repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
rmk135/objects
|
examples/miniapps/factory-patterns/factory_of_factories.py
|
Python
|
bsd-3-clause
| 1,394
| 0
|
"""`Factory of Factories` pattern."""
from dependency_injector import containers, providers
class SqlAlchemyDatabaseService
|
:
def __init__(self, session, base_class):
self.session = session
self.base_class = base_class
class TokensService:
def __init__(self, id_generator, database):
self.id_generator = id_generator
|
self.database = database
class Token:
...
class UsersService:
def __init__(self, id_generator, database):
self.id_generator = id_generator
self.database = database
class User:
...
# Sample objects
session = object()
id_generator = object()
class Container(containers.DeclarativeContainer):
database_factory = providers.Factory(
providers.Factory,
SqlAlchemyDatabaseService,
session=session,
)
token_service = providers.Factory(
TokensService,
id_generator=id_generator,
database=database_factory(base_class=Token),
)
user_service = providers.Factory(
UsersService,
id_generator=id_generator,
database=database_factory(base_class=User),
)
if __name__ == '__main__':
container = Container()
token_service = container.token_service()
assert token_service.database.base_class is Token
user_service = container.user_service()
assert user_service.database.base_class is User
|
mezz64/home-assistant
|
homeassistant/components/media_source/local_source.py
|
Python
|
apache-2.0
| 7,330
| 0.000546
|
"""Local Media Source Implementation."""
from __future__ import annotations
import mimetypes
from pathlib import Path
from aiohttp import web
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.media_player.const import MEDIA_CLASS_DIRECTORY
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import raise_if_invalid_path
from .const import DOMAIN,
|
MEDIA_CLASS_MAP, MEDIA_MIME_TYPES
from .error import Unresolvable
from .models import BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia
@callback
def async_setup(hass: HomeAssista
|
nt) -> None:
"""Set up local media source."""
source = LocalSource(hass)
hass.data[DOMAIN][DOMAIN] = source
hass.http.register_view(LocalMediaView(hass, source))
class LocalSource(MediaSource):
"""Provide local directories as media sources."""
name: str = "Local Media"
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize local source."""
super().__init__(DOMAIN)
self.hass = hass
@callback
def async_full_path(self, source_dir_id: str, location: str) -> Path:
"""Return full path."""
return Path(self.hass.config.media_dirs[source_dir_id], location)
@callback
def async_parse_identifier(self, item: MediaSourceItem) -> tuple[str, str]:
"""Parse identifier."""
if not item.identifier:
# Empty source_dir_id and location
return "", ""
source_dir_id, location = item.identifier.split("/", 1)
if source_dir_id not in self.hass.config.media_dirs:
raise Unresolvable("Unknown source directory.")
try:
raise_if_invalid_path(location)
except ValueError as err:
raise Unresolvable("Invalid path.") from err
return source_dir_id, location
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve media to a url."""
source_dir_id, location = self.async_parse_identifier(item)
if source_dir_id == "" or source_dir_id not in self.hass.config.media_dirs:
raise Unresolvable("Unknown source directory.")
mime_type, _ = mimetypes.guess_type(
str(self.async_full_path(source_dir_id, location))
)
assert isinstance(mime_type, str)
return PlayMedia(f"/media/{item.identifier}", mime_type)
async def async_browse_media(self, item: MediaSourceItem) -> BrowseMediaSource:
"""Return media."""
try:
source_dir_id, location = self.async_parse_identifier(item)
except Unresolvable as err:
raise BrowseError(str(err)) from err
result = await self.hass.async_add_executor_job(
self._browse_media, source_dir_id, location
)
return result
def _browse_media(self, source_dir_id: str, location: str) -> BrowseMediaSource:
"""Browse media."""
# If only one media dir is configured, use that as the local media root
if source_dir_id == "" and len(self.hass.config.media_dirs) == 1:
source_dir_id = list(self.hass.config.media_dirs)[0]
# Multiple folder, root is requested
if source_dir_id == "":
if location:
raise BrowseError("Folder not found.")
base = BrowseMediaSource(
domain=DOMAIN,
identifier="",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_type=None,
title=self.name,
can_play=False,
can_expand=True,
children_media_class=MEDIA_CLASS_DIRECTORY,
)
base.children = [
self._browse_media(source_dir_id, "")
for source_dir_id in self.hass.config.media_dirs
]
return base
full_path = Path(self.hass.config.media_dirs[source_dir_id], location)
if not full_path.exists():
if location == "":
raise BrowseError("Media directory does not exist.")
raise BrowseError("Path does not exist.")
if not full_path.is_dir():
raise BrowseError("Path is not a directory.")
result = self._build_item_response(source_dir_id, full_path)
if not result:
raise BrowseError("Unknown source directory.")
return result
def _build_item_response(
self, source_dir_id: str, path: Path, is_child: bool = False
) -> BrowseMediaSource | None:
mime_type, _ = mimetypes.guess_type(str(path))
is_file = path.is_file()
is_dir = path.is_dir()
# Make sure it's a file or directory
if not is_file and not is_dir:
return None
# Check that it's a media file
if is_file and (
not mime_type or mime_type.split("/")[0] not in MEDIA_MIME_TYPES
):
return None
title = path.name
if is_dir:
title += "/"
media_class = MEDIA_CLASS_DIRECTORY
if mime_type:
media_class = MEDIA_CLASS_MAP.get(
mime_type.split("/")[0], MEDIA_CLASS_DIRECTORY
)
media = BrowseMediaSource(
domain=DOMAIN,
identifier=f"{source_dir_id}/{path.relative_to(self.hass.config.media_dirs[source_dir_id])}",
media_class=media_class,
media_content_type=mime_type or "",
title=title,
can_play=is_file,
can_expand=is_dir,
)
if is_file or is_child:
return media
# Append first level children
media.children = []
for child_path in path.iterdir():
child = self._build_item_response(source_dir_id, child_path, True)
if child:
media.children.append(child)
# Sort children showing directories first, then by name
media.children.sort(key=lambda child: (child.can_play, child.title))
return media
class LocalMediaView(HomeAssistantView):
"""
Local Media Finder View.
Returns media files in config/media.
"""
url = "/media/{source_dir_id}/{location:.*}"
name = "media"
def __init__(self, hass: HomeAssistant, source: LocalSource) -> None:
"""Initialize the media view."""
self.hass = hass
self.source = source
async def get(
self, request: web.Request, source_dir_id: str, location: str
) -> web.FileResponse:
"""Start a GET request."""
try:
raise_if_invalid_path(location)
except ValueError as err:
raise web.HTTPBadRequest() from err
if source_dir_id not in self.hass.config.media_dirs:
raise web.HTTPNotFound()
media_path = self.source.async_full_path(source_dir_id, location)
# Check that the file exists
if not media_path.is_file():
raise web.HTTPNotFound()
# Check that it's a media file
mime_type, _ = mimetypes.guess_type(str(media_path))
if not mime_type or mime_type.split("/")[0] not in MEDIA_MIME_TYPES:
raise web.HTTPNotFound()
return web.FileResponse(media_path)
|
callowayproject/django-objectpermissions
|
objectpermissions/signals.py
|
Python
|
apache-2.0
| 227
| 0.008811
|
import d
|
jango.dispatch
# Whenever a permission object is saved, it sends out the signal. This allows
# models to keep their permissions in sync
permission_changed = django.dispatch.Signal(providing_args=('to_whom', 'to
|
_what'))
|
googleapis/python-retail
|
samples/interactive-tutorials/product/import_products_bq_test.py
|
Python
|
apache-2.0
| 1,917
| 0.002087
|
# Copyright 2022 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import subprocess
from setup_product.setup_cleanup import (
create_bq_dataset,
create_bq_table,
delete_bq_table,
upload_data_to_bq_table,
)
def test_import_products_bq(table_id_prefix):
dataset = "products"
valid_products_table = f"{table_id_prefix}products"
product_schema = "../resources/product_schema.json"
valid_products_source_file = "../resources/products.json"
create_bq_dataset(dataset)
create_bq_table(dataset, valid_products_table, product_schema)
upload_data_to_bq_table(
dataset, valid_products_table, valid_products_source_file, product_schema
)
output = str(
subprocess.check_output(
f"python import_products_big_query_table.py {dataset} {valid_products_table}",
shell=True,
)
)
delete_bq_tabl
|
e(dataset, valid_products_table)
assert re.match(".*import products from big query table request.*", output)
assert re.match(".*the operation was started.*", output)
assert re.match(
".*projects/.*/locations/global/catalogs/default_catalog/branches/0/operations/import-products.*",
output,
)
assert re.match(".*number of successfully imported products.*?316.*", output)
assert re.match(".*number of failures during the importing.*?
|
0.*", output)
|
desec-io/desec-stack
|
test/e2e2/spec/test_api_rrset.py
|
Python
|
mit
| 2,838
| 0.002467
|
import pytest
from conftest import DeSECAPIV1Client
@pytest.mark.parametrize("init_rrsets", [
{
('www', 'A'): (3600, {'1.2.3.4'}),
('www', 'AAAA'): (3600, {'::1'}),
('one', 'CNAME'): (3600, {'some.example.net.'}),
('other', 'TXT'): (3600, {'"foo" "bar"', '"bar" "foo"'}),
}
])
@pytest.mark.parametrize("rrsets", [
{ # create three RRsets
('a' * 63, 'A'): (7000, {'4.3.2.1', '7.6.5.4'}),
('b', 'PTR'): (7000, {'1.foo.bar.com.', '2.bar.foo.net.'}),
('c.' + 'a' * 63, 'MX'): (7000, {'10 mail.something.net.'}),
},
{ # update three RRsets
('www', 'A'): None, # ensure value from init_rrset is still there
('www', 'AAAA'): (7000, {'6666::6666', '7777::7777'}),
('one', 'CNAME'): (7000, {'other.example.net.'}),
('other', 'TXT'): (7000, {'"foobar"'}),
},
{ # delete three RRsets
('www', 'A'): (7000, {}),
('www', 'AAAA'): None, # ensure value from init_rrset is still there
('one', 'CNAME'): (7000, {}),
('other', 'TXT'): (7000, {}),
},
{ # create, update, delete
('a' * 63, 'A'): (7000, {'4.3.2.1', '7.6.5.4'}),
('www', 'A'): None, # ensure value from init_rrset is still there
('www', 'AAAA'): (7000, {'6666::6666', '7777::7777'}),
('one', 'CNAME'): None, # ensure value from init_rrset is still there
('other', 'TXT'): (7000, {}),
},
{ # complex usecase
('', 'A'): (3600, {'1.2.3.4', '255.254.253.252'}), # create apex reocrd
('*', 'MX'): (3601, {'0 mx.example.net.'}), # create wildcard record
('www', 'AAAA'): (3602, {}), # remove existing record
('www', 'A'): (7000, {'4.3.2.1', '7.6.5.4'}), # update existing record
('one', 'A'): (3603, {'1.1.1.1'}), # configure A instead of ...
('one', 'CNAME'): (3603, {}), # ... CNAME
('other', 'CNAME'): (3603, {'cname.example.com.'}), # configure CNAME instead of ...
('other', 'TXT'): (3600, {}), # ... TXT
('nonexistent', 'DNAME'): (3600, {}), # delete something that doesn't exist
|
('sub', 'CDNSKEY'): (3600, {'257 3 15 l02Woi0iS8Aa25FQkUd9RMzZHJpBoRQwAQEX1SxZJA4='}), # non-apex DNSSEC
(
|
'sub', 'CDS'): (3600, {'35217 15 2 401781b934e392de492ec77ae2e15d70f6575a1c0bc59c5275c04ebe80c6614c'}), # dto.
# ('sub', 'DNSKEY'): (3600, {'257 3 15 l02Woi0iS8Aa25FQkUd9RMzZHJpBoRQwAQEX1SxZJA4='}) # no pdns support >= 4.6
},
])
def test(api_user_domain_rrsets: DeSECAPIV1Client, rrsets: dict):
api_user_domain_rrsets.patch(f"/domains/{api_user_domain_rrsets.domain}/rrsets/", data=[
{"subname": k[0], "type": k[1], "ttl": v[0], "records": list(v[1])}
for k, v in rrsets.items()
if v is not None
])
api_user_domain_rrsets.assert_rrsets(rrsets)
|
thefirstwind/s3qloss
|
tests/t4_adm.py
|
Python
|
gpl-3.0
| 2,378
| 0.002103
|
'''
t4_adm.py - this file is part of S3QL (http://s3ql.googlecode.com)
Copyright (C) 2008-2009 Nikolaus Rath <[email protected]>
This program can be distributed under the terms of the GNU GPLv3.
'''
from __future__ import division, print_function, absolute_import
from s3ql.backends import local
from s3ql.backends.common import BetterBackend
import shutil
import sys
import tempfile
import unittest2 as unittest
import subprocess
import os.path
if __name__ == '__main__':
mypath = sys.argv[0]
else:
mypath = __file__
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(mypath), '..'))
class AdmTests(unittest.TestCase):
def setUp(self):
self.cache_dir = tempfile.mkdtemp()
self.backend_dir = tempfile.mkdtemp()
self.storage_url = 'local://' + self.backend_dir
self.passphrase = 'oeut3d'
def tearDown(self):
shutil.rmtree(self.cache_dir)
shutil.rmtree(self.backend_dir)
def mkfs(self):
proc = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'bin', 'mkfs.s3ql'),
'-L', 'test fs', '--max-obj-size', '500',
'--cachedir', self.cache_dir, '--quiet',
self.storage_url ], stdin=subprocess.PIPE)
print(self.passphrase, file=proc.stdin)
print(self.passphrase, file=proc.stdin)
proc.stdin.close()
self.assertEqual(proc.wait(), 0)
def test_passphrase(self):
self.mkfs()
passphrase_new = 'sd982jhd'
proc = subprocess.Popen([sys.executable, os.path.join
|
(BASEDIR, 'bin', 's3qladm'),
|
'--quiet', 'passphrase',
self.storage_url ], stdin=subprocess.PIPE)
print(self.passphrase, file=proc.stdin)
print(passphrase_new, file=proc.stdin)
print(passphrase_new, file=proc.stdin)
proc.stdin.close()
self.assertEqual(proc.wait(), 0)
plain_backend = local.Backend(self.storage_url, None, None)
backend = BetterBackend(passphrase_new, 'bzip2', plain_backend)
self.assertTrue(isinstance(backend['s3ql_passphrase'], str))
# Somehow important according to pyunit documentation
def suite():
return unittest.makeSuite(AdmTests)
# Allow calling from command line
if __name__ == "__main__":
unittest.main()
|
ludovic-bouguerra/tutorial-travis-docker
|
webservice/views.py
|
Python
|
gpl-3.0
| 237
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from
|
django.shortcuts i
|
mport render
from django.http import HttpResponse
def hello_world_view(request):
return HttpResponse("hello world", content_type="text/plain")
|
andersonjonathan/Navitas
|
navitas/contents/migrations/0018_auto_20170329_1549.py
|
Python
|
mit
| 483
| 0.00207
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import contents.models
class Migration(migrations.Migration):
dependencies = [
('contents', '0017_auto_20170329_1504'),
]
operations = [
migrations.AlterField(
model_name='frontpageimage',
name='image',
|
field=models.ImageField(null=True, upload_to=contents.models.get_front_pa
|
ge_image_path),
),
]
|
progdupeupl/pdp_website
|
pdp/utils/tests.py
|
Python
|
agpl-3.0
| 3,776
| 0
|
# coding: utf-8
#
# This file is part of Progdupeupl.
#
# Progdupeupl is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Progdupeupl is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# a
|
long with Progdupeupl. If not, see <http://www.gnu.org/licenses/>.
"""Tests for utils app."""
import unittest
import hashlib
from django.contrib.auth.models import User
from django_dynamic_fixture import G
from pdp.member.models import Profile, ActivationToken
from pdp.utils.templatetags.profile import profile
from pdp.utils.templatetags.interventions import interventions_topics
from pdp.utils.paginator import paginator_range
from pdp.utils import mail
class Temp
|
lateTagsTests(unittest.TestCase):
"""Test for the custom template tags about users."""
def setUp(self):
self.user = G(User)
def test_profile_none(self):
"""Test the output of profile templatetag if profile does not exist."""
self.assertEqual(None, profile(self.user))
def test_profile_existing(self):
"""Test the output of profile templatetag if profile does exist."""
p = G(Profile, user=self.user)
self.assertEqual(p, profile(self.user))
def test_interventions_none(self):
"""Test templatetags when no topic should match."""
self.assertEqual(interventions_topics(self.user), {'unread': [],
'read': []})
class PaginatorRangeTests(unittest.TestCase):
"""Tests for the paginator_range function."""
def test_out_of_range(self):
self.assertRaises(ValueError, lambda: paginator_range(3, 2))
def test_one(self):
result = paginator_range(1, 1)
self.assertEqual(result, [1])
def test_small(self):
result = paginator_range(2, 3)
self.assertEqual(result, [1, 2, 3])
def test_small_limit(self):
result = paginator_range(1, 4)
self.assertEqual(result, [1, 2, 3, 4])
def test_big_start(self):
result = paginator_range(1, 10)
self.assertEqual(result, [1, 2, None, 10])
def test_big_start_limit(self):
result = paginator_range(3, 10)
self.assertEqual(result, [1, 2, 3, 4, None, 10])
def test_big_middle(self):
result = paginator_range(5, 10)
self.assertEqual(result, [1, None, 4, 5, 6, None, 10])
def test_big_end(self):
result = paginator_range(10, 10)
self.assertEqual(result, [1, None, 9, 10])
def test_big_end_limit(self):
result = paginator_range(7, 10)
self.assertEqual(result, [1, None, 6, 7, 8, 9, 10])
class MailTests(unittest.TestCase):
"""Tests for the mail utilities."""
def test_send_templated_mail(self):
recipients = ['test1@localhost']
result = mail.send_templated_mail(
subject='Fake subject',
template='base.txt',
context={},
recipients=recipients
)
self.assertEqual(result, 1)
def test_send_mail_to_confirm_registration(self):
user = G(User, username='Blaireau1', email='test1@localhost')
link = hashlib.sha1('blbl'.encode('ascii')).hexdigest()
token = G(ActivationToken, user=user, token=link)
result = mail.send_mail_to_confirm_registration(token)
self.assertEqual(result, 1)
|
hickey/amforth
|
core/devices/atmega16u4/device.py
|
Python
|
gpl-2.0
| 10,989
| 0.070252
|
# Partname: ATmega16U4
# generated automatically, do not edit
MCUREGS = {
'WDTCSR': '&96',
'WDTCSR_WDIF': '$80',
'WDTCSR_WDIE': '$40',
'WDTCSR_WDP': '$27',
'WDTCSR_WDCE': '$10',
'WDTCSR_WDE': '$08',
'PORTD': '&43',
'DDRD': '&42',
'PIND': '&41',
'SPCR': '&76',
'SPCR_SPIE': '$80',
'SPCR_SPE': '$40',
'SPCR_DORD': '$20',
'SPCR_MSTR': '$10',
'SPCR_CPOL': '$08',
'SPCR_CPHA': '$04',
'SPCR_SPR': '$03',
'SPSR': '&77',
'SPSR_SPIF': '$80',
'SPSR_WCOL': '$40',
'SPSR_SPI2X': '$01',
'SPDR': '&78',
'UDR1': '&206',
'UCSR1A': '&200',
'UCSR1A_RXC1': '$80',
'UCSR1A_TXC1': '$40',
'UCSR1A_UDRE1': '$20',
'UCSR1A_FE1': '$10',
'UCSR1A_DOR1': '$08',
'UCSR1A_UPE1': '$04',
'UCSR1A_U2X1': '$02',
'UCSR1A_MPCM1': '$01',
'UCSR1B': '&201',
'UCSR1B_RXCIE1': '$80',
'UCSR1B_TXCIE1': '$40',
'UCSR1B_UDRIE1': '$20',
'UCSR1B_RXEN1': '$10',
'UCSR1B_TXEN1': '$08',
'UCSR1B_UCSZ12': '$04',
'UCSR1B_RXB81': '$02',
'UCSR1B_TXB81': '$01',
'UCSR1C': '&202',
'UCSR1C_UMSEL1': '$C0',
'UCSR1C_UPM1': '$30',
'UCSR1C_USBS1': '$08',
'UCSR1C_UCSZ1': '$06',
'UCSR1C_UCPOL1': '$01',
'UBRR1': '&204',
'SPMCSR': '&87',
'SPMCSR_SPMIE': '$80',
'SPMCSR_RWWSB': '$40',
'SPMCSR_SIGRD': '$20',
'SPMCSR_RWWSRE': '$10',
'SPMCSR_BLBSET': '$08',
'SPMCSR_PGWRT': '$04',
'SPMCSR_PGERS': '$02',
'SPMCSR_SPMEN': '$01',
'EEAR': '&65',
'EEDR': '&64',
'EECR': '&63',
'EECR_EEPM': '$30',
'EECR_EERIE': '$08',
'EECR_EEMPE': '$04',
'EECR_EEPE': '$02',
'EECR_EERE': '$01',
'OCR0B': '&72',
'OCR0A': '&71',
'TCNT0': '&70',
'TCCR0B': '&69',
'TCCR0B_FOC0A': '$80',
'TCCR0B_FOC0B': '$40',
'TCCR0B_WGM02': '$08',
'TCCR0B_CS0': '$07',
'TCCR0A': '&68',
'TCCR0A_COM0A': '$C0',
'TCCR0A_COM0B': '$30',
'TCCR0A_WGM0': '$03',
'TIMSK0': '&110',
'TIMSK0_OCIE0B': '$04',
'TIMSK0_OCIE0A': '$02',
'TIMSK0_TOIE0': '$01',
'TIFR0': '&53',
'TIFR0_OCF0B': '$04',
'TIFR0_OCF0A': '$02',
'TIFR0_TOV0': '$01',
'GTCCR': '&67',
'GTCCR_TSM': '$80',
'GTCCR_PSRSYNC': '$01',
'TCCR3A': '&144',
'TCCR3A_COM3A': '$C0',
'TCCR3A_COM3B': '$30',
'TCCR3A_COM3C': '$0C',
'TCCR3A_WGM3': '$03',
'TCCR3B': '&145',
'TCCR3B_ICNC3': '$80',
'TCCR3B_ICES3': '$40',
'TCCR3B_WGM3': '$18',
'TCCR3B_CS3': '$07',
'TCCR3C': '&146',
'TCCR3C_FOC3A': '$80',
'TCCR3C_FOC3B': '$40',
'TCCR3C_FOC3C': '$20',
'TCNT3': '&148',
'OCR3A': '&152',
'OCR3B': '&154',
'OCR3C': '&156',
'ICR3': '&150',
'TIMSK3': '&113',
'TIMSK3_ICIE3': '$20',
'TIMSK3_OCIE3C': '$08',
'TIMSK3_OCIE3B': '$04',
'TIMSK3_OCIE3A': '$02',
'TIMSK3_TOIE3': '$01',
'TIFR3': '&56',
'TIFR3_ICF3': '$20',
'TIFR3_OCF3C': '$08',
'TIFR3_OCF3B': '$04',
'TIFR3_OCF3A': '$02',
'TIFR3_TOV3': '$01',
'TCCR1A': '&128',
'TCCR1A_COM1A': '$C0',
'TCCR1A_COM1B': '$30',
'TCCR1A_COM1C': '$0C',
'TCCR1A_WGM1': '$03',
'TCCR1B': '&129',
'TCCR1B_ICNC1': '$80',
'TCCR1B_ICES1': '$40',
'TCCR1B_WGM1': '$18',
'TCCR1B_CS1': '$07',
'TCCR1C': '&130',
'TCCR1C_FOC1A': '$80',
'TCCR1C_FOC1B': '$40',
'TCCR1C_FOC1C': '$20',
'TCNT1': '&132',
'OCR1A': '&136',
'OCR1B': '&138',
'OCR1C': '&140',
'ICR1': '&134',
'TIMSK1': '&111',
'TIMSK1_ICIE1': '$20',
'TIMSK1_OCIE1C': '$08',
'TIMSK1_OCIE1B': '$04',
'TIMSK1_OCIE1A': '$02',
'TIMSK1_TOIE1': '$01',
'TIFR1': '&54',
'TIFR1_ICF1': '$20',
'TIFR1_OCF1C': '$08',
'TIFR1_OCF1B': '$04',
'TIFR1_OCF1A': '$02',
'TIFR1_TOV1': '$01',
'OCDR': '&81',
'MCUCR': '&85',
'MCUCR_JTD': '$80',
'MCUSR': '&84',
'MCUSR_JTRF': '$10',
'EICRA': '&105',
'EICRA_ISC3': '$C0',
'EICRA_ISC2': '$30',
'EICRA_ISC1': '$0C',
'EICRA_ISC0': '$03',
'EICRB': '&106',
'EICRB_ISC7': '$C0',
'EICRB_ISC6': '$30',
'EICRB_ISC5': '$0C',
'EICRB_ISC4': '$03',
'EIMSK': '&61',
'EIMSK_INT': '$FF',
'EIFR': '&60',
'EIFR_INTF': '$FF',
'PCMSK0': '&107',
'PCIFR': '&59',
'PCIFR_PCIF0': '$01',
'PCICR': '&104',
'PCICR_PCIE0': '$01',
'TCCR4A': '&192',
'TCCR4A_COM4A': '$C0',
'TCCR4A_COM4B': '$30',
'TCCR4A_FOC4A': '$08',
'TCCR4A_FOC4B': '$04',
'TCCR4A_PWM4A': '$02',
'TCCR4A_PWM4B': '$01',
'TCCR4B': '&193',
'TCCR4B_PWM4X': '$80',
'TCCR4B_PSR4': '$40',
'TCCR4B_DTPS4': '$30',
'TCCR4B_CS4': '$0F',
'TCCR4C': '&194',
'TCCR4C_COM4A1S': '$80',
'TCCR4C_COM4A0S': '$40',
'TCCR4C_COM4B1S': '$20',
'TCCR4C_COM4B0S': '$10',
'TCCR4C_COM4D': '$0C',
'TCCR4C_FOC4D': '$02',
'TCCR4C_PWM4D': '$01',
'TCCR4D': '&195',
'TCCR4D_FPIE4': '$80',
'TCCR4D_FPEN4': '$40',
'TCCR4D_FPNC4': '$20',
'TCCR4D_FPES4': '$10',
'TCCR4D_FPAC4': '$08',
'TCCR4D_FPF4': '$04',
'TCCR4D_WGM4': '$03',
'TCCR4E': '&196',
'TCCR4E_TLOCK4': '$80',
'TCCR4E_ENHC4': '$40',
'TCCR4E_OC4OE': '$3F',
'TCNT4': '&190',
'TC4H': '&191',
'OCR4A': '&207',
'OCR4B': '&208',
'OCR4C': '&209',
'OCR4D': '&210',
'TIMSK4': '&114',
'TIMSK4_OCIE4D': '$80',
'TIMSK4_OCIE4A': '$40',
'TIMSK4_OCIE4B': '$20',
'TIMSK4_TOIE4': '$04',
'TIFR4': '&57',
'TIFR4_OCF4D': '$80',
'TIFR4_OCF4A': '$40
|
',
'TIFR4_OCF4B': '$20',
'TIFR4_TOV4': '$04',
'DT4': '&212',
'DT4_DT4L': '$FF',
'PORTB': '&37',
'DDRB': '&36',
'PINB': '&35',
'PORTC': '&40',
'DDRC': '&39',
'PINC': '&38',
'PORTE': '&46',
'DDRE': '&45',
'PINE': '&44',
'PORTF': '&49',
'DDRF': '&48',
'PINF': '&47',
'ADMUX': '&124',
'ADMUX_REFS': '$C0',
'ADMUX_ADLAR': '$20',
'ADMUX_MUX': '$1F',
'ADCSRA': '&122',
'ADCSRA_ADEN': '$80',
'ADCSRA_ADSC': '$40',
'ADCS
|
RA_ADATE': '$20',
'ADCSRA_ADIF': '$10',
'ADCSRA_ADIE': '$08',
'ADCSRA_ADPS': '$07',
'ADC': '&120',
'ADCSRB': '&123',
'ADCSRB_ADHSM': '$80',
'ADCSRB_MUX5': '$20',
'ADCSRB_ADTS': '$17',
'DIDR0': '&126',
'DIDR0_ADC7D': '$80',
'DIDR0_ADC6D': '$40',
'DIDR0_ADC5D': '$20',
'DIDR0_ADC4D': '$10',
'DIDR0_ADC3D': '$08',
'DIDR0_ADC2D': '$04',
'DIDR0_ADC1D': '$02',
'DIDR0_ADC0D': '$01',
'DIDR2': '&125',
'DIDR2_ADC13D': '$20',
'DIDR2_ADC12D': '$10',
'DIDR2_ADC11D': '$08',
'DIDR2_ADC10D': '$04',
'DIDR2_ADC9D': '$02',
'DIDR2_ADC8D': '$01',
'ACSR': '&80',
'ACSR_ACD': '$80',
'ACSR_ACBG': '$40',
'ACSR_ACO': '$20',
'ACSR_ACI': '$10',
'ACSR_ACIE': '$08',
'ACSR_ACIC': '$04',
'ACSR_ACIS': '$03',
'DIDR1': '&127',
'DIDR1_AIN1D': '$02',
'DIDR1_AIN0D': '$01',
'SREG': '&95',
'SREG_I': '$80',
'SREG_T': '$40',
'SREG_H': '$20',
'SREG_S': '$10',
'SREG_V': '$08',
'SREG_N': '$04',
'SREG_Z': '$02',
'SREG_C': '$01',
'SP': '&93',
'OSCCAL': '&102',
'RCCTRL': '&103',
'RCCTRL_RCFREQ': '$01',
'CLKPR': '&97',
'CLKPR_CLKPCE': '$80',
'CLKPR_CLKPS': '$0F',
'SMCR': '&83',
'SMCR_SM': '$0E',
'SMCR_SE': '$01',
'EIND': '&92',
'GPIOR2': '&75',
'GPIOR2_GPIOR': '$FF',
'GPIOR1': '&74',
'GPIOR1_GPIOR': '$FF',
'GPIOR0': '&62',
'GPIOR0_GPIOR07': '$80',
'GPIOR0_GPIOR06': '$40',
'GPIOR0_GPIOR05': '$20',
'GPIOR0_GPIOR04': '$10',
'GPIOR0_GPIOR03': '$08',
'GPIOR0_GPIOR02': '$04',
'GPIOR0_GPIOR01': '$02',
'GPIOR0_GPIOR00': '$01',
'PRR1': '&101',
'PRR1_PRUSB': '$80',
'PRR1_PRTIM3': '$08',
'PRR1_PRUSART1': '$01',
'PRR0': '&100',
'PRR0_PRTWI': '$80',
'PRR0_PRTIM2': '$40',
'PRR0_PRTIM0': '$20',
'PRR0_PRTIM1': '$08',
'PRR0_PRSPI': '$04',
'PRR0_PRUSART0': '$02',
'PRR0_PRADC': '$01',
'CLKSTA': '&199',
'CLKSTA_RCON': '$02',
'CLKSTA_EXTON': '$01',
'CLKSEL1': '&198',
'CLKSEL1_RCCKSEL': '$F0',
'CLKSEL1_EXCKSEL': '$0F',
'CLKSEL0': '&197',
'CLKSEL0_RCSUT': '$C0',
'CLKSEL0_EXSUT': '$30',
'CLKSEL0_RCE': '$08',
'CLKSEL0_EXTE': '$04',
'CLKSEL0_CLKS': '$01',
'PLLCSR': '&73',
'PLLCSR_PINDIV': '$10',
'PLLCSR_PLLE': '$02',
'PLLCSR_PLOCK': '$01',
'PLLFRQ': '&82',
'PLLFRQ_PINMUX': '$80',
'PLLFRQ_PLLUSB': '$40',
'PLLFRQ_PLLTM': '$30',
'PLLFRQ_PDIV': '$0F',
'UEINT': '&244',
'UEBCHX': '&243',
'UEBCLX': '&242',
'UEDATX': '&241',
'UEDATX_DAT': '$FF',
'UEIENX': '&240',
'UEIENX_FLERRE': '$80',
'UEIENX_NAKINE': '$40',
'UEIENX_NAKOUTE': '$10',
'UEIENX_RXSTPE': '$
|
ffalcinelli/wstunnel
|
wstunnel/client.py
|
Python
|
lgpl-3.0
| 10,007
| 0.001799
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Fabio Falcinelli
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import socket
from tornado import httpclient
from tornado.ioloop import IOLoop
from tornado.tcpserver import TCPServer
from tornado.websocket import WebSocketClientConnection
from wstunnel.toolbox import tuple_to_address
from wstunnel.exception import EndpointNotAvailableException
from wstunnel.filters import FilterException
__author__ = "fabio"
logger = logging.getLogger(__name__)
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None, **kwargs):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
"""
options = httpclient.HTTPRequest._DEFAULTS.copy()
options.update(kwargs)
if io_loop is None:
io_loop = IOLoop.current()
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout,
validate_cert=kwargs.get("validate_cert", True))
request = httpclient._RequestProxy(request, options)
conn = WebSocketClientConnection(io_loop, request)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
class WebSocketProxy(TCPServer):
"""
Listen on a port and delegate the accepted connection to a WebSocketLocalProxyHandler
"""
def __init__(self, port, ws_url, **kwargs):
super(WebSocketProxy, self).__init__(kwargs.get("io_loop"),
kwargs.get("ssl_options"))
self.bind(port,
kwargs.get("address", ''),
kwargs.get("family", socket.AF_UNSPEC),
kwargs.get("backlog", 128))
self.ws_url = ws_url
self.ws_options = kwargs.get("ws_options", {})
self.filters = kwargs.get("filters", [])
self.serving = False
self.ws_conn = None
se
|
lf._address_list = []
@property
def address_list(self):
return self._address_list
def handle_stream(self, stream, address):
"""
Handle a new client connect
|
ion with a proxy over websocket
"""
logger.info("Got connection from %s on %s" % (tuple_to_address(stream.socket.getpeername()),
tuple_to_address(stream.socket.getsockname())))
self.ws_conn = WebSocketProxyConnection(self.ws_url, stream, address,
filters=self.filters,
ws_options=self.ws_options)
self.ws_conn.connect()
def start(self, num_processes=1):
super(WebSocketProxy, self).start(num_processes)
self._address_list = [(s.getsockname()[0], s.getsockname()[1]) for s in self._sockets.values()]
self.serving = True
def stop(self):
super(WebSocketProxy, self).stop()
self.serving = False
def __str__(self):
return "WebSocketProxy %s" % (" | ".join(["%s --> %s" %
("%s:%d" % (a, p), self.ws_url) for (a, p) in self.address_list]))
class WebSocketProxyConnection(object):
"""
Handles the client connection and works as a proxy over a websocket connection
"""
def __init__(self, url, io_stream, address, ws_options=None, **kwargs):
self.url = url
self.io_loop = kwargs.get("io_loop")
self.connect_timeout = kwargs.get("connect_timeout", None)
self.keep_alive = kwargs.get("keep_alive", None)
self.ws_options = ws_options
self.io_stream, self.address = io_stream, address
self.filters = kwargs.get("filters", [])
self.io_stream.set_close_callback(self.on_close)
self.ws_conn = None
def connect(self):
logger.info("Connecting WebSocket at url %s" % self.url)
websocket_connect(self.url,
self.io_loop,
callback=self.on_open,
connect_timeout=self.connect_timeout,
**self.ws_options)
def on_open(self, ws_conn):
"""
When the websocket connection is handshaked, start reading for data over the client socket
connection
"""
try:
self.ws_conn = ws_conn.result()
except httpclient.HTTPError as e:
#TODO: change with raise EndpointNotAvailableException(message="The server endpoint is not available") from e
raise EndpointNotAvailableException("The server endpoint is not available", cause=e)
self.ws_conn.on_message = self.on_message
self.ws_conn.release_callback = self.on_close
self.io_stream.read_until_close(self.on_close, streaming_callback=self.on_peer_message)
def on_message(self, message):
"""
On a message received from websocket, send back to client peer
"""
try:
data = None if message is None else bytes(message)
for filtr in self.filters:
data = filtr.ws_to_socket(data=data)
if data:
self.io_stream.write(data)
except FilterException as e:
logger.exception(e)
self.on_close()
def on_close(self, *args, **kwargs):
"""
Handles the close event from the client socket
"""
logger.info("Closing connection with client at {0}:{1}".format(*self.address))
logger.debug("Received args %s and %s", args, kwargs)
if not self.io_stream.closed():
self.io_stream.close()
def on_peer_message(self, message):
"""
On data received from client peer, forward through WebSocket
"""
try:
data = None if message is None else bytes(message)
for filtr in self.filters:
data = filtr.socket_to_ws(data=data)
if data:
self.ws_conn.write_message(data, binary=True)
except FilterException as e:
logger.exception(e)
self.on_close()
class WSTunnelClient(object):
"""
Manages redirects from local ports to remote websocket servers
"""
def __init__(self, proxies=None, address='', family=socket.AF_UNSPEC, io_loop=None, ssl_options=None,
ws_options=None):
self.stream_options = {
"address": address,
"family": family,
"io_loop": io_loop,
"ssl_options": ssl_options,
}
self.ws_options = ws_options or {}
self.proxies = proxies or {}
self.serving = False
self._num_proc = 1
if proxies:
for port, ws_url in proxies.items():
self.add_proxy(port, WebSocketProxy(port=port,
ws_url=ws_url,
ws_options=self.ws_options,
**self.stream_options))
def add_proxy(self, key, ws_proxy):
"""
Adds a proxy to the list.
If the tunnel is serving connection, the proxy it gets started.
"""
self.proxies[key] = ws_proxy
if self.serving:
ws_proxy.start(self._num_proc)
logger.info("Started %s" % ws_proxy)
def remove_proxy(self, key):
"""
Removes a proxy from the list.
If the tunnel is serving connection, the proxy it gets stopped.
"""
ws_proxy = self.proxies.get
|
mixturemodel-flow/tensorflow
|
tensorflow/python/estimator/model_fn.py
|
Python
|
apache-2.0
| 12,164
| 0.004357
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and methods related to model_fn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from tensorflow.python.estimator.export.export_output import ExportOutput
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import nest
class ModeKeys(object):
"""Standard names for model modes.
The following standard keys are defined:
* `TRAIN`: training mode.
* `EVAL`: evaluation mode.
* `PREDICT`: inference mode.
"""
TRAIN = 'train'
EVAL = 'eval'
PREDICT = 'infer'
LOSS_METRIC_KEY = 'loss'
AVERAGE_LOSS_METRIC_KEY = 'average_loss'
class EstimatorSpec(
collections.namedtuple('EstimatorSpec', [
'predictions', 'loss', 'train_op', 'eval_metric_ops',
'export_outputs', 'training_chief_hooks', 'training_hooks',
'scaffold', 'evaluation_hooks'
])):
"""Ops and objects returned from a `model_fn` and passed to `Estimator`.
`EstimatorSpec` fully defines the model to be run by `Estimator`.
"""
def __new__(cls,
mode,
predictions=None,
loss=None,
train_op=None,
eval_metric_ops=None,
export_outputs=None,
training_chief_hooks=None,
training_hooks=None,
scaffold=None,
evaluation_hooks=None):
"""Creates a validated `EstimatorSpec` instance.
Depending on the value of `mode`, different arguments are required. Namely
* For `mode == ModeKeys.TRAIN`: required fields are `loss` and `train_op`.
* For `mode == ModeKeys.EVAL`: required field is `loss`.
* For `mode == ModeKeys.PREDICT`: required fields are `predictions`.
model_fn can populate all arguments independent of mode. In this case, some
arguments will be ignored by `Estimator`. E.g. `t
|
rain_op` will be ignored
in eval and infer modes. Example:
```python
def my_model_fn(mode, features, labels):
predictions = ...
loss = ...
train
|
_op = ...
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Alternatively, model_fn can just populate the arguments appropriate to the
given mode. Example:
```python
def my_model_fn(mode, features, labels):
if (mode == tf.estimator.ModeKeys.TRAIN or
mode == tf.estimator.ModeKeys.EVAL):
loss = ...
else:
loss = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = ...
else:
train_op = None
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = ...
else:
predictions = None
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Args:
mode: A `ModeKeys`. Specifies if this is training, evaluation or
prediction.
predictions: Predictions `Tensor` or dict of `Tensor`.
loss: Training loss `Tensor`. Must be either scalar, or with shape `[1]`.
train_op: Op for the training step.
eval_metric_ops: Dict of metric results keyed by name. The values of the
dict are the results of calling a metric function, namely a
`(metric_tensor, update_op)` tuple.
export_outputs: Describes the output signatures to be exported to
`SavedModel` and used during serving.
A dict `{name: output}` where:
* name: An arbitrary name for this output.
* output: an `ExportOutput` object such as `ClassificationOutput`,
`RegressionOutput`, or `PredictOutput`.
Single-headed models only need to specify one entry in this dictionary.
Multi-headed models should specify one entry for each head, one of
which must be named using
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.
training_chief_hooks: Iterable of `tf.train.SessionRunHook` objects to
run on the chief worker during training.
training_hooks: Iterable of `tf.train.SessionRunHook` objects to run
on all workers during training.
scaffold: A `tf.train.Scaffold` object that can be used to set
initialization, saver, and more to be used in training.
evaluation_hooks: Iterable of `tf.train.SessionRunHook` objects to
run during evaluation.
Returns:
A validated `EstimatorSpec` object.
Raises:
ValueError: If validation fails.
TypeError: If any of the arguments is not the expected type.
"""
# Validate train_op.
if train_op is None:
if mode == ModeKeys.TRAIN:
raise ValueError('Missing train_op.')
else:
_check_is_tensor_or_operation(train_op, 'train_op')
# Validate loss.
if loss is None:
if mode in (ModeKeys.TRAIN, ModeKeys.EVAL):
raise ValueError('Missing loss.')
else:
loss = _check_is_tensor(loss, 'loss')
loss_shape = loss.get_shape()
if loss_shape.num_elements() not in (None, 1):
raise ValueError('Loss must be scalar, given: {}'.format(loss))
if not loss_shape.is_compatible_with(tensor_shape.scalar()):
loss = array_ops.reshape(loss, [])
# Validate predictions.
if predictions is None:
if mode == ModeKeys.PREDICT:
raise ValueError('Missing predictions.')
predictions = {}
else:
if isinstance(predictions, dict):
predictions = {
k: _check_is_tensor(v, 'predictions[{}]'.format(k))
for k, v in six.iteritems(predictions)
}
else:
predictions = _check_is_tensor(predictions, 'predictions')
# Validate eval_metric_ops.
if eval_metric_ops is None:
eval_metric_ops = {}
else:
if not isinstance(eval_metric_ops, dict):
raise TypeError(
'eval_metric_ops must be a dict, given: {}'.format(eval_metric_ops))
for key, metric_value_and_update in six.iteritems(eval_metric_ops):
if (not isinstance(metric_value_and_update, tuple) or
len(metric_value_and_update) != 2):
raise TypeError(
'Values of eval_metric_ops must be (metric_value, update_op) '
'tuples, given: {} for key: {}'.format(
metric_value_and_update, key))
metric_value, metric_update = metric_value_and_update
for metric_value_member in nest.flatten(metric_value):
# Allow (possibly nested) tuples for metric values, but require that
# each of them be Tensors or Operations.
_check_is_tensor_or_operation(metric_value_member,
'eval_metric_ops[{}]'.format(key))
_check_is_tensor_or_operation(metric_update,
'eval_metric_ops[{}]'.format(key))
# Validate export_outputs.
if export_outputs is not None:
if not isinstance(export_outputs, dict):
raise TypeError('export_outputs must be dict, given: {}'.format(
export_outputs))
for v in six.itervalues(export_outputs):
if not isinstance(v, ExportOutput):
|
Chedi/airflow
|
airflow/contrib/operators/bigquery_operator.py
|
Python
|
apache-2.0
| 2,155
| 0.003712
|
import logging
from airflow.contrib.hooks.bigquery_hook import BigQueryHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class BigQueryOperator(BaseOperator):
"""
Executes BigQuery SQL queries in a specific BigQuery database
"""
template_fields = ('bql', 'destination_dataset_table')
template_ext = ('.sql',)
ui_color = '#e4f0e8'
@apply_defaults
def __init__(self,
bql,
destination_dataset_table = False,
write_disposition = 'WRITE_EMPTY',
bigquery_conn_id='bigquery_default',
delegate_to=None,
*args,
**kwargs):
"""
Create a new BigQueryOperator.
:param bql: the sql code to be executed
:type bql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
:param destination_dataset_table: A dotted dataset.table that, if set,
will store the results of the query.
:type destination_dataset_table: string
:para
|
m bigquery_conn_id: reference to a specific BigQuery hook.
|
:type bigquery_conn_id: string
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have domain-wide delegation enabled.
:type delegate_to: string
"""
super(BigQueryOperator, self).__init__(*args, **kwargs)
self.bql = bql
self.destination_dataset_table = destination_dataset_table
self.write_disposition = write_disposition
self.bigquery_conn_id = bigquery_conn_id
self.delegate_to = delegate_to
def execute(self, context):
logging.info('Executing: %s', str(self.bql))
hook = BigQueryHook(bigquery_conn_id=self.bigquery_conn_id, delegate_to=self.delegate_to)
conn = hook.get_conn()
cursor = conn.cursor()
cursor.run_query(self.bql, self.destination_dataset_table, self.write_disposition)
|
arenadata/ambari
|
dev-support/docker/docker/bin/ambaribuild.py
|
Python
|
apache-2.0
| 8,584
| 0.033667
|
#!/usr/bin/python
# coding: utf-8
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess, time, sys
import json
import datetime
from optparse import OptionParser
SKIP_TEST="-DskipTests"
AMBARI_AUTH_HEADERS = "--header 'Authorization:Basic YWRtaW46YWRtaW4=' --header 'X-Requested-By: PIVOTAL'"
AMBARI_BUILD_DOCKER_ROOT = "/tmp/ambari-build-docker"
NO_EXIT_SLEEP_TIME=60
RETRY_MAX=20
def git_deep_cleaning():
proc = subprocess.Popen("git clean -xdf",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def ambariUnitTest():
proc = subprocess.Popen("mvn -fae clean install",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def buildAmbari(stack_distribution, supplemental_distribution=None):
stack_distribution_param = ("-Dstack.distribution=" + stack_distribution) if stack_distribution is not None else ""
supplemental_distribution_param = ("-Dsupplemental.distribution=" + supplemental_distribution) if supplemental_distribution is not None else ""
proc = subprocess.Popen("mvn -B clean install package rpm:rpm -Dmaven.clover.skip=true -Dfindbugs.skip=true "
+ SKIP_TEST + " "
+ stack_distribution_param + " "
+ supplemental_distribution_param + " "
+ " -Dpython.ver=\"python >= 2.6\"",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def install_ambari_server():
proc = subprocess.Popen("sudo yum install -y ambari-server-*.x86_64.rpm",
shell=True,
cwd="/tmp/ambari/ambari-server/target/rpm/ambari-server/RPMS/x86_64")
return proc.wait()
def install_ambari_agent():
proc = subprocess.Popen("sudo yum install -y ambari-agent-*.x86_64.rpm",
shell=True,
cwd="/tmp/ambari/ambari-agent/target/rpm/ambari-agent/RPMS/x86_64")
return proc.wait()
def setup_ambari_server():
proc = subprocess.Popen("echo -e '\n\n\n\n' | sudo ambari-server setup",
shell=True)
return proc.wait()
def start_ambari_server(debug=False):
proc = subprocess.Popen("sudo ambari-server start" + (" --debug" if debug else ""),
shell=True)
return proc.wait()
def start_dependant_services():
retcode = 0
proc = subprocess.Popen("sudo service sshd start", shell=True)
retcode += proc.wait()
proc = subprocess.Popen("sudo service ntpd start", shell=True)
retcode += proc.wait()
return retcode
def configure_ambari_agent():
proc = subprocess.Popen("hostname -f", stdout=subprocess.PIPE, shell=True)
hostname = proc.stdout.read().rstrip()
proc = subprocess.Popen("sudo sed -i 's/hostname=localhost/hostname=" + hostname + "/g' /etc/ambari-agent/conf/ambari-agent.ini",
shell=True)
return proc.wait()
def start_ambari_agent(wait_until_registered = True):
retcode = 0
proc = subprocess.Popen("service ambari-agent start",
shell=True)
retcode += proc.wait()
if wait_until_registered:
if not wait_until_ambari_agent_registered():
print "ERROR: ambari-agent was not registered."
sys.exit(1)
return retcode
def wait_until_ambari_agent_registered():
'''
return True if ambari agent is found registered.
return False if timeout
'''
count = 0
while count < RETRY_MAX:
count += 1
proc = subprocess.Popen("curl " +
"http://localhost:8080/api/v1/hosts " +
AMBARI_AUTH_HEADERS,
stdout=subprocess.PIPE,
shell=True)
hosts_result_string = proc.stdout.read()
hosts_result_json = json.loads(hosts_result_string)
if len(hosts_result_json["items"]) != 0:
return True
time.sleep(5)
return False
def post_blueprint():
proc = subprocess.Popen("curl -X POST -D - " +
"-d @single-node-HDP-2.1-blueprint1.json http://localhost:8080/api/v1/blueprints/myblueprint1 " +
AMBARI_AUTH_HEADERS ,
cwd=AMBARI_BUILD_DOCKER_ROOT + "/blueprints",
shell=True)
return proc.wait()
def create_cluster():
proc = subprocess.Popen("curl -X POST -D - " +
"-d @single-node-hostmapping1.json http://localhost:8080/api/v1/clusters/mycluster1 " +
AMBARI_AUTH_HEADERS ,
cwd=AMBARI_BUILD_DOCKER_ROOT + "/blueprints",
shell=True)
return proc.wait()
# Loop to not to exit Docker container
def no_exit():
print ""
print "loop to not to exit docker container..."
print ""
while True:
time.sleep(NO_EXIT_SLEEP_TIME)
class ParseResult:
is_deep_clean = False
is_rebuild = False
stack_distribution = None
supplemental_distribution = None
is_test = False
is_install_server = False
is_install_agent = False
is_deploy = False
is_server_debug = False
def parse(argv):
result = ParseResult()
if len(argv) >=2:
parser = OptionParser()
parser.add_option("-c", "--clean",
dest="is_deep_clean",
action="store_true",
default=False,
help="if this option
|
is set, git clean -xdf is executed for the ambari local git re
|
po")
parser.add_option("-b", "--rebuild",
dest="is_rebuild",
action="store_true",
default=False,
help="set this flag if you want to rebuild Ambari code")
parser.add_option("-s", "--stack_distribution",
dest="stack_distribution",
help="set a stack distribution. [HDP|PHD|BIGTOP]. Make sure -b is also set when you set a stack distribution")
parser.add_option("-x", "--supplemental_distribution",
dest="supplemental_distribution",
help="set a supplement stack distribution in addition to the primary one. [BigInsights]. Make sure -b is also set when you set a supplement stack distribution")
parser.add_option("-d", "--server_debug",
dest="is_server_debug",
action="store_true",
default=False,
help="set a debug option for ambari-server")
(options, args) = parser.parse_args(argv[1:])
if options.is_deep_clean:
result.is_deep_clean = True
if options.is_rebuild:
result.is_rebuild = True
if options.stack_distribution:
result.stack_distribution = options.stack_distribution
if options.supplemental_distribution:
result.supplemental_distribution = options.supplemental_distribution
if options.is_server_debug:
result.is_server_debug = True
if argv[0] == "test":
result.is_test = True
if argv[0] == "server":
result.is_install_server = True
if argv[0] == "agent":
result.is_install_server = True
result.is_install_agent = True
if argv[0] == "deploy":
result.is_install_server = True
result.is_install_agent = True
result.is_deploy = True
return result
if __name__ == "__main__":
if len(sys.argv) == 1:
print "specify one of test, server, agent or deploy"
sys.exit(1)
start = datetime.datetime.utcnow()
# test: execute unit test
# server: install ambari-server
# with or without rebuild
# agent: install ambari-server and ambari-agent
# with or without rebuild
# deploy: install ambari-server, ambari-agent and deploy Hadoop
# with or without rebuild
parsed_args = parse(sys.argv[1:])
if parsed_args.is_deep_clean:
retcode = git_deep_cleaning()
if retcode != 0: sys.exit(retcode)
if parsed_args.is_test:
retcode = ambariUnitTest()
end = datetime.datetime.utcnow()
print ""
print "Duration: " + str((end-start).seconds) + " seconds"
sys.exit(retcode)
if parsed_args.is_rebuild:
retcode = buildAmbari(parsed_args.stack_distribution, supplemental_distribution=parsed_args.supplemental_distribution)
if retcode != 0: sys.exit(retcode)
if parsed_args.is_install_server:
retcode = install_ambari_server()
if retcode != 0: sys.exit(retcode)
retcode = setup_ambari_server()
if retcode != 0: sys.exit(retcode)
retcode = start_ambari_server(parsed_args.is_server_debug)
if retcode != 0: sys.exit(retcode)
retcode = start_dependant_services()
if retcode != 0: sys.exit(retcode)
if parsed_args.is_install_agent:
retcode = install_ambari_agent()
if retcode != 0: sys.exit(retcode)
retcode = configure_ambari_agent()
if retcode != 0:
|
ryplo/helpme
|
setup.py
|
Python
|
mit
| 959
| 0
|
#!/usr/bin/env python
from setuptools import setup, find_packages
with open('pypd/version.py') as version_file:
exec(compile(version_file.read(), version_file.name, 'exec'))
options = {
'name': 'pypd',
'version': __version__,
'packages': find_packages(),
'scripts': [],
'description': 'A python client for PagerDuty API',
'author': 'JD Cumpson',
'author_email': '[email protected]',
'maintainer_email': '[email protected]',
'license': 'MIT',
'url': 'https://github.com/PagerDuty/pypd',
'download_url': 'https://github.com/PagerDuty/pypd/archive/master.tar.gz',
'classifiers': [
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Lib
|
raries :: Python Modules',
],
'install_requires': ['ujson', 'requests'],
'tests_require': [],
'cmdclass': {}
}
setup(**opti
|
ons)
|
hmendozap/auto-sklearn
|
test/test_pipeline/components/classification/test_extra_trees.py
|
Python
|
bsd-3-clause
| 3,415
| 0.000586
|
import unittest
from autosklearn.pipeline.components.classification.extra_trees import \
ExtraTreesClassifier
from autosklearn.pipeline.util import _test_classifier, \
_test_classifier_iterative_fit, _test_classifier_predict_proba
import numpy as np
import sklearn.metrics
import sklearn.ensemble
class ExtraTreesComponentTest(unittest.TestCase):
def test_default_configuration(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier)
self.assertAlmostEqual(0.95999999999999996,
sklearn.metrics.accuracy_score(targets, predictions))
def test_default_configuration_predict_proba(self):
for i in range(10):
predictions, ta
|
rgets = \
_test_classifier_predict_proba(ExtraTreesClassifier)
self.assertAlmostEqual(0.12052046298054782,
sklearn.metrics.log_loss(
targets, predictions))
def test_default_configuration_sparse(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier, sparse=
|
True)
self.assertAlmostEqual(0.71999999999999997,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_iterative_fit(self):
for i in range(10):
predictions, targets = \
_test_classifier_iterative_fit(ExtraTreesClassifier)
self.assertAlmostEqual(0.93999999999999995,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_binary(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier, make_binary=True)
self.assertAlmostEqual(1,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_multilabel(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier, make_multilabel=True)
self.assertAlmostEqual(0.97060428849902536,
sklearn.metrics.average_precision_score(
targets, predictions))
def test_default_configuration_predict_proba_multilabel(self):
for i in range(10):
predictions, targets = \
_test_classifier_predict_proba(ExtraTreesClassifier,
make_multilabel=True)
self.assertEqual(predictions.shape, ((50, 3)))
self.assertAlmostEqual(0.98976738180772728,
sklearn.metrics.average_precision_score(
targets, predictions))
def test_target_algorithm_multioutput_multiclass_support(self):
cls = sklearn.ensemble.ExtraTreesClassifier()
X = np.random.random((10, 10))
y = np.random.randint(0, 1, size=(10, 10))
# Running this without an exception is the purpose of this test!
cls.fit(X, y)
|
tallstreet/Whoosh-AppEngine
|
tests/test_indexing.py
|
Python
|
apache-2.0
| 11,352
| 0.019204
|
import unittest
from os import mkdir
from os.path import exists
from shutil import rmtree
from whoosh import fields, index, qparser, store, writing
class TestIndexing(unittest.TestCase):
def make_index(self, dirname, schema):
if not exists(dirname):
mkdir(dirname)
st = store.FileStorage(dirname)
ix = index.Index(st, schema, create = True)
return ix
def destroy_index(self, dirname):
if exists(dirname):
rmtree(dirname)
def test_creation(self):
s = fields.Schema()
s.add("content", fields.TEXT(phrase = True))
s.add("title", fields.TEXT(stored = True))
s.add("path", fields.ID(stored = True))
s.add("tags", fields.KEYWORD(stored = True))
s.add("quick", fields.NGRAM)
s.add("note", fields.STORED)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(title = u"First", content = u"This is the first document", path = u"/a",
tags = u"first second third", quick = u"First document", note = u"This is the first document")
w.start_document()
w.add_field("content", u"Let's try this again")
w.add_field("title", u"Second")
w.add_field("path", u"/b")
w.add_field("tags", u"Uno Dos Tres")
w.add_field("quick", u"Second document")
w.add_field("note", u"This is the second document")
w.end_document()
w.commit()
def test_integrity(self):
s = fields.Schema(name = fields.TEXT, value = fields.TEXT)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(name = u"Yellow brown", value = u"Blue red green purple?")
w.add_document(name = u"Alpha beta", value = u"Gamma delta epsilon omega.")
w.commit()
w = writing.IndexWriter(ix)
w.add_document(name = u"One two", value = u"Three four five.")
w.commit()
tr = ix.term_reader()
self.assertEqual(ix.doc_count_all(), 3)
self.assertEqual(list(tr.lexicon("name")), ["alpha", "beta", "brown", "one", "two", "yellow"])
def test_lengths(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
ix = self.make_index("testindex", s)
try:
w = ix.writer()
tokens = u"ABCDEFG"
from itertools import cycle, islice
lengths = [10, 20, 2, 102, 45, 3, 420, 2]
for length in lengths:
w.add_document(f2 = u" ".join(islice(cycle(tokens), length)))
w.commit()
|
dr = ix.doc_reader()
ls1 = [dr.doc_field_length(i, "f1") for i in xrange(0, len(lengths))]
ls2 = [dr.doc_field_length(i, "f2") for i in xrange(0, len(lengths))]
self.assertEqual(ls1, [0]*len(lengths))
self.assertEqual(ls2, lengths)
dr.close()
ix.close()
finally:
self.destroy_index("testindex")
|
def test_lengths_ram(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B C D E", f2 = u"X Y Z")
w.add_document(f1 = u"B B B B C D D Q", f2 = u"Q R S T")
w.add_document(f1 = u"D E F", f2 = u"U V A B C D E")
w.commit()
dr = ix.doc_reader()
ls1 = [dr.doc_field_length(i, "f1") for i in xrange(0, 3)]
ls2 = [dr.doc_field_length(i, "f2") for i in xrange(0, 3)]
self.assertEqual(dr[0]["f1"], "A B C D E")
self.assertEqual(dr.doc_field_length(0, "f1"), 5)
self.assertEqual(dr.doc_field_length(1, "f1"), 8)
self.assertEqual(dr.doc_field_length(2, "f1"), 3)
self.assertEqual(dr.doc_field_length(0, "f2"), 3)
self.assertEqual(dr.doc_field_length(1, "f2"), 4)
self.assertEqual(dr.doc_field_length(2, "f2"), 7)
self.assertEqual(ix.field_length("f1"), 16)
self.assertEqual(ix.field_length("f2"), 14)
def test_merged_lengths(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B C", f2 = u"X")
w.add_document(f1 = u"B C D E", f2 = u"Y Z")
w.commit()
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A", f2 = u"B C D E X Y")
w.add_document(f1 = u"B C", f2 = u"X")
w.commit(writing.NO_MERGE)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B X Y Z", f2 = u"B C")
w.add_document(f1 = u"Y X", f2 = u"A B")
w.commit(writing.NO_MERGE)
dr = ix.doc_reader()
self.assertEqual(dr[0]["f1"], u"A B C")
self.assertEqual(dr.doc_field_length(0, "f1"), 3)
self.assertEqual(dr.doc_field_length(2, "f2"), 6)
self.assertEqual(dr.doc_field_length(4, "f1"), 5)
def test_frequency_keyword(self):
s = fields.Schema(content = fields.KEYWORD)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = ix.writer()
w.add_document(content = u"A B C D E")
w.add_document(content = u"B B B B C D D")
w.add_document(content = u"D E F")
w.commit()
tr = ix.term_reader()
self.assertEqual(tr.doc_frequency("content", u"B"), 2)
self.assertEqual(tr.frequency("content", u"B"), 5)
self.assertEqual(tr.doc_frequency("content", u"E"), 2)
self.assertEqual(tr.frequency("content", u"E"), 2)
self.assertEqual(tr.doc_frequency("content", u"A"), 1)
self.assertEqual(tr.frequency("content", u"A"), 1)
self.assertEqual(tr.doc_frequency("content", u"D"), 3)
self.assertEqual(tr.frequency("content", u"D"), 4)
self.assertEqual(tr.doc_frequency("content", u"F"), 1)
self.assertEqual(tr.frequency("content", u"F"), 1)
self.assertEqual(tr.doc_frequency("content", u"Z"), 0)
self.assertEqual(tr.frequency("content", u"Z"), 0)
self.assertEqual(list(tr), [(0, u"A", 1, 1), (0, u"B", 2, 5),
(0, u"C", 2, 2), (0, u"D", 3, 4),
(0, u"E", 2, 2), (0, u"F", 1, 1)])
def test_frequency_text(self):
s = fields.Schema(content = fields.KEYWORD)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = ix.writer()
w.add_document(content = u"alfa bravo charlie delta echo")
w.add_document(content = u"bravo bravo bravo bravo charlie delta delta")
w.add_document(content = u"delta echo foxtrot")
w.commit()
tr = ix.term_reader()
self.assertEqual(tr.doc_frequency("content", u"bravo"), 2)
self.assertEqual(tr.frequency("content", u"bravo"), 5)
self.assertEqual(tr.doc_frequency("content", u"echo"), 2)
self.assertEqual(tr.frequency("content", u"echo"), 2)
self.assertEqual(tr.doc_frequency("content", u"alfa"), 1)
self.assertEqual(tr.frequency("content", u"alfa"), 1)
self.assertEqual(tr.doc_frequency("content", u"delta"), 3)
self.assertEqual(tr.frequency("content", u"delta"), 4)
self.assertEqual(tr.doc_frequency("content", u"foxtrot"), 1)
self.assertEqual(tr.frequency("content", u"foxtrot"), 1)
self.assertEqual(tr.doc_frequency("content", u"zulu"), 0)
self.assertEqual(tr.frequency("content", u"zulu"), 0)
self.assertEqual(list(tr), [(0, u"alfa", 1, 1), (0, u"bravo
|
dcosentino/edx-platform
|
lms/djangoapps/oai/settings.py
|
Python
|
agpl-3.0
| 1,073
| 0.008388
|
from django.conf import settings
from datetime import timedelta
# Endpoint settings
OAI_BASE_URL="http"
if settings.HTTPS == "on":
OAI_BASE_URL="https"
OAI_BASE_URL=OAI_BASE_URL+"://"+settings.SITE_NAME
REPOSITORY_NAME = settings.PLATFORM_NAME
ADMIN_EMAIL = settings.TECH_SUPPORT_EMAIL
OAI_ENDPOINT_NAME = 'oai'
RESULTS_LIMIT = 100
RESUMPTION_TOKEN_VALIDITY = timedelta(hours=6)
METADATA_
|
FORMAT = 'o
|
ai_dc'
OWN_SET_PREFIX = settings.PLATFORM_NAME
DISABLE_PRINT_OWN_SET_PREFIX= True
RESUMPTION_TOKEN_SALT = 'change_me' # salt used to generate resumption tokens
if hasattr(settings, 'OAI_SETTINGS'):
OAI_ENDPOINT_NAME = settings.OAI_SETTINGS.get('OAI_ENDPOINT_NAME')
RESULTS_LIMIT = settings.OAI_SETTINGS.get('RESULTS_LIMIT') or RESULTS_LIMIT
METADATA_FORMAT = settings.OAI_SETTINGS.get('METADATA_FORMAT') or METADATA_FORMAT
RESUMPTION_TOKEN_SALT = settings.OAI_SETTINGS.get('RESUMPTION_TOKEN_SALT') or RESUMPTION_TOKEN_SALT
DISABLE_PRINT_OWN_SET_PREFIX = settings.OAI_SETTINGS.get('DISABLE_PRINT_OWN_SET_PREFIX') or DISABLE_PRINT_OWN_SET_PREFIX
|
akalipetis/djoser
|
djoser/compat.py
|
Python
|
mit
| 267
| 0
|
from djoser.conf import settings
__all__ = ['settings']
def get_user_email(user):
email_field_name = get
|
_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user
|
_email_field_name(user):
return user.get_email_field_name()
|
MikeDMorgan/scRNAseq
|
pipeline_docs/pipeline_scRnaseq/__init__.py
|
Python
|
mit
| 24
| 0
|
fro
|
m trackers
|
import *
|
emesene/emesene
|
emesene/gui/qt4ui/TrayIcon.py
|
Python
|
gpl-3.0
| 5,102
| 0
|
# -*- coding: utf-8 -*-
# This file is part of emesene.
#
# emesene is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# emesene is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with emesene; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
''' This module contains the tray icon's class'''
import sys
import PyQt4.QtGui as QtGui
import gui
import extension
from e3 import status
class TrayIcon (QtGui.QSystemTrayIcon, gui.BaseTray):
'''A class that implements the tray icon of emesene for Qt4'''
NAME = 'TrayIcon'
DESCRIPTION = 'Qt4 Tray Icon'
AUTHOR = 'Gabriele "Whisky" Visconti'
WEBSITE = ''
def __init__(self, handler, main_window=None):
'''
constructor
handler -- a e3common.Handler.TrayIconHandler object
'''
gui.BaseTray.__init__(self, handler)
QtGui.QSystemTrayIcon.__init__(self)
self._main_window = main_window
self.menu = None
self._conversations = None
self.setIcon(QtGui.QIcon(gui.theme.image_theme.logo))
self.activated.connect(self._on_tray_icon_clicked)
self.set_login()
# TODO: this is for mac os, and should be changed in the
# future (probably no tray icon at all, just the dock icon)
if sys.platform == 'darwin':
icon = QtGui.QIcon(gui.theme.image_theme.logo)
qt_app = QtGui.QApplication.instance()
qt_app.setWindowIcon(icon)
qt_app.setApplicationName('BHAWH')
else:
self.show()
def set_login(self):
'''Called when the login window is shown. Sets a proper
context menu in the Tray Icon.'''
tray_login_menu_cls = extension.get_default('tray login menu')
self.menu = tray_login_menu_cls(self.handler, self._main_window)
self.setIcon(QtGui.QIcon(gui.theme.image_theme.logo_panel))
self.setToolTip("emesene")
if sys.platform == 'darwin':
QtGui.qt_mac_set_dock_menu(self.menu)
else:
self.setContextMenu(self.menu)
def set_main(self, session):
'''Called when the main window is shown. Stores the contact list
and registers the callback for the status_change_succeed event'''
gui.BaseTray.set_main(self, session)
if self.menu:
self.menu.unsubscribe()
tray_main_menu_cls = extension.get_default('tray main menu')
self.menu = tray_main_menu_cls(self.handler, self._main_window)
self.setToolTip("emesene - " + self.handler.session.account.account)
self._on_status_change_succeed(self.handler.session.account.status)
if sys.platform == 'darwin':
QtGui.qt_mac_set_dock_menu(self.menu)
else:
self.setContextMenu(self.menu)
def set_conversations(self, conversations):
'''Store a reference to the conversation page'''
self._conversations = conversations
def set_visible(self, visible):
'''Changes icon's visibility'''
self.setVisible(visible)
def _on_tray_icon_clicked(self, reason):
'''This slot is called when the user clicks the tray icon.
Toggles main window's visibility'''
if not self._main_window:
return
if reas
|
on == QtGui.QSystemTrayIcon.Trigger:
if not self._main_window.isVisible():
self._mai
|
n_window.show()
self._main_window.activateWindow()
self._main_window.raise_()
else: # visible
if self._main_window.isActiveWindow():
self._main_window.hide()
else:
self._main_window.activateWindow()
self._main_window.raise_()
elif reason == QtGui.QSystemTrayIcon.Context:
if self.menu:
self.menu.show()
def _on_contact_attr_changed(self, *args):
"""
This is called when a contact changes something
"""
self.menu.list._on_contact_change_something(*args)
def _on_status_change_succeed(self, stat):
"""
This is called when status is successfully changed
"""
if stat not in status.ALL or stat == -1:
return
self.setIcon(QtGui.QIcon(
gui.theme.image_theme.status_icons_panel[stat]))
def hide(self):
self.unsubscribe()
QtGui.QSystemTrayIcon.setVisible(self, False)
def unsubscribe(self):
self.disconnect_signals()
if self.menu:
self.menu.unsubscribe()
|
Callek/build-relengapi
|
relengapi/lib/time.py
|
Python
|
mpl-2.0
| 304
| 0
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at
|
http://mozilla.org/MPL/2.0/.
import datetime
import pytz
def now():
return datetime.datetime.utcnow(
|
).replace(tzinfo=pytz.UTC)
|
CRImier/pyLCI
|
output/drivers/mcp23008.py
|
Python
|
apache-2.0
| 2,598
| 0.007698
|
import smbus
from time import sleep
def delay(time):
sleep(time/1000.0)
def delayMicroseconds(time):
sleep(time/1000000.0)
from hd44780 import HD44780
class Screen(HD44780):
"""A driver for MCP23008-based I2C LCD backpacks. The one tested had "WIDE.HK" written on it."""
def __init__(self, bus=1, addr=0x27, debug=False, **kwargs):
"""Initialises the ``Screen`` object.
Kwargs:
* ``bus``: I2C bus number.
* ``addr``: I2C address of the board.
* ``debug``: enables printing out LCD commands.
* ``**kwargs``: all the other arguments, get passed further to HD44780 constructor
"""
self.bus_num = bus
self.bus = smbus.SMBus(self.bus_num)
if type(addr) in [str, unicode]:
addr = int(addr, 16)
self.addr = addr
self.debug = debug
|
self.i2c_init()
HD44780.__init__(self, debug=self.debug, **kwargs)
def i2c
|
_init(self):
"""Inits the MCP23017 IC for desired operation."""
self.setMCPreg(0x05, 0x0c)
self.setMCPreg(0x00, 0x00)
def write_byte(self, byte, char_mode=False):
"""Takes a byte and sends the high nibble, then the low nibble (as per HD44780 doc). Passes ``char_mode`` to ``self.write4bits``."""
if self.debug and not char_mode:
print(hex(byte))
self.write4bits(byte >> 4, char_mode)
self.write4bits(byte & 0x0F, char_mode)
def write4bits(self, data, char_mode=False):
"""Writes a nibble to the display. If ``char_mode`` is set, holds the RS line high."""
if char_mode:
data |= 0x10
self.setMCPreg(0x0a, data)
data ^= 0x80
delayMicroseconds(1.0)
self.setMCPreg(0x0a, data)
data ^= 0x80
delayMicroseconds(1.0)
self.setMCPreg(0x0a, data)
delay(1.0)
def setMCPreg(self, reg, val):
"""Sets the MCP23017 register."""
self.bus.write_byte_data(self.addr, reg, val)
if __name__ == "__main__":
screen = Screen(bus=1, addr=0x27, cols=16, rows=2, debug=True, autoscroll=False)
line = "0123456789012345"
if True:
screen.display_data(line, line[::-1])
sleep(1)
screen.display_data(line[::-1], line)
sleep(1)
screen.clear()
|
ChameleonCloud/blazar
|
blazar/db/exceptions.py
|
Python
|
apache-2.0
| 1,705
| 0
|
# Copyright (c) 2014 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from blazar import exceptions
from blazar.i18n import _
LOG = logging.getLogger(__name__)
class BlazarDBException(exceptions.BlazarExcept
|
ion)
|
:
msg_fmt = _('An unknown database exception occurred')
class BlazarDBDuplicateEntry(BlazarDBException):
msg_fmt = _('Duplicate entry for %(columns)s in %(model)s model was found')
class BlazarDBNotFound(BlazarDBException):
msg_fmt = _('%(id)s %(model)s was not found')
class BlazarDBInvalidFilter(BlazarDBException):
msg_fmt = _('%(query_filter)s is invalid')
class BlazarDBInvalidFilterOperator(BlazarDBException):
msg_fmt = _('%(filter_operator)s is invalid')
class BlazarDBExtraCapabilitiesNotEnabled(BlazarDBException):
msq_fmt = _('%(resource_type)s does not have extra capabilities enabled.')
class BlazarDBInvalidExtraCapability(BlazarDBException):
msg_fmt = _('%(property_name)s does not exist for resource type '
'%(resource_type)s.')
class BlazarDBForbiddenExtraCapability(BlazarDBException):
msg_fmt = _('%(property_name)s cannot be set as an extra capability')
|
stamen/fieldpapers
|
decoder/apiutils.py
|
Python
|
gpl-2.0
| 6,689
| 0.007325
|
from urlparse import urljoin
from os.path import dirname, basename
from xml.etree import ElementTree
from mimetypes import guess_type
from StringIO import StringIO
import requests
def update_print(apibase, password, print_id, progress):
"""
"""
params = {'id': print_id}
data = dict(progress=progress, password=password)
res = requests.post(urljoin(apibase, '/update-atlas.php'), params=params, data=data)
assert res.status_code == 200, 'POST to update-atlas.php resulting in status %s instead of 200' % res.status
def finish_print(apibase, password, print_id, print_info):
"""
"""
params = {'id': print_id}
print_info.update(dict(password=password))
res = requests.post(urljoin(apibase, '/finish-atlas.php'), params=params, data=print_info)
assert res.status_code == 200, 'POST to finish-atlas.php resulting in status %s instead of 200' % res.status
def update_scan(apibase, password, scan_id, progress):
"""
"""
params = {'id': scan_id}
data = {'password': password,
'progress': progress}
res = requests.post(urljoin(apibase, '/update-scan.php'), params=params, data=data)
assert res.status_code == 200, 'POST to update-scan.php resulting in status %s instead of 200' % res.status
def finish_scan(apibase, password, scan_id, uploaded_file, print_id, print_page_number, print_href, min_coord, max_coord, geojpeg_bounds):
"""
"""
params = {'id': scan_id}
data = {
'print_id': print_id,
'print_page_number': print_page_number,
'print_href': print_href,
'password': password,
'uploaded_file': uploaded_file,
'has_geotiff': 'yes',
'has_geojpeg': 'yes',
'has_stickers': 'no',
'min_row': min_coord.row, 'max_row': max_coord.row,
'min_column': min_coord.column, 'max_column': max_coord.column,
'min_zoom': min_coord.zoom, 'max_zoom': max_coord.zoom,
'geojpeg_bounds': '%.8f,%.8f,%.8f,%.8f' % geojpeg_bounds
}
res = requests.post(urljoin(apibase, '/finish-scan.php'), params=params, data=data)
assert res.status_code == 200, 'POST to finish-scan.php resulting in status %s instead of 200' % res.status
def fail_scan(apibase, password, scan_id):
"""
"""
params = {'id': scan_id}
data = {'password': password}
res = requests.post(urljoin(apibase, '/fail-scan.php'), params=params, data=data)
# TODO when does this fail? this failing shouldn't be fatal
assert res.status_code == 200, 'POST to fail-scan.php resulting in status %s instead of 200' % res.status
def finish_form(apibase, password, form_id, action_url, http_method, title, fields):
"""
"""
data = dict(password=password, action_url=action_url, http_method=http_method, title=title)
for (index, field) in enumerate(fields):
data['fields[%d][name]' % index] = field['name']
data['fields[%d][label]' % index] = field['label']
data['fields[%d][type]' % index] = field['type']
params = {'id': form_id}
res = requests.post(urljoin(apibase, '/finish-form.php'), params=params, data=data)
assert res.status_code == 200, 'POST to finish-form.php resulting in status %s instead of 200' % res.status
def fail_form(apibase, password, form_id):
"""
"""
params = {'id': form_id}
data = {'password': password}
res = requests.post(urljoin(apibase, '/fail-form.php'), params=params, data=data)
assert res.status_code == 200, 'POST to fail-form.php resulting in status %s instead of 200' % res.status
def upload(params, file_path, file_contents, apibase, password):
""" Upload a file via the API append.php form input provision thingie.
This allows uploads to either target S3 or the app itself.
"""
params.update(dict(password=password,
dirname=dirname(file_path),
mimetype=(guess_type(file_path)[0] or '')))
res = requests.get(urljoin(apibase, '/append.php'), params=params, headers=dict(Accept='application/paperwalking+xml'))
form = ElementTree.parse(StringIO(res.text)).getroot()
if form.tag == 'form':
form_action = form.attrib['action']
inputs = form.findall('.//input')
fields = {}
files = {}
for input in inputs:
if input.attrib['type'
|
] != 'file' and 'name' in input.attrib:
|
fields[input.attrib['name']] = input.attrib['value']
elif input.attrib['type'] == 'file':
files[input.attrib['name']] = (basename(file_path), file_contents)
if len(files) == 1:
base_url = [el.text for el in form.findall(".//*") if el.get('id', '') == 'base-url'][0]
resource_url = urljoin(base_url, file_path)
res = requests.post(urljoin(apibase, form_action), data=fields, files=files)
assert res.status_code in range(200, 308), 'POST of file to %s resulting in status %s instead of 2XX/3XX' % (form_action, res.status_code)
return resource_url
raise Exception('Did not find a form with a file input, why is that?')
def append_print_file(print_id, file_path, file_contents, apibase, password):
""" Upload a print.
"""
params = {
"print": print_id,
}
return upload(params, file_path, file_contents, apibase, password)
def append_scan_file(scan_id, file_path, file_contents, apibase, password):
""" Upload a scan.
"""
params = {
"scan": scan_id,
}
return upload(params, file_path, file_contents, apibase, password)
def get_print_info(print_url):
"""
"""
print print_url
res = requests.get(print_url, headers=dict(Accept='application/paperwalking+xml'))
if res.status_code == 404:
raise Exception("No such atlas: %s" % print_url)
print_ = ElementTree.parse(StringIO(res.text)).getroot()
print_id = print_.attrib['id']
paper = print_.find('paper').attrib['size']
orientation = print_.find('paper').attrib['orientation']
layout = print_.find('paper').attrib.get('layout', 'full-page')
north = float(print_.find('bounds').find('north').text)
south = float(print_.find('bounds').find('south').text)
east = float(print_.find('bounds').find('east').text)
west = float(print_.find('bounds').find('west').text)
print print_id, north, west, south, east, paper, orientation, layout
return print_id, north, west, south, east, paper, orientation, layout
|
MingLin-home/Ming_slim
|
preprocessing/cifarnet_preprocessing.py
|
Python
|
gpl-3.0
| 4,252
| 0.007291
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Provides utilities to preprocess images in CIFAR-10.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
_PADDING = 4
slim = tf.contrib.slim
def preprocess_for_train(image,
output_height,
output_width,
padding=_PADDING):
"""Preprocesses the given image for training.
Note that the actual resizing scale is sampled from
[`resize_size_min`, `resize_size_max`].
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
padding: The amound of padding before and after each dimension of the image.
Returns:
A preprocessed image.
"""
tf.summary.image('image', tf.expand_dims(image, 0))
# Transform the image to floats.
image = tf.to_float(image)
if padding > 0:
image = tf.pad(image, [[padding, padding], [padding, padding], [0, 0]])
# image = tf.image.resize_images(image,(output_height,output_width))
# Randomly crop a [height, width] section of the image.
distorted_image = tf.random_crop(image,
[32, 32, 3])
# Randomly flip the image horizontally.
distorted_ima
|
ge = tf.image.random_flip_left_right(distorted_image)
tf.summary.image('distorted_image', tf.expand_dims(distorted_image, 0))
# Because these operations are not commutative, consider randomizing
# the order their operation.
distorted_image = tf.image.random_brightness(distorted_image,
max_delta=63)
distorted_image = tf.image.random_contrast(distor
|
ted_image,
lower=0.2, upper=1.8)
# Subtract off the mean and divide by the variance of the pixels.
return tf.image.per_image_standardization(distorted_image)
def preprocess_for_eval(image, output_height, output_width):
"""Preprocesses the given image for evaluation.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
Returns:
A preprocessed image.
"""
tf.summary.image('image', tf.expand_dims(image, 0))
# Transform the image to floats.
image = tf.to_float(image)
# image = tf.image.resize_images(image, (output_height, output_width))
# Resize and crop if needed.
resized_image = tf.image.resize_image_with_crop_or_pad(image,
output_width,
output_height)
tf.summary.image('resized_image', tf.expand_dims(resized_image, 0))
# Subtract off the mean and divide by the variance of the pixels.
return tf.image.per_image_standardization(resized_image)
def preprocess_image(image, output_height, output_width, is_training=False):
"""Preprocesses the given image.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
is_training: `True` if we're preprocessing the image for training and
`False` otherwise.
Returns:
A preprocessed image.
"""
if is_training:
return preprocess_for_train(image, output_height, output_width)
else:
return preprocess_for_eval(image, output_height, output_width)
|
nicfit/nicfit.py
|
examples/asyncio_example.py
|
Python
|
mit
| 371
| 0.005391
|
#!/usr/bin/env python
import time
from nicfit.aio import Application
async def _main(args):
print(args)
print("Sleeping 2...")
time.sleep(2)
print("Sleeping 0...")
return 0
def atex
|
it():
print("atexit")
app = Application(_main, atexit=atexit)
app.arg_parser.add_argument("--example", help="Example cli")
app.run()
as
|
sert not"will not execute"
|
steveandroulakis/mytardis
|
tardis/tardis_portal/tests/test_rmexperiment.py
|
Python
|
bsd-3-clause
| 4,827
| 0.008494
|
from compare import expect
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.client import Client
from django.core.management import call_command
import sys
from tardis.tardis_portal.models import \
Experiment, Dataset, Dataset_File, ExperimentACL, License, UserProfile, \
ExperimentParameterSet, ExperimentParameter, DatasetParameterSet, DatafileParameterSet
def _create_test_user():
user_ = User(username='tom',
first_name='Thomas',
last_name='Atkins',
email='[email protected]')
user_.save()
UserProfile(user=user_).save()
return user_
def _create_license():
license_ = License(name='Creative Commons Attribution-NoDerivs 2.5 Australia',
url='http://creativecommons.org/licenses/by-nd/2.5/au/',
internal_description='CC BY 2.5 AU',
allows_distribution=True)
license_.save()
return license_
def _create_test_experiment(user, license_):
experiment = Experiment(title='Norwegian Blue',
description='Parrot + 40kV',
created_by=user)
experiment.public_access = Experiment.PUBLIC_ACCESS_FULL
experiment.license = license_
experiment.save()
experiment.author_experiment_set.create(order=0,
author="John Cleese",
url="http://nla.gov.au/nla.party-1")
experiment.author_experiment_set.create(order=1,
author="Michael Palin",
url="http://nla.gov.au/nla.party-2")
acl = ExperimentACL(experiment=experiment,
pluginId='django_user',
entityId=str(user.id),
isOwner=True,
canRead=True,
canWrite=True,
canDelete=True,
aclOwnershipType=ExperimentACL.OWNER_OWNED)
acl.save()
return experiment
def _create_test_dataset(nosDatafiles):
ds_ = Dataset(description='happy snaps of plumage')
ds_.save()
for i in range (0, nosDatafiles) :
df_ = Dataset_File(dataset=ds_, url='http://planet-python.org/' + str(_next_id()))
df_.save()
ds_.save()
return ds_
def _create_test_data():
# Create 2 experiments with 3 datasets, one of which is in both experiments.
user_ = _create_test_user()
license_ = _create_license()
exp1_ = _create_test_experiment(user_, license_)
exp2_ = _create_test_experiment(user_, license_)
ds1_ = _create_test_dataset(1)
ds2_ = _create_test_dataset(2)
ds3_ = _create_test_dataset(3)
ds1_.experiments.add(exp1_);
ds2_.experiments.add(exp1_);
ds2_.experiments.add(exp2_);
ds3_.experiments.add(exp2_);
ds1_.save()
ds2_.save()
ds3_.save()
exp1_.save()
exp2_.save()
return (exp1_, exp2_)
_counter = 1
def _next_id():
global _counter
res = _counter
_counter += 1
return res
class RmExperimentTestCase(TestCase):
def setUp(self):
pass
def testList(self):
(exp1_, exp2_) = _create_test_data()
expect(Dataset_File.objects.all().count()).to_be(6)
expect(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
# Check that --list doesn't remove anything
call_command('rmexperiment', exp1_.pk, list=True)
expect(Dataset_File.objects.all().count()).to_be(6)
expect(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
def testRemove(self):
(exp1_, exp2_) = _create_test_data()
expect(Dataset_File.objects.all().count()).to_be(6)
expect
|
(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
# Remove first experiment
|
and check that the shared dataset hasn't been removed
call_command('rmexperiment', exp1_.pk, confirmed=True)
expect(Dataset_File.objects.all().count()).to_be(5)
expect(len(exp2_.get_datafiles())).to_be(5)
#Remove second experiment
call_command('rmexperiment', exp2_.pk, confirmed=True)
expect(Dataset_File.objects.all().count()).to_be(0)
#Check that everything else has been removed too
expect(ExperimentACL.objects.all().count()).to_be(0)
expect(ExperimentParameterSet.objects.all().count()).to_be(0)
expect(ExperimentParameter.objects.all().count()).to_be(0)
expect(DatasetParameterSet.objects.all().count()).to_be(0)
expect(DatafileParameterSet.objects.all().count()).to_be(0)
def tearDown(self):
pass
|
lapisdecor/bzoinq
|
bzoinq/playit.py
|
Python
|
mit
| 240
| 0
|
import subprocess
fro
|
m pkg_resources import resource_filename
def playit(file):
"""
Function used to play a sound file
"""
filepath = resource_filename(__name__, 'sound/' + file)
subprocess
|
.Popen(["paplay", filepath])
|
wazo-pbx/xivo-auth
|
alembic/versions/97e2d9949db_revert_add_plugin_event_acl_to_the_.py
|
Python
|
gpl-3.0
| 2,816
| 0.00071
|
"""revert: add plugin event acl to the admin backend
Revision ID: 97e2d9949db
Revises: 1e5140290977
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '97e2d9949db'
down_revision = '1e5140290977'
POLICY_NAME = 'wazo_default_admin_policy'
ACL_TEMPLATES = ['events.plugin.#']
policy_table = sa.sql.table(
'auth_policy', sa.Column('uuid', sa.String(38)), sa.Column('name', sa.String(80))
)
acl_template_table = sa.sql.table(
'auth_acl_template', sa.Column('id', sa.Integer), sa.Column('template', sa.Text)
)
policy_template = sa.sql.table(
'auth_policy_template',
sa.Column('policy_uuid', sa.String(38)),
sa.Column('template_id', sa.Integer),
)
def _find_acl_template(conn, acl_template):
query = (
sa.sql.select([acl_template_table.c.id])
.where(acl_template_table.c.template == acl_template)
.limit(1)
)
return conn.execute(query).scalar()
def _find_acl_templates(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if acl_template_id:
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_policy_uuid(conn, policy_name):
policy_query = sa.sql.select([policy_table.c.uuid]).where(
policy_table.c.name == policy_name
)
for policy in conn.execute(policy_query).fetchall():
return policy[0]
def _insert_acl_template(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if not acl_template_id:
query = (
acl_template_table.insert()
.returning(acl_template_table.c.id)
.values(template=acl_template)
)
acl_template_id = conn.execute(query).scalar()
acl_template_ids.append(acl
|
_template_id)
return acl_template_ids
def downgrade():
conn = op.get_bind()
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
acl_template_ids = _insert_acl_template(conn, ACL_TEMPLATES)
op.bulk_insert(
policy_template,
[
{'policy_uuid': policy_uuid, 'template_id': template_id}
for template_id in acl_template_ids
],
)
def upgrade():
conn = op.get_bi
|
nd()
acl_template_ids = _find_acl_templates(conn, ACL_TEMPLATES)
if acl_template_ids:
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
delete_query = policy_template.delete().where(
sa.sql.and_(
policy_template.c.policy_uuid == policy_uuid,
policy_template.c.template_id.in_(acl_template_ids),
)
)
op.execute(delete_query)
|
martylee/Python
|
CSC410-Project-1-master/minic/pretty_minic.py
|
Python
|
gpl-2.0
| 658
| 0.00152
|
from . import minic_ast
class PrettyGenerator(object):
def __init__(self):
# Statements start with indentation of self.indent_level spaces, using
# the _make_indent method
#
self.indent_level = 0
def _make_indent(self):
return ' ' * s
|
elf.indent_level
def visit(self, node):
method = 'v
|
isit_' + node.__class__.__name__
return getattr(self, method, self.generic_visit)(node)
def generic_visit(self, node):
#~ print('generic:', type(node))
if node is None:
return ''
else:
return ''.join(self.visit(c) for c_name, c in node.children())
|
allebacco/PyNodeGraph
|
pynodegraph/__init__.py
|
Python
|
mit
| 286
| 0.006993
|
from node_view import NodeGraphView
from node_scene import NodeGraphScene
from items.node_item import NodeItem
from items.connectio
|
n_item import ConnectionItem
from items.connector_item i
|
mport BaseConnectorItem, IOConnectorItem, InputConnectorItem, OutputConnectorItem
import node_utils
|
sha-red/django-shared-utils
|
shared/utils/views/alphabetical_pagination.py
|
Python
|
mit
| 1,768
| 0.002828
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import F, Func, Value
class AlphabeticalPaginationMixin(object):
alphabetical_pagination_field = 'name'
def get_alphabetical_pagination_field(self):
return self.alphabetical_pagination_field
def get_selected_letter(self):
return self.request.GET.get('letter', 'a')
def get_base_queryset(self):
"""
Queryset before applying pagination filters.
"""
qs = super(AlphabeticalPaginationMixin, self).get_queryset().exclude(
|
**{self.get_alphabetical_pagination_field(): ''}
)
return qs
def get_queryset(self):
qs = self.get_base_queryset()
# FIXME Select Umlauts (using downgrade and also downgrade sort_name field?)
# FIXME Select on TRIM/LEFT as in get_letter_choices
filter = {
"{}__istartswith".format(self.get_alphabetical_pagination_field()):
self.get_selected_letter()}
return qs.filter(**filter).order_by(self.al
|
phabetical_pagination_field)
def get_letter_choices(self):
return self.get_base_queryset().annotate(name_lower=Func(
Func(
Func(
F(self.get_alphabetical_pagination_field()), function='LOWER'),
function='TRIM'),
Value("1"), function='LEFT')).order_by(
'name_lower').distinct('name_lower').values_list('name_lower', flat=True)
def get_context_data(self, **kwargs):
context = super(AlphabeticalPaginationMixin, self).get_context_data(**kwargs)
context['selected_letter'] = self.get_selected_letter()
context['alphabet'] = self.get_letter_choices()
return context
|
wazo-pbx/xivo-auth
|
alembic/versions/2d4882d39dbb_add_graphql_acl_to_users.py
|
Python
|
gpl-3.0
| 3,250
| 0.001538
|
"""add graphql ACL to users
Revision ID: 2d4882d39dbb
Revises: c4d0e9ec46a9
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2d4882d39dbb'
down_revision = 'dc2848563b53'
POLICY_NAME = 'wazo_default_user_policy'
ACL_TEMPLATES = ['dird.graphql.me']
policy_table = sa.sql.table(
'auth_policy', sa.Column('uuid', sa.String(38)), sa.Column('name', sa.Strin
|
g(80))
)
acl_template_table = sa.sql.table(
'auth_acl_template', sa.Column('id', sa.Integer), sa.Column('template', sa.Text)
)
policy_templa
|
te = sa.sql.table(
'auth_policy_template',
sa.Column('policy_uuid', sa.String(38)),
sa.Column('template_id', sa.Integer),
)
def _find_acl_template(conn, acl_template):
query = (
sa.sql.select([acl_template_table.c.id])
.where(acl_template_table.c.template == acl_template)
.limit(1)
)
return conn.execute(query).scalar()
def _find_acl_templates(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if acl_template_id:
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_policy_uuid(conn, policy_name):
policy_query = sa.sql.select([policy_table.c.uuid]).where(
policy_table.c.name == policy_name
)
for policy in conn.execute(policy_query).fetchall():
return policy[0]
def _insert_acl_template(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if not acl_template_id:
query = (
acl_template_table.insert()
.returning(acl_template_table.c.id)
.values(template=acl_template)
)
acl_template_id = conn.execute(query).scalar()
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_acl_template_ids(conn, policy_uuid):
query = sa.sql.select([policy_template.c.template_id]).where(
policy_template.c.policy_uuid == policy_uuid
)
return [acl_template_id for (acl_template_id,) in conn.execute(query).fetchall()]
def upgrade():
conn = op.get_bind()
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
if not policy_uuid:
return
acl_template_ids = _insert_acl_template(conn, ACL_TEMPLATES)
acl_template_ids_already_associated = _get_acl_template_ids(conn, policy_uuid)
for template_id in set(acl_template_ids) - set(acl_template_ids_already_associated):
query = policy_template.insert().values(
policy_uuid=policy_uuid, template_id=template_id
)
conn.execute(query)
def downgrade():
conn = op.get_bind()
acl_template_ids = _find_acl_templates(conn, ACL_TEMPLATES)
if not acl_template_ids:
return
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
if not policy_uuid:
return
delete_query = policy_template.delete().where(
sa.sql.and_(
policy_template.c.policy_uuid == policy_uuid,
policy_template.c.template_id.in_(acl_template_ids),
)
)
op.execute(delete_query)
|
PEAT-AI/Automato
|
Surveillance/make_ndvi.py
|
Python
|
gpl-3.0
| 1,020
| 0.009804
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ndvi_test.py
#
# Copyright 2015 rob <rob@Novu>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth
|
Floor, Boston,
# MA 02110-1301, USA.
#
#
'''just a annoying dummy to get rid of Gtk2 and Gtk3 incompatibilities'''
from infrapix import infrapix
impo
|
rt sys
infrapix.ndvi(sys.argv[1],sys.argv[2], show_histogram = True,)
|
ssssam/ansible-modules-core
|
packaging/os/apt.py
|
Python
|
gpl-3.0
| 24,239
| 0.004868
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Flowroute LLC
# Written by Matthew Williams <[email protected]>
# Based on yum module written by Seth Vidal <skvidal at fedoraproject.org>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: apt
short_description: Manages apt-packages
description:
- Manages I(apt) packages (such a
|
s for Debian/Ubuntu).
version_added: "0.0.2"
options:
name:
description:
- A package name, like C(foo), or package specifier with version, like C(foo=1.0). Name wildcards (fnmatch) like C(apt*) and version wildcards like C(foo=1.0*) are also supported.
required:
|
false
default: null
state:
description:
- Indicates the desired package state. C(latest) ensures that the latest version is installed. C(build-dep) ensures the package build dependencies are installed.
required: false
default: present
choices: [ "latest", "absent", "present", "build-dep" ]
update_cache:
description:
- Run the equivalent of C(apt-get update) before the operation. Can be run as part of the package installation or as a separate step.
required: false
default: no
choices: [ "yes", "no" ]
cache_valid_time:
description:
- If C(update_cache) is specified and the last run is less or equal than I(cache_valid_time) seconds ago, the C(update_cache) gets skipped.
required: false
default: no
purge:
description:
- Will force purging of configuration files if the module state is set to I(absent).
required: false
default: no
choices: [ "yes", "no" ]
default_release:
description:
- Corresponds to the C(-t) option for I(apt) and sets pin priorities
required: false
default: null
install_recommends:
description:
- Corresponds to the C(--no-install-recommends) option for I(apt). Default behavior (C(yes)) replicates apt's default behavior; C(no) does not install recommended packages. Suggested packages are never installed.
required: false
default: yes
choices: [ "yes", "no" ]
force:
description:
- If C(yes), force installs/removes.
required: false
default: "no"
choices: [ "yes", "no" ]
upgrade:
description:
- 'If yes or safe, performs an aptitude safe-upgrade.'
- 'If full, performs an aptitude full-upgrade.'
- 'If dist, performs an apt-get dist-upgrade.'
- 'Note: This does not upgrade a specific package, use state=latest for that.'
version_added: "1.1"
required: false
default: "yes"
choices: [ "yes", "safe", "full", "dist"]
dpkg_options:
description:
- Add dpkg options to apt command. Defaults to '-o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold"'
- Options should be supplied as comma separated list
required: false
default: 'force-confdef,force-confold'
deb:
description:
- Path to a .deb package on the remote machine.
required: false
version_added: "1.6"
requirements: [ python-apt, aptitude ]
author: Matthew Williams
notes:
- Three of the upgrade modes (C(full), C(safe) and its alias C(yes)) require C(aptitude), otherwise
C(apt-get) suffices.
'''
EXAMPLES = '''
# Update repositories cache and install "foo" package
- apt: name=foo update_cache=yes
# Remove "foo" package
- apt: name=foo state=absent
# Install the package "foo"
- apt: name=foo state=present
# Install the version '1.00' of package "foo"
- apt: name=foo=1.00 state=present
# Update the repository cache and update package "nginx" to latest version using default release squeeze-backport
- apt: name=nginx state=latest default_release=squeeze-backports update_cache=yes
# Install latest version of "openjdk-6-jdk" ignoring "install-recommends"
- apt: name=openjdk-6-jdk state=latest install_recommends=no
# Update all packages to the latest version
- apt: upgrade=dist
# Run the equivalent of "apt-get update" as a separate step
- apt: update_cache=yes
# Only run "update_cache=yes" if the last one is more than 3600 seconds ago
- apt: update_cache=yes cache_valid_time=3600
# Pass options to dpkg on run
- apt: upgrade=dist update_cache=yes dpkg_options='force-confold,force-confdef'
# Install a .deb package
- apt: deb=/tmp/mypackage.deb
# Install the build dependencies for package "foo"
- apt: pkg=foo state=build-dep
'''
import traceback
# added to stave off future warnings about apt api
import warnings
warnings.filterwarnings('ignore', "apt API not stable yet", FutureWarning)
import os
import datetime
import fnmatch
import itertools
# APT related constants
APT_ENV_VARS = dict(
DEBIAN_FRONTEND = 'noninteractive',
DEBIAN_PRIORITY = 'critical',
LANG = 'C'
)
DPKG_OPTIONS = 'force-confdef,force-confold'
APT_GET_ZERO = "0 upgraded, 0 newly installed"
APTITUDE_ZERO = "0 packages upgraded, 0 newly installed"
APT_LISTS_PATH = "/var/lib/apt/lists"
APT_UPDATE_SUCCESS_STAMP_PATH = "/var/lib/apt/periodic/update-success-stamp"
HAS_PYTHON_APT = True
try:
import apt
import apt.debfile
import apt_pkg
except ImportError:
HAS_PYTHON_APT = False
def package_split(pkgspec):
parts = pkgspec.split('=', 1)
if len(parts) > 1:
return parts[0], parts[1]
else:
return parts[0], None
def package_versions(pkgname, pkg, pkg_cache):
try:
versions = set(p.version for p in pkg.versions)
except AttributeError:
# assume older version of python-apt is installed
# apt.package.Package#versions require python-apt >= 0.7.9.
pkg_cache_list = (p for p in pkg_cache.Packages if p.Name == pkgname)
pkg_versions = (p.VersionList for p in pkg_cache_list)
versions = set(p.VerStr for p in itertools.chain(*pkg_versions))
return versions
def package_version_compare(version, other_version):
try:
return apt_pkg.version_compare(version, other_version)
except AttributeError:
return apt_pkg.VersionCompare(version, other_version)
def package_status(m, pkgname, version, cache, state):
try:
# get the package from the cache, as well as the
# the low-level apt_pkg.Package object which contains
# state fields not directly acccesible from the
# higher-level apt.package.Package object.
pkg = cache[pkgname]
ll_pkg = cache._cache[pkgname] # the low-level package object
except KeyError:
if state == 'install':
try:
if cache.get_providing_packages(pkgname):
return False, True, False
m.fail_json(msg="No package matching '%s' is available" % pkgname)
except AttributeError:
# python-apt version too old to detect virtual packages
# mark as upgradable and let apt-get install deal with it
return False, True, False
else:
return False, False, False
try:
has_files = len(pkg.installed_files) > 0
except UnicodeDecodeError:
has_files = True
except AttributeError:
has_files = False # older python-apt cannot be used to determine non-purged
try:
package_is_installed = ll_pkg.current_state == apt_pkg.CURSTATE_INSTALLED
except AttributeError: # python-apt 0.7.X has very weak low-level object
try:
# might not be necessary as python-apt post-0.7.X should have current_state property
package_is_installed = pkg.is_installed
except AttributeError:
# assume older version of python-apt is installed
pa
|
tilemapjp/OSGeo.GDAL.Xamarin
|
gdal-1.11.0/swig/python/samples/gdal2grd.py
|
Python
|
mit
| 4,946
| 0.006066
|
#!/usr/bin/env python
###############################################################################
# $Id: gdal2grd.py 27044 2014-03-16 23:41:27Z
|
rouault $
#
# Project: GDAL Python samples
# Purpose: Script to write out ASCII GRD rasters (used in Golden Software
# Surfer)
|
# from any source supported by GDAL.
# Author: Andrey Kiselev, [email protected]
#
###############################################################################
# Copyright (c) 2003, Andrey Kiselev <[email protected]>
# Copyright (c) 2009, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
try:
from osgeo import gdal
from osgeo.gdalconst import *
gdal.TermProgress = gdal.TermProgress_nocb
except ImportError:
import gdal
from gdalconst import *
try:
import numpy as Numeric
Numeric.arrayrange = Numeric.arange
except ImportError:
import Numeric
import sys
# =============================================================================
def Usage():
print('Usage: gdal2grd.py [-b band] [-quiet] infile outfile')
print('Write out ASCII GRD rasters (used in Golden Software Surfer)')
print('')
print(' -b band Select a band number to convert (1 based)')
print(' -quiet Do not report any diagnostic information')
print(' infile Name of the input GDAL supported file')
print(' outfile Name of the output GRD file')
print('')
sys.exit(1)
# =============================================================================
infile = None
outfile = None
iBand = 1
quiet = 0
# Parse command line arguments.
i = 1
while i < len(sys.argv):
arg = sys.argv[i]
if arg == '-b':
i = i + 1
iBand = int(sys.argv[i])
elif arg == '-quiet':
quiet = 1
elif infile is None:
infile = arg
elif outfile is None:
outfile = arg
else:
Usage()
i = i + 1
if infile is None:
Usage()
if outfile is None:
Usage()
indataset = gdal.Open(infile, GA_ReadOnly)
if infile == None:
print('Cannot open', infile)
sys.exit(2)
geotransform = indataset.GetGeoTransform()
band = indataset.GetRasterBand(iBand)
if band == None:
print('Cannot load band', iBand, 'from the', infile)
sys.exit(2)
if not quiet:
print('Size is ',indataset.RasterXSize,'x',indataset.RasterYSize,'x',indataset.RasterCount)
print('Projection is ',indataset.GetProjection())
print('Origin = (',geotransform[0], ',',geotransform[3],')')
print('Pixel Size = (',geotransform[1], ',',geotransform[5],')')
print('Converting band number',iBand,'with type',gdal.GetDataTypeName(band.DataType))
# Header printing
fpout = open(outfile, "wt")
fpout.write("DSAA\n")
fpout.write(str(band.XSize) + " " + str(band.YSize) + "\n")
fpout.write(str(geotransform[0] + geotransform[1] / 2) + " " +
str(geotransform[0] + geotransform[1] * (band.XSize - 0.5)) + "\n")
if geotransform[5] < 0:
fpout.write(str(geotransform[3] + geotransform[5] * (band.YSize - 0.5)) + " " +
str(geotransform[3] + geotransform[5] / 2) + "\n")
else:
fpout.write(str(geotransform[3] + geotransform[5] / 2) + " " +
str(geotransform[3] + geotransform[5] * (band.YSize - 0.5)) + "\n")
fpout.write(str(band.ComputeRasterMinMax(0)[0]) + " " +
str(band.ComputeRasterMinMax(0)[1]) + "\n")
for i in range(band.YSize - 1, -1, -1):
scanline = band.ReadAsArray(0, i, band.XSize, 1, band.XSize, 1)
j = 0
while j < band.XSize:
fpout.write(str(scanline[0, j]))
j = j + 1
if j % 10: # Print no more than 10 values per line
fpout.write(" ")
else:
fpout.write("\n")
fpout.write("\n")
# Display progress report on terminal
if not quiet:
gdal.TermProgress(float(band.YSize - i) / band.YSize)
|
Snifer/BurpSuite-Plugins
|
faraday/model/workspace.py
|
Python
|
gpl-2.0
| 20,725
| 0.008733
|
#!/usr/bin/env python
'''
Faraday Penetration Test IDE - Community Version
Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/)
See the file 'doc/LICENSE' for the license information
'''
import os
import model.api
import model
import time
import datetime
from model.report import ReportManager
from model.diff import HostDiff
from model.container import ModelObjectContainer, CouchedModelObjectContainer
from model.conflict import Conflict
from model.hosts import Host
from model.guiapi import notification_center as notifier
import mockito
from config.configuration import getInstanceConfiguration
CONF = getInstanceConfiguration()
import json
import shutil
from persistence.orm import WorkspacePersister
from managers.all import PersistenceManagerFactory, CouchdbManager, FSManager
class Workspace(object):
"""
Handles a complete workspace (or project)
It contains a reference to the model and the command execution
history for all users working on the same workspace.
It has a list with all existing workspaces just in case user wants to
open a new one.
"""
def __init__(self, name, manager, shared=CONF.getAutoShareWorkspace()):
self.name = name
self.description = ""
self.customer = ""
self.start_date = datetime.date(1,1,1)
self.finish_date = datetime.date(1,1,1)
self.id = name
self._command_history = None
self._model_controller = None
self._workspace_manager = manager
self.shared = shared
self._path = os.path.join(CONF.getPersistencePath(), name)
self._persistence_excluded_filenames = ["categories.xml", "workspace.xml"]
self.container = ModelObjectContainer()
self.__conflicts = []
self._object_factory = model.common.factory
self._object_factory.register(model.hosts.Host)
self._report_path = os.path.join(CONF.getReportPath(), name)
self._report_ppath = os.path.join(self._report_path,"process")
if not os.path.exists(self._report_path):
os.mkdir(self._report_path)
if not os.path.exists(self._report_ppath):
os.mkdir(self._report_ppath)
def _notifyWorkspaceNoConnection(self):
notifier.showPopup("Couchdb Connection lost. Defaulting to memory. Fix network and try again in 5 minutes.")
def getReportPath(self):
return self._report_path
def saveObj(obj):raise NotImplementedError("Abstract method")
def delObj(obj):raise NotImplementedError("Abstract method")
def remove(self, host):
del self.container[host.getID()]
self.delObj(host)
def save(self): raise NotImplementedError("Abstract method")
def load(self): raise NotImplementedError("Abstract method")
def setModelCont
|
roller(self, model_controller):
self._model_controller = model_controller
def getContainee(self):
return self.container
def set_path(self, path):
self._path = path
def get_path(self):
return self._path
def set_report_path(self, path):
self._report_path
|
= path
if not os.path.exists(self._report_path):
os.mkdir(self._report_path)
self._workspace_manager.report_manager.path = self.report_path
def get_report_path(self):
return self._report_path
path = property(get_path, set_path)
report_path = property(get_report_path, set_report_path)
def isActive(self):
return self.name == self._workspace_manager.getActiveWorkspace().name
def getAllHosts(self):
return self._model_controller.getAllHosts()
def getDeletedHosts(self):
return self._model_controller.getDeletedHosts()
def cleanDeletedHosts(self):
self._model_controller.cleanDeletedHosts()
def verifyConsistency(self):
hosts = self.getAllHosts()
hosts_counter = 0
for h1 in hosts[:-1]:
hosts_counter += 1
for h2 in hosts[hosts_counter:]:
if h1 == h2 :
diff = HostDiff(h1, h2)
if diff.existDiff():
self.addConflict(Conflict(h1, h2))
return len(self.getConflicts())
def getDataManager(self):
return self._dmanager
def addConflict(self, conflict):
self.__conflicts.append(conflict)
def getConflicts(self):
return self.__conflicts
def clearConflicts(self):
self.__conflicts.clear()
def resolveConflicts(self):
pass
def conflictResolved(self, conflict):
self.__conflicts.remove(conflict)
class WorkspaceOnFS(Workspace):
def __init__(self, name, manager, shared=CONF.getAutoShareWorkspace()):
Workspace.__init__(self, name, manager, shared)
self._dmanager = FSManager(self._path)
@staticmethod
def isAvailable():
return True
def saveObj(self, obj):
host = obj.getHost()
try:
model.api.devlog("Saving host to FileSystem")
model.api.devlog("Host, %s" % host.getID())
host_as_dict = host._toDict(full=True)
filepath = os.path.join(self._path, host.getID() + ".json")
with open(filepath, "w") as outfile:
json.dump(host_as_dict, outfile, indent = 2)
except Exception:
model.api.devlog("Failed while persisting workspace to filesystem, enough perms and space?")
def delObj(self, obj):
if obj.class_signature == "Host":
self._dmanager.removeObject(obj.getID())
return
host = obj.getHost()
self.saveObj(host)
def syncFiles(self):
self.load()
def load(self):
files = os.listdir(self._path)
files = filter(lambda f: f.endswith(".json") and f not in
self._persistence_excluded_filenames, files)
modelobjectcontainer = self.getContainee()
for filename in files:
newHost = self.__loadHostFromFile(filename)
modelobjectcontainer[newHost.getID()] = newHost
notifier.workspaceLoad(self.getAllHosts())
def __loadHostFromFile(self, filename):
if os.path.basename(filename) in self._persistence_excluded_filenames:
model.api.devlog("skipping file %s" % filename)
return
else:
model.api.devlog("loading file %s" % filename)
infilepath = os.path.join(self._path, filename)
host_dict = {}
try:
with open(infilepath) as infile:
host_dict = json.load(infile)
except Exception, e:
model.api.log("An error ocurred while parsing file %s\n%s" %
(filename, str(e)), "ERROR")
return mockito.mock()
try:
newHost = Host(name=None, dic=host_dict)
return newHost
except Exception, e:
model.api.log("Could not load host from file %s" % filename, "ERROR")
model.api.devlog(str(e))
return None
class WorkspaceOnCouch(Workspace):
"""A Workspace that is syncronized in couchdb"
|
garbear/EventGhost
|
plugins/Serial/__init__.py
|
Python
|
gpl-2.0
| 13,068
| 0.003367
|
# This file is part of EventGhost.
# Copyright (C) 2005 Lars-Peter Voss <[email protected]>
#
# EventGhost is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# EventGhost is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EventGhost; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# TODO: Use of eg.SerialThread instead of eg.SerialPort
import eg
eg.RegisterPlugin(
name = "Serial Port",
author = "Bitmonster",
version = "1.1." + "$LastChangedRevision$".split()[1],
canMultiLoad = True,
description = "Arbitrary communication through a serial port.",
)
class Text:
port = "Port:"
baudrate = "Baudrate:"
bytesize = "Number of bits:"
parity = "Parity:"
parities = ['No parity', 'Odd', 'Even'] #, 'Mark', 'Space']
stopbits = "Stopbits:"
flowcontrol = "Flow control:"
handshakes = ['None', 'Xon / Xoff', 'Hardware']
generateEvents = "Generate events on incoming data"
terminator = "Terminator:"
eventPrefix = "Event prefix:"
encoding = "Encoding:"
codecChoices = [
"System code page",
"HEX",
"Latin-1",
"UTF-8",
"UTF-16",
"Python string escape",
]
class Write:
name = "Write Data"
description = (
"Writes some text through the serial port."
"\n\n<p>"
"You can use Python string escapes to send non-printable "
"characters. Some examples:<p>"
"\\n will send a Linefeed (LF)<br>"
"\\r will send a Carriage Return (CR)<br>"
"\\t will send a Horizontal Tab (TAB)<br>"
"\\x0B will send the ASCII character with the hexcode 0B<br>"
"\\\\ will send a single Backslash."
)
class Read:
name = "Read Data"
description = (
"Reads data from the serial port."
"\n\n<p>"
"This action returns the data through <i>eg.result</i>, as any "
"action does that is returning data. So you have to use "
'<a href="http://www.eventghost.net/docs/scripting">'
"Python scripting</a> to do anything with the result."
"<p>"
"Using this action and enabling event generation in the plugin "
"cannot be used at the same time, as one of it will always eat "
"the data away from the other."
)
read_all = "Read as many bytes as are currently available"
read_some = "Read exactly this number of bytes:"
read_time = "and wait this maximum number of milliseconds for them:"
import wx
import threading
import win32event
import win32file
import codecs
import binascii
BAUDRATES = [
'110', '300', '600', '1200', '2400', '4800', '9600', '14400', '19200',
'38400', '57600', '115200', '128000', '256000'
]
def MyHexDecoder(input):
return (binascii.b2a_hex(input).upper(), len(input))
DECODING_FUNCS = [
codecs.getdecoder(eg.systemEncoding),
MyHexDecoder,
codecs.getdecoder("latin1"),
codecs.getdecoder("utf8"),
codecs.getdecoder("utf16"),
codecs.getencoder("string_escape"),
]
class Serial(eg.RawReceiverPlugin):
text = Text
def __init__(self):
eg.RawReceiverPlugin.__init__(self)
self.AddAction(Write)
self.AddAction(Read)
self.serial = None
self.buffer = ""
def __start__(
self,
port,
baudrate,
bytesize=8,
parity=0,
stopbits=0,
handshake=0,
generateEvents=False,
terminator="",
prefix="Serial",
encodingNum=0,
):
xonxoff = 0
rtscts = 0
if handshake == 1:
xonxoff = 1
elif handshake == 2:
rtscts = 1
try:
self.serial = eg.SerialPort(
port,
baudrate=baudrate,
bytesize=(5, 6, 7, 8)[bytesize],
stopbits=(1, 2)[stopbits],
parity=('N', 'O', 'E')[parity],
xonxoff=xonxoff,
rtscts=rtscts,
)
except:
self.serial = None
raise self.Exceptions.SerialOpenFailed
self.serial.timeout = 1.0
self.serial.setRTS()
if generateEvents:
self.decoder = DECODING_FUNCS[encodingNum]
self.terminator = eg.ParseString(terminator).decode('string_escape')
self.info.eventPrefix = prefix
self.stopEvent = win32event.CreateEvent(None, 1, 0, None)
self.receiveThread = threading.Thread(target=self.ReceiveThread, name="SerialThread")
self.receiveThread.start()
else:
self.receiveThread = None
def __stop__(self):
if self.serial is not None:
if self.receiveThread:
win32event.SetEvent(self.stopEvent)
self.receiveThread.join(1.0)
self.serial.close()
self.serial = None
def HandleChar(self, ch):
self.buffer += ch
pos = self.buffer.find(self.terminator)
if pos != -1:
eventstring = self.buffer[:pos]
if eventstring:
self.TriggerEvent(self.decoder(eventstring)[0])
self.buffer = self.buffer[pos+len(self.terminator):]
def ReceiveThread(self):
from win32event import (
ResetEvent,
MsgWaitForMultipleObjects,
QS_ALLINPUT,
WAIT_OBJECT_0,
WAIT_TIMEOUT,
)
from win32file import ReadFile, AllocateReadBuffer, GetOverlappedResult
from win32api import GetLastError
continueLoop = True
overlapped = self.serial._overlappedRead
hComPort = self.serial.hComPort
hEvent = overlapped.hEvent
stopEvent = self.stopEvent
n = 1
waitingOnRead = False
buf = AllocateReadBuffer(n)
while continueLoop:
if not waitingOnRead:
ResetEvent(hEvent)
hr, _ = ReadFile(hComPort, buf, overlapped)
if hr == 997:
waitingOnRead = True
elif hr == 0:
pass
#n = GetOverlappedResult(hComPort, overlapped, 1)
#self.HandleChar(str(buf))
else:
self.PrintError("error")
raise
rc = MsgWaitForMultipleObjects(
(hEvent, stopEvent),
0,
1000,
QS_ALLINPUT
)
if rc == WAIT_OBJECT_0:
n = GetOverlappedResult(hComPort, overlapped, 1)
if n:
self.HandleChar(str(buf))
#else:
# print "WAIT_OBJECT_0", n, str(buf[:n])
waitingOnRead = False
elif rc == WAIT_OBJECT_0+1:
continueLoop = False
elif rc == WAIT_TIMEOUT:
pass
else:
self.PrintError("unknown message")
def Configure(
|
self,
port=0,
baudrate=9600,
bytesize=3,
p
|
arity=0,
stopbits=0,
handshake=0,
generateEvents=False,
terminator="\\r",
prefix="Serial",
encodingNum=0,
):
text = self.text
panel = eg.ConfigPanel()
portCtrl = panel.SerialPortChoice(
|
abstract-open-solutions/djc.recipe2
|
djc/recipe2/recipe.py
|
Python
|
bsd-3-clause
| 11,686
| 0.00077
|
import logging, os, random
from zc.buildout import UserError, easy_install
from zc.recipe.egg import Egg
SETTINGS_TEMPLATE = '''
from %(settings_module)s import *
SECRET_KEY = "%(secret)s"
%(settings_override)s
'''
SCRIPT_TEMPLATES = {
'wsgi': easy_install.script_header + '''
%(relative_paths_setup)s
import sys
sys.path[0:0] = [
%(path)s,
]
%(initialization)s
import os
try:
from django.core.wsgi import get_wsgi_application
IS_14_PLUS = True
except ImportError:
from django.core.handlers.wsgi import WSGIHandler
IS_14_PLUS = False
os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s"
def app_factory(global_config, **local_config):
"""This function wraps our simple WSGI app so it
can be used with paste.deploy"""
if IS_14_PLUS:
return get_wsgi_application()
else:
return WSGIHandler()
application = app_factory(%(arguments)s)
''',
'manage': easy_install.script_header + '''
%(relative_paths_setup)s
import sys
sys.path[0:0] = [
%(path)s,
]
%(initialization)s
import os
try:
from django.core.management import execute_from_command_line
IS_14_PLUS = True
except ImportError:
from django.core.management import ManagementUtility
IS_14_PLUS = False
os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s"
if IS_14_PLUS:
execute_from_command_line(%(arguments)s)
else:
utility = ManagementUtility(%(arguments)s)
utility.execute()
'''
}
class Recipe(object):
wsgi_file = 'wsgi.py'
settings_file = 'settings.py'
sites_default = 'sites'
site_settings_template = '%(name)s_site_config'
secret_cfg = '.secret.cfg'
def __init__(self, buildout, name, options):
self.buildout, self.name, self.options = buildout, name, options
self.logger = logging.getLogger(name)
self.options['location'] = os.path.join(
|
self.buildout['buildout']['parts-directory'], self.name
)
self.options.setdefault('extra-paths', '')
self.options.setdefault('environment-vars', '')
self.options.setdefault('sites-directory', self.sites_default)
self.options.setdefault('settings-override', '')
self.options.setdefault('settings-file', self.settings_file)
self.options.setdefault('wsgi-file', self.wsgi_file)
self.options.se
|
tdefault('manage-py-file', 'django')
self.eggs = [ ]
if 'eggs' in self.buildout['buildout']:
self.eggs.extend(self.buildout['buildout']['eggs'].split())
if 'eggs' in self.options:
self.eggs.extend(self.options['eggs'].split())
self.working_set = None
self.extra_paths = [ self.options['location'] ]
sites_path = os.path.join(
self.buildout['buildout']['directory'],
self.options['sites-directory']
)
if os.path.isdir(sites_path):
self.extra_paths.append(sites_path)
if os.path.isdir(sites_path) and 'settings-module' not in self.options:
# Check if the user has created a module %(name)s_config
settings_module = self.site_settings_template % {
'name': self.name
}
settings_module_path = os.path.join(sites_path, settings_module)
initpy = os.path.join(settings_module_path, '__init__.py')
settingspy = os.path.join(settings_module_path, 'settings.py')
if os.path.isdir(settings_module_path) and \
os.path.isfile(initpy) and os.path.isfile(settingspy):
self.options.setdefault('settings-module',
'%s.settings' % settings_module)
self.extra_paths.extend(self.options['extra-paths'].split())
self.secret_key = None
def setup_working_set(self):
egg = Egg(
self.buildout, 'Django', self.options
)
self.working_set = egg.working_set(self.eggs)
def setup_secret(self):
secret_file = os.path.join(
self.buildout['buildout']['directory'],
self.secret_cfg
)
if os.path.isfile(secret_file):
stream = open(secret_file, 'rb')
data = stream.read().decode('utf-8').strip()
stream.close()
self.logger.debug("Read secret: %s" % data)
else:
stream = open(secret_file, 'wb')
chars = u'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
data = u''.join([random.choice(chars) for __ in range(50)])
stream.write(data.encode('utf-8')+u"\n")
stream.close()
self.logger.debug(
"Generated secret: %s (and written to %s)" % (data, secret_file)
)
self.secret_key = data
return secret_file
def setup_module_file(self, module, name, data):
with open(os.path.join(module, name), 'wb') as stream:
stream.write(data)
def get_settings(self, static_directory=None, media_directory=None):
if 'settings-module' not in self.options:
raise UserError(
("You should specify 'settings-module' in %(name)s "
"or create a module named '"+self.site_settings_template+"' "
"in '%(sites)s' with a 'settings.py' file in it") % {
'name': self.name,
'sites': self.options['sites-directory']
}
)
settings_override = self.options['settings-override']
if static_directory is not None:
settings_override += '\nSTATIC_ROOT = "%s"\n' % (
static_directory,
)
if media_directory is not None:
settings_override += '\nMEDIA_ROOT = "%s"\n' % (
media_directory,
)
return SETTINGS_TEMPLATE % {
'settings_module': self.options['settings-module'],
'secret': self.secret_key,
'settings_override': settings_override
}
def setup_directories(self):
result = []
for directory in [ 'static-directory', 'media-directory' ]:
result.append(None)
if directory in self.options:
path = os.path.join(
self.buildout['buildout']['directory'],
self.options[directory]
)
if not os.path.isdir(path):
os.makedirs(path)
result[-1] = path
return result
def get_initialization(self):
# The initialization code is expressed as a list of lines
initialization = []
# Gets the initialization code: the tricky part here is to preserve
# indentation.
# Since buildout does totally waste whitespace, if one wants to
# preserve indentation must prefix its lines with '>>> ' or '... '
raw_value = self.options.get('initialization', '')
is_indented = False
indentations = ('>>> ', '... ')
for line in raw_value.splitlines():
if line != "":
if len(initialization) == 0:
if line.startswith(indentations[0]):
is_indented = True
else:
if is_indented and not line.startswith(indentations[1]):
raise UserError(
("Line '%s' should be indented "
"properly but is not") % line
)
if is_indented:
line = line[4:]
initialization.append(line)
# Gets the environment-vars option and generates code to set the
# enviroment variables via os.environ
environment_vars = []
for line in self.options.get('environment-vars', '').splitlines():
line = line.strip()
if len(line) > 0:
try:
var_name, raw_value = line.split(' ', 1)
except ValueError:
raise RuntimeError(
"Bad djc.recipe2 environment-vars contents: %s" % line
)
|
mirobot/mirobot-py
|
setup.py
|
Python
|
mit
| 623
| 0.033708
|
from distutils.core import setup
s
|
etup(
name = 'mirobot',
packages = ['mirobot'],
version = '1.0.3',
description = 'A Python library to control Mirobot (http://mirobot.io)',
author = 'Ben Pirt',
author_email = '[email protected]',
url = 'https://github.com/mirobot/mirobot-py',
download_url = 'https://github.com/mirobot/mirobot-py/tarball/v1.0.2',
keywords = ['robotics'
|
, 'control', 'mirobot'],
classifiers = ['Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Education', 'License :: OSI Approved :: MIT License'],
install_requires=[
"websocket-client",
],
)
|
python-rope/rope
|
rope/base/__init__.py
|
Python
|
lgpl-3.0
| 161
| 0
|
"""Base rope packag
|
e
This package contains rope core modules that are used by other modules
and packages.
"""
__all__ = ["project", "libutils", "exceptio
|
ns"]
|
perimosocordiae/sparray
|
bench/benchmarks/ops.py
|
Python
|
mit
| 1,302
| 0.016129
|
import scipy.sparse as ss
impor
|
t warn
|
ings
warnings.simplefilter('ignore', ss.SparseEfficiencyWarning)
from sparray import FlatSparray
class Operations(object):
params = [['FlatSparray', 'csr_matrix']]
param_names = ['arr_type']
def setup(self, arr_type):
mat = ss.rand(3000, 4000, density=0.1, format='csr')
if arr_type == 'FlatSparray':
self.arr = FlatSparray.from_spmatrix(mat)
else:
self.arr = mat
def time_scalar_multiplication(self, arr_type):
self.arr * 3
def time_sum(self, arr_type):
self.arr.sum()
def time_getitem_scalar(self, arr_type):
self.arr[154, 145]
def time_getitem_subarray(self, arr_type):
self.arr[:5, :5]
def time_getitem_row(self, arr_type):
self.arr[876]
def time_getitem_col(self, arr_type):
self.arr[:,273]
def time_diagonal(self, arr_type):
self.arr.diagonal()
class ImpureOperations(object):
params = [['FlatSparray', 'csr_matrix']]
param_names = ['arr_type']
number = 1 # make sure we re-run setup() before each timing
def setup(self, arr_type):
mat = ss.rand(3000, 4000, density=0.1, format='csr')
if arr_type == 'FlatSparray':
self.arr = FlatSparray.from_spmatrix(mat)
else:
self.arr = mat
def time_setdiag(self, arr_type):
self.arr.setdiag(99)
|
tariq786/datafying_bitcoin
|
sp_batch_hdfs.py
|
Python
|
gpl-3.0
| 3,652
| 0.026013
|
from pyspark import SparkConf, SparkContext
from jsonrpc.authproxy import AuthServiceProxy
import json
import sys
#This is batch processing of bitcoind (locally run bitcoin daemon)
#RPC (Remote Procedure Call) block's json stored
#in HDFS. Currently 187,990 blocks' json representation is
#stored in HDFS. The HDFS file size is around 6.5GB
#The output of this program is block_number and the corresponding
#transaction fee in units of Satoshi. This data is written to HBASE
#table.
#The program takes only 69 minutes to run. While the streaming version
#of the program takes 177 minutes.
#It is a Good illustration of time-space(memory) tradeoff
conf = SparkConf().setMaster("local").setAppName("bitcoin_TransactionFee_calcultor")
sc = SparkContext(conf=conf)
rpcuser="bitcoinrpc"
rpcpassword="5C3Y6So6sCRPgBao8KyWV2bYpTHZt5RCVAiAg5JmTnHr"
rpcip = "127.0.0.1"
bitcoinrpc = AuthServiceProxy("http://"+rpcuser+":"+rpcpassword+"@"+rpcip+":8332")
#function SaveRecord: saves tx_fee for a block to hbase database
def SaveRecord(tx_fee_rdd):
host = 'localhost' #sys.argv[1]
table = 'tx_fee_table_sp_batch' #needs to be created before hand in hbase shell
conf = {"hbase.zookeeper.quorum": host,
"hbase.mapred.outputtable": table,
"mapreduce.outputformat.class": "org.apache.hadoop.hbase.mapreduce.TableOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.hbase.io.ImmutableBytesWritable",
|
"mapreduce.job.output.value.class": "org.apache.hadoop.io.Writable"}
keyConv = "org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter"
valueConv = "org.apache.spark.examples.pythonconverters.StringListToPut
|
Converter"
#row key id,id, cfamily=tx_fee_col,column_name = tx_fee, column_value=x
#datamap = tx_fee_rdd.map(lambda x: ("tx_fee",x) )
#( rowkey , [ row key , column family , column name , value ] )
datamap = tx_fee_rdd.map(lambda x: (str(x[0]),
[str(x[0]),"tx_fee_col","tx_fee",str(x[1])])
)
datamap.saveAsNewAPIHadoopDataset(conf=conf,
keyConverter=keyConv,
valueConverter=valueConv)
def get_tx_fee(gen_tx):
gen_tx_json = bitcoinrpc.decoderawtransaction(bitcoinrpc.getrawtransaction(gen_tx))
return gen_tx_json
content_rdd = sc.textFile("hdfs://ec2-52-21-47-235.compute-1.amazonaws.com:9000/bitcoin/block_chain_full.txt")
#The file below is for testing purposes
#content_rdd = sc.textFile("file:///home/ubuntu/unix_practice/bitcoin/2_blocks.txt")
dump_rdd = content_rdd.map(lambda x: json.dumps(x)).map(lambda x : x.decode('unicode_escape').encode('ascii','ignore'))
#print dump_rdd.take(2)
load_rdd = dump_rdd.map(lambda x: json.loads(x))
#print load_rdd.take(2)
split_blk_rdd = load_rdd.map(lambda x: x.split(":"))
#tx = load_rdd.filter(lambda x: "tx" in x)
#print split_blk_rdd.take(split_blk_rdd.count())
gen_tx_rdd = split_blk_rdd.map(lambda x : (x[8][1:7],x[6][4:68]) ) #this gets generation transactions
#print "*************HERE***************"
#print gen_tx_rdd.take(gen_tx_rdd.count()) #from the blocks
tx_json_rdd = gen_tx_rdd.map(lambda x : (x[0],get_tx_fee(x[1])) ) #function call
#print tx_json_rdd.take(tx_json_rdd.count())
tx_fee_rdd = tx_json_rdd.map(lambda x : (x[0],x[1].items()
[3][1][0]["value"]-25) )#.filter(lambda x : "value" in x)
#print tx_fee_rdd.take(tx_fee_rdd.count())
SaveRecord(tx_fee_rdd) #function call
#just to display values for debugging
#val_lst = tx_fee_rdd.take(tx_fee_rdd.count()) #use [3][1]
#print val_lst
|
dockermeetupsinbordeaux/docker-zabbix-sender
|
docker_zabbix_sender/stats.py
|
Python
|
apache-2.0
| 2,565
| 0.00234
|
# encoding: utf-8
"""Provides collection of events emitters"""
import time
from . import EndPoint
def container_count(host_fqdn, docker_client, statistics):
"""
Emit events providing:
- number of containers
- number of running containers
- number of crashed containers
:param host_fqdn: FQDN of the host where the docker-zabbix-daemon is running, for instance docker.acme.com
:type host_fqdn: string
:param docker_client: instance of docker.Client see http://docker-py.readthedocs.org/en/latest/api/
:type docker_client: docker.Client
:param statistics: List of dicts providing collected container statistics. see Docker stats API call on https://docs.docker.com/reference/api/docker_remote_api_v1.17/#get-container-stats-based-on-resource-usage
:return: list of dicts providing additional events to push to Zabbix.
Each dict is composed of 4 keys:
- hostname
- timestamp
- key
- value
"""
running = 0
crashed = 0
now = int(time.time())
containers = docker_client.containers(all=True)
for container in containers:
status = container['Status']
if status.startswith('Up'):
running += 1
elif not status.startswith('Exited (0)'):
crashed += 1
data = {
'all': len(containers),
'running': running,
'crashed': crashed,
}
return [
{
'hostname': '-',
'timestamp': now,
'key': EndPoint.EVENT_KEY_PREFIX + 'count.' + key,
'value': value
}
for key, value in data.items()
]
def container_ip(host_fqdn, docker_client, statis
|
tics):
"""Emit the ip addresses of containers.
"""
for stat in statistics:
containerId = stat['id']
details = docker_client.inspect_container(containerId)
yield {
'hostname':
|
EndPoint.container_hostname(host_fqdn, stat['name']),
'timestamp': stat['timestamp'],
'key': EndPoint.EVENT_KEY_PREFIX + 'ip',
'value': details['NetworkSettings']['IPAddress']
}
def cpu_count(host_fqdn, docker_client, statistics):
"""Emit the number of CPU available for each container.
"""
for stat in statistics:
yield {
'hostname': EndPoint.container_hostname(host_fqdn, stat['name']),
'timestamp': stat['timestamp'],
'key': EndPoint.EVENT_KEY_PREFIX + 'cpu.count',
'value': len(stat['cpu_stats']['cpu_usage']['percpu_usage'])
}
|
rocky/python-uncompyle6
|
test/simple_source/bug33/05_nonlocal.py
|
Python
|
gpl-3.0
| 230
| 0.004348
|
# From Python 3.6 funct
|
ools.py
# Bug was in detecting "nonlocal" access
def not_bug():
cache_token = 5
def register():
nonlocal cache_token
return cache_token == 5
return register()
assert not_
|
bug()
|
MOOOWOOO/Q400K
|
app/user/__init__.py
|
Python
|
gpl-3.0
| 125
| 0.008
|
# coding
|
: utf-8
from flask import Blueprint
__author__ = 'Jux.Liu'
user = Blueprint('user', __name__)
from . import vie
|
ws
|
jtiki/djangocms-cascade
|
cmsplugin_cascade/link/plugin_base.py
|
Python
|
mit
| 2,939
| 0.003403
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import apps
from django.forms import widgets
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from cmsplugin_cascade.fields import PartialFormField
from cmsplugin_cascade.plugin_base import CascadePluginBase
from .forms import LinkForm
class LinkPluginBase(CascadePluginBase):
text_enabled = True
allow_children = False
parent_classes = []
require_parent = False
glossary_fields = (
PartialFormField('target',
widgets.RadioSelect(choices=(('', _("Same Window")), ('_blank', _("New Window")),
('_parent', _("Parent Window")), ('_top', _("Topmost Frame")),)),
initial='',
label=_("Link Target"),
help_text=_("Open Link in other target.")
),
PartialFormField('title',
widgets.TextInput(),
label=_("Title"),
help_text=_("Link's Title")
),
)
html_tag_attributes = {'title': 'title', 'target': 'target'}
# map field from glossary to these form fields
glossary_field_map = {'link': ('link_type', 'cms_page', 'ext_url', 'mail_to',)}
@classmethod
def get_link(cls, obj):
link = obj.glossary.get('link', {})
linktype = link.get('type')
if linktype == 'exturl':
return '{url
|
}'.format(**link)
if linktype == 'email':
return 'mailto:{email}'.format(**link)
# otherwise try to resolve by model
if 'model' in link and 'pk' in link:
if not hasattr(obj, '_link_model'):
Model = apps.get_model(*link['model'].split('.'))
try:
obj._link_model = Model.objects.get(pk=link['pk'])
|
except Model.DoesNotExist:
obj._link_model = None
if obj._link_model:
return obj._link_model.get_absolute_url()
def get_ring_bases(self):
bases = super(LinkPluginBase, self).get_ring_bases()
bases.append('LinkPluginBase')
return bases
def get_form(self, request, obj=None, **kwargs):
kwargs.setdefault('form', LinkForm.get_form_class())
return super(LinkPluginBase, self).get_form(request, obj, **kwargs)
@python_2_unicode_compatible
class LinkElementMixin(object):
"""
A mixin class to convert a CascadeElement into a proxy model for rendering the ``<a>`` element.
Note that a Link inside the Text Editor Plugin is rendered using ``str(instance)`` rather
than ``instance.content``.
"""
def __str__(self):
return self.content
@property
def link(self):
return self.plugin_class.get_link(self)
@property
def content(self):
return mark_safe(self.glossary.get('link_content', ''))
|
aplicatii-romanesti/allinclusive-kodi-pi
|
.kodi/addons/plugin.video.salts/scrapers/__init__.py
|
Python
|
apache-2.0
| 4,030
| 0.005707
|
__all__ = ['scraper', 'local_scraper', 'pw_scraper', 'uflix_scraper', 'watchseries_scraper', 'movie25_scraper', 'merdb_scraper', '2movies_scraper', 'icefilms_scraper',
'movieshd_scraper', 'yifytv_scraper', 'viooz_scraper', 'filmstreaming_scraper', 'myvideolinks_scraper', 'filmikz_scraper', 'clickplay_scraper', 'nitertv_scraper',
'iwatch_scraper', 'ororotv_scraper', 'view47_scraper', 'vidics_scraper', 'oneclickwatch_scraper', 'istreamhd_scraper', 'losmovies_scraper', 'movie4k_
|
scraper',
'noobroom_scraper', 'solar_scraper', 'vkbox_scraper', 'directdl_scraper', 'movietv_scraper', 'moviesonline7_scraper', 'streamallthis_scraper', 'afdah_scraper',
'streamtv_scraper', 'moviestorm_scraper', 'wmo_scraper', 'zumvo_scraper', 'wso_scraper', 'tvrelease_scraper', 'hdmz_scraper', 'ch131_scraper', 'watchfree_scraper',
'pftv_scraper', 'flixanity_scraper', 'cmz_scraper', 'movienight_scraper', 'gvcenter_scraper', 'alluc_scraper', 'afdahorg_scraper', 'xmovies8_s
|
craper',
'yifystreaming_scraper', 'mintmovies_scraper', 'playbox_scraper', 'shush_proxy', 'mvsnap_scraper', 'pubfilm_scraper', 'pctf_scraper', 'rlssource_scraper',
'couchtunerv1_scraper', 'couchtunerv2_scraper', 'tunemovie_scraper', 'watch8now_scraper', 'megabox_scraper', 'dizilab_scraper', 'beinmovie_scraper',
'dizimag_scraper', 'ayyex_scraper']
import re
import os
import xbmcaddon
import xbmc
import datetime
import time
from salts_lib import log_utils
from salts_lib.constants import VIDEO_TYPES
from . import scraper # just to avoid editor warning
from . import *
class ScraperVideo:
def __init__(self, video_type, title, year, trakt_id, season='', episode='', ep_title='', ep_airdate=''):
assert(video_type in (VIDEO_TYPES.__dict__[k] for k in VIDEO_TYPES.__dict__ if not k.startswith('__')))
self.video_type = video_type
self.title = title
self.year = year
self.season = season
self.episode = episode
self.ep_title = ep_title
self.trakt_id = trakt_id
self.ep_airdate = None
if ep_airdate:
try: self.ep_airdate = datetime.datetime.strptime(ep_airdate, "%Y-%m-%d").date()
except (TypeError, ImportError): self.ep_airdate = datetime.date(*(time.strptime(ep_airdate, '%Y-%m-%d')[0:3]))
def __str__(self):
return '|%s|%s|%s|%s|%s|%s|%s|' % (self.video_type, self.title, self.year, self.season, self.episode, self.ep_title, self.ep_airdate)
def update_xml(xml, new_settings, cat_count):
new_settings.insert(0, '<category label="Scrapers %s">' % (cat_count))
new_settings.append(' </category>')
new_str = '\n'.join(new_settings)
match = re.search('(<category label="Scrapers %s">.*?</category>)' % (cat_count), xml, re.DOTALL | re.I)
if match:
old_settings = match.group(1)
if old_settings != new_settings:
xml = xml.replace(old_settings, new_str)
else:
log_utils.log('Unable to match category: %s' % (cat_count), xbmc.LOGWARNING)
return xml
def update_settings():
path = xbmcaddon.Addon().getAddonInfo('path')
full_path = os.path.join(path, 'resources', 'settings.xml')
try:
with open(full_path, 'r') as f:
xml = f.read()
except:
raise
new_settings = []
cat_count = 1
old_xml = xml
classes = scraper.Scraper.__class__.__subclasses__(scraper.Scraper)
for cls in sorted(classes, key=lambda x: x.get_name().upper()):
new_settings += cls.get_settings()
if len(new_settings) > 90:
xml = update_xml(xml, new_settings, cat_count)
new_settings = []
cat_count += 1
if new_settings:
xml = update_xml(xml, new_settings, cat_count)
if xml != old_xml:
try:
with open(full_path, 'w') as f:
f.write(xml)
except:
raise
else:
log_utils.log('No Settings Update Needed', xbmc.LOGDEBUG)
update_settings()
|
daniaki/Enrich2
|
enrich2/sequence/aligner.py
|
Python
|
gpl-3.0
| 21,237
| 0.002025
|
# Copyright 2016-2017 Alan F Rubin, Daniel C Esposito
#
# This file is part of Enrich2.
#
# Enrich2 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the Licens
|
e, or
# (at your option) any later version.
#
# Enrich2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; withou
|
t even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Enrich2. If not, see <http://www.gnu.org/licenses/>.
"""
Enrich2 aligner module
======================
Module for alignment of variants to the wild type sequence.
This module is optional, and using it will dramatically increase runtime when
counting variants. It is only recommended for users who need to count
insertion and deletion variants (i.e. not coding sequences).
"""
from ctypes import c_int
import numpy as np
import logging
from ..base.utils import log_message
_AMBIVERT = False
try:
from ambivert.ambivert import gapped_alignment_to_cigar
from ambivert import align
# Reset the logging handlers after loading ambivert
for handler in logging.getLogger("ambivert").handlers:
handler.close()
logging.getLogger('ambivert').handlers = []
for handler in logging.getLogger().handlers:
handler.close()
logging.getLogger().handlers = []
logging.captureWarnings(False)
_AMBIVERT = True
except ImportError:
pass
__all__ = [
"Aligner"
]
#: Default similarity matrix used by the aligner.
#: User-defined matrices must have this format.
_simple_similarity = {
'A': {'A': 1, 'C': -1, 'G': -1, 'T': -1, 'N': 0, 'X': 0},
'C': {'A': -1, 'C': 1, 'G': -1, 'T': -1, 'N': 0, 'X': 0},
'G': {'A': -1, 'C': -1, 'G': 1, 'T': -1, 'N': 0, 'X': 0},
'T': {'A': -1, 'C': -1, 'G': -1, 'T': 1, 'N': 0, 'X': 0},
'N': {'A': 0, 'C': 0, 'G': 0, 'T': 0, 'N': 0, 'X': 0},
'X': {'A': 0, 'C': 0, 'G': 0, 'T': 0, 'N': 0, 'X': 0},
'gap_open': -1,
'gap_extend': 0
}
class Aligner(object):
"""
Class for performing local alignment of two DNA sequences.
This class implements `Needleman-Wunsch <http://en.wikipedia.org/wiki/
Needleman%E2%80%93Wunsch_algorithm>`_ local alignment.
The :py:class:`~enrich2.sequence.aligner.Aligner` requires a scoring matrix
when created. The format is a nested dictionary, with a special ``'gap_open'``
entry for the gap_open penalty (this value is used for both gap_open opening and gap_open
extension).
The ``'X'`` nucleotide is a special case for unresolvable mismatches in
:py:class:`~enrich2.libraries.overlap.OverlapSeqLib` variant data.
Parameters
----------
similarity : `dict`
Similarity matrix used by the aligner, must contain a cost mapping
between each of 'A', 'C', 'G', 'T', 'N', 'X'.
backend : {'ambivert', 'enrich2'}, default: 'ambivert'
Select the alignment backend. If backend is 'ambivert' then
similarity is ignored.
Attributes
----------
similarity : `dict`
Similarity matrix used by the aligner, must contain a cost mapping
between each of 'A', 'C', 'G', 'T', 'N', 'X'.
matrix : :py:class:`~numpy.ndarray`
The dynamically computed cost matrix.
seq1 : `str`
Reference sequence.
seq2 : `str`
The sequence that is to be aligned.
calls : `int`
Number of times `align` has been performed.
Methods
-------
align
Align two sequences using ``Needleman-Wusch``.
Notes
-----
This class implements `Needleman-Wunsch <http://en.wikipedia.org/wiki/
Needleman%E2%80%93Wunsch_algorithm>`_ local alignment.
"""
_MAT = 1 # match
_INS = 2 # insertion (with respect to wild type)
_DEL = 3 # deletion (with respect to wild type)
_END = 4 # end of traceback
def __init__(self, similarity=_simple_similarity, backend='ambivert'):
similarity_keys = list(similarity.keys())
if 'gap_open' in similarity_keys:
similarity_keys.remove('gap_open')
if 'gap_extend' in similarity_keys:
similarity_keys.remove('gap_extend')
for key in similarity_keys:
keys_map_to_dicts = all(x in similarity[key]
for x in similarity_keys)
symmetrical = len(similarity[key]) != len(similarity_keys)
if not keys_map_to_dicts or symmetrical:
raise ValueError("Asymmetrical alignment scoring matrix")
self.similarity = similarity
if 'gap_open' not in self.similarity:
raise ValueError(
"No gap_open open penalty in alignment scoring matrix.")
if 'gap_extend' not in self.similarity:
raise ValueError(
"No gap_open extend penalty in alignment scoring matrix.")
self.matrix = None
self.seq1 = None
self.seq2 = None
self.calls = 0
# TODO: uncomment aligner backend
# global _AMBIVERT
# if backend == 'ambivert' and _AMBIVERT:
# self.align = self.align_ambivert
# log_message(
# logging_callback=logging.info,
# msg="Using ambivert alignment backend.",
# extra={'oname': 'Aligner'}
# )
# else:
# self.align = self.align_enrich2
# log_message(
# logging_callback=logging.info,
# msg="Using enrich2 alignment backend.",
# extra={'oname': 'Aligner'}
# )
self.align = self.align_enrich2
log_message(
logging_callback=logging.info,
msg="Using enrich2 alignment backend.",
extra={'oname': 'Aligner'}
)
def align_ambivert(self, seq1, seq2):
"""
Aligns the two sequences, *seq1* and *seq2* and returns a list of
tuples describing the differences between the sequences.
The tuple format is ``(i, j, type, length)``, where ``i`` and ``j``
are the positions in *seq1* and *seq2*, respectively, and type is one
of ``"match"``, ``"mismatch"``, ``"insertion"``, or ``"deletion"``.
For indels, the ``length`` value is the number of bases inserted or
deleted with respect to *seq1* starting at ``i``.
Parameters
----------
seq1 : `str`
Reference sequence.
seq2 : `str`
The sequence that is to be aligned.
Returns
-------
`list`
list of tuples describing the differences between the sequences.
"""
if not isinstance(seq1, str):
raise TypeError("First sequence must be a str type")
if not isinstance(seq2, str):
raise TypeError("Second sequence must be a str type")
if not seq1:
raise ValueError("First sequence must not be empty.")
if not seq2:
raise ValueError("Second sequence must not be empty.")
self.matrix = np.ndarray(
shape=(len(seq1) + 1, len(seq2) + 1),
dtype=np.dtype([('score', np.int), ('trace', np.byte)])
)
seq1 = seq1.upper()
seq2 = seq2.upper()
a1, a2, *_ = self.needleman_wunsch(
seq1, seq2,
gap_open=self.similarity['gap_open'],
gap_extend=self.similarity['gap_extend']
)
backtrace = cigar_to_backtrace(
seq1, seq2,
gapped_alignment_to_cigar(a1, a2)[0]
)
return backtrace
def align_enrich2(self, seq1, seq2):
"""
Aligns the two sequences, *seq1* and *seq2* and returns a list of
tuples describing the differences between the sequences.
The tuple format is ``(i, j, type, length)``, where ``i`` and ``j``
are the positions in *seq1* and *seq2*, respectively, and type is one
|
SUSE/spacewalk-osad2
|
setup.py
|
Python
|
gpl-2.0
| 4,111
| 0.002189
|
#!/usr/bin/env python
from distutils.core import setup, run_setup, Command
import zmq.auth
import shutil
import os
OSAD2_PATH = os.path.dirname(os.path.realpath(__file__))
OSAD2_SERVER_CERTS_DIR = "/etc/rhn/osad2-server/certs/"
OSAD2_SERVER_PUB_KEY = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys/server.key")
OSAD2_SERVER_PRIVATE_KEY = os.path.join(OSAD2_SERVER_CERTS_DIR, "private_keys/server.key_secret")
OSAD2_CLIENT_SETUP_FILE = os.path.join(OSAD2_PATH, "setup_client.py")
PKGNAME_FILE = os.path.join(OSAD2_PATH, "PKGNAME")
class OSAD2Command(Command):
def _create_curve_certs(self, name):
print "Creating CURVE certificates for '%s'..." % name
pk_file, sk_file = zmq.auth.create_certificates(OSAD2_SERVER_CERTS_DIR,
name)
# OSAD2 certificates storage
pk_dst = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys")
sk_dst = os.path.join(OSAD2_SERVER_CERTS_DIR, "private_keys")
shutil.move(pk_file, pk_dst)
shutil.move(sk_file, sk_dst)
pk_dst = os.path.join(pk_dst, name + ".key")
sk_dst = os.path.join(sk_dst, name + ".key_secret")
print pk_dst
print sk_dst
return pk_dst, sk_dst
class CreateServerCommand(OSAD2Command):
description = "Create and install CURVE server key"
user_options = []
def initialize_options(self):
self.name = None
def finalize_options(self):
assert os.path.isdir(OSAD2_SERVER_CERTS_DIR), \
'Certificates storage dir doesn\'t exist: %s' % OSAD2_SERVER_CERTS_DIR
server_keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, 'private_keys/server.key_secret')
assert not os.path.isfile(server_keyfile), 'Server key already exists'
def run(self):
self._create_curve_certs("server")
class CreateClientCommand(OSAD2Command):
description = "Create a new client. Generate a RPM package"
user_options = [
('name=', None, 'Specify the new client name.'),
]
def initialize_options(self):
self.name = None
def finalize_options(self):
assert self.n
|
ame, 'You mus
|
t specify a client name'
assert os.path.isdir(OSAD2_SERVER_CERTS_DIR), \
'Certificates storage dir doesn\'t exist: %s' % OSAD2_SERVER_CERTS_DIR
keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys/" + self.name + '.key')
server_keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, 'private_keys/server.key_secret')
assert os.path.isfile(server_keyfile), 'Server key doesn\'t exist'
assert not os.path.isfile(keyfile), 'Client name already exists'
def run(self):
pk_file, sk_file = self._create_curve_certs(self.name)
# Temporary key storage for RPM build
import shutil
shutil.copy(pk_file, "etc/client.key_secret")
shutil.copy(OSAD2_SERVER_PUB_KEY, "etc/")
self._build_client_rpm()
def _build_client_rpm(self):
print "Creating RPM package for '%s'..." % self.name
open(PKGNAME_FILE, "w").write(self.name)
run_setup(OSAD2_CLIENT_SETUP_FILE, script_args=["bdist_rpm", "--quiet"])
os.remove(PKGNAME_FILE)
os.remove("etc/client.key_secret")
os.remove("etc/server.key")
setup(name='spacewalk-osad2-server',
version='alpha',
license='GPLv2',
description='An alternative OSA dispatcher module for Spacewalk',
long_description='This is an experiment to improve osad, a service '
'that simulates instant execution of actions in a '
'Spacewalk environment.',
platforms=['All'],
packages=['osad2', 'osad2.server'],
scripts=['bin/osad2_server.py'],
data_files=[
('/etc/rhn/osad2-server/', ['etc/osad_server.prod.cfg']),
('/etc/rhn/osad2-server/certs/private_keys/', []),
('/etc/rhn/osad2-server/certs/public_keys/', []),
],
cmdclass={'createclient': CreateClientCommand,
'createserver': CreateServerCommand})
|
OscarES/serpentinetracker
|
examples/atf/atfExt.py
|
Python
|
gpl-3.0
| 1,975
| 0.017722
|
import pylab as pl
import scipy as sp
from serpentine import *
from elements import *
import visualize
class AtfExt :
def __init__(self) :
print 'AtfExt:__init__'
# set twiss parameters
mytwiss = Twiss()
mytwiss.betax = 6.85338806855804
mytwiss.alphax = 1.11230788371885
mytwiss.etax = 3.89188697330735e-012
mytwiss.etaxp = 63.1945125619190e-015
mytwiss.betay = 2.941
|
29410712918
mytwiss.alphay = -1.91105724003646
mytwiss.etay = 0
mytwiss.etayp = 0
mytwiss.nemitx = 5.08807339588144e-006
mytwiss.nemity = 50.8807339588144e-009
mytwiss.sigz = 8.00000000000000e-003
mytwiss.sigP = 1.03999991965541e-003
mytwiss.pz_cor = 0
# load beam line
self.atfFull = Serpentine(line='newATF2lat.aml',twiss=mytwiss)
self.atfExt = Serpentine(line=beamline.Line(self.atfFull.beamline[947:])
|
,twiss=mytwiss)
# zero zero cors
self.atfExt.beamline.ZeroCors()
# Track
self.atfExt.Track()
readings = self.atfExt.GetBPMReadings()
# Visualisation
self.v = visualize.Visualize()
def moverCalibration(self, mag, bpms) :
pass
def correctorCalibration(self, corr, bpms) :
pass
def bba(self, mag, bpm) :
pass
def magMoverCalibration(self, mag, bpm) :
pass
def setMagnet(self,name, value) :
ei = self.atfExt.beamline.FindEleByName(name)
print ei
e = self.atfExt.beamline[ei[0]]
e.B = value
def plotOrbit(self) :
self.v.PlotBPMReadings(self.atfExt)
def plotTwiss(self) :
self.v.PlotTwiss(self.atfExt)
def run(self) :
self.atfExt.Track()
def jitterBeam(self) :
r = 1+sp.random.standard_normal()
# self.s.beam_in.x[5,:] = (1+r/3e4)*self.nominalE
# print r,self.s.BeamIn.x[5,:]
|
repotvsupertuga/tvsupertuga.repository
|
script.module.cryptolib/lib/cryptopy/cipher/rijndael_test.py
|
Python
|
gpl-2.0
| 9,568
| 0.037312
|
#! /usr/bin/env python
""" cryptopy.cipher.rijndael_test
Tests for the rijndael encryption algorithm
Copyright (c) 2002 by Paul A. Lambert
Read LICENSE.txt for license information.
"""
from cryptopy.cipher.rijndael import Rijndael
from cryptopy.cipher.base import noPadding
from binascii import a2b_hex
import unittest
class Rijndael_TestVectors(unittest.TestCase):
""" Test Rijndael algorithm using know values."""
def testGladman_dev_vec(self):
""" All 25 combinations of block and key size.
These test vectors were generated by Dr Brian Gladman
using the program aes_vec.cpp <[email protected]> 24th May 2001.
vectors in file: dev_vec.txt
http://fp.gladman.plus.com/cryptography_technology/rijndael/index.htm
"""
def RijndaelTestVec(i, key, pt, ct):
""" Run single AES test vector with any legal blockSize
and any legal key size. """
bkey, plainText, cipherText = a2b_hex(key), a2b_hex(pt), a2b_hex(ct)
kSize = len(bkey)
bSize = len(cipherText) # set block size to length of block
alg = Rijndael(bkey, keySize=kSize, blockSize=bSize, padding=noPadding())
self.assertEqual( alg.encrypt(plainText), cipherText )
self.assertEqual( alg.decrypt(cipherText), plainText )
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '3925841d02dc09fbdc118597196a0b32')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '231d844639b31b412211cfe93712b880')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e0370734',
ct = 'f9fb29aefc384a250340d833b87ebc00')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '8faa8fe4dee9eb17caa4797502fc9d3f')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '1a6e6c2c662e7da6501ffb62bc9e93f3')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '16e73aec921314c29df905432bc8968ab64b1f51')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '0553eb691670dd8a5a5b5addf1aa7450f7a0e587')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '73cd6f3423036790463aa9e19cfcde894ea16623')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '601b5dcd1cf4ece954c740445340bf0afdc048df')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '579e930b36c1529aa3e86628bacfe146942882cf')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = 'b24d275489e82bb8f7375e0d5fcdb1f481757c538b65148a')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '738dae25620d3d3beff4a037a04290d73eb33521a63ea568')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '725ae43b5f3161de806a7c93e0bca93c967ec1ae1b71e1cf')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = 'bbfc14180afbf6a36382a061843f0b63e769acdc98769130')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '0ebacf199e3315c2e34b24fcc7c46ef4388aa475d66c194c')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'b0a8f78f6b3c66213f792ffd2a61631f79331407a5e5c8d3793aceb1')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = '08b99944edfce33a2acb131183ab0168446b2d15e958480010f545e3')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'be4c597d8f7efe22a2f7e5b1938e2564d452a5bfe72399c7af1101e2')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'ef529598ecbce297811b49bbed2c33bbe1241d6e1a833dbe119569e8')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = '02fafc200176ed05deb8edb82a3555b0b10d47a388dfd59cab2f6c11')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8',
|
ct = '7d15479076b69a46ffb3b3beae97ad8313f622f67fedb487de9f06b9ed9c8f19')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 20 byte key',
|
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
|
reneploetz/mesos
|
src/python/cli_new/lib/cli/plugins/base.py
|
Python
|
apache-2.0
| 5,461
| 0.000366
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable
|
law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the Licen
|
se for the specific language governing permissions and
# limitations under the License.
"""
Plugin's Base Class
"""
import sys
import cli
from cli.docopt import docopt
PLUGIN_NAME = "base-plugin"
PLUGIN_CLASS = "PluginBase"
VERSION = "Mesos Plugin Base 1.0"
SHORT_HELP = "This is the base plugin from which all other plugins inherit."
USAGE = \
"""
{short_help}
Usage:
mesos {plugin} (-h | --help)
mesos {plugin} --version
mesos {plugin} <command> (-h | --help)
mesos {plugin} [options] <command> [<args>...]
Options:
-h --help Show this screen.
--version Show version info.
Commands:
{commands}
"""
SUBCOMMAND_USAGE = \
"""{short_help}
Usage:
mesos {plugin} {command} (-h | --help)
mesos {plugin} {command} --version
mesos {plugin} {command} [options] {arguments}
Options:
{flags}
Description:
{long_help}
"""
class PluginBase():
"""
Base class from which all CLI plugins should inherit.
"""
# pylint: disable=too-few-public-methods
COMMANDS = {}
def __setup__(self, command, argv):
pass
def __module_reference__(self):
return sys.modules[self.__module__]
def __init__(self, settings, config):
# pylint: disable=invalid-name
self.PLUGIN_NAME = PLUGIN_NAME
self.PLUGIN_CLASS = PLUGIN_CLASS
self.VERSION = VERSION
self.SHORT_HELP = SHORT_HELP
self.USAGE = USAGE
module = self.__module_reference__()
if hasattr(module, "PLUGIN_NAME"):
self.PLUGIN_NAME = getattr(module, "PLUGIN_NAME")
if hasattr(module, "PLUGIN_CLASS"):
self.PLUGIN_CLASS = getattr(module, "PLUGIN_CLASS")
if hasattr(module, "VERSION"):
self.VERSION = getattr(module, "VERSION")
if hasattr(module, "SHORT_HELP"):
self.SHORT_HELP = getattr(module, "SHORT_HELP")
if hasattr(module, "USAGE"):
self.USAGE = getattr(module, "USAGE")
self.settings = settings
self.config = config
def __autocomplete__(self, command, current_word, argv):
# pylint: disable=unused-variable,unused-argument,
# attribute-defined-outside-init
return ("default", [])
def __autocomplete_base__(self, current_word, argv):
option = "default"
# <command>
comp_words = list(self.COMMANDS.keys())
comp_words = cli.util.completions(comp_words, current_word, argv)
if comp_words is not None:
return (option, comp_words)
# <args>...
comp_words = self.__autocomplete__(argv[0], current_word, argv[1:])
# In general, we expect a tuple to be returned from __autocomplete__,
# with the first element being a valid autocomplete option, and the
# second being a list of completion words. However, in the common
# case we usually use the default option, so it's OK for a plugin to
# just return a list. We will add the "default" option for them.
if isinstance(comp_words, tuple):
option, comp_words = comp_words
return (option, comp_words)
def main(self, argv):
"""
Main method takes argument from top level mesos and parses them
to call the appropriate method.
"""
command_strings = cli.util.format_commands_help(self.COMMANDS)
usage = self.USAGE.format(
plugin=self.PLUGIN_NAME,
short_help=self.SHORT_HELP,
commands=command_strings)
arguments = docopt(
usage,
argv=argv,
version=self.VERSION,
program="mesos " + self.PLUGIN_NAME,
options_first=True)
cmd = arguments["<command>"]
argv = arguments["<args>"]
if cmd in self.COMMANDS.keys():
if "external" not in self.COMMANDS[cmd]:
argument_format, short_help, long_help, flag_format = \
cli.util.format_subcommands_help(self.COMMANDS[cmd])
usage = SUBCOMMAND_USAGE.format(
plugin=self.PLUGIN_NAME,
command=cmd,
arguments=argument_format,
flags=flag_format,
short_help=short_help,
long_help=long_help)
arguments = docopt(
usage,
argv=argv,
program="mesos " + self.PLUGIN_NAME + " " + cmd,
version=self.VERSION,
options_first=True)
if "alias" in self.COMMANDS[cmd]:
cmd = self.COMMANDS[cmd]["alias"]
self.__setup__(cmd, argv)
return getattr(self, cmd.replace("-", "_"))(arguments)
return self.main(["--help"])
|
sigma-geosistemas/django-tenants
|
dts_test_project/dts_test_project/settings.py
|
Python
|
mit
| 3,048
| 0.000656
|
"""
Django settings for dts_test_project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TENANT_APPS_DIR = os.path.join(BASE_DIR, os.pardir)
sys.path.insert(0, TENANT_APPS_DIR)
sys.path.insert(0, BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cl1)b#c&xmm36z3e(quna-vb@ab#&gpjtdjtpyzh!qn%bc^xxn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
SHARED_APPS = (
'django_tenants', # mandatory
'customers', # you must list the app where your tenant model resides in
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
TENANT_APPS = (
'dts_test_app',
)
TENANT_MODEL = "customers.Client" # app.Model
TENANT_DOMAIN_MODEL = "customers.Domain" # app.Model
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
INSTALLED_APPS = list(SHARED_APPS) + [app for app in TENANT_APPS if app not in SHARED_APPS]
ROOT_URLCONF = 'dts_test_project.urls'
WSGI_APPLICATION = 'dts_test_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django_tenants.postgresql_backend',
'NAME': 'dts_test_project',
'USER': 'postgres',
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'root'),
'HOST': os.environ.get('DATABASE_HOST', 'localhost'),
'PORT': '',
}
}
DATABASE_ROUTERS = (
'django_tenants.routers.TenantSyncRouter',
)
MIDDLEWARE = (
'tenant_tutorial.middleware.TenantTutorialMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.mi
|
ddleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.media',
'django.core.con
|
text_processors.static',
'django.contrib.messages.context_processors.messages',
)
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
jendrikseipp/rednotebook-elementary
|
win/build-installer.py
|
Python
|
gpl-2.0
| 831
| 0.002407
|
#! /usr/bin/env python3
import argparse
import logging
import os
from utils import run
logging.basicConfig(level=logging.INFO)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('dist_dir')
parser.add_argument('version')
return parser.parse_args()
args = parse_args()
DIR = os.path.dirname(os.path.abspath(__file__))
BASE_
|
DIR = os.path.dirname(DIR)
DIST_DIR = os.path.abspath(args.dist_dir)
DRIVE_C = os.path.join(DIST_DIR, 'drive_c')
WINE_RN_DIR = os.path.join(DRIVE_C, 'rednotebook')
WINE_RN_WIN_DIR = os.path.join(WINE
|
_RN_DIR, 'win')
os.environ['WINEPREFIX'] = DIST_DIR
ISCC = os.path.join(DRIVE_C, 'Program Files (x86)', 'Inno Setup 5', 'ISCC.exe')
VERSION_PARAM = '/dREDNOTEBOOK_VERSION=%s' % args.version
run(['wine', ISCC, VERSION_PARAM, 'rednotebook.iss'], cwd=WINE_RN_WIN_DIR)
|
seanbell/opensurfaces
|
server/licenses/admin.py
|
Python
|
mit
| 621
| 0.00161
|
from django.contrib import admin
from common.admin import AutoUserMixin
from licenses.models import License
class LicenseA
|
dmin(AutoUserMixin, admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['added', 'name', 'url', 'creative_commons',
'cc_attribution', 'cc_noncommercial',
'cc_no_deriv', 'cc_share_alike']
}),
]
# fiel
|
ds
readonly_fields = ['added']
list_display = ['name', 'url']
# field display
list_filter = ['name', 'added']
search_fields = ['name', 'url']
admin.site.register(License, LicenseAdmin)
|
ajylee/gpaw-rtxs
|
gpaw/lcao/pwf2.py
|
Python
|
gpl-3.0
| 16,299
| 0.00135
|
import numpy as np
from ase import Hartree
from gpaw.aseinterface import GPAW
from gpaw.lcao.overlap import NewTwoCenterIntegrals
from gpaw.utilities import unpack
from gpaw.utilities.tools import tri2full, lowdin
from gpaw.lcao.tools import basis_subset2, get_bfi2
from gpaw.coulomb import get_vxc as get_ks_xc
from gpaw.utilities.blas import r2k, gemm
from gpaw.lcao.projected_wannier import dots, condition_number, eigvals, \
get_bfs, get_lcao_projections_HSP
def get_rot(F_MM, V_oM, L):
eps_M, U_MM = np.linalg.eigh(F_MM)
indices = eps_M.real.argsort()[-L:]
U_Ml = U_MM[:, indices]
U_Ml /= np.sqrt(dots(U_Ml.T.conj(), F_MM, U_Ml).diagonal())
U_ow = V_oM.copy()
U_lw = np.dot(U_Ml.T.conj(), F_MM)
for col1, col2 in zip(U_ow.T, U_lw.T):
norm = np.linalg.norm(np.hstack((col1, col2)))
col1 /= norm
col2 /= norm
return U_ow, U_lw, U_Ml
def get_lcao_xc(calc, P_aqMi, bfs=None, spin=0):
nq = len(calc.wfs.ibzk_qc)
nao = calc.wfs.setups.nao
dtype = calc.wfs.dtype
if bfs is None:
bfs = get_bfs(calc)
if calc.density.nt_sg is None:
calc.density.interpolate()
nt_sg = calc.density.nt_sg
vxct_sg = calc.density.finegd.zeros(calc.wfs.nspins)
calc.hamiltonian.xc.calculate(calc.density.finegd, nt_sg, vxct_sg)
vxct_G = calc.wfs.gd.zeros()
calc.hamiltonian.restrict(vxct_sg[spin], vxct_G)
Vxc_qMM = np.zeros((nq, nao, nao), dtype)
for q, Vxc_MM in enumerate(Vxc_qMM):
bfs.calculate_potential_matrix(vxct_G, Vxc_MM, q)
tri2full(Vxc_MM, 'L')
# Add atomic PAW corrections
for a, P_qMi in P_aqMi.items():
D_sp = calc.density.D_asp[a][:]
H_sp = np.zeros_like(D_sp)
calc.hamiltonian.xc.calculate_paw_correction(calc.wfs.setups[a],
D_sp, H_sp)
H_ii = unpack(H_sp[spin])
for Vxc_MM, P_Mi in zip(Vxc_qMM, P_qMi):
Vxc_MM += dots(P_Mi, H_ii, P_Mi.T.conj())
return Vxc_qMM * Hartree
def get_xc2(calc, w_wG, P_awi, spin=0):
if calc.density.nt_sg is None:
calc.density.interpolate()
nt_g = calc.density.nt_sg[spin]
vxct_g = calc.density.finegd.zeros()
calc.hamiltonian.xc.get_energy_and_potential(nt_g, vxct_g)
vxct_G = calc.wfs.gd.empty()
calc.hamiltonian.restrict(vxct_g, vxct_G)
# Integrate pseudo part
Nw = len(w_wG)
xc_ww = np.empty((Nw, Nw))
r2k(.5 * calc.wfs.gd.dv, w_wG, vxct_G * w_wG, .0, xc_ww)
tri2full(xc_ww, 'L')
# Add atomic PAW corrections
for a, P_wi in P_awi.items():
D_sp = calc.density.D_asp[a][:]
H_sp = np.zeros_like(D_sp)
calc.wfs.setups[a].xc_correction.calculate_energy_and_derivatives(
D_sp, H_sp)
H_ii = unpack(H_sp[spin])
xc_ww += dots(P_wi, H_ii, P_wi.T.conj())
return xc_ww * Hartree
class ProjectedWannierFunctionsFBL:
"""PWF in the finite band limit.
::
--N
|w_w> = > |psi_n> U_nw
--n=1
"""
def __init__(self, V_nM, No, ortho=False):
Nw = V_nM.shape[1]
assert No <= Nw
V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = np.dot(V_uM.T.conj(), V_uM)
U_ow, U_lw, U_Ml = get_rot(F_MM, V_oM, Nw - No)
self.U_nw = np.vstack((U_ow, dots(V_uM, U_Ml, U_lw)))
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1))
if ortho:
lowdin(self.U_nw, self.S_ww)
self.S_ww = np.identity(Nw)
self.norms_n = np.dot(self.U_nw, np.linalg.solve(
self.S_ww, self.U_nw.T.conj())).diagonal()
def rotate_matrix(self, A_nn):
if A_nn.ndim == 1:
return np.dot(self.U_nw.T.conj() * A_nn, self.U_nw)
else:
return dots(self.U_nw.T.conj(), A_nn, self.U_nw)
def rotate_projections(self, P_ani):
P_awi = {}
for a, P_ni in P_ani.items():
P_awi[a] = np.tensordot(self.U_nw, P_ni, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_nG):
return np.tensordot(self.U_nw, psit_nG, axes=[[0], [0]])
class ProjectedWannierFunctionsIBL:
"""PWF in the infinite band limit.
::
--No --Nw
|w_w> = > |psi_o> U_ow + > |f_M> U_Mw
--o=1 --M=1
"""
def __init__(self, V_nM, S_MM, No, lcaoindices=None):
Nw = V_nM.
|
shape[1]
assert No <= Nw
self.V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = S_MM - np.dot(self.V_oM.T.conj(), self.V_oM)
U_ow, U_lw, U_Ml = get_rot(F_MM, self.V_oM, Nw - No)
self.U_Mw = np.dot(U_Ml, U_lw)
self.U_ow = U_ow - np.dot(self.V_oM, self.U_Mw)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
# stop here
|
?? XXX
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
P_uw = np.dot(V_uM, self.U_Mw)
self.norms_n = np.hstack((
np.dot(U_ow, np.linalg.solve(self.S_ww, U_ow.T.conj())).diagonal(),
np.dot(P_uw, np.linalg.solve(self.S_ww, P_uw.T.conj())).diagonal()))
def rotate_matrix(self, A_o, A_MM):
assert A_o.ndim == 1
A_ww = dots(self.U_ow.T.conj() * A_o, self.V_oM, self.U_Mw)
A_ww += np.conj(A_ww.T)
A_ww += np.dot(self.U_ow.T.conj() * A_o, self.U_ow)
A_ww += dots(self.U_Mw.T.conj(), A_MM, self.U_Mw)
return A_ww
def rotate_projections(self, P_aoi, P_aMi, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
P_awi = {}
for a, P_oi in P_aoi.items():
P_awi[a] = np.tensordot(U_Mw, P_aMi[a], axes=[[0], [0]])
if len(U_ow) > 0:
P_awi[a] += np.tensordot(U_ow, P_oi, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_oG, bfs, q=-1, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
w_wG = np.zeros((U_ow.shape[1],) + psit_oG.shape[1:])
if len(U_ow) > 0:
gemm(1., psit_oG, U_ow.T.copy(), 0., w_wG)
bfs.lcao_to_grid(U_Mw.T.copy(), w_wG, q)
return w_wG
class PWFplusLCAO(ProjectedWannierFunctionsIBL):
def __init__(self, V_nM, S_MM, No, pwfmask, lcaoindices=None):
Nw = V_nM.shape[1]
self.V_oM = V_nM[:No]
dtype = V_nM.dtype
# Do PWF optimization for pwfbasis submatrix only!
Npwf = len(pwfmask.nonzero()[0])
pwfmask2 = np.outer(pwfmask, pwfmask)
s_MM = S_MM[pwfmask2].reshape(Npwf, Npwf)
v_oM = self.V_oM[:, pwfmask]
f_MM = s_MM - np.dot(v_oM.T.conj(), v_oM)
nw = len(s_MM)
assert No <= nw
u_ow, u_lw, u_Ml = get_rot(f_MM, v_oM, nw - No)
u_Mw = np.dot(u_Ml, u_lw)
u_ow = u_ow - np.dot(v_oM, u_Mw)
# Determine U for full lcao basis
self.U_ow = np.zeros((No, Nw), dtype)
for U_w, u_w in zip(self.U_ow, u_ow):
np.place(U_w, pwfmask, u_w)
self.U_Mw = np.identity(Nw, dtype)
np.place(self.U_Mw, pwfmask2, u_Mw.flat)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
self.norms_n = None
def set_lcaoatoms(calc, pwf, lcaoatoms):
ind = get_bfi(calc, lcaoatoms)
for i in ind:
pwf.U_ow[:, i] = 0.0
pwf.U_Mw[:, i] = 0.0
pwf_U_Mw[i, i] = 1.0
class PWF2:
def __init__(self, gpwfilename, fixedenergy=0., spin=0, ibl=True,
basis='sz', zero_fermi=False, pwfbasis=None, lcaoatoms=None,
projection_data=None):
calc = GPAW
|
eiri/nixie
|
tests/test_nixie_errors.py
|
Python
|
mit
| 579
| 0.015544
|
import unittest, uuid
from nixie.core import Nixie, KeyError
class NixieErrorsTestCa
|
se(unittest.TestCase):
def test_read_missing(self):
nx = Nixie()
self.assertIsNone(nx.read('missing'))
def test_update_missing(self):
nx = Nixie()
with self.assertRaises(KeyError):
nx.update('missing')
def test_update_with_wrong_value(self):
nx = Nixie()
key = nx.create()
with self.assertRaises(ValueError):
|
nx.update(key, 'a')
def test_delete_missing(self):
nx = Nixie()
with self.assertRaises(KeyError):
nx.delete('missing')
|
MERegistro/meregistro
|
meregistro/shortcuts.py
|
Python
|
bsd-3-clause
| 533
| 0
|
from django.shortcuts import render_to_response
from django.core.context_processors import csrf
from django.conf import settings
def my_render(request, template, context={}):
|
context.update(csrf(request))
context['STATIC_URL'] = settings.STATIC_URL
context['flash'] = request.get_flash()
context['user'] = request.user
context['user_perfil']
|
= request.get_perfil()
context['credenciales'] = set(request.get_credenciales())
context['settings'] = settings
return render_to_response(template, context)
|
ozgurgunes/django-cmskit
|
cmskit/articles/forms.py
|
Python
|
mit
| 1,271
| 0.008655
|
# -*- coding: utf-8 -*-
from django import forms
from cmskit.articles.models import Index, Article
from cms.plugin_pool import plugin_pool
from cms.plugins.text.widgets.wymeditor_widget import WYMEditor
from cms.plugins.text.settings import USE_TINYMCE
def get_editor_widget():
"""
Returns the Django form Widget to be used for
the text area
"""
#plugins = plugin_pool.get_text_enabled_plugins(self.placeholder, self.page)
if USE_TINYMCE and "tinymce" in settings.INSTALLED_APPS:
from cms.plugins.text.widgets.tinymce_widget import TinyMCEEditor
return TinyMCEEditor()
else:
return WYMEditor()
class IndexForm(forms.ModelForm):
class Meta:
model = Index
def __init__(self, *args, **kwargs):
super(IndexForm, self).__init__(*args, **kwargs)
choices = [self.fields['page'].choices.__iter__().next()]
|
for page in self.fields['page'].queryset:
choices.append(
(page.id, ''.join(['- '*page.level, page.__unicode__()]))
)
self.fields['page'].choices = choices
class ArticleForm(forms.ModelForm):
body = forms.CharField(widget=get_editor_widget())
class Meta:
model = Artic
|
le
|
wehr-lab/RPilot
|
autopilot/core/pilot.py
|
Python
|
gpl-3.0
| 26,194
| 0.005612
|
"""
"""
import os
import sys
import datetime
import logging
import argparse
import threading
import time
import socket
import json
import base64
import subprocess
import warnings
import numpy as np
import pandas as pd
from scipy.stats import linregress
import tables
warnings.simplefilter('ignore', category=tables.NaturalNameWarning)
from autopilot import prefs
from autopilot.core.loggers import init_logger
if __name__ == '__main__':
# Parse arguments - this should have been called with a .json prefs file passed
# We'll try to look in the default location first
parser = argparse.ArgumentParser(description="Run an autopilot")
parser.add_argument('-f', '--prefs', help="Location of .json prefs file (created during setup_autopilot.py)")
args = parser.parse_args()
if not args.prefs:
prefs_file = '/usr/autopilot/prefs.json'
if not os.path.exists(prefs_file):
raise Exception("No Prefs file passed, and file not in default location")
raise Warning('No prefs file passed, loaded from default location. Should pass explicitly with -p')
else:
prefs_file = args.prefs
prefs.init(prefs_file)
if prefs.get('AUDIOSERVER') or 'AUDIO' in prefs.get('CONFIG'):
if prefs.get('AUDIOSERVER') == 'pyo':
from autopilot.stim.sound import pyoserver
else:
from autopilot.stim.sound import jackclient
from autopilot.core.networking import Pilot_Station, Net_Node, Message
from autopilot import external
from autopilot import tasks
from autopilot.hardware import gpio
########################################
class Pilot:
"""
Drives the Raspberry Pi
Coordinates the hardware and networking objects to run tasks.
Typically used with a connection to a :class:`.Terminal` object to
coordinate multiple subjects and tasks, but a high priority for future releases
is to do the (trivial amount of) work to make this class optionally
standalone.
Called as a module with the -f flag to give the location of a prefs file, eg::
python pilot.py -f prefs_file.json
if the -f flag is not passed, looks in the default location for prefs
(ie. `/usr/autopilot/prefs.json`)
Needs the following prefs (typically established by :mod:`.setup.setup_pilot`):
* **NAME** - The name used by networking objects to address this Pilot
* **BASEDIR** - The base directory for autopilot files (/usr/autopilot)
* **PUSHPORT** - Router port used by the Te
|
rminal we connect to.
* **TERMINALIP** - IP Address of our upstream Terminal.
* **MSGPORT** - Port used by our own networking object
* **HARDWARE** - Any hardware and its mapping to GPIO pins. No pins are required to be set, instead each
task defines which pins it needs. Currently the default configuration asks for
* POKES - :class:`.hardware.Beambreak`
* LEDS - :class:`.hardware.LED_RGB`
* PORTS - :class:`.hardware.Solenoid`
* **AUDIOSERVER** - Which type
|
, if any, audio server to use (`'jack'`, `'pyo'`, or `'none'`)
* **NCHANNELS** - Number of audio channels
* **FS** - Sampling rate of audio output
* **JACKDSTRING** - string used to start the jackd server, see `the jack manpages <https://linux.die.net/man/1/jackd>`_ eg::
jackd -P75 -p16 -t2000 -dalsa -dhw:sndrpihifiberry -P -rfs -n3 -s &
* **PIGPIOMASK** - Binary mask of pins for pigpio to control, see `the pigpio docs <http://abyz.me.uk/rpi/pigpio/pigpiod.html>`_ , eg::
1111110000111111111111110000
* **PULLUPS** - Pin (board) numbers to pull up on boot
* **PULLDOWNS** - Pin (board) numbers to pull down on boot.
Attributes:
name (str): The name used to identify ourselves in :mod:`.networking`
task (:class:`.tasks.Task`): The currently instantiated task
running (:class:`threading.Event`): Flag used to control task running state
stage_block (:class:`threading.Event`): Flag given to a task to signal when task stages finish
file_block (:class:`threading.Event`): Flag used to wait for file transfers
state (str): 'RUNNING', 'STOPPING', 'IDLE' - signals what this pilot is up to
pulls (list): list of :class:`~.hardware.Pull` objects to keep pins pulled up or down
server: Either a :func:`~.sound.pyoserver.pyo_server` or :class:`~.jackclient.JackClient` , sound server.
node (:class:`.networking.Net_Node`): Our Net_Node we use to communicate with our main networking object
networking (:class:`.networking.Pilot_Station`): Our networking object to communicate with the outside world
ip (str): Our IPv4 address
listens (dict): Dictionary mapping message keys to methods used to process them.
logger (:class:`logging.Logger`): Used to log messages and network events.
"""
logger = None
# Events for thread handling
running = None
stage_block = None
file_block = None
quitting = None
"""mp.Event to signal when process is quitting"""
# networking - our internal and external messengers
node = None
networking = None
# audio server
server = None
def __init__(self, splash=True):
if splash:
with open(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setup', 'welcome_msg.txt'), 'r') as welcome_f:
welcome = welcome_f.read()
print('')
for line in welcome.split('\n'):
print(line)
print('')
sys.stdout.flush()
self.name = prefs.get('NAME')
if prefs.get('LINEAGE') == "CHILD":
self.child = True
self.parentid = prefs.get('PARENTID')
else:
self.child = False
self.parentid = 'T'
self.logger = init_logger(self)
self.logger.debug('pilot logger initialized')
# Locks, etc. for threading
self.running = threading.Event() # Are we running a task?
self.stage_block = threading.Event() # Are we waiting on stage triggers?
self.file_block = threading.Event() # Are we waiting on file transfer?
self.quitting = threading.Event()
self.quitting.clear()
# init pigpiod process
self.init_pigpio()
# Init audio server
if prefs.get('AUDIOSERVER') or 'AUDIO' in prefs.get('CONFIG'):
self.init_audio()
# Init Station
# Listen dictionary - what do we do when we receive different messages?
self.listens = {
'START': self.l_start, # We are being passed a task and asked to start it
'STOP' : self.l_stop, # We are being asked to stop running our task
'PARAM': self.l_param, # A parameter is being changed
'CALIBRATE_PORT': self.l_cal_port, # Calibrate a water port
'CALIBRATE_RESULT': self.l_cal_result, # Compute curve and store result
'BANDWIDTH': self.l_bandwidth # test our bandwidth
}
# spawn_network gives us the independent message-handling process
self.networking = Pilot_Station()
self.networking.start()
self.node = Net_Node(id = "_{}".format(self.name),
upstream = self.name,
port = prefs.get('MSGPORT'),
listens = self.listens,
instance=False)
self.logger.debug('pilot networking initialized')
# if we need to set pins pulled up or down, do that now
self.pulls = []
if prefs.get( 'PULLUPS'):
for pin in prefs.get('PULLUPS'):
self.pulls.append(gpio.Digital_Out(int(pin), pull='U', polarity=0))
if prefs.get( 'PULLDOWNS'):
for pin in prefs.get('PULLDOWNS'):
self.pulls.append(gpio.Digital_Out(int(pin), pull='D', polarity=1))
self.logger.debug('pullups and pulldowns set')
# check if the calibration file needs to be updated
# Set and update state
self.state = 'IDLE' # or 'Running'
self.update_state()
# Since we're starting up, h
|
ndparker/wolfe
|
wolfe/scheduler/_job.py
|
Python
|
apache-2.0
| 6,561
| 0
|
# -*- coding: ascii -*-
r"""
:Copyright:
Copyright 2014 - 2016
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
======
Jobs
======
Jobs have been entered into the scheduler once. They may be even finished
already.
"""
if __doc__: # pragma: no cover
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
import collections as _collections
import itertools as _it
from .. import _graph
from .. import interfaces as _interfaces
from .. import _lock
#: Exception raised on cycles, when a todo DAG is resolved
DependencyCycle = _graph.DependencyCycle
#: Job ID sequence
#:
#: :Type: callable
_gen_id = _it.count(1).next
def last_job_id():
"""
Determine the largest job ID assigned until now
:Return: The ID. It's ``0``, if no job ID was assigned until now (job IDs
start with ``1``)
:Rtype: ``id``
"""
# this inspects the counter iterable by calling pickling methods and
# retrieving the next value from there and then subtracting one.
# __reduce__ returns the factory ('count') and the argument tuple
# containing the initial value (advanced with each call to next())
# pylint: disable = no-member
return _gen_id.__self__.__reduce__()[1][0] - 1
class Job(object):
"""
Job after is been scheduled.
:See: `JobInterface`
"""
__implements__ = [_interfaces.JobInterface]
def __init__(self, job_id, desc, group, locks, importance, not_before,
extra, predecessors, attempts):
"""
Initialization
:Parameters:
`job_id` : ``int``
Job ID
`desc` : `TodoDescription`
Job description
`group` : ``str``
Job Group
`locks` : iterable
List of locks that need to be aquired (``(`LockInterface`, ...)``)
`importance` : ``int``
Job importance
`not_before` : various
execute job not before this time. Special formats are allowed:
``int``
Number of seconds from now (delay)
``datetime.datetime``
a specific point in time (server time). Use UTC if you can. For
naive date times, UTC is assumed.
If omitted or ``None``, ``0`` is assumed.
`extra` : ``dict``
Extra job data
`predecessors` : iterable
List of jobs to be run successfully before this one
(``(int, ...)``)
`attempts` : ``list``
execution attempts (``[ExecutionAttemptInterface, ...]``)
"""
self.id = job_id
self.desc = desc
self.group = group
self.locks = _lock.validate(locks)
self.locks_waiting = None
self.importance = importance
self.extra = extra
self.predecessors = set()
self.predecessors_waiting = None
self.attempts = attempts
self.not_before = not_before
for item in predecessors or ():
self.depend_on(item)
def depend_on(self, job_id):
"""
Add predecessor job ID
Duplicates are silently ignored.
:See: `interfaces.JobInterface.depend_on`
"""
assert self.predecessors_waiting is None
try:
job_id = int(job_id)
except TypeError:
raise ValueError("Invalid job_id: %r" % (job_id,))
if job_id < 1 or job_id >= self.id:
raise ValueError("Invalid job_id: %r" % (job_id,))
self.predecessors.add(job_id)
def job_from_todo(todo):
"""
Construct Job from Todo
:Parameters:
`todo` : `Todo`
Todo to construct from
:Return: New job instance
:Rtype: `JobInterface`
"""
return Job(
_gen_id(), todo.desc, todo.group, todo.locks, todo.importance,
todo.not_before, {}, set(), []
)
def joblist_from_todo(todo):
"""
Construct a list of jobs from Todo graph
:Parameters:
`todo` : `Todo`
todo to be inspected.
:Return: List of jobs (``[JobInterface, ...]``)
:Rtype: ``list``
"""
jobs, todos, virtuals = [], {}, {}
toinspect = _collections.deque([(todo, None)])
graph = _graph.DependencyGraph()
# 1) fill the dependency graph with the todo nodes (detects cycles, too)
try:
while toinspect:
todo, parent = toinspect.pop()
todo_id = id(todo)
if todo_id in todos:
virtual_id, pre, _ = todos[todo_id]
else:
pre = []
virtual_id = len(virtuals)
todos[todo_id] = virtual_id, pre, todo
virtuals[virtual_id] = todo_id
for parent_id in todo.predecessors():
graph.add((False, parent_id), (True, virtual_id))
pre.append((False, parent_id))
for succ in todo.successors():
toinspect.appendlef
|
t((succ, (True, virtual_id)))
if parent is not None:
graph.add(parent, (True, virtual_id))
pre.append(parent)
else:
graph.ad
|
d((False, None), (True, virtual_id))
except DependencyCycle as e:
# remap to our input (todos and not some weird virtual IDs)
raise DependencyCycle([
todos[virtuals[tup[1]]][2] for tup in e.args[0]
])
# 2) resolve the graph (create topological order)
id_mapping = {}
for is_virtual, virtual_id in graph.resolve():
if is_virtual:
_, pres, todo = todos[virtuals[virtual_id]]
job = job_from_todo(todo)
for is_virtual, pre in pres:
if is_virtual:
pre = id_mapping[pre]
job.depend_on(pre)
id_mapping[virtual_id] = job.id
jobs.append(job)
return jobs
|
horkko/biomaj
|
biomaj/notify.py
|
Python
|
agpl-3.0
| 2,306
| 0.001735
|
from builtins import str
from builtins import object
import smtplib
import email.utils
from biomaj.workflow import Workflow
import logging
import sys
if sys.version < '3':
from email.MIMEText import MIMEText
else:
from email.mime.text import MIMEText
class Notify(object):
"""
Send notifications
"""
@staticmethod
def notifyBankAction(bank):
if not bank.config.get('mail.smtp.host') or bank.session is None:
|
logging.info('Notify:none')
return
admins = bank.config.get('mail.admin')
if not admins:
logging.info('Notify: no mail.admin defined')
return
admin_list = admins.split(',')
logging.info('Notify:' + bank.config.get('mail.admin'))
mfrom = b
|
ank.config.get('mail.from')
log_file = bank.config.log_file
msg = MIMEText('')
if log_file:
fp = None
if sys.version < '3':
fp = open(log_file, 'rb')
else:
fp = open(log_file, 'r')
msg = MIMEText(fp.read(2000000))
fp.close()
msg['From'] = email.utils.formataddr(('Author', mfrom))
msg['Subject'] = 'BANK[' + bank.name + '] - STATUS[' + str(bank.session.get_status(Workflow.FLOW_OVER)) + '] - UPDATE[' + str(bank.session.get('update')) + '] - REMOVE[' + str(bank.session.get('remove')) + ']' + ' - RELEASE[' + str(bank.session.get('release')) + ']'
logging.info(msg['subject'])
server = None
for mto in admin_list:
msg['To'] = email.utils.formataddr(('Recipient', mto))
try:
server = smtplib.SMTP(bank.config.get('mail.smtp.host'))
if bank.config.get('mail.tls') is not None and str(bank.config.get('mail.tls')) == 'true':
server.starttls()
if bank.config.get('mail.user') is not None and str(bank.config.get('mail.user')) != '':
server.login(bank.config.get('mail.user'), bank.config.get('mail.password'))
server.sendmail(mfrom, [mto], msg.as_string())
except Exception as e:
logging.error('Could not send email: ' + str(e))
finally:
if server is not None:
server.quit()
|
karstenw/nodebox-pyobjc
|
examples/New Functions/twyg/demo1.py
|
Python
|
mit
| 1,032
| 0.014535
|
from __future__ import print_function
import os
twyg = ximport('twyg')
# reload(twyg)
datafiles = list(filelist( os.path.abspath('example-data')))
datafile = choice(datafiles)
configs = [ 'boxes', 'bubbles', 'edge', 'flowchart', 'hive', 'ios', 'jellyfish',
|
'junction1', 'junction2', 'modern', 'nazca', 'rounded', 'square',
'synapse', 'tron']
colorschemes = [ 'aqua', 'azure', 'bordeaux', 'clay', 'cmyk', 'cobalt', 'colors21',
'crayons', 'earth', 'forest', 'grape', 'honey', 'inca', 'jelly', 'kelp',
'mango', 'mellow', 'merlot', 'milkshake', 'mint-gray', 'mint', 'moon'
|
,
'mustard', 'neo', 'orbit', 'pastels', 'quartz', 'salmon', 'tentacle',
'terracotta', 'turquoise', 'violet']
config = choice(configs)
colorscheme = choice(colorschemes)
margins = ['10%', '5%']
print( config )
print( colorscheme )
print( os.path.basename(datafile) )
print()
twyg.generate_output_nodebox(datafile, config, colorscheme=colorscheme, margins=margins)
|
python-attrs/attrs
|
tests/test_slots.py
|
Python
|
mit
| 18,010
| 0
|
# SPDX-License-Identifier: MIT
"""
Unit tests for slots-related functionality.
"""
import pickle
import sys
import types
import weakref
import pytest
import attr
from attr._compat import PY2, PYPY, just_warn, make_set_closure_cell
# Pympler doesn't work on PyPy.
try:
from pympler.asizeof import asizeof
has_pympler = True
except BaseException: # Won't be an import error.
has_pympler = False
@attr.s
class C1(object):
x = attr.ib(validator=attr.validators.instance_of(int))
y = attr.ib()
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmet
|
hod
def staticmethod():
return "staticmethod"
if not PY2:
def my_class(self):
return __class__
d
|
ef my_super(self):
"""Just to test out the no-arg super."""
return super().__repr__()
@attr.s(slots=True, hash=True)
class C1Slots(object):
x = attr.ib(validator=attr.validators.instance_of(int))
y = attr.ib()
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
if not PY2:
def my_class(self):
return __class__
def my_super(self):
"""Just to test out the no-arg super."""
return super().__repr__()
def test_slots_being_used():
"""
The class is really using __slots__.
"""
non_slot_instance = C1(x=1, y="test")
slot_instance = C1Slots(x=1, y="test")
assert "__dict__" not in dir(slot_instance)
assert "__slots__" in dir(slot_instance)
assert "__dict__" in dir(non_slot_instance)
assert "__slots__" not in dir(non_slot_instance)
assert set(["__weakref__", "x", "y"]) == set(slot_instance.__slots__)
if has_pympler:
assert asizeof(slot_instance) < asizeof(non_slot_instance)
non_slot_instance.t = "test"
with pytest.raises(AttributeError):
slot_instance.t = "test"
assert 1 == non_slot_instance.method()
assert 1 == slot_instance.method()
assert attr.fields(C1Slots) == attr.fields(C1)
assert attr.asdict(slot_instance) == attr.asdict(non_slot_instance)
def test_basic_attr_funcs():
"""
Comparison, `__eq__`, `__hash__`, `__repr__`, `attrs.asdict` work.
"""
a = C1Slots(x=1, y=2)
b = C1Slots(x=1, y=3)
a_ = C1Slots(x=1, y=2)
# Comparison.
assert b > a
assert a_ == a
# Hashing.
hash(b) # Just to assert it doesn't raise.
# Repr.
assert "C1Slots(x=1, y=2)" == repr(a)
assert {"x": 1, "y": 2} == attr.asdict(a)
def test_inheritance_from_nonslots():
"""
Inheritance from a non-slotted class works.
Note that a slotted class inheriting from an ordinary class loses most of
the benefits of slotted classes, but it should still work.
"""
@attr.s(slots=True, hash=True)
class C2Slots(C1):
z = attr.ib()
c2 = C2Slots(x=1, y=2, z="test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
c2.t = "test" # This will work, using the base class.
assert "test" == c2.t
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
assert set(["z"]) == set(C2Slots.__slots__)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
def test_nonslots_these():
"""
Enhancing a dict class using 'these' works.
This will actually *replace* the class with another one, using slots.
"""
class SimpleOrdinaryClass(object):
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
C2Slots = attr.s(
these={"x": attr.ib(), "y": attr.ib(), "z": attr.ib()},
init=False,
slots=True,
hash=True,
)(SimpleOrdinaryClass)
c2 = C2Slots(x=1, y=2, z="test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
with pytest.raises(AttributeError):
c2.t = "test" # We have slots now.
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
assert set(["__weakref__", "x", "y", "z"]) == set(C2Slots.__slots__)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "SimpleOrdinaryClass(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
def test_inheritance_from_slots():
"""
Inheriting from an attrs slotted class works.
"""
@attr.s(slots=True, hash=True)
class C2Slots(C1Slots):
z = attr.ib()
@attr.s(slots=True, hash=True)
class C2(C1):
z = attr.ib()
c2 = C2Slots(x=1, y=2, z="test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
assert set(["z"]) == set(C2Slots.__slots__)
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
with pytest.raises(AttributeError):
c2.t = "test"
non_slot_instance = C2(x=1, y=2, z="test")
if has_pympler:
assert asizeof(c2) < asizeof(non_slot_instance)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
def test_inheritance_from_slots_with_attribute_override():
"""
Inheriting from a slotted class doesn't re-create existing slots
"""
class HasXSlot(object):
__slots__ = ("x",)
@attr.s(slots=True, hash=True)
class C2Slots(C1Slots):
# y re-defined here but it shouldn't get a slot
y = attr.ib()
z = attr.ib()
@attr.s(slots=True, hash=True)
class NonAttrsChild(HasXSlot):
# Parent class has slot for "x" already, so we skip it
x = attr.ib()
y = attr.ib()
z = attr.ib()
c2 = C2Slots(1, 2, "test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
assert {"z"} == set(C2Slots.__slots__)
na = NonAttrsChild(1, 2, "test")
assert 1 == na.x
assert 2 == na.y
assert "test" == na.z
assert {"__weakref__", "y", "z"} == set(NonAttrsChild.__slots__)
def test_inherited_slot_reuses_slot_descriptor():
"""
We reuse slot descriptor for an attr.ib defined in a slotted attr.s
"""
class HasXSlot(object):
__slots__ = ("x",)
class OverridesX(HasXSlot):
@property
def x(self):
return None
@attr.s(slots=True)
class Child(OverridesX):
x = attr.ib()
assert Child.x is not OverridesX.x
assert Child.x is HasXSlot.x
c = Child(1)
assert 1 == c.x
assert set() == set(Child.__slots__)
ox = OverridesX()
assert ox.x is None
def test_bare_inheritance_from_slots():
"""
Inheriting from a bare attrs slotted class works.
"""
@attr.s(
init=False, eq=False, order=False, hash=False, repr=False, slots=True
)
class C1BareSlots(object):
x = attr.ib(validator=attr.validators.instance_of(int))
y = attr.ib()
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
@attr.s(init=False, eq=False, order=False, hash=False, repr=False)
class C1Bare(object):
x = attr.ib(validator=
|
Debian/dak
|
daklib/conftest.py
|
Python
|
gpl-2.0
| 1,325
| 0.000755
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as publish
|
ed by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This progr
|
am is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from daklib.database.all import Base
Session = sessionmaker()
@pytest.fixture(scope='session')
def engine():
engine = create_engine('sqlite://', echo=True)
Base.metadata.create_all(engine)
return engine
@pytest.yield_fixture
def session(engine):
connection = engine.connect()
trans = connection.begin()
session = Session(bind=connection)
yield session
session.close()
trans.rollback()
connection.close()
|
johnbachman/belpy
|
indra/sources/eidos/processor.py
|
Python
|
mit
| 18,394
| 0
|
import re
import copy
import logging
import datetime
import objectpath
from indra.statements import *
logger = logging.getLogger(__name__)
class EidosProcessor(object):
"""This processor extracts INDRA Statements from Eidos JSON-LD output.
Parameters
----------
json_dict : dict
A JSON dictionary containing the Eidos extractions in JSON-LD format.
Attributes
----------
statements : list[indra.statements.Statement]
A list of INDRA Statements that were extracted by the processor.
"""
def __init__(self, json_dict, grounding_ns=None):
self.doc = EidosDocument(json_dict)
self.grounding_ns = grounding_ns
self.statements = []
def extract_causal_relations(self):
"""Extract causal relations as Statements."""
# Get the extractions that are labeled as directed and causal
relations = [e for e in self.doc.extractions if
'DirectedRelation' in e['labels'] and
'Causal' in e['labels']]
# For each relation, we try to extract an INDRA Statement and
# save it if its valid
for relation in relations:
stmt = self.get_causal_relation(relation)
if stmt is not None:
self.statements.append(stmt)
def extract_correlations(self):
events = [e for e in self.doc.extractions if
'UndirectedRelation' in e['labels'] and
'Correlation' in e['labels']]
for event in events:
# For now, just take the first source and first destination.
# Later, might deal with hypergraph representation.
arg_ids = find_args(event, 'argument')
if len(arg_ids) != 2:
logger.warning('Skipping correlation with not 2 arguments.')
# Resolve coreferences by ID
arg_ids = [self.doc.coreferences.get(arg_id, arg_id)
for arg_id in arg_ids]
# Get the actual entities
args = [self.doc.entities[arg_id] for arg_id in arg_ids]
# Make Events from the entities
members = [self.get_event(arg) for arg in args]
# Get the evidence
evidence = self.get_evidence(event)
st = Association(members, evidence=[evidence])
self.statements.append(st)
def extract_events(self):
events = [e for e in self.doc.extractions if
'Concept-Expanded' in e['labels']]
for event_entry in events:
event = self.get_event(event_entry)
evidence = self.get_evidence(event_entry)
event.evidence = [evidence]
if not event.context and evidence.context:
event.context = copy.deepcopy(evidence.context)
evidence.context = None
self.statements.append(event)
def get_event_by_id(self, event_id):
# Resolve coreferences by ID
event_id = self.doc.coreferences.get(event_id, event_id)
# Get the actual entity
event = self.doc.entities[event_id]
return self.get_event(event)
def get_event(self, event):
concept = self.get_concept(event)
states = event.get('states', [])
extracted_states = self.extract_entity_states(states)
polarity = extracted_states.get('polarity')
adjectives = extracted_states.get('adjectives')
delta = QualitativeDelta(polarity=polarity, adjectives=adjectives)
timex = extracted_states.get('time_context', None)
geo = extracted_states.get('geo_context', None)
context = WorldContext(time=timex, geo_location=geo) \
if timex or geo else None
stmt = Event(concept, delta=delta, context=context)
return stmt
def get_causal_relation(self, relation):
# For now, just take the first source and first destination.
# Later, might deal with hypergraph representation.
subj_id = find_arg(relation, 'source')
obj_id = find_arg(relation, 'destination')
if subj_id is None or obj_id is None:
return None
subj = self.get_event_by_id(subj_id)
obj = self.get_event_by_id(obj_id)
evidence = self.get_evidence(relation)
# We also put the adjectives and polarities into annotations since
# they could otherwise get squashed upon preassembly
evidence.annotations['subj_polarity'] = subj.delta.polarity
evidence.annotations['obj_polarity'] = obj.delta.polarity
evidence.annotations['subj_adjectives'] = subj.delta.adjectives
evidence.annotations['obj_adjectives'] = obj.delta.adjectives
evidence.annotations['subj_context'] = subj.context.to_json() if \
subj.context else {}
evidence.annotations['obj_context'] = obj.context.to_json() if \
obj.context else {}
st = Influence(subj, obj, evidence=[evidence])
return st
def get_evidence(self, relation):
"""Return the Evidence object for the INDRA Statment."""
provenance = relation.get('provenance')
# First try looking up the full sentence through provenance
text = None
context = None
if provenance:
sentence_tag = provenance[0].get('sentence')
if sentence_tag and '@id' in sentence_tag:
sentence_id = sentence_tag['@id']
sentence = self.doc.sentences.get(sentence_id)
if sentence is not None:
text = _sanitize(sentence['text'])
# Here we try to get the title of the document and set it
# in the provenance
doc_id = provenance[0].get('document', {}).get('@id')
if doc_id:
title = self.doc.documents.get(doc_id, {}).get('title')
if title:
provenance[0]['document']['title'] = title
annotations = {'found_by': relation.get('rule'),
'provenance': provenance}
|
if self.doc.dct is not None:
annotations['document_creation_time'] = self.do
|
c.dct.to_json()
epistemics = {}
negations = self.get_negation(relation)
hedgings = self.get_hedging(relation)
if hedgings:
epistemics['hedgings'] = hedgings
if negations:
# This is the INDRA standard to show negation
epistemics['negated'] = True
# But we can also save the texts associated with the negation
# under annotations, just in case it's needed
annotations['negated_texts'] = negations
# If that fails, we can still get the text of the relation
if text is None:
text = _sanitize(relation.get('text'))
ev = Evidence(source_api='eidos', text=text, annotations=annotations,
context=context, epistemics=epistemics)
return ev
@staticmethod
def get_negation(event):
"""Return negation attached to an event.
Example: "states": [{"@type": "State", "type": "NEGATION",
"text": "n't"}]
"""
states = event.get('states', [])
if not states:
return []
negs = [state for state in states
if state.get('type') == 'NEGATION']
neg_texts = [neg['text'] for neg in negs]
return neg_texts
@staticmethod
def get_hedging(event):
"""Return hedging markers attached to an event.
Example: "states": [{"@type": "State", "type": "HEDGE",
"text": "could"}
"""
states = event.get('states', [])
if not states:
return []
hedgings = [state for state in states
if state.get('type') == 'HEDGE']
hedging_texts = [hedging['text'] for hedging in hedgings]
return hedging_texts
def extract_entity_states(self, states):
if states is None:
return {'polarity': None, 'adjectives': []}
polarity = None
adjectives = []
time_context = None
geo_context = None
for
|
dana-i2cat/felix
|
modules/resource/manager/stitching-entity/src/handler/geni/v3/extensions/sfa/trust/gid.py
|
Python
|
apache-2.0
| 10,122
| 0.004149
|
#----------------------------------------------------------------------
# Copyright (c) 2008 Board of Trustees, Princeton University
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
##
# Implements SFA GID. GIDs are based on certificates, and the GID class is a
# descendant of the certificate class.
##
import xmlrpclib
import uuid
|
from handler.geni.v3.extensions.sfa.trust.certificate import Certificate
from handler.geni.v3.extensions.sfa.util.faults import GidInvalidParentHrn, GidParentHrn
from handler.geni.v3.extensions.sfa.util.sfalogging import logger
from handler.geni.v3.extensions.sfa.util.xrn import hrn_to_
|
urn, urn_to_hrn, hrn_authfor_hrn
##
# Create a new uuid. Returns the UUID as a string.
def create_uuid():
return str(uuid.uuid4().int)
##
# GID is a tuple:
# (uuid, urn, public_key)
#
# UUID is a unique identifier and is created by the python uuid module
# (or the utility function create_uuid() in gid.py).
#
# HRN is a human readable name. It is a dotted form similar to a backward domain
# name. For example, planetlab.us.arizona.bakers.
#
# URN is a human readable identifier of form:
# "urn:publicid:IDN+toplevelauthority[:sub-auth.]*[\res. type]\ +object name"
# For example, urn:publicid:IDN+planetlab:us:arizona+user+bakers
#
# PUBLIC_KEY is the public key of the principal identified by the UUID/HRN.
# It is a Keypair object as defined in the cert.py module.
#
# It is expected that there is a one-to-one pairing between UUIDs and HRN,
# but it is uncertain how this would be inforced or if it needs to be enforced.
#
# These fields are encoded using xmlrpc into the subjectAltName field of the
# x509 certificate. Note: Call encode() once the fields have been filled in
# to perform this encoding.
class GID(Certificate):
uuid = None
hrn = None
urn = None
email = None # for adding to the SubjectAltName
##
# Create a new GID object
#
# @param create If true, create the X509 certificate
# @param subject If subject!=None, create the X509 cert and set the subject name
# @param string If string!=None, load the GID from a string
# @param filename If filename!=None, load the GID from a file
# @param lifeDays life of GID in days - default is 1825==5 years
def __init__(self, create=False, subject=None, string=None, filename=None, uuid=None, hrn=None, urn=None, lifeDays=1825):
Certificate.__init__(self, lifeDays, create, subject, string, filename)
if subject:
logger.debug("Creating GID for subject: %s" % subject)
if uuid:
self.uuid = int(uuid)
if hrn:
self.hrn = hrn
self.urn = hrn_to_urn(hrn, 'unknown')
if urn:
self.urn = urn
self.hrn, type = urn_to_hrn(urn)
def set_uuid(self, uuid):
if isinstance(uuid, str):
self.uuid = int(uuid)
else:
self.uuid = uuid
def get_uuid(self):
if not self.uuid:
self.decode()
return self.uuid
def set_hrn(self, hrn):
self.hrn = hrn
def get_hrn(self):
if not self.hrn:
self.decode()
return self.hrn
def set_urn(self, urn):
self.urn = urn
self.hrn, type = urn_to_hrn(urn)
def get_urn(self):
if not self.urn:
self.decode()
return self.urn
# Will be stuffed into subjectAltName
def set_email(self, email):
self.email = email
def get_email(self):
if not self.email:
self.decode()
return self.email
def get_type(self):
if not self.urn:
self.decode()
_, t = urn_to_hrn(self.urn)
return t
##
# Encode the GID fields and package them into the subject-alt-name field
# of the X509 certificate. This must be called prior to signing the
# certificate. It may only be called once per certificate.
def encode(self):
if self.urn:
urn = self.urn
else:
urn = hrn_to_urn(self.hrn, None)
str = "URI:" + urn
if self.uuid:
str += ", " + "URI:" + uuid.UUID(int=self.uuid).urn
if self.email:
str += ", " + "email:" + self.email
self.set_data(str, 'subjectAltName')
##
# Decode the subject-alt-name field of the X509 certificate into the
# fields of the GID. This is automatically called by the various get_*()
# functions in this class.
def decode(self):
data = self.get_data('subjectAltName')
dict = {}
if data:
if data.lower().startswith('uri:http://<params>'):
dict = xmlrpclib.loads(data[11:])[0][0]
else:
spl = data.split(', ')
for val in spl:
if val.lower().startswith('uri:urn:uuid:'):
dict['uuid'] = uuid.UUID(val[4:]).int
elif val.lower().startswith('uri:urn:publicid:idn+'):
dict['urn'] = val[4:]
elif val.lower().startswith('email:'):
# FIXME: Ensure there isn't cruft in that address...
# EG look for email:copy,....
dict['email'] = val[6:]
self.uuid = dict.get("uuid", None)
self.urn = dict.get("urn", None)
self.hrn = dict.get("hrn", None)
self.email = dict.get("email", None)
if self.urn:
self.hrn = urn_to_hrn(self.urn)[0]
##
# Dump the credential to stdout.
#
# @param indent specifies a number of spaces to indent the output
# @param dump_parents If true, also dump the parents of the GID
def dump(self, *args, **kwargs):
print self.dump_string(*args,**kwargs)
def dump_string(self, indent=0, dump_parents=False):
result=" "*(indent-2) + "GID\n"
result += " "*indent + "hrn:" + str(self.get_hrn()) +"\n"
result += " "*indent + "urn:" + str(self.get_urn()) +"\n"
result += " "*indent + "uuid:" + str(self.get_uuid()) + "\n"
if self.get_email() is not None:
result += " "*indent + "email:" + str(self.get_email()) + "\n"
filename=self.get_filename()
if filename: result += "Filename %s\n"%filename
if self.parent and dump_parents:
result += " "*indent + "parent:\n"
result += self.parent.dump_string(indent+4, dump_parents)
return result
##
# Verify the chain of authenticity of the GID. First perform the checks
# of the certificate class (verifying that each parent signs the child,
# etc). In addition, GIDs also confirm that the parent's HRN is a prefix
# of the child's HRN, and the parent is of type 'authority'.
#
# Verifying these prefixes prevents a rogue authority from signing a GID
# for a principal that is not a member of that authority. For example,
# planetlab.us.arizona cannot sign a GID for planetlab.us.princeton.foo.
def verify_chain(self, trusted_c
|
meltiseugen/DocumentsFlow
|
DocumentsFlow/settings.py
|
Python
|
mit
| 3,117
| 0.001283
|
"""
Django settings for DocumentsFlow project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-3v-w43(q0sg$!%e+i@#f#=w(j40i=afhjrmyedj-+x36+z'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'DocumentsFlow.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
|
'context_processors': [
'django.template.context_processors.debug',
'django.temp
|
late.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'DocumentsFlow.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
rparent/django-lock-tokens
|
tests/settings.py
|
Python
|
mit
| 1,481
| 0
|
# -*- coding: utf-8
from __future__ import a
|
bsolute_import, unicode_literals
import django
DEBUG = True
USE_TZ = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "o0fy)a6pmew*fe9b+^wf)96)2j8)%6oz555d7by7_(*i!b8wj8"
|
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
}
ROOT_URLCONF = "tests.urls"
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.staticfiles",
"django.contrib.admin",
"django.contrib.messages",
"lock_tokens.apps.LockTokensConfig",
"tests",
]
SITE_ID = 1
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
]
}
},
]
if django.VERSION >= (1, 10):
MIDDLEWARE = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
)
else:
MIDDLEWARE_CLASSES = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
)
|
strahlex/pymachinetalk
|
pymachinetalk/tests/test_dns_sd.py
|
Python
|
mit
| 13,743
| 0.000291
|
# coding=utf-8
import pytest
@pytest.fixture
def dns_sd():
from pymachinetalk import dns_sd
return dns_sd
@pytest.fixture
def sd():
from pymachinetalk import dns_sd
sd = dns_sd.ServiceDiscovery()
return sd
def test_registeringServicesFromServiceContainerWorks(dns_sd, sd):
service = dns_sd.Service()
discoverable = dns_sd.ServiceContainer()
discoverable.services.append(service)
sd.register(discoverable)
assert service in sd.services
def test_registeringServiceDirectlyWorks(dns_sd, sd):
service = dns_sd.Service()
sd.register(service)
assert service in sd.services
def test_registeringAnythingElseFails(sd):
item = object()
try:
sd.register(item)
except TypeError:
assert True
assert item not in sd.services
def test_registeringWhenRunningThrowsError(dns_sd, sd):
service = dns_sd.Service()
def dummy():
pass
sd._start_discovery = dummy
sd.start()
try:
sd.register(service)
except RuntimeError:
assert True
assert service not in sd.services
def test_unregisteringServiceDirectlyWorks(dns_sd, sd):
service = dns_sd.Service()
sd.register(service)
sd.unregister(service)
assert service not in sd.services
def test_unregisteringServicesFromServiceContainerWorks(dns_sd, sd):
service = dns_sd.Service()
discoverable = dns_sd.ServiceContainer()
discoverable.services.append(service)
sd.register(discoverable)
sd.unregister(discoverable)
assert service not in sd.services
def test_unregisteringAnythingElseFails(sd):
item = 34
try:
sd.unregister(item)
except TypeError:
assert True
assert item not in sd.services
def test_unregisteringWhenRunningThrowsError(dns_sd, sd):
service = dns_sd.Service()
def dummy():
pass
sd._start_discovery = dummy
sd.start()
try:
sd.unregister(service)
except RuntimeError:
assert True
assert service not in sd.services
class ServiceInfoFactory(object):
def create(
self,
base_type='machinekit',
domain='local',
sd_protocol='tcp',
name='Hugo on Franz',
service=b'halrcomp',
uuid=b'12345678',
host='127.0.0.1',
protocol='tcp',
port=12345,
version=0,
properties=None,
server='127.0.0.1',
address=None,
):
from zeroconf import ServiceInfo
typestring = '_%s._%s.%s.' % (base_type, sd_protocol, domain)
dsn = b'%s://%s:%i' % (protocol.encode(), host.encode(), port)
if properties is None:
properties = {
b'uuid': uuid,
b'service': service,
b'dsn': dsn,
b'version': version,
}
return ServiceInfo(
type_=typestring,
name='%s %s.%s' % (name, host, typestring),
properties=properties,
address=(address or host).encode(),
port=port,
server=server,
)
@pytest.fixture
def zeroconf(mocker):
from zeroconf import Zeroconf
service_info = ServiceInfoFactory().create()
zeroconf_stub = mocker.stub(name='get_service_info')
zeroconf_stub.return_value = service_info
stub_object = Zeroconf()
stub_object.get_service_info = zeroconf_stub
return stub_object
@pytest.fixture
def zeroconf_without_service_info(mocker):
from zeroconf import Zeroconf
zeroconf_stub = mocker.stub(name='get_service_info')
zeroconf_stub.return_value = None
stub_object = Zeroconf()
stub_object.get_service_info = zeroconf_stub
return stub_object
def test_serviceDiscoveredUpdatesRegisteredServices(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceDisappearedUpdatesRegisteredServices(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
sd.remove_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_stoppingServiceDiscoveryResetsAllServices(dns_sd, sd, zeroconf):
service1 = dns_sd.Service(type_='halrcomp')
sd.register(service1)
service2 = dns_sd.Service(type_='halrcmd')
sd.register(service2)
sd.browser = object() # dummy
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
sd.stop()
assert service1.ready is False
assert service2.ready is False
def test_serviceDiscoveredWithoutServiceInfoDoesNotUpdateRegisteredServices(
dns_sd, sd, zeroconf_without_service_info
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf_without_service_info,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_serviceDisappearedWithoutServiceInfoDoesNotUpdateRegisteredServices(
dns_sd, sd, zeroconf_without_service_info
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
service.ready = True
sd.remove_service(
zeroconf_without_service_info,
'_machinekit._tcp.lo
|
cal.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceInfoSetsAllRelevantValuesOfService(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
name='Foo on Bar',
uuid=b'987654321',
|
version=5,
host='10.0.0.10',
protocol='tcp',
port=12456,
server='sandybox.local',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://10.0.0.10:12456'
assert service.name == service_info.name
assert service.uuid == '987654321'
assert service.version == 5
assert service.host_name == 'sandybox.local'
assert service.host_address == '10.0.0.10'
def test_serviceInfoResolvesLocalHostnameIfMatched(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
host='sandybox.local',
protocol='tcp',
port=12456,
server='sandybox.local',
address='10.0.0.10',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://10.0.0.10:12456'
def test_serviceInfoRetursRawUriIfHostnameIsNotMatched(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
host='thinkpad.local',
protocol='tcp',
port=12456,
server='sandybox.local',
address='10.0.0.10',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://thinkpad.local:12456'
def test_serviceInfoWithIncompleteValuesIsIgnoredByService(dns_sd):
service = dns_sd.Service(type_='launcher')
service_info = ServiceInfoFactory().create(properties={})
service.add_service_info(service_info)
assert service.uri == ''
assert service.uuid == ''
assert service.version == b''
def test_removingServiceInfoResetsAllRelevantValuesOfService(dns_sd):
service = dns_sd.Service(type_='blahus')
service_info = ServiceInfoFactory().create()
service.add_service_info(service_info)
service.remove_service_info(service_info)
assert service.uri == ''
assert service.name == ''
assert service.uuid == ''
assert service.version == 0
assert service.host_name == ''
assert service.host_address == ''
def test_clearingServiceInfosResetsValuesOfService(dns_sd):
service = dns_sd.Service(type_='foobar')
service.add_service_info(ServiceInfoFactory().create())
service.a
|
shivaco/selfbot
|
cogs/role_perms.py
|
Python
|
gpl-3.0
| 398
| 0.012563
|
from discord.ext import commands
class Github:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def permrole(self, ctx, argument:str):
await self.bot.say(';py fo
|
r perm in discord.ut
|
ils.get(ctx.message.server.roles, name="{}").permissions: print(perm)'.format(argument))
def setup(bot):
bot.add_cog(Github(bot))
|
mattvperry/ktane-py
|
ktane/__init__.py
|
Python
|
mit
| 173
| 0.005848
|
from .command_line_mixins import CommandLineMixins
from .module import Module
from .console_app import ConsoleApp
__all__ = ['CommandLineMixins', 'Module', 'Con
|
soleApp']
|
|
JarryShaw/jsntlib
|
src/NTLArchive/__init__.py
|
Python
|
gpl-3.0
| 100
| 0
|
# -*- co
|
ding: utf-8 -*-
'''
|
Caution:
For Python 2.7, `__init__.py` file in folders is nessary.
'''
|
sirca/clusterous
|
clusterous/defaults.py
|
Python
|
apache-2.0
| 3,073
| 0.002603
|
# Copyright 2015 Nicta
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
"""
Module for storing default and static values
"""
local_config_dir = '~/.clusterous'
local_session_data_dir = local_config_dir + '/' + 'session'
local_environment_dir = local_config_dir + '/' + 'environment'
cached_cluster_file = 'cluster_spec.yml'
cached_environment_file = 'environment.yml'
cached_cluster_file_path = local_environment_dir + '/' + cached_cluster_file
cached_environment_file_path = local_environment_dir + '/' + cached_environment_file
remote_environment_dir = '/home/ubuntu/environment'
current_nat_ip_file = local_config_dir + '/' + 'current_controller'
cluster_info_file = local_config_dir + '/' + 'cluster_info.yml'
taggable_name_re = re.compile('^[\w-]+$') # For user supplied strings such as cluster name
taggable_name_max_length = 64 # Arbitrary but ample, keeping in mind AWS keys can be max 127 chars
nat_name_format = '{0}-nat'
nat_name_tag_value = 'nat'
nat_instance_type = 't2.micro'
controller_name_format = '{0}-controller'
controller_name_tag_value = 'controller'
controller_instance_type = 't2.small'
node_name_format = '{0}-node-{1}'
instance_tag_key = '@clusterous'
instance_node_type_tag_key = 'NodeType'
registry_s3_path = '/docker-registry'
central_logging_name_format = '{0}-central-logging'
central_logging_name_tag_value = 'central-logging'
central_logging_instance_type =
|
't2.small'
default_zone = 'a'
controller_root_volume_size = 50 # GB
cluster_username = 'ubuntu'
cluster_user_home_dir = '/home/ubuntu'
shared_volume_path = '/home/data/'
shared_volume_size = 20 # GB
remote_scripts_dir = 'ansible/remote'
default_cluster_d
|
ef_filename = 'default_cluster.yml'
remote_host_scripts_dir = 'clusterous'
remote_host_key_file = 'key.pem'
remote_host_vars_file = 'vars.yml'
container_id_script_file = 'container_id.sh'
mesos_port = 5050
marathon_port = 8080
central_logging_port = 8081
nat_ssh_port_forwarding = 22000
# How many seconds to wait for all Marathon applications to reach "started" state
# Currently 30 minutes
app_launch_start_timeout = 1800
app_destroy_timeout = 60
def get_script(filename):
"""
Takes script relative filename, returns absolute path
Assumes this file is in Clusterous source root, uses __file__
"""
return '{0}/{1}/{2}'.format(os.path.dirname(__file__), 'scripts', filename)
def get_remote_dir():
"""
Return full path of remote scripts directory
"""
return '{0}/{1}/{2}'.format(os.path.dirname(__file__), 'scripts', remote_scripts_dir)
|
JocelynDelalande/moulinette-yunohost
|
lib/yunohost/hook.py
|
Python
|
agpl-3.0
| 10,947
| 0.004111
|
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2013 YunoHost
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
""" yunohost_hook.py
Manage hooks
"""
import os
import sys
import re
import json
import errno
import subprocess
from shlex import split as arg_split
from moulinette.core import MoulinetteError
from moulinette.utils.log import getActionLogger
hook_folder = '/usr/share/yunohost/hooks/'
custom_hook_folder = '/etc/yunohost/hooks.d/'
logger = getActionLogger('yunohost.hook')
def hook_add(app, file):
"""
Store hook script to filsystem
Keyword argument:
app -- App to link with
file -- Script to add (/path/priority-file)
"""
path, filename = os.path.split(file)
priority, action = _extract_filename_parts(filename)
try: os.listdir(custom_hook_folder + action)
except OSError: os.makedirs(custom_hook_folder + action)
finalpath = custom_hook_folder + action +'/'+ priority +'-'+ app
os.system('cp %s %s' % (file, finalpath))
os.system('chown -hR admin: %s' % hook_folder)
return { 'hook': finalpath }
def hook_remove(app):
"""
Remove hooks linked to a specific app
Keyword argument:
app -- Scripts related to app will be removed
"""
try:
for action in os.listdir(custom_hook_folder):
for script in os.listdir(custom_hook_folder + action):
if script.endswith(app):
os.remove(custom_hook_folder + action +'/'+ script)
except OSError: pass
def hook_list(action, list_by='name', show_info=False):
"""
List available hooks for an action
Keyword argument:
action -- Action name
list_by -- Property to list hook by
show_info -- Show hook information
"""
result = {}
# Process the property to list hook by
if list_by == 'priority':
if show_info:
def _append_hook(d, priority, name, path):
# Use the priority as key and a dict of hooks names
# with their info as value
value = { 'path': path }
try:
d[priority][name] = value
except KeyError:
d[priority] = { name: value }
else:
def _append_hook(d, priority, name, path):
# Use the priority as key and the name as value
try:
d[priority].add(name)
except KeyError:
d[priority] = set([name])
elif list_by == 'name' or list_by == 'folder':
if show_info:
def _append_hook(d, priority, name, path):
# Use the name as key and a list of hooks info - the
# executed ones with this name - as value
l = d.get(name, list())
for h in l:
# Only one priority for the hook is accepted
if h['priority'] == priority:
# Custom hooks overwrite system ones and they
# are appended at the end - so overwite it
if h['path'] != path:
h['path'] = path
return
l.append({ 'priority': priority, 'path': path })
d[name] = l
else:
if list_by == 'name':
result = set()
def _append_hook(d, priority, name, path):
# Add only the name
d.add(name)
else:
raise MoulinetteError(errno.EINVAL, m18n.n('hook_list_by_invalid'))
def _append_folder(d, folder):
# Iterate over and add hook from a folder
for f in os.listdir(folder + action):
path = '%s%s/%s' % (folder, action, f)
priority, name = _extract_filename_parts(f)
_append_hook(d, priority, name, path)
try:
# Append system hooks first
if list_by == 'folder':
result['system'] = dict() if show_info else set()
_append_folder(result['system'], hook_folder)
else:
_append_folder(result, hook_folder)
except OSError:
logger.debug("system hook folder not found for action '%s' in %s",
action, hook_folder)
try:
# Append custom hooks
if list_by == 'folder':
result['custom'] = dict() if show_info else set()
_append_folder(result['custom'], custom_hook_folder)
else:
_append_folder(result, custom_hook_folder)
except OSError:
logger.debug("custom hook folder not found for action '%s' in %s",
|
action, custom_hook_folder)
return { 'hooks': result }
def hook_callback(action, hooks=[], args=None):
"""
Execute all scripts binded to an action
Keyword argument:
action -- Action name
hooks -- List of hooks names to execute
|
args -- Ordered list of arguments to pass to the script
"""
result = { 'succeed': list(), 'failed': list() }
hooks_dict = {}
# Retrieve hooks
if not hooks:
hooks_dict = hook_list(action, list_by='priority',
show_info=True)['hooks']
else:
hooks_names = hook_list(action, list_by='name',
show_info=True)['hooks']
# Iterate over given hooks names list
for n in hooks:
try:
hl = hooks_names[n]
except KeyError:
raise MoulinetteError(errno.EINVAL,
m18n.n('hook_name_unknown', n))
# Iterate over hooks with this name
for h in hl:
# Update hooks dict
d = hooks_dict.get(h['priority'], dict())
d.update({ n: { 'path': h['path'] }})
hooks_dict[h['priority']] = d
if not hooks_dict:
return result
# Format arguments
if args is None:
args = []
elif not isinstance(args, list):
args = [args]
# Iterate over hooks and execute them
for priority in sorted(hooks_dict):
for name, info in iter(hooks_dict[priority].items()):
filename = '%s-%s' % (priority, name)
try:
hook_exec(info['path'], args=args)
except:
logger.exception("error while executing hook '%s'",
info['path'])
result['failed'].append(filename)
else:
result['succeed'].append(filename)
return result
def hook_check(file):
"""
Parse the script file and get arguments
Keyword argument:
file -- File to check
"""
try:
with open(file[:file.index('scripts/')] + 'manifest.json') as f:
manifest = json.loads(str(f.read()))
except:
raise MoulinetteError(errno.EIO, m18n.n('app_manifest_invalid'))
action = file[file.index('scripts/') + 8:]
if 'arguments' in manifest and action in manifest['arguments']:
return manifest['arguments'][action]
else:
return {}
def hook_exec(file, args=None):
"""
Execute hook from a file with arguments
Keyword argument:
file -- Script to execute
args -- Arguments to pass to the script
"""
from moulinette.utils.stream import NonBlockingStreamReader
from yunohost.app import _value_for_locale
if isinstance(args, list):
arg_list = args
else:
required_args = hook_check(fil
|
nischal2002/m-quiz-2016
|
quiz.py
|
Python
|
mit
| 1,704
| 0.016432
|
import sys # this allows you to read the user input from keyboard also called "stdin"
import classOne # This imports all the classOne functions
import classTwo # This imports all the classTwo functions
import classThree # This imports all the classThree functions
import classFour # This imports all the classFour functions
TIMEOUT=10 # this is the amount of time you will wait for an answer in Seconds. 10 means 10 seconds
MAX_CLASS=5
QUIZ_INSTRUCTIONS = """
Get ready for the quiz. You will have 10 questions out of which you
will need 8 right to win the prize. You will have """ + str(TIMEOUT) + """ seconds
to answer each question.Press Enter to start."""
def getUsersClass(): #main
''' This function will get the user's class. It will compare the class with MAX_CLASS and
will return False if it is more than the MAX_CLASS. Class also has to be a natural number '''
print("Please tell me which Class you are in? ")
try:
usersClass = int(sys.stdin.readline().strip())
if (usersClass < 1 or usersClass > MAX_CLASS) :
print("No Quiz available for Class " + str(usersClass))
return False
else :
return usersClass
except :
print("Exception")
return False
if
|
__name__ == '__main__':
while(True) :
usersClass = getUsersClass()
if (usersClass != False) :
break
print(QUIZ_INSTRUCTIONS)
sys.stdin.readline()
if (usersClass == 1) :
classOne.classOneQuiz()
elif (usersClass == 2) :
classTwo.classTwoQuiz()
elif(usersClass == 3):
cl
|
assThree.classThreeQuiz()
elif(usersClass == 4):
classFour.classFourQuiz()
|
Kreis-Unna/PostNAS_Search
|
PostNAS_AccessControl.py
|
Python
|
gpl-2.0
| 12,848
| 0.006305
|
# -*- coding: utf-8 -*-
import os
import getpass
import json
from qgis.PyQt.QtCore import QSettings
from qgis.PyQt.QtSql import QSqlDatabase, QSqlQuery
from qgis.PyQt.QtWidgets import QMessageBox
from qgis.core import *
import qgis.core
class PostNAS_AccessControl:
def __init__(self, username = None):
if(username == None):
self.username = getpass.getuser().lower()
else:
self.username = username.lower()
self.access = None
self.name = None
self.db = self.__loadDB()
def setUsername(self,username):
self.username = username
if(self.checkUserExists()):
sql = "SELECT name,access FROM public.postnas_search_access_control WHERE lower(username) = :username"
self.__openDB()
queryLoadUserData = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryLoadUserData.prepare(sql)
queryLoadUserData.bindValue(":username",self.getUsername())
queryLoadUserData.exec_()
if(queryLoadUserData.size() == 1):
while(queryLoadUserData.next()):
self.setName(queryLoadUserData.value(queryLoadUserData.record().indexOf("name")))
self.setAccess(queryLoadUserData.value(queryLoadUserData.record().indexOf("access")))
def setAccess(self,access):
self.access = access
def setName(self,name):
self.name = name
def getUsername(self):
return self.username.lower()
def getAccess(self):
return self.access
def getName(self):
return self.name
def __checkUsername(self):
pass
def checkAccessControlIsActive(self):
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '\config.json'):
with open(os.path.dirname(os.path.realpath(__file__)) + '\config.json') as config_file:
config = json.load(config_file)
accessControl = config['accessControl']
pass
else:
settings = QSettings("PostNAS", "PostNAS-Suche")
accessControl = settings.value("accessControl")
if(accessControl == 1):
if (self.checkAccessTable() == False):
accessControl = 0
else:
if (self.checkAccessTable() == True):
accessControl = 1
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '\config.json'):
config['accessControl'] = accessControl
with open(os.path.dirname(os.path.realpath(__file__)) + '\config.json', 'w') as config_file:
json.dump(config, config_file)
else:
settings.setValue("accessControl", accessControl)
if(accessControl == 1):
return True
else:
return False
def checkAccessTable(self):
sql = "SELECT table_name FROM information_schema.tables WHERE table_name = 'postnas_search_access_control'";
self.__openDB()
query = QSqlQuery(self.db)
query.exec_(sql)
if(query.size() > 0):
return True
else:
return False
def createAccessTable(self):
file_path = os.path.dirname(os.path.realpath(__file__)) + "/create_accesstable/create_table.sql"
sql = open(file_path).read()
self.__openDB()
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.exec_(sql)
if(query.lastError().number() == -1):
return True
else:
return False
def checkAccessTableHasAdmin(self):
sql = "SELECT lower(username) FROM public.postnas_search_access_control WHERE access = 0";
self.__openDB()
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.exec_(sql)
if(query.size() > 0):
return True
else:
return False
def insertUser(self):
if(self.getUsername() != None):
self.__openDB()
sql = "INSERT INTO public.postnas_search_access_control (username,name,access) VALUES (:username,:name,:access)"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername().lower())
query.bindValue(":name",self.name)
query.bindValue(":access",self.access)
query.exec_()
if(query.lastError().number() == -1):
return True
else:
return False
else:
return False
def insertAdminUser(self):
self.access = 0
return self.insertUser()
def updateUser(self,username_old):
if(self.getUsername() != None):
self.__openDB()
sql = "UPDATE public.postnas_search_access_control SET username = :username, name = :name, access = :access WHERE username = :username_old"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername().lower())
query.bindValue(":username_old",username_old)
query.bindValue(":name",self.name)
|
query.bindValue(":access",self.access)
query.exec_()
if(query.lastError().number() == -1):
return True
else:
QgsMessageLog.logMessage("Datenbankfehler beim Update: " + query.lastError().text(),'PostNAS-Suche', Qgis.Critical)
return False
else:
return False
def checkUserIsAdmi
|
n(self):
if(self.getUsername() != None):
self.__openDB()
sql = "SELECT lower(username) as username FROM public.postnas_search_access_control WHERE access = 0 AND lower(username) = :username"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername())
query.exec_()
if(query.lastError().number() == -1):
if(query.size() > 0):
return True
else:
return False
else:
return False
else:
return False
def checkUserHasEigentuemerAccess(self):
if(self.getUsername() != None):
self.__openDB()
sql = "SELECT lower(username) as username FROM public.postnas_search_access_control WHERE access IN (0,1) AND lower(username) = :username"
queryEigentuemerAccess = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryEigentuemerAccess.prepare(sql)
queryEigentuemerAccess.bindValue(":username",self.getUsername())
queryEigentuemerAccess.exec_()
if(queryEigentuemerAccess.lastError().number() == -1):
if(queryEigentuemerAccess.size() > 0):
return True
else:
return False
else:
return False
else:
return False
def loadUserAccessTable(self):
sql = "SELECT lower(username) as username,name,bezeichnung FROM public.postnas_search_access_control LEFT JOIN public.postnas_search_accessmode ON postnas_search_access_control.access = postnas_search_accessmode.id";
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
self.__openDB()
queryLoadAccessTable = QSqlQuery(self.db)
queryLoadAccessTable.prepare(sql)
queryLo
|
dandeliondeathray/niancat-micro
|
slackrest/features/steps/chat.py
|
Python
|
apache-2.0
| 2,243
| 0.001337
|
from behave import given, when, then
from slackrest.app import SlackrestApp
from slackrest.command import Visibility, Method
import json
class GiveMeAReply:
pattern = '!givemeareply'
url_format = '/reply'
visibility = Visibility.Any
body = None
method = Method.GET
class GiveMeANotification:
pattern = '!givemeanotification'
url_format = '/notify'
visibility = Visibility.Any
body = None
method = Method.GET
class MakeAPost:
pattern = '!makeapost'
url_format = '/makeapost'
visibility = Visibility.Any
method = Method.POST
@classmethod
def body(cls, **kwargs):
return json.dumps({'param': 'value'})
commands = [GiveMeAReply, GiveMeANotification, MakeAPost]
@given(u'Slackrest is connected to Slack')
def step_impl(context):
context.app = SlackrestApp(context.chat_url, commands, context.notification_channel_id)
context.app.run_async()
context.slack_events.await_event(event_type='login')
@when(u'I send "{message}" from channel "{channel_id}"')
def step_impl(context, message, channel_id):
user_id = 'U123456'
msg = {'type': 'message', 'text': message, 'channel': channel_id, 'user': user_id}
context.slack_events.send_message(msg)
@then(u'I should get a message in channel "{channel_id}"')
def step_impl(context, channel_id):
event = context.slack_events.await_event(event_type='message')
assert event['message']['channel'] == channel_id
@then(u'I should get a message containing "{msg}"')
def step_impl(context, msg):
event = context.slack_events.await_event(event_type='message')
print("Got message containing '{}'".format(event['message']['text']))
print("Got messag
|
e containing '{}'".format(event['message'][
|
'text']))
assert msg in event['message']['text']
@given(u'I set the notification channel to "{notification_channel_id}"')
def step_impl(context, notification_channel_id):
context.notification_channel_id = notification_channel_id
@given(u'I map "!givemeareply" to /reply')
def step_impl(context):
pass
@given(u'I map "!givemeanotification" to /notify')
def step_impl(context):
pass
@given(u'the chat bot is at {url}')
def step_impl(context, url):
context.chat_url = url
|
cypreess/django-getpaid
|
docs/conf.py
|
Python
|
mit
| 8,687
| 0.001381
|
# -*- coding: utf-8 -*-
#
# django-getpaid documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 16 21:16:46 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
import django
import getpaid
sys.path.append(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir)
)
sys.path.append(
os.path.join(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir),
"example",
)
)
sys.path.append(
os.path.join(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir),
"django-getpaid",
)
)
sys.path.insert(0, os.path.abspath("../example"))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
django.setup()
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx_rtd_theme",
"sphinx.ext.autosectionlabel",
]
autodoc_member_order = "bysource"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "django-getpaid"
copyright = "2012-2013 Krzysztof Dorosz, 2013-2020 Dominik Kozaczko"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "2.2"
# The full version, including alpha/beta/rc tags.
release = getpaid.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Defau
|
lt is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style
|
sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "django-getpaiddoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
"index",
"django-getpaid.tex",
"django-getpaid Documentation",
"Sunscrapers",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
("index", "django-getpaid", "django-getpaid Documentation", ["Sunscrapers"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, descr
|
antonsergeev/tesselect
|
gwyddion_import.py
|
Python
|
mit
| 1,734
| 0.008661
|
# Script loads 3d data fr
|
om text file (after Gwyddion text importing of AFM file)
import re
import numpy as np
def ReadData(file_name):
'''
Load 3d data array from a text file. The text file is imported from Gwyddion (free SPM data analysis software).
Parameters
----------
file_name : str
Relative path to a text file
Returns
-------
data : ndarray
|
MxM matrix of SPM data
width : float
Width of image (in meters)
height : float
Height of image (in meters)
pixel_height : float
Height of one pixel (in meters)
height_unit : float
Measurement unit coefficient (in unit/meter)
'''
comments = [] # List of comments in text file
f = open(file_name)
for line in f:
if line.startswith('#'):
comments.append(line)
else:
break
f.close()
rex = r"(\d+[.]\d+)\s(\S+)" # regular expression for image size searching
width_match = re.search(rex, comments[1])
height_match = re.search(rex, comments[2])
if (width_match.group(2) == 'µm') and (height_match.group(2) == 'µm'):
width_unit = 1e-6
height_unit = 1e-6
else:
raise ValueError("Attention! The measurement units aren't micrometers!") # My data was only in micrometers :)
width = float(width_match.group(1)) * width_unit
height = float(height_match.group(1)) * height_unit
data = np.genfromtxt(file_name) # NumPy function for data importing
M = np.shape(data)[0] # ---!!--- Needs to add rectangular area ---!!---
pixel_height = height/M
return data, width, height, pixel_height, height_unit
|
MQFN/MQFN
|
bbmq/server/models.py
|
Python
|
apache-2.0
| 3,993
| 0.004758
|
# -------------------------------- Database models----------------------------------------------------------------------
import sys, os
import sqlalchemy
from sqlalchemy import create_engine
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import secrets
import settings
MYSQL_USERNAME = secrets.MYSQL_USERNAME
MYSQL_PASSWORD = secrets.MYSQL_PASSWORD
MYSQL_HOSTNAME = secrets.MYSQL_HOSTNAME
MYSQL_DATABASE_NAME = secrets.MYSQL_DATABASE_NAME
MYSQL_HOST_PORT = secrets.MYSQL_HOST_PORT
MAX_MESSAGE_SIZE = settings.MAX_MESSAGE_SIZE
database_url = 'mysql://{}:{}@{}:{}/{}'.format(MYSQL_USERNAME, MYSQL_PASSWORD, MYSQL_HOSTNAME, MYSQL_HOST_
|
PORT,
MYSQL_DATABASE_NAME)
engine = create_engine(database_url)
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import ForeignKey
from sqlalchemy.orm
|
import sessionmaker
Base = declarative_base()
class ModelManager(object):
"""
Model manager
"""
@classmethod
def create_session(cls, engine):
"""
create a session based
:param engine: engine object
:return: returns the created session object
"""
Session = sessionmaker(bind=engine)
session = Session()
return session
@classmethod
def add_to_session(cls, session, obj):
"""
add the object to the session
:param obj:
:param session: session object
:return:
"""
session.add(obj)
@classmethod
def commit_session(cls, session):
"""
commit to session
:param session:
:return:
"""
session.commit()
@classmethod
def delete_from_session(cls, session, obj):
"""
delete the object from the session
:param session:
:return:
"""
session.delete(obj)
@classmethod
def rollback_session(cls, session):
"""
rollback the current session
:param session:
:return:
"""
session.rollback()
@classmethod
def close_session(cls, session):
"""
close the current session
:param session:
:return:
"""
session.close()
class Queue(Base):
"""
Queues model class
"""
__tablename__ = "Queue"
id = Column(Integer, primary_key=True)
name = Column(String(20), unique=True)
created_timestamp = Column(DateTime)
message = relationship("Message", back_populates="queue")
def __repr__(self):
"""
representation of the Queue class
:return:
"""
return "<Queue (name: {}, created_timestamp: {})>".format(self.name, self.created_timestamp)
class Message(Base):
"""
Message model class
"""
__tablename__ = "Message"
id = Column(Integer, primary_key=True)
queue_id = Column(Integer, ForeignKey('Queue.id'))
is_fetched = Column(Boolean, default=False)
content = Column(Text)
publish_timestamp = Column(DateTime)
consumed_timestamp = Column(DateTime)
queue = relationship("Queue", back_populates="message")
# The consumed_timestamp should ideally have a null value for default but that is not feasible here so
# for checking we will first check whether the is_fetched value is true, if so we consider the consumed_timestamp
# as the date and time when the message was dequeued.
def __repr__(self):
"""
representation of the Message class
:return:
"""
return "<Message (queue_id: {}, is_fetched: {}, content: {}...{}, publish_timestamp: {}, " \
"consumed_timestamp: {})>".format(self.queue_id, self.is_fetched, self.content[:10],self.content[10:],
self.publish_timestamp, self.consumed_timestamp)
|
Orav/kbengine
|
kbe/src/lib/python/Lib/distutils/versionpredicate.py
|
Python
|
lgpl-3.0
| 5,298
| 0.002643
|
"""Module for parsing and testing package version predicate strings.
"""
import re
import distutils.version
import operator
re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)",
re.ASCII)
# (package) (rest)
re_paren = re.compile(r"^\s*\((.*)\)
|
\s*$") # (list) inside of parentheses
re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
# (comp) (version)
def splitUp(pred):
"""Parse a single version comparison.
Return (comparison string, StrictVersion)
"""
res = re_splitComparison.match(pred)
if not res:
raise ValueError("bad package restriction syntax: %r" % pred)
comp, ve
|
rStr = res.groups()
return (comp, distutils.version.StrictVersion(verStr))
compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
">": operator.gt, ">=": operator.ge, "!=": operator.ne}
class VersionPredicate:
"""Parse and test package version predicates.
>>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
The `name` attribute provides the full dotted name that is given::
>>> v.name
'pyepat.abc'
The str() of a `VersionPredicate` provides a normalized
human-readable version of the expression::
>>> print(v)
pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
The `satisfied_by()` method can be used to determine with a given
version number is included in the set described by the version
restrictions::
>>> v.satisfied_by('1.1')
True
>>> v.satisfied_by('1.4')
True
>>> v.satisfied_by('1.0')
False
>>> v.satisfied_by('4444.4')
False
>>> v.satisfied_by('1555.1b3')
False
`VersionPredicate` is flexible in accepting extra whitespace::
>>> v = VersionPredicate(' pat( == 0.1 ) ')
>>> v.name
'pat'
>>> v.satisfied_by('0.1')
True
>>> v.satisfied_by('0.2')
False
If any version numbers passed in do not conform to the
restrictions of `StrictVersion`, a `ValueError` is raised::
>>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
Traceback (most recent call last):
...
ValueError: invalid version number '1.2zb3'
It the module or package name given does not conform to what's
allowed as a legal module or package name, `ValueError` is
raised::
>>> v = VersionPredicate('foo-bar')
Traceback (most recent call last):
...
ValueError: expected parenthesized list: '-bar'
>>> v = VersionPredicate('foo bar (12.21)')
Traceback (most recent call last):
...
ValueError: expected parenthesized list: 'bar (12.21)'
"""
def __init__(self, versionPredicateStr):
"""Parse a version predicate string.
"""
# Fields:
# name: package name
# pred: list of (comparison string, StrictVersion)
versionPredicateStr = versionPredicateStr.strip()
if not versionPredicateStr:
raise ValueError("empty package restriction")
match = re_validPackage.match(versionPredicateStr)
if not match:
raise ValueError("bad package name in %r" % versionPredicateStr)
self.name, paren = match.groups()
paren = paren.strip()
if paren:
match = re_paren.match(paren)
if not match:
raise ValueError("expected parenthesized list: %r" % paren)
str = match.groups()[0]
self.pred = [splitUp(aPred) for aPred in str.split(",")]
if not self.pred:
raise ValueError("empty parenthesized list in %r"
% versionPredicateStr)
else:
self.pred = []
def __str__(self):
if self.pred:
seq = [cond + " " + str(ver) for cond, ver in self.pred]
return self.name + " (" + ", ".join(seq) + ")"
else:
return self.name
def satisfied_by(self, version):
"""True if version is compatible with all the predicates in self.
The parameter version must be acceptable to the StrictVersion
constructor. It may be either a string or StrictVersion.
"""
for cond, ver in self.pred:
if not compmap[cond](version, ver):
return False
return True
_provision_rx = None
def split_provision(value):
"""Return the name and optional version number of a provision.
The version number, if given, will be returned as a `StrictVersion`
instance, otherwise it will be `None`.
>>> split_provision('mypkg')
('mypkg', None)
>>> split_provision(' mypkg( 1.2 ) ')
('mypkg', StrictVersion ('1.2'))
"""
global _provision_rx
if _provision_rx is None:
_provision_rx = re.compile(
"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$",
re.ASCII)
value = value.strip()
m = _provision_rx.match(value)
if not m:
raise ValueError("illegal provides specification: %r" % value)
ver = m.group(2) or None
if ver:
ver = distutils.version.StrictVersion(ver)
return m.group(1), ver
|
datagovuk/ckanext-archiver
|
ckanext/archiver/commands.py
|
Python
|
mit
| 19,891
| 0.001408
|
import logging
import os
import sys
import time
import re
import shutil
import itertools
import ckan.plugins as p
from pylons import config
from ckan.lib.cli import CkanCommand
from ckan.lib.helpers import OrderedDict
REQUESTS_HEADER = {'content-type': 'application/json'}
class Archiver(CkanCommand):
'''
Download and save copies of all package resources.
The result of each download attempt is saved to the CKAN task_status table,
so the information can be used later for QA analysis.
Usage:
paster archiver init
- Creates the database table archiver needs to run
paster archiver update [{package-name/id}|{group-name/id}]
- Archive all resources or just those belonging to a specific
package or group, if specified
paster archiver clean-status
- Cleans the TaskStatus records that contain the status of each
archived resource, whether it was successful or not, with errors.
It does not change the cache_url etc. in the Resource
paster archiver clean-cached-resources
- Removes all cache_urls and other references to resource files on
disk.
paster archiver view [{dataset name/id}]
- Views info archival info, in general and if you specify one, about
a particular dataset\'s resources.
paster archiver report [outputfile]
- Generates a report on orphans, either resources where the path
does not exist, or files on disk that don't have a corresponding
orphan. The outputfile parameter is the name of the CSV output
from running the report
paster archiver delete-orphans [outputfile]
- Deletes orphans that are files on disk with no corresponding
resource. This uses the report command and will write out a
report to [outputfile]
paster archiver migrate-archive-dirs
- Migrate the layout of the archived resource directories.
Previous versions of ckanext-archiver stored resources on disk
at: {resource-id}/filename.csv and this version puts them at:
{2-chars-of-resource-id}/{resource-id}/filename.csv
Running this moves them to the new locations and updates the
cache_url on each resource to reflect the new location.
paster archiver migrate
- Updates the database schema to include new fields.
'''
# TODO
# paster archiver clean-files
# - Remove all archived resources
summary = __doc__.split('\n')[0]
usage = __doc__
min_args = 0
max_args = 2
def __init__(self, name):
super(Archiver, self).__init__(name)
self.parser.add_option('-q', '--queue',
action='store',
dest='queue',
help='Send to a particular queue')
def command(self):
"""
Parse command line arguments and call appropriate method.
"""
if not self.args or self.args[0] in ['--help', '-h', 'help']:
print self.usage
sys.exit(1)
cmd = self.args[0]
self._load_config()
# Initialise logger after the config is loaded, so it is not disabled.
self.log = logging.getLogger(__name__)
if cmd == 'update':
self.update()
elif cmd == 'clean-status':
self.clean_status()
elif cmd == 'clean-cached-resources':
self.clean_cached_resources()
elif cmd == 'view':
if len(self.args) == 2:
self.view(self.args[1])
else:
self.view()
elif cmd == 'report':
if len(self.args) != 2:
self.log.error('Command requires a parameter, the name of the output')
return
self.report(self.args[1], delete=False)
elif cmd == 'delete-orphans':
if len(self.args) != 2:
self.log.error('Command requires a parameter, the name of the output')
return
self.report(self.args[1], delete=True)
elif cmd == 'init':
import ckan.model as model
from ckanext.archiver.model import init_tables
init_tables(model.meta.engine)
self.log.info('Archiver tables are initialized')
elif cmd == 'migrate-archive-dirs':
self.migrate_archive_dirs()
elif cmd == 'migrate':
self.migrate()
else:
self.log.error('Command %s not recognized' % (cmd,))
def update(self):
from ckan import model
from ckanext.archiver import lib
packages = []
resources = []
if len(self.args) > 1:
for arg in self.args[1:]:
# try arg as a group id/name
group = model.Group.get(arg)
if group:
if group.is_organization:
packages.extend(
model.Session.query(model.Package)
|
.filter_by(owner_org=group.id))
else:
packages.extend(group.packages(with_private=True))
if not self.options.queue:
self.options.queue = 'bulk'
continue
# try arg as a package id/name
pkg = model.Package.get(arg)
if pkg:
packages.append(pkg)
if not self.options.queue:
self.options.queue = 'priority'
continue
# try arg as a resource id
res = model.Resource.get(arg)
if res:
resources.append(res)
if not self.options.queue:
self.options.queue = 'priority'
continue
else:
self.log.error('Could not recognize as a group, package '
'or resource: %r', arg)
sys.exit(1)
else:
# all packages
pkgs = model.Session.query(model.Package)\
.filter_by(state='active')\
.order_by('name').all()
packages.extend(pkgs)
if not self.options.queue:
self.options.queue = 'bulk'
if packages:
self.log.info('Datasets to archive: %d', len(packages))
if resources:
self.log.info('Resources to archive: %d', len(resources))
if not (packages or resources):
self.log.error('No datasets or resources to process')
sys.exit(1)
self.log.info('Queue: %s', self.options.queue)
for package in packages:
if p.toolkit.check_ckan_version(max_version='2.2.99'):
# earlier CKANs had ResourceGroup
pkg_resources = \
[res for res in
itertools.chain.from_iterable(
(rg.resources_all
for rg in package.resource_groups_all)
)
if res.state == 'active']
else:
pkg_resources = \
[res for res in package.resources_all
if res.state == 'active']
self.log.info('Queuing dataset %s (%s resources)',
package.name, len(pkg_resources))
lib.create_archiver_package_task(package, self.options.queue)
time.sleep(0.1) # to try to avoid Redis getting overloaded
for resource in resources:
if p.toolkit.check_ckan_version(max_version='2.2.99'):
package = resource.resource_group.package
else:
package = resource.package
self.log.info('Queuing resource %s/%s', package.name, resource.id)
lib.create_archiver_resource_task(resource, self.options.queue)
time.sleep(0.05
|
|
gappleto97/Senior-Project
|
main.py
|
Python
|
mit
| 2,530
| 0.001581
|
from common import bounty, peers, settings
from common.safeprint import safeprint
from multiprocessing import Queue, Value
from time import sleep, time
import pickle
def sync():
from multiprocessing import Manager
man = Manager()
items = {'config': man.dict(),
'peerList': man.list(),
|
'bountyList': man.list(),
'bountyLock': bounty.bountyLock,
'keyList': man.list()}
items['config'].update(settings.config)
items['peerList'].extend(peers.peerlist)
items['bountyList'].extend(bounty.bountyList)
safeprint(items)
peers.sync(items)
return items
def initParallels():
queue = Queue()
live = Value('b', True)
|
ear = peers.listener(settings.config['port'], settings.config['outbound'], queue, live, settings.config['server'])
ear.daemon = True
ear.items = sync()
ear.start()
mouth = peers.propagator(settings.config['port'] + 1, live)
mouth.daemon = True
mouth.items = ear.items
mouth.start()
feedback = []
stamp = time()
while queue.empty():
if time() - 15 > stamp:
break
global ext_ip, ext_port
ext_ip = ""
ext_port = -1
try:
feedback = queue.get(False)
settings.outbound = feedback[0]
if settings.outbound is not True:
ext_ip, ext_port = feedback[1:3]
except:
safeprint("No feedback received from listener")
return live
def main():
# Begin Init
settings.setup()
try:
import miniupnpc
except:
safeprint("Miniupnpc is not installed. Running in outbound only mode")
settings.config['outbound'] = True
safeprint("settings are:")
safeprint(settings.config)
live = initParallels()
global ext_ip, ext_port
peers.initializePeerConnections(settings.config['port'], ext_ip, ext_port)
# End Init
# Begin main loop
if settings.config.get('seed'):
safeprint("Seed mode activated")
try:
while True and not settings.config.get('test'):
sleep(0.1)
except KeyboardInterrupt:
safeprint("Keyboard Interrupt")
elif settings.config.get('server'):
safeprint("Server mode activated")
else:
safeprint("Client mode activated")
# End main loop
# Begin shutdown
safeprint("Beginning exit process")
live.value = False
settings.saveSettings()
peers.saveToFile()
bounty.saveToFile()
# End shutdown
if __name__ == "__main__":
main()
|
Kniyl/mezzanine
|
mezzanine/core/middleware.py
|
Python
|
bsd-2-clause
| 12,608
| 0.000317
|
from __future__ import unicode_literals
from future.utils import native_str
from django.contrib import admin
from django.contrib.auth import logout
from django.contrib.messages import error
from django.contrib.redirects.models import Redirect
from django.core.exceptions import MiddlewareNotUsed
from django.core.urlresolvers import reverse, resolve
from django.http import (HttpResponse, HttpResponseRedirect,
HttpResponsePermanentRedirect, HttpResponseGone)
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.template import Template, RequestContext
from django.utils.cache import get_max_age
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mezzanine.conf import settings
from mezzanine.core.models import SitePermission
from mezzanine.core.management.commands.createdb import (DEFAULT_USERNAME,
DEFAULT_PASSWORD)
from mezzanine.utils.cache import (cache_key_prefix, nevercache_token,
cache_get, cache_set, cache_installed)
from mezzanine.utils.device import templates_for_device
from mezzanine.utils.sites import current_site_id, templates_for_host
from mezzanine.utils.urls import next_url
_deprecated = {
"AdminLoginInterfaceSelector": "AdminLoginInterfaceSelectorMiddleware",
"DeviceAwareUpdateCacheMiddleware": "UpdateCacheMiddleware",
"DeviceAwareFetchFromCacheMiddleware": "FetchFromCacheMiddleware",
}
class _Deprecated(object):
def __init__(self, *args, **kwargs):
from warnings import warn
msg = "mezzanine.core.middleware.%s is deprecated." % self.old
if self.new:
msg += (" Please change the MIDDLEWARE_CLASSES setting to use "
"mezzanine.core.middleware.%s" % self.new)
warn(msg)
for old, new in _deprecated.items():
globals()[old] = type(native_str(old),
(_Deprecated,),
{"old": old, "new": new})
class AdminLoginInterfaceSelectorMiddleware(object):
"""
Checks for a POST from the admin login view and if authentication is
successful and the "site" interface is selected, redirect to the site.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
login_type = request.POST.get("mezzanine_login_interface")
if login_type and not request.user.is_authenticated():
response = view_func(request, *view_args, **view_kwargs)
if request.user.is_authenticated():
if login_type == "admin":
next = request.get_full_path()
username = request.user.get_username()
if (username == DEFAULT_USERNAME and
request.user.check_password(DEFAULT_PASSWORD)):
error(request, mark_safe(_(
"Your account is using the default password, "
"please <a href='%s'>change it</a> immediately.")
% reverse("user_change_password",
args=(request.user.id,))))
else:
next = next_url(request) or "/"
return HttpResponseRedirect(next)
else:
return response
return None
class SitePermissionMiddleware(object):
"""
Marks the current user with a ``has_site_permission`` which is
used in place of ``user.is_staff`` to achieve per-site staff
access.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
has_site_permission = False
if request.user.is_superuser:
has_site_permission = True
elif request.user.is_staff:
lookup = {"user": request.user, "sites": current_site_id()}
try:
SitePermission.objects.get(**lookup)
except SitePermission.DoesNotExist:
admin_index = reverse("admin:index")
if request.path.startswith(admin_index):
logout(request)
view_func = admin.site.login
extra_context = {"no_site_permission": True}
return view_func(request, extra_context=extra_context)
else:
has_site_permission = True
request.user.has_site_permission = has_site_permission
class TemplateForDeviceMiddleware(object):
"""
Inserts device-specific templates to the template list.
"""
def process_template_response(self, request, response):
if hasattr(response, "template_name"):
if not isinstance(response.template_name, Template):
templates = templates_for_device(request,
response.template_name)
response.template_name = templates
return response
class TemplateForHostMiddleware(object):
"""
Inserts host-specific templates to the template list.
"""
def process_template_response(self, request, response):
if hasattr(response, "template_name"):
if not isinstance(response.template_name, Template):
templates = templates_for_host(request,
response.template_name)
response.template_name = templates
return response
class UpdateCacheMiddleware(object):
"""
Response phase for Mezzanine's cache middleware. Handles caching
the response, and then performing the second phase of rendering,
for content enclosed by the ``nevercache`` tag.
"""
def process_response(self, request, response):
# Caching is only applicable for text-based, non-streaming
# responses. We also skip it for non-200 statuses during
# development, so that stack traces are correctly rendered.
is_text = response.get("content-type", "").startswith("text")
valid_status = response.status_code == 200
streaming = getattr(response, "streaming", False)
if not is_text or streaming or (settings.DEBUG and not valid_status):
return response
# Cache the response if all the required conditions are met.
# Response must be marked for updating by the
# ``FetchFromCacheMiddleware`` having a cache get miss, the
# user must not be authenticated, the HTTP status must be OK
# and the response mustn't include an expiry age, indicating it
# shouldn't be cached.
marked_for_update = getattr(request, "_update_cache", False)
anon = hasattr(request, "user") and not request.user.is_authenticated()
timeout = get_max_age(response)
if timeout is None:
timeout = settings.CACHE_MIDDLEWARE_SECONDS
if anon and valid_status and marked_for_update and timeout:
cache_key = cache_key_prefix(request) + request.get_full_path()
_cache_set = lambda r: cache_set(cache_key, r.content, timeout)
if callable(getattr(response, "render", None)):
response.add_post_render_callback(_cache_set)
else:
_cache_set(response)
# Second phase rendering for non-cached template code and
# content. Split on the delimiter the ``nevercache`` tag
# wrapped its contents in, and render only the content
# enclosed by it, to avoid possible template code injection.
token = nevercache_token()
try:
token = token.encode('utf-8')
except AttributeError:
pass
parts = response.content.split(token)
# Restore csrf token from cookie - check the response
# first as it may be being set for the first time.
csrf_token = None
try:
csrf_token = response.cookies[settings.CSRF_COOKIE_NAME].value
|
except KeyError:
try:
csrf_token = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
pass
if csrf_token:
request.META["CSRF_COOKIE"] = csrf_token
contex
|
t = Reques
|
ademilly/waterflow
|
waterflow/_version.py
|
Python
|
mit
| 16,749
| 0
|
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.16 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "waterflow-"
cfg.versionfile_source = "waterflow/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes
both the project name and a version string.
"""
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
|
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(s
|
orted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is No
|
OpenMORA/mora-base
|
scripts/morautils.py
|
Python
|
gpl-3.0
| 2,018
| 0.041625
|
# Utility functions for OpenMORA scripts
#
# Part of OpenMora - https://github.com/OpenMORA
import os, sys, string
import platform
import yaml
def get_mora_paths():
""" Returns a list of paths with MORA modules, from the env var MORA_PATH
"""
if not 'MORA_PATH' in os.environ:
print('**ERROR** Environment variable MORA_PATH not set')
sys.exit(1)
sMoraPaths=os.environ['MORA_PATH'];
if platform.system()=="Windows":
sPathDelim = ";"
else:
sPathDelim = ":"
morabase_dir="";
return sMoraPaths.split(sPathDelim)
def get_morabase_dir():
""" Returns the path of "mora-base" pkg
"""
mora_paths = get_mora_paths() # Get e
|
nv vars
for p in mora_paths:
tstPath = os.path.normpath(p + "/mora-base")
if os.path.exists(tstPath):
morabase_dir = tstPath
if (len(morabase_dir)==0) or (not os.
|
path.exists(morabase_dir)):
print("Couldn't detect mora-base in MORA_PATH!!")
sys.exit(1)
return morabase_dir
import sys, math
def progress(percent):
''' source: http://gunslingerc0de.wordpress.com/2010/08/13/python-command-line-progress-bar/ '''
width = 74
marks = math.floor(width * (percent / 100.0))
spaces = math.floor(width - marks)
loader = '[' + ('=' * int(marks)) + (' ' * int(spaces)) + ']'
if percent >= 100:
percent = 100
sys.stdout.write("%s %d%%\r" % (loader, percent))
if percent >= 100:
pass
sys.stdout.write("\n")
sys.stdout.flush()
def get_pkgs_root():
'''Returns the path to the parent directory of mora-base'''
morabase_dir = get_morabase_dir()
pkgs_root = os.path.dirname(morabase_dir)
return pkgs_root
def read_distro_file():
'''Returns the yaml contents of the distro file'''
morabase_dir = get_morabase_dir()
pkgs_root = os.path.dirname(morabase_dir)
sDistroFile = os.path.normpath( morabase_dir + "/distro/openmora-pkgs.yaml")
assert os.path.exists(sDistroFile)
assert os.path.exists(pkgs_root + "/mora-base")
# Parse distro file:
fil = open(sDistroFile, 'r')
distro = yaml.load(fil)
fil.close()
#print distro
return distro
|
trafi/djinni
|
test-suite/handwritten-src/python/test_proxying.py
|
Python
|
apache-2.0
| 5,511
| 0.009254
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from foo_receiver import FooReceiver
from foo_listener_bf import FooListenerBfHelper
from PyCFFIlib_cffi import ffi, lib
import gc
class FooListenerBfImpl:
def delete_fl_in_fl(self):
print ("Not to be used")
def on_string_change(self, prs):
print ("FooListenerImpl.py: on_string_change prs", prs)
self._prs = prs
return self._prs
def get_string(self):
return self._prs
def set_listener_bf(self,fl):
self._fl = fl
def get_listener_bf(self):
return self._fl
def set_binary(self,b):
print ("setting Binary in FooListenerBfImpl ", b)
self._b = b
def get_binary(self):
return self._b
def send_return(self,fl):
return fl
def create():
# TODO: decide if we want to have this here or make checks in the helper.frompy for all
# methods to exist as attributes on the class more lenient
print ("I don't use it but the +p +c plus the check in fromPy for having all methods needs me to have this")
def fr_set_get(fr, fl, s):
fr.add_listener_bf(fl)
assert fr.set_private_bf_string(s) == s, "test_interface_back_forth failed"
# assert fl._prs == s, "test_interface_back_forth failed"
assert fr.get_listener_bf_string() == s, "test_interface_back_forth failed"
# back and forth via regular calls from python to cpp
def test_interface_back_forth():
print ("start test len ", len(FooListenerBfHelper.c_data_set))
fr = FooReceiver.create()
fl = FooListenerBfImpl() # python implementation of listener
fl_cpp = fr.get_foo_listener_bf() # cpp implementation of listener
# both direct and indirect test for python impl of FooListenerBf
fr_set_get(fr, fl, "Hello world!")
# both direct and indirect test for cpp impl of FooListenerBf
fr_set_get(fr, fl_cpp, "Goodbye world!")
fr_set_get(fr, fl_cpp, "Goodbye world!")
# send python implementation back and forth and see that it can still be used, and that no wrapper was added
fl_1 = fr.send_return(fl)
fl_2 = fr.send_return(fl_1)
fr_set_get(fr, fl_2, "Hello")
assert fl == fl_1 and fl_1 == fl_2, "test_interface_back_forth failed"
# send cpp implementation back and forth and see that is can still be used, and handles
|
hold same implementation
fl_cpp_1 = fr.send_return(fl_cpp)
fl_cpp_2 = fr.send_return(fl_cpp_1)
fr_set_get(fr, fl_cpp_2, "Goodbye")
assert lib.equal_handles_cw__foo_listener_bf(fl_cpp._cpp_imp
|
l, fl_cpp_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_cpp_1._cpp_impl, fl_cpp_2._cpp_impl)
fl = fl_1 = fl_2 = fl_cpp = fl_cpp_1 = fl_cpp_2 = None
gc.collect()
fr = None
gc.collect()
assert 0 == len(FooListenerBfHelper.c_data_set)
def fr_fl_set_get(fr, fl_in_fl, b):
fr.set_listener_bf_in_listener_bf(fl_in_fl)
fr.set_binary_in_listener_bf_in_listener_bf(b)
assert b == fr.get_binary_in_listener_bf_in_listener_bf(), "test_interface_back_forth failed"
# back and forth via callbacks cpp to python
def test_interface_callback_back_forth():
fr = FooReceiver.create()
fl = FooListenerBfImpl()
fr.add_listener_bf(fl)
fl_in_fl = FooListenerBfImpl()
b = b'Some Binary 11'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in python, listener 2 in python
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert fl_in_fl == fl_in_fl_1 and fl_in_fl_1 == fl_in_fl_2, "test_interface_back_forth failed"
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in python, listener 2 in python after back&forth
fl_in_fl = fr.get_foo_listener_bf()
b = b'Some Other Binary 12'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in python, listener 2 in cpp
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert lib.equal_handles_cw__foo_listener_bf(fl_in_fl._cpp_impl, fl_in_fl_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_in_fl_1._cpp_impl, fl_in_fl_2._cpp_impl)
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in python, listener 2 in cpp after back&forth
fl = fr.get_foo_listener_bf()
fr.add_listener_bf(fl)
fl_in_fl = FooListenerBfImpl()
b = b'Some Binary 21'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in cpp, listener 2 in python
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert fl_in_fl == fl_in_fl_1 and fl_in_fl_1 == fl_in_fl_2, "test_interface_back_forth failed"
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in cpp, listener 2 in python after back&forth
fl_in_fl = fr.get_foo_listener_bf()
b = b'Some Other Binary 22'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in cpp, listener 2 in cpp
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert lib.equal_handles_cw__foo_listener_bf(fl_in_fl._cpp_impl, fl_in_fl_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_in_fl_1._cpp_impl, fl_in_fl_2._cpp_impl)
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in cpp, listener 2 in cpp after back&forth
fl = fl_in_fl = fl_in_fl_1 = fl_in_fl_2 = None
gc.collect()
fr = None
gc.collect()
assert 0 == len(FooListenerBfHelper.c_data_set)
|
kernevil/samba
|
python/samba/tests/dckeytab.py
|
Python
|
gpl-3.0
| 2,161
| 0
|
# Tests for source4/libnet/py_net_dckeytab.c
#
# Copyright (C) David Mulder <[email protected]> 2018
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import string
from samba.net import Net
from samba import enable_net_export_k
|
eytab
from samba import tests
from samba.param import LoadParm
enable_net_export_keytab()
def open_bytes(filename):
if sys
|
.version_info[0] == 3:
return open(filename, errors='ignore')
else:
return open(filename, 'rb')
class DCKeytabTests(tests.TestCase):
def setUp(self):
super(DCKeytabTests, self).setUp()
self.lp = LoadParm()
self.lp.load_default()
self.creds = self.insta_creds(template=self.get_credentials())
self.ktfile = os.path.join(self.lp.get('private dir'), 'test.keytab')
self.principal = self.creds.get_principal()
def tearDown(self):
super(DCKeytabTests, self).tearDown()
os.remove(self.ktfile)
def test_export_keytab(self):
net = Net(None, self.lp)
net.export_keytab(keytab=self.ktfile, principal=self.principal)
assert os.path.exists(self.ktfile), 'keytab was not created'
with open_bytes(self.ktfile) as bytes_kt:
result = ''
for c in bytes_kt.read():
if c in string.printable:
result += c
principal_parts = self.principal.split('@')
assert principal_parts[0] in result and \
principal_parts[1] in result, \
'Principal not found in generated keytab'
|
robdennis/sideboard
|
sideboard/tests/test_server.py
|
Python
|
bsd-3-clause
| 19,922
| 0.001757
|
from __future__ import unicode_literals
import json
import socket
from uuid import uuid4
from time import sleep
from urllib import urlencode
from random import randrange
from unittest import TestCase
from Queue import Queue, Empty
from contextlib import closing
from urlparse import urlparse, parse_qsl
import pytest
import cherrypy
import requests
from rpctools.jsonrpc import ServerProxy
from ws4py.server.cherrypyserver import WebSocketPlugin
import sideboard.websockets
from sideboard.lib import log, config, subscribes, notifies, services, cached_property, WebSocket
from sideboard.tests import service_patcher, config_patcher
from sideboard.tests.test_sa import Session
@pytest.mark.functional
class SideboardServerTest(TestCase):
port = config['cherrypy']['server.socket_port']
jsonrpc_url = 'http://127.0.0.1:{}/jsonrpc'.format(port)
jsonrpc = ServerProxy(jsonrpc_url)
rsess_username = 'unit_tests'
@staticmethod
def assert_port_open(port):
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('0.0.0.0', port))
@staticmethod
def assert_can_connect_to_localhost(port):
for i in range(50):
try:
socket.create_connection(('127.0.0.1', port)).close()
except Exception as e:
sleep(0.1)
else:
break
else:
raise e
@classmethod
def start_cherrypy(cls):
class Root(object):
@cherrypy.expose
def index(self):
cherrypy.session['username'] = cls.rsess_username
return cls.rsess_username
cherrypy.tree.apps.pop('/mock_login', None)
cherrypy.tree.mount(Root(), '/mock_login')
cls.assert_port_open(cls.port)
cherrypy.config.update({'engine.autoreload_on': False})
cherrypy.engine.start()
cherrypy.engine.wait(cherrypy.engine.states.STARTED)
cls.assert_can_connect_to_localhost(cls.port)
@classmethod
def stop_cherrypy(cls):
cherrypy.engine.stop()
cherrypy.engine.wait(cherrypy.engine.states.STOPPED)
cherrypy.engine.state = cherrypy.engine.states.EXITING
# ws4py does not support stopping and restarting CherryPy
sideboard.websockets.websocket_plugin.unsubscribe()
sideboard.websockets.websocket_plugin = WebSocketPlugin(cherrypy.engine)
sideboard.websockets.websocket_plugin.subscribe()
@classmethod
def setUpClass(cls):
super(SideboardServerTest, cls).setUpClass()
cls.start_cherrypy()
cls.ws = cls.patch_websocket(services.get_websocket())
cls.ws.connect(max_wait=5)
assert cls.ws.connected
@classmethod
def tearDownClass(cls):
cls.stop_cherrypy()
super(SideboardServerTest, cls).tearDownClass()
@staticmethod
def patch_websocket(ws):
ws.q = Queue()
ws.fallback = ws.q.put
return ws
def wait_for(self, func, *args, **kwargs):
for i in range(50):
cherrypy.engine.publish('main') # since our unit tests don't call cherrypy.engine.block, we must publish this event manually
try:
result = func(*args, **kwargs)
assert result or result is None
except:
sleep(0.1)
else:
break
else:
raise AssertionError('wait timed out')
def wait_for_eq(self, target, func, *args, **kwargs):
try:
self.wait_for(lambda: target == func(*args, **kwargs))
except:
raise AssertionError('{!r} != {!r}'.format(target, func(*args, **kwargs)))
def wait_for_ne(self, target, func, *args, **kwargs):
try:
self.wait_for(lambda: target != func(*args, **kwargs))
except:
raise AssertionError('{!r} == {!r}'.format(target, func(*args, **kwargs)))
@cached_property
def rsess(self):
rsess = requests.Session()
rsess.trust_env = False
self._get(rsess, '/mock_login')
return rsess
def url(self, path, **query_params):
params = dict(parse_qsl(urlparse(path).query))
params.update(query_params)
url = 'http://127.0.0.1:{}{}'.format(self.port, urlparse(path).path)
if params:
url += '?' + urlencode(params)
return url
def _get(self, rsess, path, **params):
return rsess.get(self.url(path, **params))
def get(self, path, **params):
return self._get(self.rsess, path, **params).content
def get_json(self, path, **params):
return self._get(self.rsess, path, **params).json()
def open_ws(self):
return self.patch_websocket(WebSocket(connect_immediately=True, max_wait=5))
def next(self, ws=None, timeout=2):
return (ws or self.ws).q.get(timeout=timeout)
def assert_incoming(self, ws=None, client=None, timeout=1, **params):
data = self.next(ws, timeout)
assert (client or self.client) == data.get('client')
for key, val in params.items():
assert val == data[key]
def assert_no_response(self):
pytest.raises(Empty, self.next)
def asse
|
rt_error_with(self, *args, **kwargs):
if args:
self.ws.ws.send(str(args[0]))
else:
self.ws._send(**kwargs)
assert 'error' in self.next()
def call(self, **params):
callback = 'callback{}'.format(randrange(1000000))
self.ws._se
|
nd(callback=callback, **params)
result = self.next()
assert callback == result['callback']
return result
def subscribe(self, **params):
params.setdefault('client', self.client)
return self.call(**params)
def unsubscribe(self, client=None):
self.call(action='unsubscribe', client=client or self.client)
class JsonrpcTest(SideboardServerTest):
@pytest.fixture(autouse=True)
def override(self, service_patcher):
service_patcher('testservice', self)
def get_message(self, name):
return 'Hello {}!'.format(name)
def send_json(self, body, content_type='application/json'):
if isinstance(body, dict):
body['id'] = self._testMethodName
resp = requests.post(self.jsonrpc_url, data=json.dumps(body),
headers={'Content-Type': 'application/json'})
assert resp.json
return resp.json()
def test_rpctools(self):
assert 'Hello World!' == self.jsonrpc.testservice.get_message('World')
def test_content_types(self):
for ct in ['text/html', 'text/plain', 'application/javascript', 'text/javascript', 'image/gif']:
response = self.send_json({
'method': 'testservice.get_message',
'params': ['World']
}, content_type=ct)
assert 'Hello World!' == response['result'], 'Expected success with valid reqeust using Content-Type {}'.format(ct)
class TestWebsocketSubscriptions(SideboardServerTest):
@pytest.fixture(autouse=True)
def override(self, service_patcher, config_patcher):
config_patcher(1, 'ws.call_timeout')
service_patcher('self', self)
def echo(self, s):
self.echoes.append(s)
return s
def slow_echo(self, s):
sleep(2)
return s
@subscribes('names')
def get_names(self):
return self.names
@notifies('names')
def change_name(self, name=None):
self.names[-1] = name or uuid4().hex
@notifies('names')
def change_name_then_error(self):
self.names[:] = reversed(self.names)
self.fail()
def indirectly_change_name(self):
self.change_name(uuid4().hex)
@subscribes('places')
def get_places(self):
return self.places
@notifies('places')
def change_place(self):
self.places[0] = uuid4().hex
@subscribes('names', 'places')
def get_names_and_places(self):
return self.names + self.places
def setUp(self):
SideboardServerTest.setUp(se
|
mhvk/astropy
|
astropy/cosmology/__init__.py
|
Python
|
bsd-3-clause
| 830
| 0
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
""" astropy.cosmology contains classes and functions for cosmological
distance measures and other cosmology-related calculations.
See the `Astropy documentation
<https://docs.astropy.org/en/latest/cosmology/index.html>`_ for more
detailed usage examples and references.
"""
from . import core, flrw, funcs, parameter, units, utils
from . import io # needed before 'realizations' # isort: split
from . import realizations
from .core import *
from .flrw import *
from .funcs import *
from .parameter import *
from .realizations import *
from .utils import *
__all__ = (core.__all__ + flrw.__all__ # cosmology classes
+ realizations.__all__ # insta
|
nces thereof
+ funcs.__all__ + para
|
meter.__all__ + utils.__all__) # utils
|
amcat/amcat
|
navigator/views/tests/test_article_views.py
|
Python
|
agpl-3.0
| 10,397
| 0.003078
|
from django.core.urlresolvers import reverse
from django.test import Client
from amcat.models import ArticleSet, Sentence, Article, Role
from amcat.tools import amcattest, sbd
import navigator.forms
from navigator.views.article_views import ArticleSplitView, handle_split, get_articles, ArticleDetailsView
class TestSplitArticles(amcattest.AmCATTestCase):
def create_test_sentences(self):
article = amcattest.create_test_article(byline="foo", text="Dit is. Tekst.\n\n"*3 + "Einde.")
sbd.create_sentences(article)
return article, article.sentences.all()
@amcattest.use_elastic
def test_article_split_view(self):
from amcat.models import Role, ProjectRole
article, sentences = self.create_test_sentences()
aset = amcattest.create_test_set(0)
aset.add_articles([article])
user = amcattest.create_test_user(username="fred", password="secret")
ProjectRole.objects.create(user=user, project=aset.project, role=Role.objects.get(label="admin"))
# Only test the very basic; if a simple split works we trust the view
# to use handle_split(), which is tested more extensively below.
url = reverse("navigator:" + ArticleSplitView.get_view_name(), args=[aset.project.id, article.id])
client = Client()
client.login(username="fred", password="secret")
response = client.post(url, {
"add_to_new_set": "test_article_split_view_set",
"remove_from_all_sets": "on",
"add_splitted_to_new_set": "",
"sentence-%s" % sentences[1].id: "on"
})
new_set = ArticleSet.objects.filter(nam
|
e="test_article_split_view_set")
self.assertEqual(response.status_code, 200)
self.assertTrue(new_set.exists())
self.assertEqual(article, new_set[0].articles.all()[0])
@amcattest.use_elastic
def test_handle_split(self):
from amcat.tools import amcattest
from functools import partial
article, sentences = sel
|
f.create_test_sentences()
project = amcattest.create_test_project()
aset1 = amcattest.create_test_set(4, project=project)
aset2 = amcattest.create_test_set(5, project=project)
aset3 = amcattest.create_test_set(0)
# Creates a codingjob for each articleset, as handle_split should account
# for "codedarticlesets" as well.
cj1 = amcattest.create_test_job(articleset=aset1)
cj2 = amcattest.create_test_job(articleset=aset2)
cj3 = amcattest.create_test_job(articleset=aset3)
for _set in [aset1, aset2]:
for _article in _set.articles.all():
sbd.create_sentences(_article)
a1, a2 = aset1.articles.all()[0], aset2.articles.all()[0]
aset1.add_articles([article])
aset3.add_articles([a1])
form = partial(navigator.forms.SplitArticleForm, project, article, initial={
"remove_from_sets": False
})
# Test form defaults (should do nothing!)
f = form(dict())
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertEquals(5, aset1.articles.all().count())
self.assertEquals(5, aset2.articles.all().count())
self.assertEquals(1, aset3.articles.all().count())
self.assertTrue(self.article_in(cj1, aset1, article))
self.assertFalse(self.article_in(cj2, aset2, article))
self.assertFalse(self.article_in(cj3, aset3, article))
# Passing invalid form should raise exception
f = form(dict(add_to_sets=[-1]))
self.assertFalse(f.is_valid())
self.assertRaises(ValueError, handle_split, f, project, article, Sentence.objects.none())
# Test add_to_new_set
f = form(dict(add_to_new_set="New Set 1"))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
aset = project.all_articlesets().filter(name="New Set 1")
self.assertTrue(aset.exists())
self.assertEquals(project, aset[0].project)
# Test add_to_sets
f = form(dict(add_to_sets=[aset3.id]))
self.assertFalse(f.is_valid())
f = form(dict(add_to_sets=[aset2.id]))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(self.article_in(cj2, aset2, article))
# Test add_splitted_to_new_set
f = form(dict(add_splitted_to_new_set="New Set 2"))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
aset = project.all_articlesets().filter(name="New Set 2")
self.assertTrue(aset.exists())
self.assertEquals(project, aset[0].project)
self.assertEquals(1, aset[0].articles.count())
self.assertFalse(self.article_in(None, aset[0], article))
# Test add_splitted_to_sets
f = form(dict(add_splitted_to_sets=[aset2.id]))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(article in aset2.articles.all())
# Test remove_from_sets
f = form(dict(remove_from_sets=[aset1.id]))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(article not in aset1.articles.all())
# Test remove_from_all_sets
aset1.add_articles([article])
aset2.add_articles([article])
aset3.add_articles([article])
f = form(dict(remove_from_all_sets=True))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(aset1 in project.all_articlesets())
self.assertTrue(aset2 in project.all_articlesets())
self.assertFalse(aset3 in project.all_articlesets())
self.assertFalse(self.article_in(cj1, aset1, article))
self.assertFalse(self.article_in(cj2, aset2, article))
self.assertTrue(self.article_in(cj3, aset3, article))
def article_in(self, codingjob, articleset, article):
from amcat.tools.amcates import ES
ES().refresh()
if codingjob is not None:
if not codingjob.coded_articles.filter(article=article):
return False
return article.id in (articleset.get_article_ids() | articleset.get_article_ids(use_elastic=True))
class TestArticleViews(amcattest.AmCATTestCase):
@amcattest.use_elastic
def create_test_sentences(self):
article = amcattest.create_test_article(text="foo\n\nDit is. Tekst.\n\n"*3 + "Einde.")
sbd.create_sentences(article)
return article, article.sentences.all()
@amcattest.use_elastic
def test_get_articles(self):
from amcat.models import Sentence
_get_articles = lambda a, s : list(get_articles(a, s))
# Should raise exception if sentences not in article
article, sentences = self.create_test_sentences()
s1 = Sentence.objects.filter(id=amcattest.create_test_sentence().id)
self.assertRaises(ValueError, _get_articles, article, s1)
# Should raise an exception if we try to split on title
self.assertRaises(ValueError, _get_articles, article, sentences.filter(parnr=1))
# Should return a "copy", with byline in "text" property
arts = _get_articles(article, Sentence.objects.none())
Article.create_articles(arts)
self.assertEquals(len(arts), 1)
sbd.create_sentences(arts[0])
self.assertEquals(
[s.sentence for s in sentences[1:]],
[s.sentence for s in arts[0].sentences.all()[1:]]
)
self.assertTrue("foo" in arts[0].text)
# Should be able to split on byline
self.assertEquals(2, len(_get_articles(article, sentences[1:2])))
a, b = _get_articles(article, sentences[4:5])
# Check if text on splitted articles contains expected
self.assertTrue("Einde" not in a.text)
self.assertTrue("Einde" in b.text)
@amcattest.use_elastic
def test_permissions(self)
|
s0hvaperuna/Not-a-bot
|
bot/commands.py
|
Python
|
mit
| 2,568
| 0.000389
|
import logging
from discord.ext import commands
from bot.cooldowns import CooldownMapping, Cooldown
from bot.globals import Auth
from utils.utilities import is_owner, check_blacklist, no_dm
|
terminal = logging.getLogger('terminal')
def command(*args, **attrs):
if 'cls' not in attrs:
attrs['cls'] = Command
return commands.command(*args, **attrs)
def group(name=None, **attrs):
"""Uses custom Group class"""
if 'cls' not in attrs:
|
attrs['cls'] = Group
return commands.command(name=name, **attrs)
def cooldown(rate, per, type=commands.BucketType.default):
"""See `commands.cooldown` docs"""
def decorator(func):
if isinstance(func, Command):
func._buckets = CooldownMapping(Cooldown(rate, per, type))
else:
func.__commands_cooldown__ = Cooldown(rate, per, type)
return func
return decorator
class Command(commands.Command):
def __init__(self, func, **kwargs):
# Init called twice because commands are copied
super(Command, self).__init__(func, **kwargs)
self._buckets = CooldownMapping(self._buckets._cooldown)
self.owner_only = kwargs.pop('owner_only', False)
self.auth = kwargs.pop('auth', Auth.NONE)
self.checks.insert(0, check_blacklist)
if self.owner_only:
terminal.info(f'registered owner_only command {self.name}')
self.checks.insert(0, is_owner)
if 'no_pm' in kwargs or 'no_dm' in kwargs:
self.checks.insert(0, no_dm)
def undo_use(self, ctx):
"""Undoes one use of command"""
if self._buckets.valid:
bucket = self._buckets.get_bucket(ctx.message)
bucket.undo_one()
class Group(Command, commands.Group):
def __init__(self, *args, **attrs): # skipcq: PYL-W0231
Command.__init__(self, *args, **attrs)
self.invoke_without_command = attrs.pop('invoke_without_command', False)
def group(self, *args, **kwargs):
def decorator(func):
kwargs.setdefault('parent', self)
result = group(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
def command(self, *args, **kwargs):
def decorator(func):
if 'owner_only' not in kwargs:
kwargs['owner_only'] = self.owner_only
kwargs.setdefault('parent', self)
result = command(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
|
BleuLlama/LlamaPyArdy
|
Python/devices/lib_RC2014_BusSupervisor.py
|
Python
|
mit
| 4,512
| 0.068927
|
#!/usr/bin/python
################################################################################
# Bus Supervisor Interface
#
# - interfaces to the MCP23017 and PCF8574 IO expander chips
#
# The logic for this was ported from Dr Scott M. Baker's project:
# http://www.smbaker.com/z80-retrocomputing-4-bus-supervisor
#
##############################################
|
###############################
|
###
from libArdySer import ArdySer
from lib_GenericChip import GenericChip
from GS_Timing import delay
from lib_MCP23017_IOExpander16 import MCP23017_IOExpander16
from lib_PCF8574_IOExpander8 import PCF8574_IOExpander8
class RC2014_BusSupervisor:
##################################
# class variables
ardy = None
cpuIoData = None
# A0-A7 - Data byte
# B0-B7 - Bus control
M1 = 0x01 # B0
CLK = 0x02 # B1
INT = 0x04 # B2
MREQ = 0x08 # B3
WR = 0x10 # B4
RD = 0x20 # B5
IORQ = 0x40 # B6
BUSACK = 0x80 # B7
cpuControl = None
# 0x0F - control, clock, etc
BUSREQ = 0x01
RESET = 0x02
CLKEN = 0x04
CLKOUT = 0x08
# 0xF0 - unused
cpuAddress = None
# A0-A7, B0-B7 - Address lines (reversed)
# our mirror values here
data = 0
dataControl = 0
dataAddress = 0
##############################
def bitReverse( data ):
retval = 0
if( (data & 0x80) == 0x80 ): retval = retval | 0x01
if( (data & 0x40) == 0x40 ): retval = retval | 0x02
if( (data & 0x20) == 0x20 ): retval = retval | 0x04
if( (data & 0x10) == 0x10 ): retval = retval | 0x08
if( (data & 0x08) == 0x08 ): retval = retval | 0x10
if( (data & 0x04) == 0x04 ): retval = retval | 0x20
if( (data & 0x02) == 0x02 ): retval = retval | 0x40
if( (data & 0x01) == 0x01 ): retval = retval | 0x80
return retval
##################################
# Initialization
def __init__( self, _ardy, _i2cAddr8 = None ):
# set the arduino object
baseAddr = _i2cAddr8
if _i2cAddr8 is None:
baseAddr = 0x21
self.data = 0
self.dataControl = 0
self.dataAddress = 0
self.ardy = _ardy
self.cpuIoData = MCP23017_IOExpander16( _ardy, baseAddr + 0 )
self.cpuControl = PCF8574_IOExpander8( _ardy, baseAddr + 1 )
self.cpuAddress = MCP23017_IOExpander16( _ardy, baseAddr + 2 )
self.ClearAllExpanders()
def ClearAllExpaners( self ):
# clear data register
self.cpuIoData.DirectionA( IODIRA, IOALLINPUT )
self.cpuIoData.SetA( 0x00 )
self.cpuIoData.DirectionB( IODIRA, IOALLINPUT )
self.cpuIoData.SetB( 0x00 )
# clear control register
self.cpuControl.Set( 0x00 )
# clear address register
self.cpuAddress.DirectionA( IOALLINPUT )
self.cpuAddress.SetA( 0x00 )
self.cpuAddress.DirectionB( IOALLINPUT )
self.cpuAddress.SetB( 0x00 )
##################################
# Low-level commands
##################################
# Package commands
def SupervisorDelay( self ):
delay( 1 )
def Reset( self ):
# RESET = 0
value = 0x00
self.cpuControl.Set( value )
self.SupervisorDelay()
# RESET = 1
value = self.RESET
self.cpuControl.Set( value )
return
def TakeBus( self ):
value = self.BUSREQ
self.cpuControl.Set( value )
while True:
value = self.cpuIoData.GetB( )
if (value & BUSAQ) == 0
break
self.cpuAddress.DirectionA( IOALLINPUT )
self.cpuAddress.DirectionB( IOALLINPUT )
value = M1 | C
data.iodir |= M1, CLK, INT, BUSACK
data, setgpio MREQ WR RD IORQ
return
def ReleaseBus( self ):
address[0].iodir = 0xff # input (high-z)
address[1].iodir = 0xff # input (high-z)
data.iodir = 0xff
if( reset ) supervisorDelay
busreq = 1
while trie
get gpio[1]
if busaq != 0
break
return
def SlowClock( self ):
period = 1.0/Float( rate )/2.0
clken = 0
while true:
clkout = 0
sleep( period )
clkout = 1
sleep( period )
return
def NormalClock( self ):
CLKEN =1
return
def SetAddress( self, addr ):
gpio0 = bitswap( addr >> 8 )
gpio1 = bitswap( addr & 0xff )
return
##############################
def MemRead( self, addr ):
set address( addr)
rd = 0
mreq = 0
result = daa.getgpio(0)
rd = 1
MREQ = 1
return 0xff
def MemWrite( self, addr, data ):
set address( addr )
data.setgpio( val )
wr = 0
mreq = 0
wr = 1
mreq = 1
iodir0 = 0xff
return
def IORead( self, addr ):
set address (addr )
rd = 0
iorq = 0
val = data.getgpio
rd = 1
iorq = 1
return 0xff
def IOWrite( self, addr, data ):
set address( addr )
iodir 0 = 0x00
data.setgpio( data )
wr = 0
iorq = 0
wr = 1
iorq = 1
iodir 0 = 0xff
return
|
h2oai/h2o-3
|
h2o-py/tests/testdir_javapredict/pyunit_PUBDEV_8330_GLM_mojo_gamma_offset.py
|
Python
|
apache-2.0
| 1,402
| 0.021398
|
import sys, os
sys.path.insert(1, "../../../")
import h2o
from tests import pyunit_utils
from random import randint
import tempfile
def glm_gamma_offset_mojo():
train = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate_complete.csv.zip"))
y = "DPROS"
x = ["AGE","RACE","CAPSULE","DCAPS","PSA","VOL"]
x_offset = ["AGE","RACE","CAPSULE","DCAPS","PSA","VOL", "C1"]
params = {'family':"gamma", 'offset_column':"C1"}
offset = pyunit_utils.random_dataset_real_only(train.nrow, 1, realR=3, misFrac=0, randSeed=12345)
train = train.cbind(offset)
tmpdir = tempfile.mkdtemp()
glm_gamma_model = pyunit_utils.buil
|
d_save_model_generic(params, x, train, y, "glm", tmpdir) # build and save mojo model
MOJONAME = pyunit_utils.getMojoName(glm_gamma_model._id)
h2o.download_csv(train[x_offset], os.path.join(tmpdir, 'in.csv')) # save test file, h2o predict/mojo use same file
pred_h2o, pred_mojo = pyunit_utils.mojo_predict(glm_gamma_model, tmpdir, MOJONAME) # load model and perform predict
|
h2o.download_csv(pred_h2o, os.path.join(tmpdir, "h2oPred.csv"))
print("Comparing mojo predict and h2o predict...")
pyunit_utils.compare_frames_local(pred_h2o, pred_mojo, 0.1, tol=1e-10) # compare mojo and model predict
if __name__ == "__main__":
pyunit_utils.standalone_test(glm_gamma_offset_mojo)
else:
glm_gamma_offset_mojo()
|
powellc/timberwyck
|
manage.py
|
Python
|
bsd-3-clause
| 313
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "timberwyc
|
k.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Dev")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
|
fangdingjun/example1
|
python/ssh_c.py
|
Python
|
gpl-3.0
| 1,479
| 0.005409
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import paramiko
import threading
import sys
import re
import time
import os
def start_shell(h, u, p):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(h, 22, u, p)
s = ssh.invoke_shell()
w = threading.Thread(target=write_chanel, args=(s, ))
# r = threading.Thread(target=read_chanel, args=(s, ))
w.setDaemon(True)
w.start()
# w.start()
read_chanel(s)
# w.join()
try:
s.close()
ssh.close()
except:
pass
def read_chanel(s):
while True:
d = s.recv(4096)
if not d:
break
# for i in ['\x1b.*?m','\x0f','\x1b\[6;1H','\x1b\[K','\x1b25;1H']:
# d=re
|
.sub(str(i),"",d)
sys.stdout.write(d)
sys.stdout.flush()
# time.sleep(0.1)
try:
s.close()
except:
pass
# os.kill(os.getpid(), 15)
# sys.exit(0)
def write_chanel
|
(s):
try:
while True:
c = sys.stdin.read(1)
if not c:
s.close()
break
a = s.send(c)
if a == 0:
s.close()
break
except:
pass
if __name__ == '__main__':
import sys
if len(sys.argv) < 4:
print 'usage:%s host user passwd' % sys.argv[0]
sys.exit(1)
(host, user, passwd) = sys.argv[1:4]
start_shell(host, user, passwd)
|
orting/emphysema-estimation
|
Experiments/07-MICCAI/Scripts/PredictTrain.py
|
Python
|
gpl-3.0
| 1,630
| 0.019018
|
#!/usr/bin/python3
import sys, subprocess
def main(argv=None):
if argv is None:
argv = sys.argv
experiments = {
1 : ('Continuous', 'COPD'),
2 : ('Binary', ' COPD'),
3 : ('Continuous', 'EmphysemaExtentLung'),
4 : ('Binary', 'EmphysemaExtentLung'),
}
try:
experiment = experiments[ int(argv[1]) ]
except Exception as e:
print( 'usage: prog <experiment number>' )
return 1
prog = '../../Build/Classification/PredictClusterModel'
labels = {
'COPD' : '../../Data/Training/Labels/COPD.csv',
'EmphysemaExtentLung' : '../../Data/Training/Labels/EmphysemaExtentLung.csv',
}
instances = '../../Data/Training/Instances.csv'
bagMembership = '../../Data/Training/BagMembership.csv'
modelPattern = "Out/Training/MaxIterations1000/%s_%s_k%s_1.model"
numberOfClust
|
ers = ['5', '10', '20', ]#'15', '20', ]#'25', '30']
params = {
'histograms' : '24',
}
for k in numberOfClusters:
|
out = 'Out/Training/MaxIterations1000/%s_%s_k%s_' % (experiment + (k,))
cmd = [
prog,
"--instances", instances,
'--bag-membership', bagMembership,
'--model', modelPattern % (experiment + (k,)),
"--histograms", params['histograms'],
"--output", out,
]
print( ' '.join( cmd ) )
if subprocess.call( cmd ) != 0:
print( 'Error running %s : %s : k = %s' % ( experiment + (k,)) )
return 1
return 0
if __name__ == '__main__':
sys.exit( main() )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.