repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
internap/arsenal
|
releasenotes/source/conf.py
|
Python
|
apache-2.0
| 8,940
| 0
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Glance Release Notes documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 3 17:40:50 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'oslosphinx',
'reno.sphinxext',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cellar Release Notes'
copyright = u'2016, OpenStack Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# The full version, including alpha/beta/rc tags.
release = ''
# The short X.Y version.
version = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'GlanceReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'GlanceReleaseNotes.tex', u'Glance Release Notes Documentation',
u'Glance Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links
|
.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index i
|
s generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'glancereleasenotes', u'Glance Release Notes Documentation',
[u'Glance Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -----
|
MissionCriticalCloud/marvin
|
marvin/cloudstackAPI/listIpForwardingRules.py
|
Python
|
apache-2.0
| 4,977
| 0.001005
|
"""List the IP forwarding rules"""
from baseCmd import *
from baseResponse import *
class listIpForwardingRulesCmd (baseCmd):
typeInfo = {}
def __init__(self):
self.isAsync = "false"
"""list resources by account. Must be used with the domainId parameter."""
self.account = None
self.typeInfo['account'] = 'string'
"""list only resources belonging to the domain specified"""
self.domainid = None
self.typeInfo['domainid'] = 'uuid'
"""Lists rule with the specified ID."""
self.id = None
self.typeInfo['id'] = 'uuid'
"""list the rule belonging to this public IP address"""
self.ipaddressid = None
self.typeInfo['ipaddressid'] = 'uuid'
"""defaults to false, but if true, lists all resources from the parent specified by the domainId till leaves."""
self.isrecursive = None
self.typeInfo['isrecursive'] = 'boolean'
"""List by keyword"""
self.keyword = None
self.typeInfo['keyword'] = 'string'
"""If set to false, list only resources belonging to the command's caller; if set to true - list resources that the caller is authorized to see. Default value is false"""
self.listall = None
self.typeInfo['listall'] = 'boolean'
""""""
self.page = None
self.typeInfo['page'] = 'integer'
""""""
self.pagesize = None
self.typeInfo['pagesize'] = 'integer'
"""list objects by project"""
self.projectid = None
self.typeInfo['projectid'] = 'uuid'
"""Lists all rules applied to the specified VM."""
self.virtualmachineid = None
self.typeInfo['virtualmachineid'] = 'uuid'
self.required = []
class listIpForwardingRulesResponse (baseResponse):
typeInfo = {}
def __init__(self):
"""the ID of the port forwarding rule"""
self.id = None
self.typeInfo['id'] = 'string'
"""the cidr list to forward traffic from"""
self.cidrlist = None
self.typeInfo['cidrlist'] = 'string'
"""is firewall for display to the regular user"""
self.fordisplay = None
self.typeInfo['fordisplay'] = 'boolean'
"""the public ip address for the port forwarding rule"""
self.ipaddress = None
self.typeInfo['ipaddress'] = 'string'
"""the public ip address id for the port forwarding rule"""
self.ipaddressid = None
self.typeInfo['ipaddressid'] = 'string'
"""the id of the guest network the port forwarding rule belongs to"""
self.networkid = None
self.typeInfo['networkid'] = 'string'
"""the ending port of port forwarding rule's private port range"""
self.privateendport = None
self.typeInfo['privateendport'] = 'string'
"""the starting port of port forwarding rule's private port range"""
self.privateport = None
self.typeInfo['privateport'] = 'string'
"""the protocol of the port forwarding rule"""
self.protocol = None
self.typeInfo['protocol'] = 'string'
"""the ending port of port forwarding rule's private port range"""
self.publicendport = None
self.typeInfo['publicendport'] = 'string'
"""the starting port of port forwarding rule's public port range"""
self.publicport = None
self.typeInfo['publicport'] = 'string'
"""the state of the rule"""
self.state = None
self.typeInfo['state'] = 'string'
"""the VM display name for the port forwarding rule"""
self.virtualmachinedisplayname = None
self.typeInfo['virtualmachinedisplayname'] = 'string'
"""the VM ID for the port forwarding rule"""
self.virtualmachineid = None
self.typeInfo['virtualmachineid'] = 'string'
"""the VM name for the port forwarding rule"""
self.virtualmachinename = None
self.typeInfo['virtualmachinename'] = 'string'
"""the vm ip address for the port forwarding rule"""
self.vmguestip = None
self.typeInfo['vmguestip'] = 'string'
"""the list of resource tags associated with the rule"""
self.tags = []
class tags:
def __init__(self):
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with
|
the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.pro
|
jectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
|
sergejx/kaleidoscope
|
tests/test_generator.py
|
Python
|
bsd-3-clause
| 6,135
| 0.000326
|
import os
from datetime import date
from unittest.mock import MagicMock, call
import pytest
import imagesize
from kaleidoscope import renderer, generator
from kaleidoscope.model import Gallery, Album, Section, Photo
from kaleidoscope.generator import generate, DefaultListener
def test_generate_gallery_index(tmpdir, disable_resize):
"""Generator should generate gallery index file."""
gallery = Gallery("Testing Gallery", "The Tester", [])
generate(gallery, str(tmpdir))
assert tmpdir.join("index.html").check()
def test_gallery_index_context(tmpdir, monkeypatch, disable_resize):
"""Generator should provide the gallery object for index template."""
render_mock = MagicMock()
monkeypatch.setattr(renderer, 'render', render_mock)
gallery = Gallery("Testing Gallery", "The Tester", [])
generate(gallery, str(tmpdir))
render_mock.assert_called_with(
"gallery.html",
str(tmpdir.join("index.html")),
{'gallery': gallery, 'current_year': date.today().year}
)
def test_album_index_generated(tmpdir, gallery_with_one_photo, disable_resize):
"""Generator should create album index file."""
generate(gallery_with_one_photo, str(tmpdir))
assert tmpdir.join("album", "index.html").exists()
def test_album_index_context(tmpdir, monkeypatch, disable_resize):
"""
Generator should provide provide correct context to the album template.
"""
render_mock = MagicMock()
monkeypatch.setattr(renderer, 'render', render_mock)
album = Album("album", "The Album", date(2017, 6, 24), [])
gallery = Gallery("Testin Gallery", "The Tester", [album])
generate(gallery, str(tmpdir))
render_mock.assert_called_with(
"album.html",
str(tmpdir.join("album", "index.html")),
{'album': album, 'gallery': gallery, 'current_year': date.today().year}
)
def test_resize_thumbnail(tmpdir, gallery_with_one_photo):
"""Generator should create thumbnail file."""
generate(gallery_with_one_photo, str(tmpdir))
thumb_path = tmpdir.join("album", "thumb", "photo.jpg")
assert thumb_path.exists()
assert imagesize.get(str(thumb_path)) <= (300, 200)
def test_resize_large(tmpdir, gallery_with_one_photo):
"""Generator should create large resized file."""
generate(gallery_with_one_photo, str(tmpdir))
large_path = tmpdir.join("album", "large", "photo.jpg")
assert large_path.exists()
assert imagesize.get(str(large_path)) <= (1500, 1000)
def test_resize_existing(tmpdir, gallery_with_one_photo):
"""When resized image allready exists, do not resize it again."""
thumb_path = tmpdir.join("album", "thumb", "photo.jpg")
large_path = tmpdir.join("album", "large", "photo.jpg")
thumb_path.ensure()
large_path.ensure()
original_thumb_mtime = thumb_path.mtime()
original_large_mtime = large_path.mtime()
generate(gallery_with_one_photo, str(tmpdir))
assert thumb_path.mtime() == original_thumb_mtime
assert large_path.mtime() == original_large_mtime
def test_resized_images_metadata(tmpdir, gallery_with_one_photo):
"""Generator should fill resized images metadata in the Photo."""
generate(gallery_with_one_photo, str(tmpdir))
photo = next(gallery_with_one_photo.albums[0].photos)
assert photo.thumb.url == "thumb/photo.jpg"
assert photo.thumb.size <= (300, 200)
assert photo.large.url == "large/photo.jpg"
assert photo.large.size <= (1500, 1000)
def test_copy_assets(tmpdir, disable_resize):
"""Generator should copy assets directory into output."""
gallery = Gallery("", "", [])
generate(gallery, str(tmpdir))
assert tmpdir.join("assets", "kaleidoscope.js").exists()
assert tmpdir.join("assets", "kaleidoscope.css").exists()
def test_assets_directory_cleaned(tmpdir, disable_resize):
"""Generator should clean up existing assets directory."""
extra_file = tmpdir.join("assets", "existing-file.txt")
extra_file.ensure()
generate(Gallery("", "", []), str(tmpdir))
assert not extra_file.exists()
def test_generator_reporting_events(gallery_with_three_photos, tmpdir,
disable_resize):
"""Generator should report important events using provided reporter."""
listener = MagicMock(spec=DefaultListener)
generate(gallery_with_three
|
_photos, tmpdir, listener)
|
album = gallery_with_three_photos.albums[0]
assert listener.starting_album.call_args == call(album, 3)
assert listener.finishing_album.called
assert listener.resizing_photo.call_count == 3
def test_counting_photos_to_resize(
gallery_with_three_photos, tmpdir, disable_resize):
"""Listener should receive count of photos that would be really resized."""
# Let's make 1.jpg already resized => 2 photos would remain
tmpdir.join("album", "large", "f1.jpg").ensure()
tmpdir.join("album", "thumb", "f1.jpg").ensure()
listener = MagicMock(spec=DefaultListener)
generate(gallery_with_three_photos, tmpdir, listener)
album = gallery_with_three_photos.albums[0]
assert listener.starting_album.call_args == call(album, 2)
assert listener.resizing_photo.call_count == 2
@pytest.fixture
def gallery_with_one_photo():
photo_path = os.path.join(os.path.dirname(__file__), 'data', 'photo.jpg')
photo = Photo("photo.jpg", "", "", photo_path)
album = Album("album", "The Album", date(2017, 6, 24), [Section("photos", [photo])])
return Gallery("Testin Gallery", "The Tester", [album])
@pytest.fixture
def gallery_with_three_photos():
photo_path = os.path.join(os.path.dirname(__file__), 'data', 'photo.jpg')
photos = [Photo("f%d.jpg" % (i,), "", "", photo_path) for i in range(3)]
album = Album("album", "The Album", date(2017, 6, 24), [Section("photos", photos)])
return Gallery("Testing Gallery", "The Tester", [album])
@pytest.fixture
def disable_resize(monkeypatch):
"""Replace image resize with dummy function and provide constant size."""
monkeypatch.setattr(generator, 'resize', MagicMock())
monkeypatch.setattr(imagesize, 'get', MagicMock(return_value=(42, 42)))
|
rohitdatta/pepper
|
pepper/routes.py
|
Python
|
agpl-3.0
| 7,316
| 0.007791
|
import announcements, users, corporate, api, volunteer, teams, innovation
def configure_routes(app):
app.add_url_rule('/', 'landing', view_func=users.views.landing, methods=['GET'])
# Signing Up/Registration
app.add_url_rule('/register', 'sign-up', view_func=users.views.sign_up, methods=['GET', 'POST'])
app.add_url_rule('/callback', 'callback', view_func=users.views.callback, methods=['GET'])
app.add_url_rule('/complete_mlh_registration', 'complete-mlh-registration',
view_func=users.views.complete_mlh_registration, methods=['GET', 'POST'])
app.add_url_rule('/complete_registration', 'complete-registration', view_func=users.views.complete_registration,
methods=['GET', 'POST'])
app.add_url_rule('/login', 'login', view_func=users.views.login, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', view_func=users.views.logout, methods=['GET'])
app.add_url_rule('/login/reset', 'forgot-password', view_func=users.views.forgot_password,
methods=['GET', 'POST'])
app.add_url_rule('/login/reset/<token>', 'reset-password', view_func=users.views.reset_password,
methods=['GET', 'POST'])
app.add_url_rule('/register/confirm/<token>', 'confirm-account', view_func=users.views.confirm_account,
methods=['GET'])
# User action pages
app.add_url_rule('/edit_profile', 'edit-profile', view_func=users.views.edit_profile,
methods=['GET', 'POST'])
app.add_url_rule('/dashboard', 'dashboard', view_func=users.views.dashboard, methods=['GET'])
app.add_url_rule('/resend_confirmation_email', 'resend-confirmation-email',
view_func=users.views.resend_confirmation, methods=['POST'])
# app.add_url_rule('/profile/resume', 'view-own-resume', view_func=users.views.view_own_resume, methods=['GET'])
# app.add_url_rule('/refresh', 'refresh-mlh-data', view_func=users.views.refresh_from_mlh, methods=['GET'])
app.add_url_rule('/accept', 'accept-invite', view_func=users.views.accept, methods=['GET', 'POST'])
app.add_url_rule('/accept/sign', 'sign', view_func=users.views.sign, methods=['GET', 'POST'])
app.add_url_rule('/additional_status', 'additional-status', view_func=users.views.additional_status,
methods=['GET'])
app.add_url_rule('/accept_travel_reimbursement', 'accept-travel-reimbursement',
view_func=users.views.accept_reimbursement, methods=['POST'])
app.add_url_rule('/view_campus_ambassadors', 'view-campus-ambassadors',
view_func=users.views.view_campus_ambassadors, methods=['GET'])
# Team actions
app.add_url_rule('/team', 'team', view_func=teams.views.team, methods=['GET', 'POST'])
# Admin Pages
app.add_url_rule('/admin', 'admin-dash', view_func=users.admin_views.admin_dashboard,
methods=['GET'])
app.add_url_rule('/admin/create-corp-user', 'create-corp', view_func=users.admin_views.create_corp_user,
methods=['GET', 'POST'])
app.add_url_rule('/admin/debug', 'debug-user', view_func=users.admin_views.debug_user,
methods=['GET', 'POST'])
app.add_url_rule('/admin/initial-create', 'initial-create',
view_func=users.admin_views.initial_create, methods=['GET', 'POST'])
app.add_url_rule('/admin/batch', 'batch-modify', view_func=users.admin_views.batch_modify,
methods=['GET', 'POST'])
app.add_url_rule('/admin/send-email', 'send-email',
view_func=users.admin_views.send_email_to_users, methods=['GET', 'POST'])
app.add_url_rule('/admin/volunteer-list', 'volunteer-list', view_func=volunteer.views.volunteer_list,
methods=['GET'])
app.add_url_rule('/admin/add-volunteer', 'add-volunteer',
view_func=volunteer.views.add_volunteer, methods=['POST'])
app.add_url_rule('/admin/reject', 'reject-users', view_func=users.admin_views.reject_users,
methods=['GET', 'POST'])
app.add_url_rule('/admin/accept-teams', 'accept-teams', view_func=users.admin_views.accept_teams,
methods=['GET', 'POST'])
app.add_url_rule('/admin/check-in', 'manual-check-in',
view_func=users.admin_views.check_in_manual, methods=['GET', 'POST'])
app.add_url_rule('/admin/sign/<user_id>', 'check-in-sign',
view_func=users.admin_views.check_in_sign, methods=['GET', 'POST'])
app.add_url_rule('/admin/check-in-post', 'manual-check-in-post',
view_func=users.admin_views.check_in_post, methods=['POST'])
app.add_url_rule('/admin/set-mlh-id', 'set-mlh-id', view_func=users.admin_views.set_mlh_id,
methods=['GET', 'POST'])
app.add_url_rule('/admin/job/<job_key>', 'worker-jobs', view_func=users.admin_views.job_view,
methods=['GET'])
# API
app.add_url_rule('/api/announcements', 'announcements', view_func=announcements.views.announcement_list,
methods=['GET'])
app.add_url_rule('/api/announcements/create', 'create-ann
|
ouncement',
view_func=announcements.views.create_announcement, methods=['POST'])
app.add_url_rule('/api/partners', 'partners', view_func=api.views.partner_list,
|
methods=['GET'])
app.add_url_rule('/api/schedule', 'schedule', view_func=api.views.schedule, methods=['GET'])
app.add_url_rule('/api/schedule/<day>', 'day-schedule', view_func=api.views.schedule_day, methods=['GET'])
app.add_url_rule('/api/check-in', 'check-in-api', view_func=api.views.check_in, methods=['GET', 'POST'])
app.add_url_rule('/api/passbook', 'passbook', view_func=api.views.passbook, methods=['POST'])
# Corporate Portal
app.add_url_rule('/corp/login', 'corp-login', view_func=corporate.views.login, methods=['GET', 'POST'])
app.add_url_rule('/corp/login/reset', 'corp-forgot-password', view_func=corporate.views.forgot_password,
methods=['GET', 'POST'])
app.add_url_rule('/corp/login/reset/<token>', 'corp-reset-password', view_func=corporate.views.reset_password,
methods=['GET', 'POST'])
app.add_url_rule('/corp/setup/<token>', 'new-user-setup', view_func=corporate.views.new_user_setup,
methods=['GET', 'POST'])
app.add_url_rule('/corp', 'corp-dash', view_func=corporate.views.corporate_dash, methods=['GET', 'POST'])
app.add_url_rule('/corp/search', 'corp-search', view_func=corporate.views.corporate_search, methods=['GET'])
app.add_url_rule('/corp/search/results', 'search-results', view_func=corporate.views.search_results,
methods=['POST'])
app.add_url_rule('/corp/view/resume', 'resume-view', view_func=corporate.views.view_resume, methods=['GET'])
app.add_url_rule('/corp/download/all-resumes', 'all-resume-download',
view_func=corporate.views.download_all_resumes, methods=['GET'])
app.add_url_rule('/innovation/auth', 'innovation-auth', view_func=innovation.views.auth, methods=['GET'])
app.add_url_rule('/innovation/get-user-info', 'innovation-user-info', view_func=innovation.views.get_user_info, methods=['GET'])
|
peeyush-tm/shinken
|
modules/ws-nocout/module.py
|
Python
|
agpl-3.0
| 15,057
| 0.030152
|
#!/usr/bin/env python
"""This Class is an Arbiter module for having a webservice
throuhg which we can have `sync` and `live polling` functionalities
"""
import json
import os
import select
import subprocess
import sys
import tarfile
import time
from shinken.basemodule import BaseModule
from shinken.external_command import ExternalCommand
from shinken.log import logger
from shinken.webui.bottlewebui import (run, route, request,
response, abort, parse_auth)
from nocout_live import main as live_poll_main
properties = {
'daemons': ['arbiter', 'receiver'],
'type': 'ws_nocout',
'external': True,
}
# called by the plugin manager to get a broker
def get_instance(plugin):
# info("[WS_Nocout] get_instance ...")
instance = WsNocout(plugin)
return instance
# Main app var. Will be fill with our running module instance
app = None
# Check_MK home dir
CHECK_MK_CONF_PATH = '/omd/dev_slave/slave_2/etc/check_mk/conf.d/wato/'
CHECK_MK_BIN = '/omd/dev_slave/slave_2/bin/cmk'
OLD_CONFIG = 'old_config.tar.gz'
NEW_CONFIG = 'new_config.tar.gz'
def get_commands(time_stamps, hosts, services, return_codes, outputs):
"""Composing a command list based on the information received in
POST request
"""
commands = []
current_time_stamp = int(time.time())
def _compose_command(t, h, s, r, o):
"""Simple function to create a command from the inputs"""
cmd = ""
if not s or s == "":
cmd = '[%s] PROCESS_HOST_CHECK_RESULT;%s;%s;%s' % (t if t is not None else current_time_stamp, h, r, o)
else:
cmd = '[%s] PROCESS_SERVICE_CHECK_RESULT;%s;%s;%s;%s' % (t if t is not None else current_time_stamp, h, s, r, o)
logger.debug("[WS_Nocout] CMD: %s" % (cmd))
commands.append(cmd)
# Trivial case: empty commmand list
if (return_codes is None or len(return_codes) == 0):
return commands
# Sanity check: if we get N return codes, we must have N hosts.
# The other values could be None
if (len(return_codes) != len(hosts)):
logger.error("[WS_Nocout] number of return codes (%d) does not match number of hosts (%d)" % (len(return_codes), len(hosts)))
abort(400, "number of return codes does not match number of hosts")
map(_compose_command, time_stamps, hosts, services, return_codes, outputs)
logger.debug("[WS_Nocout] received command: %s" % (str(commands)))
return commands
def get_page():
commands_list = []
try:
# Getting lists of informations for the commands
time_stamp_list = []
host_name_list = []
service_description_list = []
return_code_list = []
output_list = []
time_stamp_list = request.forms.getall(key='time_stamp')
logger.debug("[WS_Nocout] time_stamp_list: %s" % (time_stamp_list))
host_name_list = request.forms.getall(key='host_name')
logger.debug("[WS_Nocout] host_name_list: %s" % (host_name_list))
service_description_list = request.forms.getall(key='service_description')
logger.debug("[WS_Nocout] service_description_list: %s" % (service_description_list))
return_code_list = request.forms.getall(key='return_code')
logger.debug("[WS_Nocout] return_code_list: %s" % (return_code_list))
output_list = request.forms.getall(key='output')
logger.debug("[WS_Nocout] output_list: %s" % (output_list))
commands_list = get_commands(time_stamp_list, host_name_list, service_description_list, return_code_list, output_list)
except Exception, e:
logger.error("[WS_Nocout] failed to get the lists: %s" % str(e))
commands_list = []
#check_auth()
# Adding commands to the main queue()
logger.debug("[WS_Nocout] commands: %s" % str(sorted(commands_list)))
for c in sorted(commands_list):
ext = ExternalCommand(c)
app.from_q.put(ext)
# OK here it's ok, it will return a 200 code
def do_restart():
# Getting lists of informations for the commands
time_stamp = request.forms.get('time_stamp', int(time.time()))
command = '[%s] RESTART_PROGRAM\n' % time_stamp
#check_auth()
# Adding commands to the main queue()
logger.warning("[WS_Nocout] command: %s" % str(command))
ext = ExternalCommand(command)
app.from_q.put(ext)
# OK here it's ok, it will return a 200 code
def do_reload():
# Getting lists of informations for the commands
time_stamp = request.forms.get('time_stamp', int(time.time()))
command = '[%s] RELOAD_CONFIG\n' % time_stamp
#check_auth()
# Adding commands to the main queue()
logger.warning("[WS_Nocout] command: %s" % str(command))
ext = ExternalCommand(command)
app.from_q.put(ext)
# OK here it's ok, it will return a 200 code
def do_recheck():
# Getting lists of informations for the commands
time_stamp = request.forms.get('time_stamp', int(time.time()))
host_name = request.forms.get('host_name', '')
service_description = request.forms.get('service_description', '')
logger.debug("[WS_Nocout] Timestamp '%s' - host: '%s', service: '%s'" % (time_stamp,
host_name,
service_description
)
)
if not host_name:
abort(400, 'Missing parameter host_name')
if service_description:
# SCHEDULE_FORCED_SVC_CHECK;<host_name>;<service_description>;<check_time>
command = '[%s] SCHEDULE_FORCED_SVC_CHECK;%s;%s;%s\n' % (time_stamp,
host_name,
service_description,
time_stamp)
else:
# SCHEDULE_FORCED_HOST_CHECK;<host_name>;<check_time>
command = '[%s] SCHEDULE_FORCED_HOST_CHECK;%s;%s\n' % (time_stamp,
host_name,
time_stamp)
# We check for auth if it's not anonymously allowed
#check_auth()
# Adding commands to the main queue()
logger.debug("[WS_Nocout] command = %s" % command)
ext = ExternalCommand(command)
app.from_q.put(ext)
# OK here it's ok, it will return a 200 code
def do_downtime():
# Getting lists of informations for the commands
action = request.forms.get('action', 'add')
time_stamp = request.forms.get('time_stamp', int(time.time()))
host_name = request.forms.get('host_name', '')
service_description = request.forms.get('service_description', '')
start_time = request.forms.get('start_time', int(time.time()))
end_time = request.forms.get('end_time', int(time.time()))
# Fixed is 1 for a period between start and end time
fixed = request.forms.get('fixed', '1')
# Fixed is 0 (flexible) for a period of duration seconds from start time
duration = request.forms.get('duration', int('86400'))
trigger_id = request.forms.get('trigger_id', '0')
author = request.forms.get('author', 'anonymous')
comment = request.forms.get('comment', 'No comment')
logger.debug("[WS_Nocout] Downtime %s - host: '%s', service: '%s', comment: '%s'" % (action, host_name, service_description, comment))
if not host_name:
abort(400, 'Missing parameter host_name')
if action == 'add':
if service_description:
# SCHEDULE_SVC_DOWNTIME;<host_name>;<service_description>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment>
command = '[%s] SCHEDULE_SVC_DOWNTIME;%s;%s;%s;%s;%s;%s;%s;%s;%s\n' % ( time_stamp,
host_name,
service_description,
start_time,
end_time,
|
fixed,
|
trigger_id,
duration,
author,
comment
)
else:
# SCHEDULE_HOST_DOWNTIME;<host_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment>
command = '[%s] SCHEDULE_HOST_DOWNTIME;%s;%s;%s;%s;%s;%s;%s;%s\n' % ( time_stamp,
host_name,
start_time,
end_time,
fixed,
trigger_id,
duration,
author,
comment
)
if action == 'delete':
if service_description:
# DEL_ALL_SVC_DOWNTIMES;<host_name>;<service_description>
command = '[%s] DEL_ALL_SVC_DOWNTIMES;%s;%s\n' % ( time_stamp,
host_name,
service_description)
else:
# DEL_ALL_SVC_DOWNTIMES;<host_name>
command = '[%s] DEL_ALL_HOST_DOWNTIMES;%s\n' % ( time_stamp,
host_name)
|
mfsteen/CIQTranslate-Kristian
|
openpyxl/utils/datetime.py
|
Python
|
gpl-3.0
| 3,155
| 0.00412
|
from __future__ import absolute_import
from __future__ import division
# Copyright (c) 2010-2016 openpyxl
"""Manage Excel date weirdness."""
# Python stdlib imports
import datetime
from datetime import timedelta, tzinfo
import re
from jdcal import (
gcal2jd,
jd2gcal,
MJD_0
)
from openpyxl.compat import lru_cache
# constants
MAC_EPOCH = datetime.date(1904, 1, 1)
WINDOWS_EPOCH = datetime.date(1899, 12, 30)
CALENDAR_WINDOWS_1900 = sum(gcal2jd(WINDOWS_EPOCH.year, WINDOWS_EPOCH.month, WINDOWS_EPOCH.day))
CALENDAR_MAC_1904 = sum(gcal2jd(MAC_EPOCH.year, MAC_EPOCH.month, MAC_EPOCH.day))
SECS_PER_DAY = 86400
EPOCH = datetime.datetime.utcfromtimestamp(0)
W3CDTF_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
W3CDTF_REGEX = re.compile('(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d
|
{2}):(\d{2})(.(\d{2}))?Z?')
def datetime_to_W3CDTF(dt):
"""Convert from a datetime to a timestamp string."""
return datetime.datetime.strftime(dt, W3CDTF_FORMAT)
def W3CDTF_to_datetime(formatted_string):
"""Convert from a timestamp string to a datetime object."""
match = W3CDTF_REGEX.match(formatted_string)
dt = [int(v) for v in match.groups()[:6]]
return datetime.datetime(*dt)
@lru_cache()
def to_excel(dt, offset=CALENDAR_WINDOWS_1900):
jul = sum(gc
|
al2jd(dt.year, dt.month, dt.day)) - offset
if jul <= 60 and offset == CALENDAR_WINDOWS_1900:
jul -= 1
if hasattr(dt, 'time'):
jul += time_to_days(dt)
return jul
@lru_cache()
def from_excel(value, offset=CALENDAR_WINDOWS_1900):
if value is None:
return
if 1 < value < 60 and offset == CALENDAR_WINDOWS_1900:
value += 1
parts = list(jd2gcal(MJD_0, value + offset - MJD_0))
_, fraction = divmod(value, 1)
jumped = (parts[-1] == 0 and fraction > 0)
diff = datetime.timedelta(days=fraction)
if 0 < abs(value) < 1:
return days_to_time(diff)
if not jumped:
return datetime.datetime(*parts[:3]) + diff
else:
return datetime.datetime(*parts[:3] + [0])
class GMT(tzinfo):
def utcoffset(self, dt):
return timedelta(0)
def dst(self, dt):
return timedelta(0)
def tzname(self,dt):
return "GMT"
try:
from datetime import timezone
UTC = timezone(timedelta(0))
except ImportError:
# Python 2.6
UTC = GMT()
@lru_cache()
def time_to_days(value):
"""Convert a time value to fractions of day"""
if value.tzinfo is not None:
value = value.astimezone(UTC)
return (
(value.hour * 3600)
+ (value.minute * 60)
+ value.second
+ value.microsecond / 10**6
) / SECS_PER_DAY
@lru_cache()
def timedelta_to_days(value):
"""Convert a timedelta value to fractions of a day"""
if not hasattr(value, 'total_seconds'):
secs = (value.microseconds +
(value.seconds + value.days * SECS_PER_DAY) * 10**6) / 10**6
else:
secs =value.total_seconds()
return secs / SECS_PER_DAY
@lru_cache()
def days_to_time(value):
mins, seconds = divmod(value.seconds, 60)
hours, mins = divmod(mins, 60)
return datetime.time(hours, mins, seconds, value.microseconds)
|
HailStorm32/Q.bo_stacks
|
qbo_webi/src/voiceRecognition/voiceRecognition.py
|
Python
|
lgpl-2.1
| 10,125
| 0.01541
|
#!/usr/bin/env python2.6
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Software License Agreement (GPLv2 License)
#
# Copyright (c) 2012 TheCorpora SL
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# Authors: Miguel Angel Julian <[email protected]>;
# Daniel Cuadrado <[email protected]>;
# Arturo Bajuelos <[email protected]>;
# Sergio Merino <[email protected]>;
import cherrypy
import os
import gen_grammar
import subprocess
from mako.template import Template
from tabsClass import TabClass
import simplejson
from subprocess import Popen, PIPE, STDOUT
import roslib
import signal
roslib.load_manifest('qbo_webi');
import rospy
import time
from uuid import getnode as get_mac
from poster.encode import multipart_encode
from poster.streaminghttp import register_openers
import urllib2
class VoiceRecognitionManager(TabClass):
def __init__(self,language):
self.ipWavServer = "audio.openqbo.org"
self.portWavServer="8588"
self.language = language
self.juliusPath=roslib.packages.get_pkg_dir("qbo_listen")
self.juliusAMPath="/usr/share/qbo-julius-model/"
self.htmlTemplate = Template(filename='voiceRecognition/templates/voiceRecognitionTemplate.html')
self.jsTemplate = Template(filename='voiceRecognition/templates/voiceRecognitionTemplate.js')
self.tmpdir="/tmp/"
self.LMPaths="/config/LM/"
self.LMFileName="/sentences.conf"
self.PhonemsFileName="/phonems"
self.TiedlistFileName="/tiedlist"
self.languages_names={'en':'English','es':'Spanish','pt':'Português','de':'Deutsch','fr':'Français','it':'Italiano'}
self.path = roslib.packages.get_pkg_dir("qbo_webi")+"/src/voiceRecognition/"
self.lan = self.language["current_language"]
self.mac = get_mac()
self.p = None
@cherrypy.expose
def voiceRecognitionJs(self, parameters=None):
self.lan = self.language["current_language"]
return self.jsTemplate.render(language=self.language)
def getLanguages(self):
try:
dirList=os.listdir(self.juliusPath+self.LMPaths)
dirList.sort()
except:
dirList=-1
return dirList
def isQboListenInstalled(self):
if self.getLanguages()==-1:
return False
else:
return True
def getLanguageModels(self,language):
try:
dirList=os.listdir(self.juliusPath+self.LMPaths+language)
dirList.sort()
except:
dirList=-1
return dirList
def getLMSentences(self,language,model):
try:
f = open(self.juliusPath+self.LMPaths+language+"/"+model+self.LMFileName,'r')
return f.read()
except:
sentences=""
return sentences
@cherrypy.expose
def getModels(self,lang):
modelList=""
try:
dirList=os.listdir(self.juliusPath+self.LMPaths+lang)
dirList.sort()
for model in dirList:
modelList=modelList+model+"::"
modelList=modelList[:-2]
except:
modelList=-1
return modelList
@cherrypy.expose
def test1(self,lang,text):
text=text.encode("utf-8")
f = open(self.tmpdir+'LModel', 'w')
f.write(text)
f.close()
words=gen_grammar.verrors(self.tmpdir+'LModel',self.juliusAMPath+lang+"/"+self.PhonemsFileName)
if words==0:
return ""
else:
wordsList=""
for word in words:
wordsList=wordsList+word+"::"
wordsList=wordsList[:-2]
return wordsList
@cherrypy.expose
def test2(self,lang,text):
errorlist=""
text=text.encode("utf-8")
print text
wordlist=text.split()
print wordlist
for word in wordlist:
if word[0]!="[" and word[0]!="<":
print word
f = open(self.tmpdir+'word', 'w')
f.write("[sentence]\n")
f.write(word)
f.close()
gen_grammar.createvoca(self.tmpdir+'word', self.juliusAMPath+lang+"/"+self.PhonemsFileName, self.tmpdir+'word')
print self.tmpdir+'word'
print self.juliusAMPath+lang+"/"+self.TiedlistFileName
if gen_grammar.perrors(self.tmpdir+'word.voca',self.juliusAMPath+lang+"/"+self.TiedlistFileName)!=0:
errorlist=errorlist+word+"::"
errorlist=errorlist[:-2]
return errorlist.upper()
@cherrypy.expose
def saveToFile(self,lang,text,model):
try:
#print self.juliusPath+self.LMPaths+language+"/"+model+self.LMFileName
text=text.encode("utf-8")
f = open(self.juliusPath+self.LMPaths+lang+"/"+model+self.LMFileName,'w')
f.write(text)
f.close()
gen_grammar.compilegrammar(model,lang)
subprocess.Popen("roslaunch qbo_listen voice_recognizer.launch".split())
except:
return "ERROR: Cant write the file"
return ""
@cherrypy.expose
def getFile(self,lang="",model=""):
if lang=="" or model=="":
return "ERROR: lang:"+lang+"; model:"+model
else:
#print self.getLMSentences(lang,model)
return self.getLMSentences(lang,model)
@cherrypy.expose
def index(self):
tmp=""
if self.isQboListenInstalled():
for lang in self.getLanguages():
for LM in self.getLanguageModels(lang):
text= self.getLMSentences(lang,LM)
break
|
break
return self.htmlTemplate.render(language=self.language,lannames=self.languages_names,alllanguage=self.getLanguages())
else:
return "Qbo listen not installed"
# return self.htmlTemplate.render(language=self.language)
@cherrypy.expose
def rec(self):
# n = self.getLenght("Arturo","sp")
# print "***** "+n
|
#Borramos la anterior grabacion, si la habia
try:
cmd="rm "+self.path+"tmp/*"
self.p = Popen(cmd.split())
except ValueError:
print "Nada que borrar"
'''
try:
cmd="rm "+self.path+"/*_en"
self.p = Popen(cmd.split())
except ValueError:
print "Nada que borrar"
try:
cmd="rm "+path+"/*sp"
print cmd
self.p = Popen(cmd.split())
except ValueError:
print "Nada que borrar"
'''
self.filename = str(self.mac)+"_"+self.lan
#filename = filename.replace("\"","")
# filename = "tmp.wav"
print "FILENAME == "+self.filename
print "grabnando!!!! "+self.path+"tmp/"+self.filename
cmd="arecord -f S16_LE -r 44100 -c 1 "+self.path+"tmp/"+self.filename
self.p = Popen(cmd.split())
name="oleole"
return name
@cherrypy.expose
def stop(self):
if(self.p==None):
print "P ES NULL!!??"
else:
print "matar grabacin"
self.p.send_signal(signal.SIGINT)
cmd="python "+self.path+"sendWav2Server.py "+self.path+"tmp/"+self.filename+" "+self.ipWavServer+" "+self.portWavServer
print cmd
out = runCmd(cmd)
print out[0]
if o
|
romain-dartigues/ansible-modules-core
|
system/hostname.py
|
Python
|
gpl-3.0
| 20,763
| 0.003516
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Hiroaki Nakamura <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: hostname
author:
- "Hiroaki Nakamura (@hnakamur)"
- "Hideki Saito (@saito-hideki)"
version_added: "1.4"
short_description: Manage hostname
requirements: [ hostname ]
description:
- Set system's hostname.
- Currently implemented on Debian, Ubuntu, Fedora, RedHat, openSUSE, Linaro, ScientificLinux, Arch, CentOS, AMI.
- Any distribution that uses systemd as their init system.
- Note, this module does *NOT* modify /etc/hosts. You need to modify it yourself using other modules like template or replace.
options:
name:
required: true
description:
- Name of the host
'''
EXAMPLES = '''
- hostname: name=web01
'''
import socket
from distutils.version import LooseVersion
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.facts import *
from ansible.module_utils._text import to_bytes, to_native
class UnimplementedStrategy(object):
def __init__(self, module):
self.module = module
def get_current_hostname(self):
self.unimplemented_error()
def set_current_hostname(self, name):
self.unimplemented_error()
def get_permanent_hostname(self):
self.unimplemented_error()
def set_permanent_hostname(self, name):
self.unimplemented_error()
def unimplemented_error(self):
platform = get_platform()
distribution = get_distribution()
if distribution is not None:
msg_platform = '%s (%s)' % (platform, distribution)
else:
msg_platform = platform
self.module.fail_json(
msg='hostname module cannot be used on platform %s' % msg_platform)
class Hostname(object):
"""
This is a generic Hostname manipulation class that is subclassed
based on platform.
A subclass may wish to set different strategy instance to self.strategy.
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
strategy_class = UnimplementedStrategy
def __new__(cls, *args, **kwargs):
return load_platform_subclass(Hostname, args, kwargs)
def __init__(self, module):
self.module = module
self.name = module.params['name']
if self.platform == 'Linux' and Facts(module).is_systemd_managed():
self.strategy = SystemdStrategy(module)
else:
self.strategy = self.strategy_class(module)
def get_current_hostname(self):
return self.strategy.get_current_hostname()
def set_current_hostname(self, name):
self.strategy.set_current_hostname(name)
def get_permanent_hostname(self):
return self.strategy.get_permanent_hostname()
def set_permanent_hostname(self, name):
self.strategy.set_permanent_hostname(name)
class GenericStrategy(object):
"""
This is a generic Hostname manipulation strategy class.
A subclass may wish to override some or all of these methods.
- get_current_hostname()
- get_permanent_hostname()
- set_current_hostname(name)
- set_permanent_hostname(name)
"""
def __init__(self, module):
self.module = module
self.hostname_cmd = self.module.get_bin_path('hostname', True)
def get_current_hostname(self):
cmd = [self.hostname_cmd]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" %
(rc, out, err))
return to_native(out).strip()
def set_current_hostname(self, name):
cmd = [self.hostname_cmd, name]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" %
(rc, out, err))
def get_permanent_hostname(self):
return None
def set_permanent_hostname(self, name):
pass
# ===========================================
class DebianStrategy(GenericStrategy):
"""
This is a Debian family Hostname manipulation strategy class - it edits
the /etc/hostname file.
"""
HOSTNAME_FILE = '/etc/hostname'
def get_permanent_hostname(self):
if not os.path.isfile(self.HOSTNAME_FILE):
try:
open(self.HOSTNAME_FILE, "a").write("")
except IOError:
err = get_exception()
self.module.fail_json(msg="failed to write file: %s" %
str(err))
try:
f = open(self.HOSTNAME_FILE)
try:
return f.read().strip()
finally:
f.close()
except Exception:
err = get_exception()
self.module.fail_json(msg="failed to read hostname: %s" %
str(err))
def set_permanent_hostname(self, name):
try:
f = open(self.HOSTNAME_FILE, 'w+')
try:
f.write("%s\n" % name)
finally:
f.close()
except Exception:
err = get_exception()
self.module.fail_json(msg="failed to update hostname: %s" %
str(err))
# ===========================================
class SLESStrategy(GenericStrategy):
"""
This is a SLES Hostname strategy class - it edits the
/etc/HOSTNAME file.
"""
HOSTNAME_FILE = '/etc/HOSTNAME'
def get_permanent_hostname(self):
if not os.path.isfile(self.HOSTNAME_FILE):
|
try:
open(self.HO
|
STNAME_FILE, "a").write("")
except IOError:
err = get_exception()
self.module.fail_json(msg="failed to write file: %s" %
str(err))
try:
f = open(self.HOSTNAME_FILE)
try:
return f.read().strip()
finally:
f.close()
except Exception:
err = get_exception()
self.module.fail_json(msg="failed to read hostname: %s" %
str(err))
def set_permanent_hostname(self, name):
try:
f = open(self.HOSTNAME_FILE, 'w+')
try:
f.write("%s\n" % name)
finally:
f.close()
except Exception:
err = get_exception()
self.module.fail_json(msg="failed to update hostname: %s" %
str(err))
# ===========================================
class RedHatStrategy(GenericStrategy):
"""
This is a Redhat Hostname strategy class - it edits the
/etc/sysconfig/network file.
"""
NETWORK_FILE = '/etc/sysconfig/network'
def get_permanent_hostname(self):
try:
f = open(self.NETWORK_FILE, 'rb')
try:
for line in f.readlines():
if line.startswith('HOSTNAME'):
k, v = line.split('=')
return v.strip()
finally:
f.close()
except Exception:
err = get_exception()
self.module.fail_json(msg="failed to read hostname: %s" %
str(err))
def set_permanent_hostname(self, name):
try:
lines = []
found = False
f = open(self.NETWORK_FILE, 'rb')
try:
for line in f.readlines():
|
strogonoff/django-dbmessages
|
setup.py
|
Python
|
apache-2.0
| 1,218
| 0.001642
|
#coding: utf-8
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# Allow setup.
|
py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
|
name='django-dbmessages',
version='0.2.0a',
packages=['dbmessages'],
include_package_data=True,
license='BSD License',
description='Request-independent messaging for Django on top of contrib.messages',
long_description=README,
author='Upwork, Anton Strogonoff',
author_email='[email protected]',
maintainer='Anton Strogonoff',
maintainer_email='[email protected]',
download_url='http://github.com/strogonoff/django-dbmessages',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
willre/homework
|
day19/web/app01/forms/home.py
|
Python
|
gpl-2.0
| 723
| 0.027027
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author: Will
"""
from django import forms
from app01 import models
class ImportFrom(forms.Form):
HOST_TYPE=((1,"001"),(2,"002")) #替換爲文件
host_typ
|
e = forms.IntegerField(
widget=forms.Select(choices=HOST_TYPE)
)
hostname = forms.CharField()
def __init__(self,*args,**kwargs):
super(ImportFrom,self).__init__(*args,**kwargs)
HOST_TYPE=((1,"001"),(2,"002")) #替換爲文件
self.fields['host_type'].widget.choices = models.userInfo.objects.all().values_list("id","name")
models.userInfo.objects.get()
models.userInfo.objects.filter()
| |
xycfree/py_spider
|
spider/down_pic_thread.py
|
Python
|
gpl-3.0
| 1,358
| 0.021021
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2016-09-01 22:26:01
# @Author : Your Name ([email protected])
# @Link : http://example.org
# @Version : $Id$
import os
import threading
import requests
import lxml
from threading import Thread
from bs4 import BeautifulSoup
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
pic_path = 'pic/' # 保存文件路径
URL = 'http://www.nanrenwo.net/z/tupian/hashiqitupian/'
URL1 = 'http://www.nanrenwo.net/'
class Worker(threading.Thread)
|
:
def __init__(self, url, img, filename):
super(Worker, self).__init__()
self.url = url
self.img = img
self.filename = filename
def run(self):
try:
u = self.url + self.img
r = requests.get(u, stream=True)
with open(self.filename, 'wb') as fd:
for chunk in r.iter_content(4096):
fd.write(chunk)
except Exception, e:
raise
def get_imgs(url):
t = 1
r
|
= requests.get(url, stream=True)
soup = BeautifulSoup(r.text, 'lxml')
myimg = [img.get('src') for img in soup.find(id='brand-waterfall').find_all('img')] # 查询id下所有img元素
print 'myimg:', myimg
for img in myimg:
pic_name = pic_path + str(t) + '.jpg'
# img_src = img.get('src')
print 'img: ', img
# self.download_pic(URL1,img,pic_name) #request Url,img src,picture name
w = Worker(URL1, img, pic_name)
w.start()
t += 1
get_imgs(URL)
|
trachelr/mne-python
|
tutorials/plot_cluster_stats_spatio_temporal_2samp.py
|
Python
|
bsd-3-clause
| 4,321
| 0
|
"""
.. _tut_stats_cluster_source_2samp:
=========================================================================
2 samples permutation test on source data with spatio-temporal clustering
=========================================================================
Tests if the source space data are significantly different between
2 groups of subjects (simulated here using one subject's data).
The multiple comparisons problem is addressed with a cluster-level
permutation test across space and time.
"""
# Authors: Alexandre Gramfort <[email protected]>
# Eric Larson <[email protected]>
# License: BSD (3-clause)
import os.path as op
import numpy as np
from scipy import stats as stats
import mne
from mne import spatial_tris_connectivity, grade_to_tris
from mne.stats import spatio_temporal_cluster_test, summarize_clusters_stc
from mne.datasets import sample
print(__doc__)
###############################################################################
# Set parameters
data_path = sample.data_path()
stc_fname = data
|
_path + '/MEG/sample/sample_audvis-meg-lh.stc'
subjects_dir = data_path + '/subjects'
# Load stc to in common cortical space (fsaverage)
stc = mne.read_source_estimate(stc_fname)
stc.resample(50)
stc = mne.morph_data('sample', 'fsaverage', stc, grade=5, smooth=20,
subjects_dir=subjects_dir)
n_vertices_fsave, n_times = stc.data.shape
tstep = stc.tstep
n_subjects1, n_subjects2 = 7, 9
print('Simulating data fo
|
r %d and %d subjects.' % (n_subjects1, n_subjects2))
# Let's make sure our results replicate, so set the seed.
np.random.seed(0)
X1 = np.random.randn(n_vertices_fsave, n_times, n_subjects1) * 10
X2 = np.random.randn(n_vertices_fsave, n_times, n_subjects2) * 10
X1[:, :, :] += stc.data[:, :, np.newaxis]
# make the activity bigger for the second set of subjects
X2[:, :, :] += 3 * stc.data[:, :, np.newaxis]
# We want to compare the overall activity levels for each subject
X1 = np.abs(X1) # only magnitude
X2 = np.abs(X2) # only magnitude
###############################################################################
# Compute statistic
# To use an algorithm optimized for spatio-temporal clustering, we
# just pass the spatial connectivity matrix (instead of spatio-temporal)
print('Computing connectivity.')
connectivity = spatial_tris_connectivity(grade_to_tris(5))
# Note that X needs to be a list of multi-dimensional array of shape
# samples (subjects_k) x time x space, so we permute dimensions
X1 = np.transpose(X1, [2, 1, 0])
X2 = np.transpose(X2, [2, 1, 0])
X = [X1, X2]
# Now let's actually do the clustering. This can take a long time...
# Here we set the threshold quite high to reduce computation.
p_threshold = 0.0001
f_threshold = stats.distributions.f.ppf(1. - p_threshold / 2.,
n_subjects1 - 1, n_subjects2 - 1)
print('Clustering.')
T_obs, clusters, cluster_p_values, H0 = clu =\
spatio_temporal_cluster_test(X, connectivity=connectivity, n_jobs=2,
threshold=f_threshold)
# Now select the clusters that are sig. at p < 0.05 (note that this value
# is multiple-comparisons corrected).
good_cluster_inds = np.where(cluster_p_values < 0.05)[0]
###############################################################################
# Visualize the clusters
print('Visualizing clusters.')
# Now let's build a convenient representation of each cluster, where each
# cluster becomes a "time point" in the SourceEstimate
fsave_vertices = [np.arange(10242), np.arange(10242)]
stc_all_cluster_vis = summarize_clusters_stc(clu, tstep=tstep,
vertices=fsave_vertices,
subject='fsaverage')
# Let's actually plot the first "time point" in the SourceEstimate, which
# shows all the clusters, weighted by duration
subjects_dir = op.join(data_path, 'subjects')
# blue blobs are for condition A != condition B
brain = stc_all_cluster_vis.plot('fsaverage', hemi='both', colormap='mne',
subjects_dir=subjects_dir,
time_label='Duration significant (ms)')
brain.set_data_time_index(0)
brain.show_view('lateral')
brain.save_image('clusters.png')
|
teoreteetik/api-snippets
|
ip-messaging/rest/messages/list-messages/list-messages.5.x.py
|
Python
|
mit
| 533
| 0
|
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest.ip_messaging import TwilioIpMessagingClient
# Your Account Sid and Auth Token from twilio.com/user/account
account =
|
"ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
token = "your_auth_token"
client = TwilioIpMessagingClient(account, token)
service = client.services.get(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX")
channel = service.channels.get(sid="CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX")
messages = channel.messages.list()
for m
|
in messages:
print(m)
|
tommyip/zulip
|
tools/lib/test_script.py
|
Python
|
apache-2.0
| 2,710
| 0.002583
|
from typing import Optional, Tuple
import os
import sys
from distutils.version import LooseVersion
from version import PROVISION_VERSION
from scripts.lib.zulip_tools import get_dev_uuid_var_path
def get_major_version(v):
# type: (str) -> int
return int(v.split('.')[0])
def get_version_file():
# type: () -> str
uuid_var_path = get_dev_uuid_var_path()
return os.path.join(uuid_var_path, 'provision_version')
PREAMBLE = '''
Before we run tests, we make sure your provisioning version
is correct by looking at var/provision_version, which is at
version %s, and we compare it to the version in source
control (version.py), which is %s
|
.
'''
def preamble(version):
# type: (str) -> str
text = PREAMBLE % (version, PROVISION_VERSION)
text += '\n'
return text
NEED_TO_DOWNGRADE = '''
It looks like you checked out a branch that expects an older
version of dependencies than t
|
he version you provisioned last.
This may be ok, but it's likely that you either want to rebase
your branch on top of upstream/master or re-provision your VM.
Do this: `./tools/provision`
'''
NEED_TO_UPGRADE = '''
It looks like you checked out a branch that has added
dependencies beyond what you last provisioned. Your command
is likely to fail until you add dependencies by provisioning.
Do this: `./tools/provision`
'''
def get_provisioning_status():
# type: () -> Tuple[bool, Optional[str]]
version_file = get_version_file()
if not os.path.exists(version_file):
# If the developer doesn't have a version_file written by
# a previous provision, then we don't do any safety checks
# here on the assumption that the developer is managing
# their own dependencies and not running provision.
return True, None
with open(version_file, 'r') as f:
version = f.read().strip()
# Normal path for people that provision--we're all good!
if version == PROVISION_VERSION:
return True, None
# We may be more provisioned than the branch we just moved to. As
# long as the major version hasn't changed, then we should be ok.
if LooseVersion(version) > LooseVersion(PROVISION_VERSION):
if get_major_version(version) == get_major_version(PROVISION_VERSION):
return True, None
else:
return False, preamble(version) + NEED_TO_DOWNGRADE
return False, preamble(version) + NEED_TO_UPGRADE
def assert_provisioning_status_ok(force):
# type: (bool) -> None
if not force:
ok, msg = get_provisioning_status()
if not ok:
print(msg)
print('If you really know what you are doing, use --force to run anyway.')
sys.exit(1)
|
peacekeeper/indy-sdk
|
wrappers/python/tests/crypto/test_auth_crypt.py
|
Python
|
apache-2.0
| 1,227
| 0.005705
|
import json
import pytest
from indy import crypto, did, error
@pytest.mark.asyncio
async def test_auth_crypt_works_for_created_key(wallet_handle, seed_my1, verkey_my2, message):
verkey = await did.create_key(wallet_handle, json.dumps({'seed': seed_my1}))
await crypto.auth_crypt(wallet_handle, verkey, verkey_my2, message)
@pytest.mark.asyncio
async def test_auth_crypt_works_for_unknown_sender_verkey(wallet_handle, verkey_my1, verkey_my2, message):
with pytest.raises(error.WalletItemNotFound):
await crypto.auth_crypt(wallet_handle, verkey_my1, verkey_my2, message)
@pytest.mark.asyncio
async def test_auth_crypt_works_for_invalid_handle(wallet_hand
|
le, verkey_my1, verkey_my2, message):
with pytest.raises(error.WalletInvalidHandle):
invalid_wallet_handle = wallet_handle + 1
await crypto.auth_crypt(invalid_wallet_handle, verkey_my1, verkey_my2, message)
@pytest.mark.asy
|
ncio
async def test_auth_crypt_works_for_invalid_recipient_vk(wallet_handle, identity_trustee1, message):
(_, key) = identity_trustee1
with pytest.raises(error.CommonInvalidStructure):
await crypto.auth_crypt(wallet_handle, key, 'CnEDk___MnmiHXEV1WFgbV___eYnPqs___TdcZaNhFVW', message)
|
jaromrax/myservice
|
version2/mysglobal.py
|
Python
|
gpl-2.0
| 4,226
| 0.014908
|
#!/usr/bin/python3
#############
# this is to be leaded by every module.
# I think
#import mysglobal as g # args,loggerr @every module
#################
import logging
from logzero import setup_logger,LogFormatter,colors
import argparse
import os,sys
import json
from blessings import Terminal
import getpass # lockfile<= getuser
#from threading import Thread # thread: i need accesible thread
import uuid
DEBUG=True
config={} # global config, but not sure
MYSEPATH=os.path.expanduser("~/.myservice")
I_AM_INFINITE=False
BOTTOMLINE_TEXT="no message"
t = Terminal()
ZMQ_REP_PORT=5678
RANDOM_STR = uuid.uuid4()
user_name = os.getenv('USER') # for /var/run/screen/S-user
####################################
# PARSER ARG
######################################
parser=argparse.ArgumentParser(description="""
------------------------------------------------------------------
The tool to run services in userspace
""",usage="""
myservice2 [-d] ... shows the executables in ~/.myservice
myservice2 [-d] infinite ... run infinite (in terminal)
myservice test ... test is without a path inside ~/.myservice
myservice2 [-d] test enable ... introduces into .config.json
myservice2 [-d] test disable
myservice2 [-d] test never ... gray servicename and mode
myservice2 [-d] test undef
myservice2 [-d] test start
myservice2 [-d] test stop ... kills and makes UNDEF
myservice2 [-d] test perm4h ... run every 4 hours (it knows m,h,d)
myservice2 [-d] reconfig ... when MANUAL edit to .confg.json is done
script /dev/null ... do this when an ssh user without access to screen
------------------------------------------------------------------
VARIOUS TRICKS:
myservice2 ... looks for all executables;
* ... already present in .config.json
E ... atribute enable is there
+ or - ... attribute enable is true or false
p ... attribute perm is ON; also a,x
|
PATHS:
when ~/.myservice/test/aaa
myservice2 aaa enable : finds a path and adds into the .config.json
myservice2 infinite ... runs the table in
|
the terminal (only 1 instance possible)
OR connects to the screen -x myservice2_infinite
""",
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-d','--debug', action='store_true' , help='')
#parser.add_argument('-s','--serfmsg', default='',nargs="+" , help='serf message to mmap') # list will come after
#parser.add_argument('count', action="store", type=int)
parser.add_argument('service', action="store", nargs="?") # nargs='+' :
parser.add_argument('command', action="store", nargs="?") # nargs='+'
#parser.add_argument('command', action="store")
# print("""
# USAGE CASES:
# ------------------------------------------------------------------
# ./myservice2.py -d infinite
# ./myservice.py test enable
# ------------------------------------------------------------------
# VARIOUS TRICKS:
# subdir TTT
# myservice2 TTT/aaa enable : adds into the config
# # this was the last time about PATH!; from now on:
# myservice2 aaa sock
# ./myservice2.py -s myservice aaa # send command to mmap to test serf
# # aaa must be status sock
# """)
args=parser.parse_args()
#=========== path must exist
if not os.path.isdir( os.path.expanduser("~/.myservice") ):
#print(" directory exists")
#else:
print(" DIR NOT EXISTS")
os.mkdir( os.path.expanduser("~/.myservice") )
###########################################
# LOGGING - after AGR PARSE
########################################
log_format = '%(color)s%(levelname)1.1s... %(asctime)s%(end_color)s %(message)s' # i... format
LogFormatter.DEFAULT_COLORS[10] = colors.Fore.YELLOW ## debug level=10. default Cyan...
loglevel=1 if args.debug==1 else 11 # all info, but not debug
formatter = LogFormatter(fmt=log_format,datefmt='%Y-%m-%d %H:%M:%S')
logfile=os.path.splitext( os.path.expanduser("~/.myservice/")+os.path.basename(sys.argv[0]) )[0]+'.log'
logger = setup_logger( name="main",logfile=logfile, level=loglevel,formatter=formatter )#to 1-50
lockfile="/tmp/"+"myservice2_"+getpass.getuser()+".lock"
lockfilepid=0
|
StackStorm/mistral
|
mistral/tests/unit/lang/v2/base.py
|
Python
|
apache-2.0
| 3,920
| 0
|
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import yaml
from mistral import exceptions as exc
from mistral.lang import parser as spec_parser
from mistral.tests.unit import base
from mistral import utils
class WorkflowSpecValidationTestCase(base.BaseTest):
def __init__(self, *args, **kwargs):
super(WorkflowSpecValidationTestCase, self).__init__(*args, **kwargs)
# The relative resource path is ./mistral/tests/resources/workbook/v2.
self._resource_path = 'workbook/v2'
self._spec_parser = spec_parser.get_workflow_list_spec_from_yaml
self._dsl_blank = {
'version': '2.0',
'test': {
'type': 'direct'
}
}
self._dsl_tasks = {
'get': {
'action': 'std.http',
'input': {
'url': 'https://www.openstack.org'
}
},
'echo': {
'action': 'std.echo',
'input': {
'output': 'This is a test.'
}
},
'email': {
'action': 'std.email',
'input': {
'from_addr': '[email protected]',
'to_addrs': ['[email protected]'],
'subject': 'Test',
'body': 'This
|
is a test.',
'smtp_server': 'localhost',
'smtp_password': 'password'
}
}
}
def _parse_dsl_spec(self, dsl_file=None, add_tasks=False,
cha
|
nges=None, expect_error=False):
if dsl_file and add_tasks:
raise Exception('The add_tasks option is not a valid '
'combination with the dsl_file option.')
if dsl_file:
dsl_yaml = base.get_resource(self._resource_path + '/' + dsl_file)
if changes:
dsl_dict = yaml.safe_load(dsl_yaml)
utils.merge_dicts(dsl_dict, changes)
dsl_yaml = yaml.safe_dump(dsl_dict, default_flow_style=False)
else:
dsl_dict = copy.deepcopy(self._dsl_blank)
if add_tasks:
dsl_dict['test']['tasks'] = copy.deepcopy(self._dsl_tasks)
if changes:
utils.merge_dicts(dsl_dict, changes)
dsl_yaml = yaml.safe_dump(dsl_dict, default_flow_style=False)
if not expect_error:
return self._spec_parser(dsl_yaml)
else:
return self.assertRaises(
exc.DSLParsingException,
self._spec_parser,
dsl_yaml
)
class WorkbookSpecValidationTestCase(WorkflowSpecValidationTestCase):
def __init__(self, *args, **kwargs):
super(WorkbookSpecValidationTestCase, self).__init__(*args, **kwargs)
self._spec_parser = spec_parser.get_workbook_spec_from_yaml
self._dsl_blank = {
'version': '2.0',
'name': 'test_wb'
}
def _parse_dsl_spec(self, dsl_file=None, add_tasks=False,
changes=None, expect_error=False):
return super(WorkbookSpecValidationTestCase, self)._parse_dsl_spec(
dsl_file=dsl_file, add_tasks=False, changes=changes,
expect_error=expect_error)
|
utek/pydesktime
|
pydesktime/desktime.py
|
Python
|
mit
| 2,182
| 0.000917
|
import requests
import datetime
import calendar
class DeskTime(object):
MAIN_URL = 'https://desktime.com/api/2/json/?{params}'
def __init__(self, app_key, username, password):
self.api_key = self._login(app_key, username, password)
if self.api_key is None:
raise Exception("Authorization error")
pass
def _login(self, app_key, username, password):
auth = 'appkey={appkey}&action={action}&email={email}&password={password}'
auth = auth.format(appkey=app_key, action='authorize',
email=username, password=password)
auth_url = self.MAIN_URL.format(params=auth)
res = requests.get(auth_url)
data = res.json()
if not data.get(u'error', None):
return data.get('api_key', None)
return None
def getAllDataForDate(self, date=dat
|
etime.datetime.now().date()):
employees = 'apike
|
y={apikey}&action=employees&date={date}'
employees = employees.format(apikey=self.api_key, action='employees',
date=date.isoformat())
url = self.MAIN_URL.format(params=employees)
res = requests.get(url)
data = res.json()
if not data.get('error', None):
return data
return None
def getMonth(self, year, month, with_weekends=False):
monthrange = calendar.monthrange(year, month)
today = datetime.datetime.now().date()
data = []
resdata = {}
for dayindex in range(monthrange[1]):
day = dayindex + 1
date = datetime.date(year, month, day)
if date > today and date.year == today.year and today.month == date.month:
continue
elif date > today:
return None
if not with_weekends and date.weekday() in (5, 6):
continue
data.append(self.getAllDataForDate(date))
for elem in data:
resdata[elem.get('date')] = elem.get('employees')
return data
def getEmployee(self, employee_id):
raise(NotImplementedError)
|
adamhaney/airflow
|
tests/contrib/operators/test_gcp_bigtable_operator.py
|
Python
|
apache-2.0
| 29,272
| 0.00164
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import google.api_core.exceptions
from google.cloud.bigtable.column_family import MaxVersionsGCRule
from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable.table import ClusterState
from parameterized import parameterized
from airflow import AirflowException
from airflow.contrib.operators.gcp_bigtable_operator import \
BigtableInstanceDeleteOperator, \
BigtableTableDeleteOperator, \
BigtableTableCreateOperator, \
BigtableTableWaitForReplicationSensor, \
BigtableClusterUpdateOperator, \
BigtableInstanceCreateOperator
try:
# noinspection PyProtectedMember
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
PROJECT_ID = 'test_project_id'
INSTANCE_ID = 'test-instance-id'
CLUSTER_ID = 'test-cluster-id'
CLUSTER_ZONE = 'us-central1-f'
NODES = 5
TABLE_ID = 'test-table-id'
INITIAL_SPLIT_KEYS = []
EMPTY_COLUMN_FAMILIES = {}
class BigtableInstanceCreateTest(unittest.TestCase):
@parameterized.expand([
('instance_id', PROJECT_ID, '', CLUSTER_ID, CLUSTER_ZONE),
('main_cluster_id', PROJECT_ID, INSTANCE_ID, '', CLUSTER_ZONE),
('main_cluster_zone', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id,
main_cluster_id,
main_cluster_zone, mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableInstanceCreateOperator(
project_id=project_id,
instance_id=instance_id,
main_cluster_id=main_cluster_id,
main_cluster_zone=main_cluster_zone,
task_id="id"
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_create_instance_that_exists(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
op = BigtableInstanceCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id"
)
op.execute(None)
mock_hook.assert_called_once_with()
mock_hook.return_value.create_instance.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_create_instance_that_exists_empty_project_id(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
op = BigtableInstanceCreateOperator(
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id"
)
op.execute(None)
mock_hook.assert_called_once_with()
mock_hook.return_value.create_instance.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
op = BigtableInstanceCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id"
)
mock_hook.return_value.create_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with()
mock_hook.return_value.create_instance.assert_called_once_with(
cluster_nodes=None,
cluster_storage_type=None,
instance_display_name=None,
instance_id=INSTANCE_ID,
instance_labels=None,
instance_type=None,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
project_id=PROJECT_ID,
replica_cluster_id=None,
replica_cluster_zone=None,
timeout=None
)
class BigtableClusterUpdateTest(unittest.TestCase):
@parameterized.expand([
('instance_id', PROJECT_ID, '', CLUSTER_ID, NODES),
('cluster_id', PROJECT_ID, INSTANCE_ID, '', NODES),
('nodes', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id,
cluster_id, nodes, mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableClusterUpdateOperator(
project_id=project_id,
instance_id=instance_id,
cluster_id=cluster_id,
nodes=nodes,
task_id="id"
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_but_instance_does_not_exists(self, mock_hook):
mock_hook.return_value.get_
|
instance.return_value = None
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id"
)
op.execute(None)
err = e.exception
self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(
INSTANCE_ID))
mock_ho
|
ok.assert_called_once_with()
mock_hook.return_value.update_cluster.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_but_instance_does_not_exists_empty_project_id(self,
mock_hook):
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id"
)
op.execute(None)
err = e.exception
self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(
INSTANCE_ID))
mock_hook.assert_called_once_with()
mock_hook.return_value.update_cluster.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_that_does_not_exists(self, mock_hook):
instance = mock_hook.return_value.get_instance.ret
|
agiovann/Constrained_NMF
|
caiman/source_extraction/cnmf/utilities.py
|
Python
|
gpl-2.0
| 40,754
| 0.001718
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" A set of utilities, mostly for post-processing and visualization
We put arrays on disk as raw bytes, extending along the first dimension.
Alongside each array x we ensure the value x.dtype which stores the string
description of the array's dtype.
See Also:
------------
@url
.. image::
@author epnev
"""
# \package caiman/dource_ectraction/cnmf
# \version 1.0
# \copyright GNU General Public License v2.0
# \date Created on Sat Sep 12 15:52:53 2015
from builtins import str
from builtins import range
from past.utils import old_div
import cv2
import h5py
import logging
import numpy as np
import os
import pylab as pl
import scipy
from scipy.sparse import spdiags, issparse, csc_matrix, csr_matrix
import scipy.ndimage.morphology as morph
from skimage.feature.peak import _get_high_intensity_peaks
import tifffile
from typing import List
from .initialization import greedyROI
from ...base.rois import com
from ...mmapping import parallel_dot_product, load_memmap
from ...cluster import extract_patch_coordinates
from ...utils.stats import df_percentile
def decimation_matrix(dims, sub):
D = np.prod(dims)
if sub == 2 and D <= 10000: # faster for small matrices
ind = np.arange(D) // 2 - \
np.arange(dims[0], dims[0] + D) // (dims[0] * 2) * (dims[0] // 2) - \
(dims[0] % 2) * (np.arange(D) % (2 * dims[0]) > dims[0]) * (np.arange(1, 1 + D) % 2)
else:
def create_decimation_matrix_bruteforce(dims, sub):
dims_ds = tuple(1 + (np.array(dims) - 1) // sub)
d_ds = np.prod(dims_ds)
ds_matrix = np.eye(d_ds)
ds_matrix = np.repeat(np.repeat(
ds_matrix.reshape((d_ds,) + dims_ds, order='F'), sub, 1),
sub, 2)[:, :dims[0], :dims[1]].reshape((d_ds, -1), order='F')
ds_matrix /= ds_matrix.sum(1)[:, None]
ds_matrix = csc_matrix(ds_matrix, dtype=np.float32)
return ds_matrix
tmp = create_decimation_matrix_bruteforce((dims[0], sub), sub).indices
ind = np.concatenate([tmp] * (dims[1] // sub + 1))[:D] + \
np.arange(D) // (dims[0] * sub) * ((dims[0] - 1) // sub + 1)
data = 1. / np.unique(ind, return_counts=True)[1][ind]
return csc_matrix((data, ind, np.arange(1 + D)), dtype=np.float32)
def peak_local_max(image, min_distance=1, threshold_abs=None,
threshold_rel=None, exclude_border=True, indices=True,
num_peaks=np.inf, footprint=None):
"""Find peaks in an image as coordinate list or boolean mask.
Adapted from skimage to use opencv for speed.
Replaced scipy.ndimage.maximum_filter by cv2.dilate.
Peaks are the local maxima in a region of `2 * min_distance + 1`
(i.e. peaks are separated by at least `min_distance`).
If peaks are flat (i.e. multiple adjacent pixels have identical
intensities), the coordinates of all such pixels are returned.
If both `threshold_abs` and `threshold_rel` are provided, the maximum
of the two is chosen as the minimum intensity threshold of peaks.
Parameters
----------
image : ndarray
Input image.
min_distance : int, optional
Minimum number of pixels separating peaks in a region of `2 *
min_distance + 1` (i.e. peaks are separated by at least
`min_distance`).
To find the maximum number of peaks, use `min_distance=1`.
threshold_abs : float, optional
Minimum intensity of peaks. By default, the absolute threshold is
the minimum intensity of the image.
threshold_rel : float, optional
Minimum intensity of peaks, calculated as `max(image) * threshold_rel`.
exclude_border : int, optional
If nonzero, `exclude_border` excludes peaks from
within `exclude_border`-pixels of the border of the image.
indices : bool, optional
If True, the output will be an array representing peak
coordinates. If False, the output will be a boolean array shaped as
`image.shape` with peaks present at True elements.
num_peaks : int, optional
Maximum number of peaks. When the number of peaks exceeds `num_peaks`,
return `num_peaks` peaks based on highest peak intensity.
footprint : ndarray of bools, optional
If provided, `footprint == 1` represents the local region within which
to search for peaks at every point in `image`. Overrides
`min_distance` (also for `exclude_border`).
Returns
-------
output : ndarray or ndarray of bools
* If `indices = True` : (row, column, ...) coordinates of peaks.
* If `indices = False` : Boolean array shaped like `image`, with peaks
represented by True values.
Notes
-----
The peak local maximum function returns the coordinates of local peaks
(maxima) in an image. A maximum filter is used for finding local maxima.
This operation dilates the original image. After comparison of the dilated
and original image, this function returns the coordinates or a mask of the
peaks where the dilated image equals the original image.
Examples
--------
>>> img1 = np.zeros((7, 7))
>>> img1[3, 4] = 1
>>> img1[3, 2] = 1.5
>>> img1
array([[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 1.5, 0. , 1. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ]])
>>> peak_local_max(img1, min_distance=1)
array([[3, 4],
[3, 2]])
>>> peak_local_max(img1, min_distance=2)
array([[3, 2]])
>>> img2 = np.zeros((20, 20, 20))
>>> img2[10, 10, 10] = 1
>>> peak_local_max(img2, exclude_border=0)
array([[10, 10, 10]])
"""
if type(exclude_border) == bool:
exclude_border = min_distance if exclude_border else 0
out = np.zeros_like(image, dtype=np.bool)
if np.all(image == image.flat[0]):
if indices is True:
return np.empty((0, 2), np.int)
else:
return out
# Non maximum filter
if footprint is not None:
# image_max = ndi.maximum_filter(image, footprint=footprint,
# mode='constant')
image_max = cv2.dilate(image, footprint=footprint, iterations=1)
else:
size = 2 * min_distance + 1
# image_max = ndi.maximum_filter(image, size=size, mode='constant')
image_max = cv2.dilate(image, cv2.getStructuringElement(
cv2.MORPH_RECT, (size, size)), iterations=1)
|
mask = image == image_max
if exclude_border:
# zero out the imag
|
e borders
for i in range(mask.ndim):
mask = mask.swapaxes(0, i)
remove = (footprint.shape[i] if footprint is not None
else 2 * exclude_border)
mask[:remove // 2] = mask[-remove // 2:] = False
mask = mask.swapaxes(0, i)
# find top peak candidates above a threshold
thresholds = []
if threshold_abs is None:
threshold_abs = image.min()
thresholds.append(threshold_abs)
if threshold_rel is not None:
thresholds.append(threshold_rel * image.max())
if thresholds:
mask &= image > max(thresholds)
# Select highest intensities (num_peaks)
coordinates = _get_high_intensity_peaks(image, mask, num_peaks)
if indices is True:
return coordinates
else:
nd_indices = tuple(coordinates.T)
out[nd_indices] = True
return out
def dict_compare(d1, d2):
d1_keys = set(d1.keys())
d2_keys = set(d2.keys())
intersect_keys = d1_keys.intersection(d2_keys)
added = d1_keys - d2_keys
removed = d2_keys - d1_keys
modified = {o : (d1[o], d2[o]) for o in intersect_keys if np.any(d1[o] != d2[o])}
same = set(o for o in intersect_keys if np.all(d1[o] == d2[o]))
return added, removed, modified, same
def computeDFF_traces(Yr, A, C, bl, quantil
|
jonathanverner/brython
|
www/speed/benchmarks/create_function_complex_args.py
|
Python
|
bsd-3-clause
| 71
| 0.014085
|
for i in range(1000000):
|
def f(x, y=1, *args, **kw):
|
pass
|
appuio/ansible-role-openshift-zabbix-monitoring
|
vendor/openshift-tools/ansible/roles/lib_gcloud/build/lib/gcpresource.py
|
Python
|
apache-2.0
| 683
| 0
|
# pylint: skip-file
class GCPResource(object):
'''Object to represent a gcp resour
|
ce'''
def __init__(self, rname, rtype, project, z
|
one):
'''constructor for gcp resource'''
self._name = rname
self._type = rtype
self._project = project
self._zone = zone
@property
def name(self):
'''property for name'''
return self._name
@property
def type(self):
'''property for type'''
return self._type
@property
def project(self):
'''property for project'''
return self._project
@property
def zone(self):
'''property for zone'''
return self._zone
|
lydonjake/cs-grad-school-app
|
program/__init__.py
|
Python
|
gpl-3.0
| 211
| 0.004739
|
__author__ =
|
"Jacob Lydon"
__copyright__ =
|
"Copyright 2017"
__credits__ = []
__license__ = "GPLv3"
__version__ = "0.1"
__maintainer__ = "Jacob Lydon"
__email__ = "[email protected]"
__status__ = "Development"
|
polysquare/polysquare-ci-scripts
|
ciscripts/check/python/__init__.py
|
Python
|
mit
| 190
| 0
|
# /ciscripts
|
/check/python/__init__.py
#
# Module loader file for /ciscripts/check/python.
#
# See /LICENCE.md for Copyright information
"""Module loader file for /ciscripts/check/p
|
ython."""
|
peter1010/my_vim
|
vimfiles/py_scripts/build_types/gcc.py
|
Python
|
gpl-2.0
| 1,180
| 0.036441
|
# Build Code
import os
import subprocess
import re
class GCC:
def __init__(self):
self.enter_match = re.compile(r'Entering directory')
self.leave_match = re.compile(r'Leaving directory')
def can_build(self, dirname, ext):
if ext in (".c", ".h", ".cpp", ".hpp"):
files = [f.lower() for f in os.listdir(dirname)]
if "makefile" in files:
self.makefile_dir = dirname
return True
return False
def run(self, action, output):
ar
|
gs = ["make"]
if action:
args.append(action)
print(args)
proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
errorLines = []
while True:
line = proc.stdout.readline().decode("utf-8")
if len(line) == 0:
break
output.write(line)
if line.startswith("In file included from"):
errorLines.append(line)
else:
idx = line.find("Entering directory")
if idx >= 0:
errorLines.appe
|
nd(line)
else:
idx = line.find("Leaving directory")
if idx >= 0:
errorLines.append(line)
else:
idx = line.find("warning:")
if idx >= 0:
errorLines.append(line)
output.write(line)
return errorLines
def get_plugin():
return GCC()
|
jszokoli/jsTK
|
jsRenamer/field_replacer.py
|
Python
|
gpl-3.0
| 6,998
| 0.018148
|
import maya.cmds as cmds
from . import renamer_settings as settings
class FieldReplacer(object):
def __init__(self):
print 'Initializing jsRenamer FieldReplacer...'
#replaceMaterial = self.replaceMaterial
def checkTemplate(self,node):
#availPos = ['C','L','R','LF','RF','LB','RB','U','B']
#availSuf=['GES','GEP','PLY','NRB']
#sel = cmds.ls(sl=1)
#for node in sel:
splitNode = node.split('_')
#print splitNode
#print splitNode[0][-3:]
#check if correct amount of fields
if len(splitNode) == 5:
return True
else:
return False
##########################################
#####REPLACE FIELD########################
##########################################
def replaceMaterial(self, args=None):
ReplaceSel = cmds.ls(sl=1)
prefixReplace = cmds.textField('materialField',query=True,tx=1)
prefixReplace= prefixReplace
if prefixReplace == '':
pass
else:
for each in ReplaceSel:
if self.checkTemplate(each) == True:
if '|' in each:
replacerOldName=each.split('|')[-1]
else:
replacerOldName = each
prefixSplit = replacerOldName.split('_',1)
prefixReplaceName = prefixReplace+ '_' +str(prefixSplit[1])
#print prefixReplaceName
cmds.rename(each,prefixReplaceName)
else:
cmds.error(each+' does not match naming Template (default_C_default_0000_???)')
def replacePosition(self, args=None):
ReplaceSel = cmds.ls(sl=1)
positionReplace = cmds.optionMenu('positionField',query=True,v=1)
for each in ReplaceSel:
if self.checkTemplate(each) == True:
#print each
if '|' in each:
replacerOldName=each.split('|')[-1]
else:
replacerOldName = each
positionSplit = replacerOldName.split('_')
newPosName = positionSplit[0]+'_'+positionReplace+'_'+positionSplit[2]+'_'+positionSplit[3]+'_'+positionSplit[4]
#print newPosName
cmds.rename(each,newPosName)
else:
cmds.error(each+' does not match naming Template (default_C_default_0000_???)')
def replaceBody(self, args=None):
ReplaceSel = cmds.ls(sl=1)
bodyReplace = cmds.textField('bodyField',query=True,tx=1)
for each in ReplaceSel:
if self.checkTemplate(each) == True:
#print each
if '|' in each:
replacerOldName=each.split('|')[-1]
else:
replacerOldName = each
bodySplit = replacerOldName.split('_')
newBodyName = bodySplit[0]+'_'+bodySplit[1]+'_'+bodyReplace+'_'+bodySplit[3]+'_'+bodySplit[4]
#print newBodyName
cmds.rename(each,newBodyName)
else:
cmds.error(each+' does not match naming Template (default_C_default_0000_???)')
###Replace GEO_Suffix
def replaceGeoSuffix(self, args=None):
ReplaceSel = cmds.ls(sl=1)
suffixReplace = cmds.optionMenu('suffixField',query=True,v=1)
for each in ReplaceSel:
if self.checkTemplate(each) == True:
#print each
if '|' in each:
replacerOldName=each.split('|')[-1]
else:
replacerOldName = each
suffixSplit = replacerOldName.rsplit('_',1)
suffixReplaceName = suffixSplit[0] + '_' +suffixReplace
#print suffixReplaceName
cmds.rename(each,suffixReplaceName)
else:
cmds.error(each+' does not match naming Template (default_C_default_0000_???)')
###Replacer
def replacer(self, args=None):
replacerSel = cmds.ls(sl=1)
replacerOld = cmds.textField('replacerOldField',query = True,text=True)
replacerNew = cmds.textField('replacerNe
|
wField',query = True,text=True)
for each in replacerSel:
if '|' in each:
replacerOldName=each.split('|')[-1]
else:
replacerOldName = each
replacerNewName = replacerOldName.replace(replacerOld,replacerNew)
print replacerNewName
cmds.rename(each, replacerNewName)
###PrefixAdd
def addPrefix(self, args=None):
prefixSel = cmds.ls(sl=1)
|
prefixAddition = cmds.textField('addPrefixField',query = True,text=True)
for each in prefixSel:
newPrefixName = prefixAddition+each
print newPrefixName
cmds.rename(each,newPrefixName)
###Suffix Add
def addSuffix(self, args=None):
suffixSel = cmds.ls(sl=1)
suffixAddition = cmds.textField('addSuffixField',query = True,text=True)
for each in suffixSel:
newSuffixName = each+suffixAddition
print newSuffixName
cmds.rename(each,newSuffixName)
###Replace Prefix
def replacePrefix(self, args=None):
prefixReplaceSel = cmds.ls(sl=1)
prefixReplace = cmds.textField('replacePrefixField',query = True,text=True)
if prefixReplace == '':
pass
else:
for each in prefixReplaceSel:
try:
if '|' in each:
replacerOldName=each.split('|')[-1]
else:
replacerOldName = each
prefixSplit = replacerOldName.split('_',1)
prefixReplaceName = prefixReplace+ '_' +str(prefixSplit[1])
print prefixReplaceName
cmds.rename(each,prefixReplaceName)
except:
pass
###Replace Geo Suffix
def replaceSuffix(self, args=None):
suffixReplaceSel = cmds.ls(sl=1)
suffixReplace = cmds.textField('replaceSuffixField',query = True,text=True)
if suffixReplace == '':
pass
else:
for each in suffixReplaceSel:
try:
if '|' in each:
replacerOldName=each.split('|')[-1]
else:
replacerOldName = each
suffixSplit = replacerOldName.rsplit('_',1)
suffixReplaceName = suffixSplit[0] + '_' +suffixReplace
print suffixReplaceName
cmds.rename(each,suffixReplaceName)
except:
pass
|
joopert/home-assistant
|
homeassistant/components/cloud/http_api.py
|
Python
|
apache-2.0
| 19,172
| 0.000939
|
"""The HTTP api to control the cloud integration."""
import asyncio
from functools import wraps
import logging
import aiohttp
import async_timeout
import attr
from hass_nabucasa import Cloud, auth, thingtalk
from hass_nabucasa.const import STATE_DISCONNECTED
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.alexa import (
entities as alexa_entities,
errors as alexa_errors,
)
from homeassistant.components.google_assistant import helpers as google_helpers
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.components.websocket_api import const as ws_const
from homeassistant.core import callback
from .const import (
DOMAIN,
PREF_ALEXA_REPORT_STATE,
PREF_ENABLE_ALEXA,
PREF_ENABLE_GOOGLE,
PREF_GOOGLE_REPORT_STATE,
PREF_GOOGLE_SECURE_DEVICES_PIN,
REQUEST_TIMEOUT,
InvalidTrustedNetworks,
InvalidTrustedProxies,
RequireRelink,
)
_LOGGER = logging.getLogger(__name__)
WS_TYPE_STATUS = "cloud/status"
SCHEMA_WS_STATUS = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_STATUS}
)
WS_TYPE_SUBSCRIPTION = "cloud/subscription"
SCHEMA_WS_SUBSCRIPTION = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_SUBSCRIPTION}
)
WS_TYPE_HOOK_CREATE = "cloud/cloudhook/create"
SCHEMA_WS_HOOK_CREATE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_HOOK_CREATE, vol.Required("webhook_id"): str}
)
WS_TYPE_HOOK_DELETE = "cloud/cloudhook/delete"
SCHEMA_WS_HOOK_DELETE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_HOOK_DELETE, vol.Required("webhook_id"): str}
)
_CLOUD_ERRORS = {
InvalidTrustedNetworks: (
500,
"Remote UI not compatible with 127.0.0.1/::1 as a trusted network.",
),
InvalidTrustedProxies: (
500,
"Remote UI not compatible with 127.0.0.1/::1 as trusted proxies.",
),
}
async def async_setup(hass):
"""Initialize the HTTP API."""
async_register_command = hass.components.websocket_api.async_register_command
async_register_command(WS_TYPE_STATUS, websocket_cloud_status, SCHEMA_WS_STATUS)
async_register_command(
WS_TYPE_SUBSCRIPTION, websocket_subscription, SCHEMA_WS_SUBSCRIPTION
)
async_register_command(websocket_update_prefs)
async_register_command(
WS_TYPE_HOOK_CREATE, websocket_hook_create, SCHEMA_WS_HOOK_CREATE
)
async_register_command(
WS_TYPE_HOOK_DELETE, websocket_hook_delete, SCHEMA_WS_HOOK_DELETE
)
async_register_command(websocket_remote_connect)
async_register_command(websocket_remote_disconnect)
async_register_command(google_assistant_list)
async_register_command(google_assistant_update)
async_register_command(alexa_list)
async_register_command(alexa_update)
async_register_command(alexa_sync)
async_register_command(thingtalk_convert)
hass.http.register_view(GoogleActionsSyncView)
hass.http.register_view(CloudLoginView)
hass.http.register_view(CloudLogoutView)
hass.http.register_view(CloudRegisterView)
hass.http.register_view(CloudResendConfirmView)
hass.http.register_view(CloudForgotPasswordView)
_CLOUD_ERRORS.update(
{
auth.UserNotFound: (400, "User does not exist."),
auth.UserNotConfirmed: (400, "Email not confirmed."),
auth.UserExists: (400, "An account with the given email already exists."),
auth.Unauthenticated: (401, "Authentication failed."),
auth.PasswordChangeRequired: (400, "Password change required."),
asyncio.TimeoutError: (502, "Unable to reach the Home Assistant cloud."),
aiohttp.ClientError: (500, "Error making internal request"),
}
)
def _handle_cloud_errors(handler):
"""Webview decorator to handle auth errors."""
@wraps(handler)
async def error_handler(view, request, *args, **kwargs):
"""Handle exceptions that raise from the wrapped reque
|
st handler."""
try:
result = await handler(view, request, *args, **kwargs)
return
|
result
except Exception as err: # pylint: disable=broad-except
status, msg = _process_cloud_exception(err, request.path)
return view.json_message(
msg, status_code=status, message_code=err.__class__.__name__.lower()
)
return error_handler
def _ws_handle_cloud_errors(handler):
"""Websocket decorator to handle auth errors."""
@wraps(handler)
async def error_handler(hass, connection, msg):
"""Handle exceptions that raise from the wrapped handler."""
try:
return await handler(hass, connection, msg)
except Exception as err: # pylint: disable=broad-except
err_status, err_msg = _process_cloud_exception(err, msg["type"])
connection.send_error(msg["id"], err_status, err_msg)
return error_handler
def _process_cloud_exception(exc, where):
"""Process a cloud exception."""
err_info = _CLOUD_ERRORS.get(exc.__class__)
if err_info is None:
_LOGGER.exception("Unexpected error processing request for %s", where)
err_info = (502, f"Unexpected error: {exc}")
return err_info
class GoogleActionsSyncView(HomeAssistantView):
"""Trigger a Google Actions Smart Home Sync."""
url = "/api/cloud/google_actions/sync"
name = "api:cloud:google_actions/sync"
@_handle_cloud_errors
async def post(self, request):
"""Trigger a Google Actions sync."""
hass = request.app["hass"]
cloud: Cloud = hass.data[DOMAIN]
gconf = await cloud.client.get_google_config()
status = await gconf.async_sync_entities(gconf.agent_user_id)
return self.json({}, status_code=status)
class CloudLoginView(HomeAssistantView):
"""Login to Home Assistant cloud."""
url = "/api/cloud/login"
name = "api:cloud:login"
@_handle_cloud_errors
@RequestDataValidator(
vol.Schema({vol.Required("email"): str, vol.Required("password"): str})
)
async def post(self, request, data):
"""Handle login request."""
hass = request.app["hass"]
cloud = hass.data[DOMAIN]
await cloud.login(data["email"], data["password"])
return self.json({"success": True})
class CloudLogoutView(HomeAssistantView):
"""Log out of the Home Assistant cloud."""
url = "/api/cloud/logout"
name = "api:cloud:logout"
@_handle_cloud_errors
async def post(self, request):
"""Handle logout request."""
hass = request.app["hass"]
cloud = hass.data[DOMAIN]
with async_timeout.timeout(REQUEST_TIMEOUT):
await cloud.logout()
return self.json_message("ok")
class CloudRegisterView(HomeAssistantView):
"""Register on the Home Assistant cloud."""
url = "/api/cloud/register"
name = "api:cloud:register"
@_handle_cloud_errors
@RequestDataValidator(
vol.Schema(
{
vol.Required("email"): str,
vol.Required("password"): vol.All(str, vol.Length(min=6)),
}
)
)
async def post(self, request, data):
"""Handle registration request."""
hass = request.app["hass"]
cloud = hass.data[DOMAIN]
with async_timeout.timeout(REQUEST_TIMEOUT):
await hass.async_add_job(
cloud.auth.register, data["email"], data["password"]
)
return self.json_message("ok")
class CloudResendConfirmView(HomeAssistantView):
"""Resend email confirmation code."""
url = "/api/cloud/resend_confirm"
name = "api:cloud:resend_confirm"
@_handle_cloud_errors
@RequestDataValidator(vol.Schema({vol.Required("email"): str}))
async def post(self, request, data):
"""Handle resending confirm email code request."""
hass = request.app["hass"]
cloud = hass.data[DOMAIN]
with asyn
|
gcobos/rft
|
scripts/get_all_images.py
|
Python
|
agpl-3.0
| 207
| 0
|
import web
db = web.database(dbn='mysql', d
|
b='googlemodules', user='ale', passwd='3babes')
for url in db.select('function', what='screenshot'):
print 'http://www.googlemodules.com/image/screensh
|
ot'
|
bnorthan/projects
|
Scripts/Jython/Psfs/PSFModel.py
|
Python
|
gpl-2.0
| 1,817
| 0.053935
|
class PSFModel(object):
def __init__(self, scopeType, psfModel, xySpace, zSpace, emissionWavelength, numericalAperture, designImmersionOilRefractiveIndex, \
designSpecimenLayerRefractiveIndex, actualImmersionOilRefractiveIndex, \
actualSpecimenLayerRefractiveIndex, actualPointSourceDepthInSpecimenLayer, homeDirectory):
self.scopeType=scopeType
self.psfModel=psfModel
self.xySpace=xySpace
self.zSpace=zSpace
self.emissionWavelength=emissionWavelength
self.numericalAperture=numericalAperture
self.designImmersionOilRefractiveIndex=designImmersionOilRefractiveIndex
self.designSpecimenLayerRefractiveIndex=designSpecimenLayerRefractiveIndex
self.actualImmersionOilRefractiveIndex=actualImmersionOilRefracti
|
veIndex
self.actualSpecimenLayerRefractiveIndex=actualSpecimenLayerRefractiveIndex
self.actualPointSourceDepthInSpecimen
|
Layer=actualPointSourceDepthInSpecimenLayer
def CreatePsf(self, command, psfCommandName, xSize, ySize, zSize):
module=command.run(psfCommandName, True, \
"xSize", xSize, \
"ySize", ySize, \
"zSize", zSize, \
"fftType", "none", \
"scopeType", self.scopeType, \
"psfModel", self.psfModel, \
"xySpace", self.xySpace, \
"zSpace", self.zSpace, \
"emissionWavelength", self.emissionWavelength, \
"numericalAperture", self.numericalAperture, \
"designImmersionOilRefractiveIndex", self.designImmersionOilRefractiveIndex, \
"designSpecimenLayerRefractiveIndex", self.designSpecimenLayerRefractiveIndex, \
"actualImmersionOilRefractiveIndex", self.actualImmersionOilRefractiveIndex, \
"actualSpecimenLayerRefractiveIndex", self.actualSpecimenLayerRefractiveIndex, \
"actualPointSourceDepthInSpecimenLayer", self.actualPointSourceDepthInSpecimenLayer, \
"centerPsf", True).get()
return module.getOutputs().get("output");
|
lqe/EconomyCompensation
|
code.py
|
Python
|
gpl-3.0
| 2,064
| 0.036126
|
# -*- coding:utf-8 -*-
'''Created on 2014-8-7 @author: Administrator '''
from sys import path as sys_path
if not '..' in sys_path:sys_path.append("..") #用于import上级目录的模块
import web
#早起的把一个文件分成多个文件,再把class导入
from login.login import (index,login,loginCheck,In,reset,register,find_password)
from blog.blog import (write_blog,upload,blog_content_manage,Get,Del,blog_single_self,blog_single_other)
from admin.admin import (adminAdd,adminGet,adminDel,adminEdit)
#后期应用web.py 的子应用
from wiki.view import wiki_app
from download.download import download_app
from meeting.meeting import meeting_app
from bbs.bbs import bbs_app
urls=(
'/','index',
'/login','login',
'/loginCheck','loginCheck',
'/(admin|user_blog)','In',
'/reset/(.*)','reset',
'/register','register',
'/find_password','find_password',
'/write_blog','write_blog',
'/upload','upload',
'/blog_content_manage','blog_content_manage',
'/Get/classification','Get',
'/Del/blog_content','Del',
'/blog_single_self','blog_single_self',
'/blog_single_other','blog_single_other',
'/admin/add','adminAdd',
'/admin/get','adminGet',
'/admin/del','adminDel',
'/admin/edit','adminEdit',
'/wiki',wiki_app,
'/download',download_app,
'/meeting',meeting_app,
'/bbs',bbs_app,
)
app = web.application(urls ,locals())
#session 在web.config.debug = Fal
|
se模式下可用 可以用一下方式解决 生产中 一般设置web.config.debug = False
web.config.debug = True
if web.config.get('_session') is None:
session = web.session.Session(app,web.session.DiskStore('sessions'))
web.config._session=s
|
ession
else:
session=web.config._session
#用以下方式可以解决多文件之间传递session的问题
def session_hook():web.ctx.session=session
app.add_processor(web.loadhook(session_hook))
if __name__=='__main__':
app.run()
|
foobarbazblarg/stayclean
|
stayclean-2015-january/display-on-last-day-before-participants-must-check-in.py
|
Python
|
mit
| 1,018
| 0.006876
|
#!/usr/bin/python
import participantCollection
participantCollection = participantCollection.ParticipantCollection()
numberStillIn = participantCollection.sizeOfParticipantsWhoAreStillIn()
initialNumber = participantCollection.size()
print "There are currently **" + str(numberStillIn) + " out of " + str(initialNumber) +"** original participants. That's **" + str(int(round(100*numberStillIn/initialNum
|
ber,0))) + "%**."
print "These participants have checked in at least once in the last 15 days:"
print ""
for participant in participantCollection.participantsWhoAreStillInAndHaveCheckedIn():
print "/u/" + participant.name
print ""
print "These participants have not reported a relapse, so they are still in the running, but **if they do not check in by the end of today, they will be removed from the list, and will not be considered victorious**:"
print ""
for par
|
ticipant in participantCollection.participantsWhoAreStillInAndHaveNotCheckedIn():
print "/u/" + participant.name + " ~"
print ""
|
smarkets/smk_python_sdk
|
smarkets/statsd.py
|
Python
|
mit
| 2,824
| 0.000354
|
from __future__ import absolute_import, division, print_function, unicode_literals
# Statsd client. Loosely based on the version by Steve Ivy <[email protected]>
import logging
import random
import socket
import time
from contextlib import contextmanager
log = logging.getLogger(__name__)
class StatsD(object):
def __init__(self, host='localhost', port=8125, enabled=True, prefix=''):
self.addr = None
self.enabled = enabled
if enabled:
self.set_address(host, port)
self.prefix = prefix
self.udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def set_address(self, host, port=8125):
try:
self.addr = (socket.gethostbyname(host), port)
except socket.gaierror:
self.addr = None
self.enabled = False
@contextmanager
def timed(self, stat, sample_rate=1):
log.debug('Entering timed context for %r' % (stat,))
start = time.time()
yield
duration = int((time.time() - start) * 1000)
log.debug('Exiting timed context for %r' % (stat,))
self.timing(stat, duration, sample_rate)
def timing(self, stats, time, sample_rate=1):
"""
Log timing information
"""
unit = 'ms'
log.debug('%r took %s %s' % (stats, time, unit))
self.update_stats(stats, "%s|%s" % (time, unit), sample_rate)
def increment(self, stats, sample_rate=1):
"""
Increments one or more stats counters
"""
self.update_stats(stats, 1, sample_rate)
def decrement(self, stats, sample_rate=1):
"""
Decrements one or more stats counters
"""
self.update_stats(stats, -1, sample_rate)
def update_stats(self, stats, delta=1, sampleRate=1):
"""
Updates one or more stats counters by arbitrary amounts
"""
if not self.enabled or self.addr is None:
return
if type(stats) is not list:
stats = [stats]
data = {}
for stat in stats:
data["%s%s" % (self.prefix, stat)] = "%s|c" % delta
self.send(data, sampleRate)
def send(self, data, sample_rate):
sampled_data = {}
if sample_rate < 1:
if random.random() <= sample_rate:
for stat, value in data.items():
sampled_data[stat] = "%s|@%s" % (value, sample_rate)
else:
sampled_data = data
try:
for stat, value in sampled_data.items():
self.udp_sock.sendto("%s:%s" % (stat, value), self.addr)
except Exception as e:
log.exception('Failed to send data to the server: %r', e)
if __name__ ==
|
'__main__':
sd = StatsD()
for i in range(1, 100):
|
sd.increment('test')
|
jgrillo/zoonomia
|
zoonomia/_version.py
|
Python
|
mit
| 22
| 0
|
__versi
|
on__ = '
|
0.0.1'
|
transparenciahackday/dar-scripts
|
scripts/raspadar/txt2taggedtext.py
|
Python
|
gpl-3.0
| 23,048
| 0.008054
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import locale
locale.setlocale(locale.LC_ALL, 'pt_PT.UTF8')
import logging
logging.basicConfig(level=logging.DEBUG)
import os
import string
import datetime
from pprint import pprint
from html2text import add_item
### Constantes ###
MP_STATEMENT = 'deputado_intervencao'
PM_STATEMENT = 'pm_intervencao'
MINISTER_STATEMENT = 'ministro_intervencao'
STATE_SECRETARY_STATEMENT = 'secestado_intervencao'
PRESIDENT_STATEMENT = 'presidente'
SECRETARY_STATEMENT = 'secretario'
STATEMENT = 'intervencao'
MP_INTERRUPTION = 'deputado_interrupcao'
INTERRUPTION = 'vozes_aparte'
APPLAUSE = 'aplauso'
PROTEST = 'protesto'
LAUGHTER = 'riso'
NOTE = 'nota'
PAUSE = 'pausa'
VOTE = 'voto'
TIME = 'hora'
OTHER = 'outro'
INTRO = 'intro'
SUMMARY = 'sumario'
ROLLCALL = 'chamada'
ROLLCALL_PRESENT = 'chamada_presentes'
ROLLCALL_ABSENT = 'chamada_ausentes'
ROLLCALL_LATE = 'chamada_atrasados'
ROLLCALL_MISSION = 'chamada_missao'
SECTION = 'seccao'
END = 'fim'
MP_CONT = 'continuacao'
MP_ASIDE = 'deputado_aparte'
OTHER_START = 'outro_inicio'
OTHER_CONT = 'outro_cont'
PRESIDENT_ASIDE = 'presidente_aparte'
PRESIDENT_NEWSPEAKER = 'presidente_temapalavra'
PRESIDENT_ROLLCALL = 'presidente_chamada'
PRESIDENT_OPEN = 'presidente_aberta'
PRESIDENT_CLOSE = 'presidente_encerrada'
PRESIDENT_SUSPEND = 'presidente_suspensa'
PRESIDENT_REOPEN = 'presidente_reaberta'
PRESIDENT_SWITCH = 'presidente_troca'
ORPHAN = 'orfao'
### Regexes ###
re_hora = (re.compile(ur'^Eram (?P<hours>[0-9]{1,2}) horas e (?P<minutes>[0-9]{1,2}) minutos.$', re.UNICODE), '')
# Separador entre orador e intervenção (algumas gralhas e inconsistências obrigam-nos
# a ser relativamente permissivos ao definir a expressão)
# Importa notar que esta regex é unicode, por causa dos hífens (o Python não os vai
# encontrar de outra forma)
re_separador = (re.compile(ur'\:?[ \.]?[\–\–\—\-] ', re.LOCALE|re.UNICODE), ': -')
re_separador_estrito = (re.compile(ur'\: [\–\–\—\-] ', re.LOCALE|re.UNICODE), ': - ')
re_mauseparador = (re.compile(ur'(?P<prevchar>[\)a-z])\:[ \.][\–\–\—\-](?P<firstword>[\w\»])', re.LOCALE|re.UNICODE), '\g<prevchar>: - \g<firstword>')
re_titulo = (re.compile(ur'((O Sr[\.:])|(A Sr\.?(ª)?))(?!( Deputad))'), '')
re_ministro = (re.compile(ur'^Ministr'), '')
re_secestado = (re.compile(ur'^Secretári[oa] de Estado.*:'), '')
re_palavra = (re.compile(ur'(concedo(-lhe)?|dou|tem|vou dar)(,?[\w ^,]+,?)? a palavra|(faça favor(?! de terminar))', re.UNICODE|re.IGNORECASE), '')
re_concluir = (re.compile(ur'(tempo esgotou-se)|(esgotou-se o( seu)? tempo)|((tem (mesmo )?de|queira) (terminar|concluir))|((ultrapassou|esgotou|terminou)[\w ,]* o( seu)? tempo)|((peço|solicito)(-lhe)? que (termine|conclua))|(atenção ao tempo)|(remate o seu pensamento)|(atenção para o tempo de que dispõe)|(peço desculpa mas quero inform)|(deixem ouvir o orador)|(faça favor de prosseguir a sua)|(favor de (concluir|terminar))|(poder prosseguir a sua intervenção)|(faça( o)? favor de continuar|(queira[\w ,]* concluir))', re.UNICODE|re.IGNORECASE), '')
re_president = (re.compile(ur'O Sr\.?|A Sr\.?ª? Presidente\ ?(?P<nome>\([\w ]+\))?(?P<sep>\:[ \.]?[\–\–\—\-])'), '')
re_cont = (re.compile(ur'O Orador|A Oradora(?P<sep>\:[ \.]?[\–\–\—\-\-])', re.UNICODE), '')
re_voto = (re.compile(ur'^Submetid[oa]s? à votação', re.UNICODE), '')
re_interv = (re.compile(ur'^(?P<titulo>O Sr[\.:]?|A Sr[\.:]?(ª)?)\ (?P<nome>[\w ,’-]+)\ ?(?P<partido>\([\w -]+\))?(?P<sep>\:?[ \.]?[\–\–\—\-]? ?)', re.UNICODE), '')
#re_interv_semquebra = (re.compile(ur'(?P<titulo>O Sr\.?|A Sr(\.)?(ª)?)\ (?P<nome>[\w ,’-]{1,30})\ ?(?P<partido>\([\w -]+\))?(?P<sep>\:[ \.
|
]?[\–\–\—\-])', re.UNICODE), '')
re_interv_semquebra = (re.compile(ur'(?P<titulo>O Sr\.?|A Sr(\.)?(ª)?)\ (?P<nome>[\w ,’-]{1,50})\ ?(?P<partido>\([\w -]+\))?(?P<sep>\:[ \.]?[\–\–\—\-] )', re.UNICODE), '')
re_interv_simples = (re.compile(ur'^(?P<nome>[\w ,’-]+)\ ?(?P<partido>\([\w -]+\))?\ ?(?P<sep>\:?[ \.]?[\–\–\—\-]? )', re.UNICODE), '')
def change_type(p, newtype):
|
stype, text = p.split(']', 1)
text = text.strip()
return '[%s] %s' % (newtype, text)
def get_type(p):
stype, text = p.split(']', 1)
stype = stype.strip('[] ')
return stype
def get_speaker(p):
stype, text = p.split(']', 1)
text = text.strip()
try:
speaker, text = re.split(re_separador[0], text, 1)
except ValueError:
print 'Não consegui determinar o speaker. Vai vazio.'
print ' ' + p
print
raise
return ''
return speaker
def get_text(p):
stype, text = p.split(']', 1)
text = text.strip()
if ': -' in text:
speaker, text = text.split(':', 1)
else:
pass
return text
def strip_type(p):
stype, text = p.split(']', 1)
text = text.strip()
return text
def check_and_split_para(p):
# verificar se tem regex da intervenção
# se não, return None
# se sim, dividir e reagrupar
pass
class RaspadarTagger:
def __init__(self):
self.contents = []
# cache para registar cargos de governo e nomes
self.gov_posts = {}
def parse_txt_file(self, txtfile):
buffer = open(txtfile, 'r').read()
paragraphs = buffer.split('\n\n')
for para in paragraphs:
self.parse_paragraph(para)
self.process_orphans()
def parse_paragraph(self, p):
p = p.decode('utf-8')
p = p.strip(' \n')
if not p:
return
# FIXME: monkeypatch aqui: É preciso rectificar os separadores. Isto devia
# acontecer no html2txt, mas não tenho tempo agora para re-processar
# os HTML's. Desculpem lá.
if re.search(re_mauseparador[0], p):
p = re.sub(re_mauseparador[0], re_mauseparador[1], p, count=1)
# corresponde à regex de intervenção?
if re.search(re_interv[0], p):
# é intervenção
self.parse_statement(p)
elif re.search(re_cont[0], p):
# é a continuação de uma intervenção ("O Orador")
self.parse_statement(p, cont=True)
else:
# é outra coisa
self.parse_other(p)
def parse_statement(self, p, cont=False):
if cont:
p = re.sub(re_cont[0], re_cont[1], p, 1)
p = re.sub(re_separador[0], '', p, 1).strip()
stype = MP_CONT
else:
if not (re.match(re_titulo[0], p) and re.search(re_separador[0], p)):
stype = ORPHAN
else:
speaker, text = re.split(re_separador[0], p, 1)
speaker = re.sub(re_titulo[0], re_titulo[1], speaker, count=1).strip(u'ª \n')
p = speaker + ': - ' + text.strip()
if p.startswith('Presidente'):
return self.parse_president(p)
elif re.match(re_ministro[0], p) or re.match(re_secestado[0], p):
return self.parse_government(p)
elif p.startswith(u'Secretári') and not 'Estado' in re.split(re_separador[0], p)[0]:
return self.parse_secretary(p)
elif re.match(re_interv_simples[0], p):
stype = MP_STATEMENT
else:
stype = STATEMENT
output = '[%s] %s' % (stype, p)
# encontrar intervenções onde não há quebra de linha
# TODO: este check tem de ser feito no parse_paragraph
if re.search(re_interv_semquebra[0], output):
#print '### Encontrei uma condensada: ###'
result = re.split(re_interv_semquebra[0], output)
new_p = ''
for part in result[1:]:
if part and part != u'ª':
if part.endswith(('.', u'ª')):
new_p += part + ' '
else:
new_p += part
# arrumar a primeira parte
# print 'Primeira: ' + result[0]
# print 'Segunda: ' + new_p
# print
self.contents.append(result[0])
# processar a segunda
try:
self.parse_statement(new_p)
except RuntimeError:
|
irinabov/debian-qpid-dispatch
|
python/qpid_dispatch_internal/router/__init__.py
|
Python
|
apache-2.0
| 1,041
| 0
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE
|
file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS
|
IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from .engine import RouterEngine
from .address import Address
__all__ = ["RouterEngine", "Address"]
|
google/embedding-tests
|
thought/image_text_model.py
|
Python
|
apache-2.0
| 11,824
| 0.008373
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
from transformer_layers import TransformerBlock
import tensorflow as tf
def mean_pool(x, m):
m = tf.cast(m, tf.float32)
x = tf.multiply(x, tf.expand_dims(m, 2))
x = tf.reduce_sum(x, 1) / tf.reduce_sum(m, 1, keepdims=True)
return x
class RNN(object):
def __init__(self, num_units):
self.rnn_fw = tf.keras.layers.CuDNNLSTM(units=num_units // 2,
return_sequences=True,
go_backwards=False,
name='rnn_fw')
self.rnn_bw = tf.keras.layers.CuDNNLSTM(units=num_units // 2,
return_sequences=True,
go_backwards=False,
name='rnn_bw')
def forward(self, inputs, masks):
def rnn_fn(x, m, rnn):
x = rnn(x)
# x = tf.reduce_max(x, 1) # max pooling
# x = mean_pool(x, m) # mean pooling
indices = tf.reduce_sum(m, 1, keepdims=True) - 1
x = tf.gather_nd(x, tf.cast(indices, tf.int32), batch_dims=1)
return x
lengths = tf.reduce_sum(tf.cast(masks, tf.int32), axis=1)
masks = tf.cast(masks, tf.float32)
inputs = tf.multiply(inputs, tf.expand_dims(masks, 2))
inputs_bw = tf.reverse_sequence(inputs, lengths, 1, 0)
outputs_fw = rnn_fn(inputs, masks, self.rnn_fw)
outputs_bw = rnn_fn(inputs_bw, masks, self.rnn_bw)
outputs = tf.concat([outputs_fw, outputs_bw], axis=1)
return outputs
class Transformer(object):
def __init__(self, num_units):
self.hidden = tf.keras.layers.Dense(num_units)
self.transformer = TransformerBlock(num_units, num_units * 4,
num_layer=2)
def forward(self, inputs, masks):
masks = tf.cast(masks, tf.float32)
inputs = tf.multi
|
ply(inputs, tf.expand_dims(masks, 2))
inputs = self.hidden(inputs)
return self.transformer.forward(inputs, masks)
class DAN(object):
def __init__(self, num_units):
self.hidden = tf.keras.layers.Dense(num_units, activation=tf.nn.relu)
def forward(self, inputs, masks):
masks = tf.cast(masks, tf.float32)
inputs =
|
tf.multiply(inputs, tf.expand_dims(masks, 2))
inputs = tf.reduce_sum(inputs, 1) / tf.reduce_sum(masks, 1, keepdims=True)
return self.hidden(inputs)
def get_text_encoder(encoder_type='rnn'):
if encoder_type == 'rnn':
return RNN
elif encoder_type == 'trans':
return Transformer
elif encoder_type == 'dan':
return DAN
else:
raise ValueError(encoder_type)
class ImageTextEmbedding(object):
def __init__(self, word_emb, encoder_dim, encoder_type='rnn', norm=True,
drop_p=0.25, contrastive=False, margin=0.5, num_neg_sample=10,
lambda1=1.0, lambda2=1.0, internal=True):
self.word_emb = tf.Variable(tf.convert_to_tensor(word_emb), name="emb",
trainable=True)
self.text_encoder = get_text_encoder(encoder_type)(encoder_dim)
self.text_feat_proj = tf.keras.layers.Dense(encoder_dim)
self.img_feat_proj = tf.keras.layers.Dense(encoder_dim)
self.dropout = tf.keras.layers.Dropout(drop_p)
self.margin = margin
self.num_neg_sample = num_neg_sample
self.lambda1 = lambda1
self.lambda2 = lambda2
self.contrastive = contrastive
self.internal = internal
self.norm = norm # normalize the embedding
self.text_outputs = []
def forward_img(self, img_inputs, training):
x = self.img_feat_proj(img_inputs)
if self.norm:
x = tf.nn.l2_normalize(x, axis=-1)
return self.dropout(x, training=training)
def forward_text(self, text_inputs, text_masks, training):
if len(text_inputs.get_shape()) == 2:
x = tf.nn.embedding_lookup(self.word_emb, text_inputs)
else:
x = text_inputs
self.text_outputs.append(mean_pool(x, text_masks))
x = self.text_encoder.forward(x, text_masks)
self.text_outputs.append(x)
x = self.text_feat_proj(x)
if self.norm:
x = tf.nn.l2_normalize(x, axis=-1)
return self.dropout(x, training=training)
def encode(self, img_inputs, text_inputs, text_masks, training):
img_feats = self.forward_img(img_inputs, training)
text_feats = self.forward_text(text_inputs, text_masks, training)
return img_feats, text_feats
def forward(self, img_inputs, text_inputs, text_masks, labels, training):
img_feats, text_feats = self.encode(img_inputs, text_inputs,
text_masks, training)
if self.contrastive:
loss = contrastive_loss(img_feats, text_feats, self.margin)
sent_im_dist = - similarity_fn(text_feats, img_feats)
elif self.internal:
loss = internal_loss(img_feats, text_feats, labels)
sent_im_dist = - similarity_fn(text_feats, img_feats)
else:
loss = embedding_loss(img_feats, text_feats, labels, self.margin,
self.num_neg_sample, self.lambda1, self.lambda2)
sent_im_dist = pdist(text_feats, img_feats)
rec = recall_k(sent_im_dist, labels, ks=[1, 5, 10])
return loss, rec
def order_sim(im, s):
im = tf.expand_dims(im, 0)
s = tf.expand_dims(s, 1)
diff = tf.clip_by_value(s - im, 0, 1e6)
dist = tf.sqrt(tf.reduce_sum(diff ** 2, 2))
scores = -tf.transpose(dist)
return scores
def similarity_fn(im, s, order=False):
if order:
return order_sim(im, s)
return tf.matmul(im, s, transpose_b=True)
def internal_loss(im_embeds, sent_embeds, im_labels):
logits_s = tf.matmul(sent_embeds, im_embeds, transpose_b=True)
cost_s = tf.nn.softmax_cross_entropy_with_logits_v2(im_labels, logits_s)
logits_im = tf.matmul(im_embeds, sent_embeds, transpose_b=True)
cost_im = tf.nn.softmax_cross_entropy_with_logits_v2(tf.transpose(im_labels),
logits_im)
return tf.reduce_mean(cost_s) + tf.reduce_mean(cost_im)
def contrastive_loss(im_embeds, sent_embeds, margin, max_violation=True):
""" modified https://github.com/fartashf/vsepp/blob/master/model.py#L260 """
scores = similarity_fn(im_embeds, sent_embeds)
batch_size = tf.shape(im_embeds)[0]
diagonal = tf.diag_part(scores)
d1 = tf.reshape(diagonal, (batch_size, 1))
d2 = tf.reshape(diagonal, (1, batch_size))
cost_s = tf.clip_by_value(margin + scores - d1, 0, 1e6)
cost_im = tf.clip_by_value(margin + scores - d2, 0, 1e6)
zeros = tf.zeros(batch_size)
cost_s = tf.matrix_set_diag(cost_s, zeros)
cost_im = tf.matrix_set_diag(cost_im, zeros)
if max_violation:
cost_s = tf.reduce_max(cost_s, 1)
cost_im = tf.reduce_max(cost_im, 0)
return tf.reduce_sum(cost_s) + tf.reduce_sum(cost_im)
def pdist(x1, x2):
"""
x1: Tensor of shape (h1, w)
x2: Tensor of shape (h2, w)
Return pairwise distance for each row vector in x1, x2 as
a Tensor of shape (h1, h2)
"""
x1_square = tf.reshape(tf.reduce_sum(x1 * x1, axis=1), [-1, 1])
x2_square = tf.reshape(tf.reduce_sum(x2 * x2, axis=1), [1, -1])
return tf.sqrt(x1_square - 2 * tf.matmul(x1, tf.transpose(x2)) + x2_square +
1e-4)
def embedding_loss(im_embeds, sent_embeds, im_labels, margin, num_neg_sample,
lambda1, lambda2):
"""
im_embeds: (b, 512) image embedding tensors
sent_embeds: (sample_size * b, 512) sentence embedding tensors
where the order of sentence corresponds to the order of images and
setnteces for the same image are next to each other
im_labels: (sample_size * b, b)
|
steakunderscore/Bandwidth-Monitoring
|
src/webInterface.py
|
Python
|
gpl-3.0
| 3,059
| 0.010134
|
'''
Created on 11/02/2010
@author: [email protected]
'''
class webInterface(object):
'''
classdocs
'''
writeFile = None
def __init__(self):
pass
def __openFile(self, fileName):
self.writeFile = open(fileName, 'w')
def closeFile(self):
self.writeFile.close()
def writeHeader(self, title = 'Henry\'s iptables data accounting'):
self.writeFile.write('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">\n')
self.writeFile.write('<HTML>\n')
self.writeFile.write('<HEAD>\n')
self.writeFile.write('<TITLE>' + title + '</TITLE>\n')
self.writeFile.write('</HEAD>\n')
def writeBody(self, users):
self.writeFile.write('<BODY>\n')
self.writeFile.write('<table border="1">')
self.writeFile.write('<tr>')
self.writeFile.write('<td>IP address</td>')
self.writeFile.write('<td>On-peak Packets</td>')
self.writeFile.write('<td>On-peak Data</td>')
self.writeFile.write('<td>Off-peak Packets</td>')
self.writeFile.write('<td>Off-peak Data</td>')
self.writeFile.write('<td>Total Packets</td>')
self.writeFile.write('<td>Total Data</td>')
self.writeFile.write('</tr>')
usersList = users.keys()
usersList.sort()
for user in usersList:
self.writeFile.write('<tr>')
self.writeFile.write('<td>' + user + '</td>')
self.writeFile.write('<td>' + str(users[user].getUpData('pkts', date=None, peak='other')) + '</td>')
self.writeFile.write('<td>' + self.humanizeNumber(users[user].getUpData('data', date=None, peak='other')) + '</td>')
self.writeFile.write('<td>' + str(users[user].getDownData('pkts', date=None, peak='other')) + '</td>')
self.writeFile.write('<td>' + self.humanizeNumber(users[user].getDownData('data', date=None, peak='other')) + '</td>')
self.writeFile.write('<td>' + str(users[user].getData(type = 'pkts')) + '</td>')
self.writeFile.write('<td>' + self.humanizeNumber(users[user].getData(type = 'data')) + '</td>')
|
self.writeFile.write('</tr>')
self.writeFile.write('</table>')
self.writeFile.write('</BODY>\n')
def writeFooter(self):
self.writeFile.write('</HTML>\n')
def humanizeNumber(self,number = 0):
if number > 1024*1024*1024:
number = number/(1024*1024*1024)
number = str(number) + ' GBytes'
elif number > 1024*1024:
number = n
|
umber/(1024*1024)
number = str(number) + ' MBytes'
elif number > 1024:
number = number/1024
number = str(number) + ' KBytes'
else:
number = str(number) + ' Bytes'
return number
def outputIndex(self,file,users = None):
self.__openFile(file)
self.writeHeader()
self.writeBody(users)
self.writeFooter()
self.closeFile()
|
ddemidov/ev3dev-lang-python-1
|
ev3dev/auto.py
|
Python
|
mit
| 497
| 0.004024
|
import platform
|
# -----------------------------------------------------------------------------
# Guess platform we are running on
def current_platform():
machine = platform.machine()
if machine == 'armv5tejl':
return 'ev3'
elif machine == 'armv6l':
return 'brickpi'
else:
return 'unsupported'
if current_platform() == 'brickpi':
from .brickpi import *
else:
#
|
Import ev3 by default, so that it is covered by documentation.
from .ev3 import *
|
vileopratama/vitech
|
src/openerp/cli/deploy.py
|
Python
|
mit
| 4,038
| 0.003219
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import requests
import sys
import tempfile
import zipfile
from . import Command
class Deploy(Command):
"""Deploy a module on an Odoo instance"""
def __init__(self):
super(Deploy, self).__init__()
self.session = requests.session()
def deploy_module(self, module_path, url, login, password, db='', force=False):
url = url.rstrip('/')
csrf_token = self.authenticate(url, login, password, db)
module_file = self.zip_module(module_path)
try:
return self.upload_module(url, module_file, force=force, csrf_token=csrf_token)
finally:
os.remove(module_file)
def upload_module(self, server, module_file, force=False, csrf_token=None):
print("Uploading module file...")
url = server + '/base_import_module/upload'
post_data = {'force': '1' if force else ''}
if csrf_token: post_data['csrf_token'] = csrf_token
with open(module_file, 'rb') as f:
res = self.session.post(url, files={'mod_file': f}, data=post_data)
res.raise_for_status()
return res.text
def authenticate(self, server, login, password, db=''):
print("Authenticating on server '%s' ..." % server)
# Fixate session with a given db if any
self.session.get(server + '/web/login', params=dict(db=db))
args = dict(login=login, password=password, db=db)
res = self.session.post(server + '/base_import_module/login', args)
if res.status_code == 404:
raise Exception("The server '%s' does not have the 'base_import_module' installed." % server)
elif res.status_code != 200:
raise Exception(res.text)
return res.headers.get('x-csrf-token')
def zip_module(self, path):
path = os.path.abspath(path)
if not os.path.isdir(path):
raise Exception("Could not find module directory '%s'" % path)
container, module_name = os.path.split(path)
temp = tempfile.mktemp(suffix='.zip')
try:
print("Zipping module directory...")
with zipfile.ZipFile(temp, 'w') as zfile:
for root, dirs, files in os.walk(path):
for file in files:
file_path = os.path.join(root, file)
zfile.write(file_path, file_path.split(container).pop())
return temp
except Exception:
os.remove(temp)
|
raise
def run(self, cmdargs):
parser = argparse.ArgumentParser(
prog="%s deploy" % sys.argv[0].split(os.path.sep)[-1],
description=self.__doc__
)
parser.add_argument('path', help="Path of the module to deploy")
parser.add_argument('url', nargs='?', help='Url of the server (default=http://local
|
host:8069)', default="http://localhost:8069")
parser.add_argument('--db', dest='db', help='Database to use if server does not use db-filter.')
parser.add_argument('--login', dest='login', default="admin", help='Login (default=admin)')
parser.add_argument('--password', dest='password', default="admin", help='Password (default=admin)')
parser.add_argument('--verify-ssl', action='store_true', help='Verify SSL certificate')
parser.add_argument('--force', action='store_true', help='Force init even if module is already installed. (will update `noupdate="1"` records)')
if not cmdargs:
sys.exit(parser.print_help())
args = parser.parse_args(args=cmdargs)
if not args.verify_ssl:
self.session.verify = False
try:
if not args.url.startswith(('http://', 'https://')):
args.url = 'https://%s' % args.url
result = self.deploy_module(args.path, args.url, args.login, args.password, args.db, force=args.force)
print(result)
except Exception, e:
sys.exit("ERROR: %s" % e)
|
suizokukan/urwid
|
urwid/html_fragment.py
|
Python
|
lgpl-2.1
| 8,175
| 0.005505
|
#!/usr/bin/python
#
# Urwid html fragment output wrapper for "screen shots"
# Copyright (C) 2004-2007 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
"""
HTML PRE-based UI implementation
"""
from urwid import util
from urwid.main_loop import ExitMainLoop
from urwid.display_common import AttrSpec, BaseScreen
# replace control characters with ?'s
_trans_table = "?" * 32 + "".join([chr(x) for x in range(32, 256)])
_default_foreground = 'black'
_default_background = 'light gray'
class HtmlGeneratorSimulationError(Exception):
pass
class HtmlGenerator(BaseScreen):
# class variables
fragments = []
sizes = []
keys = []
started = True
def __init__(self):
super(HtmlGenerator, self).__init__()
self.colors = 16
self.bright_is_bold = False # ignored
self.has_underline = True # ignored
self.register_palette_entry(None,
_default_foreground, _default_background)
def set_terminal_properties(self, colors=None, bright_is_bold=None,
has_underline=None):
if colors is None:
colors = self.colors
if bright_is_bold is None:
bright_is_bold = self.bright_is_bold
if has_underline is None:
has_underline = self.has_underline
self.colors = colors
self.bright_is_bold = bright_is_bold
self.has_underline = has_underline
def set_mouse_tracking(self, enable=True):
"""Not yet implemented"""
pass
def start(self):
pass
def stop(self):
pass
def set_input_timeouts(self, *args):
pass
def reset_default_terminal_palette(self, *args):
pass
def run_wrapper(self,fn):
"""Call fn."""
return fn()
def draw_screen(self, (cols, rows), r ):
"""Create an html fragment from the render object.
Append it to HtmlGenerator.fragments list.
"""
# collect output in l
l = []
assert r.rows() == rows
if r.cursor is not None:
cx, cy = r.cursor
else:
cx = cy = None
y = -1
for row in r.content():
y += 1
col = 0
for a, cs, run in row:
run = run.translate(_trans_table)
if isinstance(a, AttrSpec):
aspec = a
else:
aspec = self._palette[a][
{1: 1, 16: 0, 88:2, 256:3}[self.colors]]
if y == cy and col <= cx:
run_width = util.calc_width(run, 0,
len(run))
if col+run_width > cx:
l.append(html_span(run,
aspec, cx-col))
else:
l.append(html_span(run, aspec))
col += run_width
else:
l.append(html_span(run, aspec))
l.append("\n")
# add the fragment to the list
self.fragments.append( "<pre>%s</pre>" % "".join(l) )
def clear(self):
"""
Force the screen to be completely repainted on the next
call to draw_screen().
(does nothing for html_fragment)
"""
pass
def get_cols_rows(self):
"""Return the next screen size in HtmlGenerator.sizes."""
if not self.sizes:
raise HtmlGeneratorSimulationError, "Ran out of screen sizes to return!"
return self.sizes.pop(0)
def get_input(self, raw_keys=False):
"""Return the next list of keypresses in HtmlGenerator.keys."""
if not self.keys:
raise ExitMainLoop()
if raw_keys:
return (self.keys.pop(0), [])
return self.keys.pop(0)
_default_aspec = AttrSpec(_default_foreground, _default_background)
(_d_fg_r, _d_fg_g, _d_fg_b, _d_bg_r, _d_bg_g, _d_bg_b) = (
_default_aspec.get_rgb_values())
def html_span(s, aspec, cursor = -1):
fg_r, fg_g, fg_b, bg_r, bg_g, bg_b = aspec.get_rgb_values()
# use real colours instead of default fg/bg
if fg_r is None:
fg_r, fg_g, fg_b = _d_fg_r, _d_fg_g, _d_fg_b
if bg_r is None:
bg_r, bg_g, bg_b = _d_bg_r, _d_bg_g, _d_bg_b
html_fg = "#%02x%02x%02x" % (fg_r, fg_g, fg_b)
html_bg = "#%02x%02x%02x" % (bg_r, bg_g, bg_b)
if aspec.standout:
html_fg, html_bg = html_bg, html_fg
extr
|
a = (";text-decoration:underline" * aspec.underline +
";font-weight:bold" * aspec.bold)
def html_span(fg, bg, s):
if not s: return ""
return ('<span style="color:%s;'
'background:%s%s">%s</span>' %
(fg, bg, extra, html_escape(s)))
if cursor >= 0:
c_off, _ign = util.calc_text_pos(s, 0, len(s), cursor)
|
c2_off = util.move_next_char(s, c_off, len(s))
return (html_span(html_fg, html_bg, s[:c_off]) +
html_span(html_bg, html_fg, s[c_off:c2_off]) +
html_span(html_fg, html_bg, s[c2_off:]))
else:
return html_span(html_fg, html_bg, s)
def html_escape(text):
"""Escape text so that it will be displayed safely within HTML"""
text = text.replace('&','&')
text = text.replace('<','<')
text = text.replace('>','>')
return text
def screenshot_init( sizes, keys ):
"""
Replace curses_display.Screen and raw_display.Screen class with
HtmlGenerator.
Call this function before executing an application that uses
curses_display.Screen to have that code use HtmlGenerator instead.
sizes -- list of ( columns, rows ) tuples to be returned by each call
to HtmlGenerator.get_cols_rows()
keys -- list of lists of keys to be returned by each call to
HtmlGenerator.get_input()
Lists of keys may include "window resize" to force the application to
call get_cols_rows and read a new screen size.
For example, the following call will prepare an application to:
1. start in 80x25 with its first call to get_cols_rows()
2. take a screenshot when it calls draw_screen(..)
3. simulate 5 "down" keys from get_input()
4. take a screenshot when it calls draw_screen(..)
5. simulate keys "a", "b", "c" and a "window resize"
6. resize to 20x10 on its second call to get_cols_rows()
7. take a screenshot when it calls draw_screen(..)
8. simulate a "Q" keypress to quit the application
screenshot_init( [ (80,25), (20,10) ],
[ ["down"]*5, ["a","b","c","window resize"], ["Q"] ] )
"""
try:
for (row,col) in sizes:
assert type(row) == int
assert row>0 and col>0
except (AssertionError, ValueError):
raise Exception, "sizes must be in the form [ (col1,row1), (col2,row2), ...]"
try:
for l in keys:
assert type(l) == list
for k in l:
assert type(k) == str
except (AssertionError, ValueError):
raise Exception, "keys must be in the form [ [keyA1, keyA2, ..], [keyB1, ..], ...]"
import curses_display
curses_display.Screen = HtmlGenerator
import raw_display
raw_display.Screen = HtmlGenerator
HtmlGenerator.sizes = sizes
HtmlGenerator.keys = keys
def screenshot_collect():
"""Return screenshots as a list of HTML fragments."""
l = HtmlGenerator.fragments
HtmlGenerator.fragments = []
return l
|
popara/jonny-api
|
matching/admin.py
|
Python
|
mit
| 786
| 0.001272
|
from django.contrib import admin
# from models import Agent, ReCa, Accomodation, Beach, Activity, Contact
#
# @admin.register(ReCa, Activity)
# class VenueAdmin(admin.ModelAdmin):
# list_display = ('name', 'internal_rating', 'ready', 'description',)
# list_filter = ('ready', 'internal_rating',)
# search_fields = ['name', 'description', 'address']
# ordering = ['id']
# save_on_top = True
#
#
# @admin.register(Accomodation)
# class AccomodAdmin(VenueAdmin):
# list_display = ('name', 'stars', 'ready', 'descriptio
|
n',)
# list_filter = ('ready', 'stars',)
#
#
# @admin.register(Beach)
# class BeachAdmin(admin.ModelAdmin):
# list_display = ('name', 'type
|
', 'description',)
# list_filter = ('name',)
#
#
# admin.site.register(Agent)
# admin.site.register(Contact)
#
#
|
samvarankashyap/googlecloudutility2
|
lib/simplejson/simplejson/encoder.py
|
Python
|
apache-2.0
| 25,806
| 0.001589
|
"""Implementation of JSONEncoder
"""
from __future__ import absolute_import
import re
from operator import itemgetter
# Do not import Decimal directly to avoid reload issues
import decimal
from .compat import u, unichr, binary_type, string_types, integer_types, PY3
def _import_speedups():
try:
from . import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
from simplejson.decoder import PosInf
#ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
# This is required because u() will mangle the string and ur'' isn't valid
# python3 syntax
ESCAPE = re.compile(u'[\\x00-\\x1f\\\\"\\b\\f\\n\\r\\t\u2028\u2029]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
for i in [0x2028, 0x2029]:
ESCAPE_DCT.setdefault(unichr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s, _PY3=PY3, _q=u('"')):
"""Return a JSON representation of a Python string
"""
if _PY3:
if isinstance(s, binary_type):
s = s.decode('utf-8')
else:
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return _q + ESCAPE.sub(replace, s) + _q
def py_encode_basestring_ascii(s, _PY3=PY3):
"""Return an ASCII-only JSON representation of a Python string
"""
if _PY3:
if isinstance(s, binary_type):
s = s.decode('utf-8')
else:
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and
|
types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict, namedt
|
uple | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=True, namedtuple_as_object=True,
tuple_as_array=True, bigint_as_string=False,
item_sort_key=None, for_json=False, ignore_nan=False,
int_as_string_bitcount=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be an (item_separator, key_separator)
tuple. The default is (', ', ': ') if *indent* is ``None`` and
(',', ': ') otherwise. To get the most compact JSON representation,
you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
If namedtuple_as_object is true (the default), objects with
``_asdict()`` methods will be encoded as JSON objects.
If tuple_as_array is true (the default), tuple (and subclasses) will
be encoded as JSON arrays.
If bigint_as_string is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If int_as_string_bitcount is a positive number (n), then int of size
greater than or equal to 2**n or lower than or equal to -2**n will be
encoded as strings.
If specified, item_sort_key is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key.
If for_json is true (not the default), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized
as ``null`` in compliance with the ECMA-262 specification. If true,
this will override *allow_nan*.
"""
|
jabooth/menpo-archive
|
menpo/image/boolean.py
|
Python
|
bsd-3-clause
| 11,453
| 0
|
from copy import deepcopy
import numpy as np
from menpo.image.base import Image
from skimage.transform import pyramid_gaussian
class BooleanImage(Image):
r"""
A mask image made from binary pixels. The region of the image that is
left exposed by the mask is referred to as the 'masked region'. The
set of 'masked' pixels is those pixels corresponding to a True value in
the mask.
Parameters
-----------
mask_data : (M, N, ..., L) ndarray
The binary mask data. Note that there is no channel axis - a 2D Mask
Image is built from just a 2D numpy array of mask_data.
Automatically coerced in
|
to boolean values.
"""
def __init__(self, mask_data):
# Enforce boolean pixels, and add a channel dim
mask_data = np.asarray(mask_dat
|
a[..., None], dtype=np.bool)
super(BooleanImage, self).__init__(mask_data)
@classmethod
def _init_with_channel(cls, image_data_with_channel):
r"""
Constructor that always requires the image has a
channel on the last axis. Only used by from_vector. By default,
just calls the constructor. Subclasses with constructors that don't
require channel axes need to overwrite this.
"""
return cls(image_data_with_channel[..., 0])
@classmethod
def blank(cls, shape, fill=True, round='ceil', **kwargs):
r"""
Returns a blank :class:`BooleanImage` of the requested shape
Parameters
----------
shape : tuple or list
The shape of the image. Any floating point values are rounded
according to the ``round`` kwarg.
fill : True or False, optional
The mask value to be set everywhere
Default: True (masked region is the whole image - meaning the whole
image is exposed)
round: {'ceil', 'floor', 'round'}
Rounding function to be applied to floating point shapes.
Default: 'ceil'
Returns
-------
blank_image : :class:`BooleanImage`
A blank mask of the requested size
"""
if round not in ['ceil', 'round', 'floor']:
raise ValueError('round must be either ceil, round or floor')
# Ensure that the '+' operator means concatenate tuples
shape = tuple(getattr(np, round)(shape))
if fill:
mask = np.ones(shape, dtype=np.bool)
else:
mask = np.zeros(shape, dtype=np.bool)
return cls(mask)
@property
def mask(self):
r"""
Returns the pixels of the mask with no channel axis. This is what
should be used to mask any k-dimensional image.
:type: (M, N, ..., L), np.bool ndarray
"""
return self.pixels[..., 0]
@property
def n_true(self):
r"""
The number of ``True`` values in the mask
:type: int
"""
return np.sum(self.pixels)
@property
def n_false(self):
r"""
The number of ``False`` values in the mask
:type: int
"""
return self.n_pixels - self.n_true
@property
def proportion_true(self):
r"""
The proportion of the mask which is ``True``
:type: double
"""
return (self.n_true * 1.0) / self.n_pixels
@property
def proportion_false(self):
r"""
The proportion of the mask which is ``False``
:type: double
"""
return (self.n_false * 1.0) / self.n_pixels
@property
def true_indices(self):
r"""
The indices of pixels that are true.
:type: (``n_dims``, ``n_true``) ndarray
"""
# Ignore the channel axis
return np.vstack(np.nonzero(self.pixels[..., 0])).T
@property
def false_indices(self):
r"""
The indices of pixels that are false.
:type: (``n_dims``, ``n_false``) ndarray
"""
# Ignore the channel axis
return np.vstack(np.nonzero(~self.pixels[..., 0])).T
@property
def all_indices(self):
r"""
Indices into all pixels of the mask, as consistent with
true_indices and false_indices
:type: (``n_dims``, ``n_pixels``) ndarray
"""
return np.indices(self.shape).reshape([self.n_dims, -1]).T
def __str__(self):
return ('{} {}D mask, {:.1%} '
'of which is True '.format(self._str_shape, self.n_dims,
self.proportion_true))
def from_vector(self, flattened):
r"""
Takes a flattened vector and returns a new
:class:`BooleanImage` formed by
reshaping the vector to the correct dimensions. Note that this is
rebuilding a boolean image **itself** from boolean values. The mask
is in no way interpreted in performing the operation, in contrast to
MaskedImage, where only the masked region is used in from_vector()
and as_vector(). Any image landmarks are transferred in the process.
Parameters
----------
flattened : (``n_pixels``,) np.bool ndarray
A flattened vector of all the pixels of a BooleanImage.
Returns
-------
image : :class:`BooleanImage`
New BooleanImage of same shape as this image
"""
mask = BooleanImage(flattened.reshape(self.shape))
mask.landmarks = self.landmarks
return mask
def invert(self):
r"""
Inverts the current mask in place, setting all True values to False,
and all False values to True.
"""
self.pixels = ~self.pixels
def inverted_copy(self):
r"""
Returns a copy of this Boolean image, which is inverted.
Returns
-------
inverted_image: :class:`BooleanNSImage`
An inverted copy of this boolean image.
"""
inverse = deepcopy(self)
inverse.invert()
return inverse
def bounds_true(self, boundary=0, constrain_to_bounds=True):
r"""
Returns the minimum to maximum indices along all dimensions that the
mask includes which fully surround the True mask values. In the case
of a 2D Image for instance, the min and max define two corners of a
rectangle bounding the True pixel values.
Parameters
----------
boundary : int, optional
A number of pixels that should be added to the extent. A
negative value can be used to shrink the bounds in.
Default: 0
constrain_to_bounds: bool, optional
If True, the bounding extent is snapped to not go beyond
the edge of the image. If False, the bounds are left unchanged.
Default: True
Returns
--------
min_b : (D,) ndarray
The minimum extent of the True mask region with the boundary
along each dimension. If constrain_to_bounds was True,
is clipped to legal image bounds.
max_b : (D,) ndarray
The maximum extent of the True mask region with the boundary
along each dimension. If constrain_to_bounds was True,
is clipped to legal image bounds.
"""
mpi = self.true_indices
maxes = np.max(mpi, axis=0) + boundary
mins = np.min(mpi, axis=0) - boundary
if constrain_to_bounds:
maxes = self.constrain_points_to_bounds(maxes)
mins = self.constrain_points_to_bounds(mins)
return mins, maxes
def bounds_false(self, boundary=0, constrain_to_bounds=True):
r"""
Returns the minimum to maximum indices along all dimensions that the
mask includes which fully surround the False mask values. In the case
of a 2D Image for instance, the min and max define two corners of a
rectangle bounding the False pixel values.
Parameters
----------
boundary : int >= 0, optional
A number of pixels that should be added to the extent. A
negative value can be used to shrink the bounds in.
|
gward/buildbot
|
buildbot/process/step_twisted2.py
|
Python
|
gpl-2.0
| 6,316
| 0.004275
|
from buildbot.status import tests
from buildbot.process.step import SUCCESS, FAILURE, BuildStep
from buildbot.process.step_twisted import RunUnitTests
from zope.interface import implements
from twisted.python import log, failure
from twisted.spread import jelly
from twisted.pb.tokens import BananaError
from twisted.web.html import PRE
from twisted.web.error import NoResource
class Null: pass
ResultTypes = Null()
ResultTypeNames = ["SKIP",
"EXPECTED_FAILURE", "FAILURE", "ERROR",
"UNEXPECTED_SUCCESS", "SUCCESS"]
try:
from twisted.trial import reporter # introduced in Twisted-1.0.5
# extract the individual result types
for name in ResultTypeNames:
setattr(ResultTypes, name, getattr(reporter, name))
except ImportError:
from twisted.trial import unittest # Twisted-1.0.4 has them here
for name in ResultTypeNames:
setattr(ResultTypes, name, getattr(unittest, name))
log._keepErrors = 0
from twisted.trial import remote # for trial/jelly parsing
import StringIO
class OneJellyTest(tests.OneTest):
def html(self, request):
tpl = "<HTML><BODY>\n\n%s\n\n</body></html>\n"
pptpl = "<HTML><BODY>\n\n<pre>%s</pre>\n\n</body></html>\n"
t = request.postpath[0] # one of 'short', 'long' #, or 'html'
if isinstance(self.results, failure.Failure):
# it would be nice to remove unittest functions from the
# traceback like unittest.format_exception() does.
if t == 'short':
s = StringIO.StringIO()
self.results.printTraceback(s)
return pptpl % PRE(s.getvalue())
elif t == 'long':
s = StringIO.StringIO()
self.results.printDetailedTraceback(s)
return pptpl % PRE(s.getvalue())
#elif t == 'html':
# return tpl % formatFailure(self.results)
# ACK! source lines aren't stored in the Failure, rather,
# formatFailure pulls them (by filename) from the local
# disk. Feh. Even printTraceback() won't work. Double feh.
return NoResource("No such mode '%s'" % t)
if self.results == None:
return tpl % "No results to show: test probably passed."
# maybe results are plain text?
return pptpl % PRE(self.results)
class TwistedJellyTestResults(tests.TestResults):
oneTestClass = OneJellyTest
def describeOneTest(self, testname):
return "%s: %s\n" % (testname, self.tests[testname][0])
class RunUnitTestsJelly(RunUnitTests):
"""I run the unit tests with the --jelly option, which generates
machine-parseable results as the tests are run.
"""
trialMode = "--jelly"
implements(remote.IRemoteReporter)
ourtypes = { ResultTypes.SKIP: tests.SKIP,
ResultTypes.EXPECTED_FAILURE: tests.EXPECTED_FAILURE,
ResultTypes.FAILURE: tests.FAILURE,
ResultTypes.ERROR: tests.ERROR,
ResultTypes.UNEXPECTED_SUCCESS: tests.UNEXPECTED_SUCCESS,
ResultTypes.SUCCESS: tests.SUCCESS,
}
def __getstate__(self):
#d = RunUnitTests.__getstate__(self)
d = self.__dict__.copy()
# Banana subclasses are Ephemeral
if d.has_key("decoder"):
del d['decoder']
return d
def start(self):
self.decoder = remote.DecodeReport(self)
# don't accept anything unpleasant from the (untrusted) build slave
# The jellied stream may have Failures, but everything inside should
# be a string
security = jelly.SecurityOptions()
security.allowBasicTypes()
security.allowInstancesOf(failure.Failure)
self.decoder.taster = security
self.results = TwistedJellyTestResults()
RunUnitTests.start(self)
def logProgress(self, progress):
# XXX: track number of tests
BuildStep.logProgress(self, progress)
def addStdout(self, data):
if not self.decoder:
return
try:
self.decoder.dataReceived(data)
except BananaError:
self.decoder = None
log.msg("trial --jelly output unparseable, traceback follows")
log.deferr()
def remote_start(self, expectedTests, times=None):
print "remote_start", expectedTests
def remote_reportImportError(self, name, aFailure, times=None):
pass
def remote_reportStart(self, testClass, method, times=None):
print "reportStart", testClass, method
def remote_reportResults(self, testClass, method, resultType, results,
times=None):
print "reportResults", testClass, method, resultType
which = testClass + "." + method
self.results.addTest(which,
self.ourtypes.get(resultType, tests.UNKNOWN),
results)
def finished(self, rc):
# give self.results to our Build object
self.build.testsFinished(self.results)
total = self.r
|
esults.countTests()
count = self.results.countFailures()
result = SUCCESS
if total == None:
result = (FAILURE, ['tests%s' % self.rtext(' (%s)')])
if count:
result = (FAILURE, ["%d tes%s%s" % (count,
(count == 1 and 't' or 'ts'),
self.rtext(' (%s)'))])
return self.stepComplet
|
e(result)
def finishStatus(self, result):
total = self.results.countTests()
count = self.results.countFailures()
color = "green"
text = []
if count == 0:
text.extend(["%d %s" % \
(total,
total == 1 and "test" or "tests"),
"passed"])
else:
text.append("tests")
text.append("%d %s" % \
(count,
count == 1 and "failure" or "failures"))
color = "red"
self.updateCurrentActivity(color=color, text=text)
self.addFileToCurrentActivity("tests", self.results)
#self.finishStatusSummary()
self.finishCurrentActivity()
|
wavefancy/BIDMC-PYTHON
|
Exome/MinorReadsCoverage/MinorReadsCoverage.py
|
Python
|
mit
| 3,687
| 0.006509
|
#!/usr/bin/env python3
"""
Calculate minor reads coverage.
Minor-read ratio (MRR), which was defined as the ratio of reads for the less
covered allele (reference or variant allele) over the total number of reads
covering the position at which the variant was called. (Only applied to hetero sites.)
@Author: [email protected]
Usage:
MinorReadsCoverage.py (-o| -f cutoff)
MinorReadsCoverage.py -h | --help | -v | --version
Notes:
1. Read vcf file from stdin.
2. MinorReadsCoverage only calculated from hetero sites.
3. Output results to stdout.
Options:
-o Output MinorReadsCoverage statistics.
-f cutoff Filter out sites if MRC < cutoff.
-t tags Comma separated tag list.
-h --help Show this screen.
-v --version Show version.
"""
import sys
from docopt import docopt
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
def ShowFormat():
'''Input File format example:'''
print('''
''');
if __name__ == '__main__':
args = docopt(__doc__, version='1.0')
#print(args)
# if(args['--format']):
# ShowFormat()
|
# sys.exit(-1)
from pysam import VariantFile
|
vcfMetaCols=9 #number of colummns for vcf meta information.
tags = ['GT','AD'] #GATK, AD: reads depth for ref and alt allele.
cutoff = 1
if args['-f']:
cutoff = float(args['-f'])
# def depth(geno):
# '''reformat a genotype record'''
# ss = geno.split(':')
# if ss[outGenoArrayIndex[0]][0] != '.' and :
#
#
# try:
# out = [ss[x] for x in outGenoArrayIndex]
# return out
# except IndexError:
# sys.stderr.write('ERROR: Index out of range. geno: %s, out index: %s\n'%(geno, str(outGenoArrayIndex)))
# sys.exit(-1)
outGenoArrayIndex = []
def setoutGenoArrayIndex(oldFormatTags):
outGenoArrayIndex.clear()
ss = oldFormatTags.upper().split(':')
for x in tags:
try:
y = ss.index(x)
outGenoArrayIndex.append(y)
except ValueError:
sys.stderr.write('ERROR: can not find tag: "%s", from input vcf FORMAT field.\n'%(x))
sys.exit(-1)
infile = VariantFile('-', 'r')
if args['-f']:
sys.stdout.write(str(infile.header))
if args['-o']:
sys.stdout.write('#CHROM\tPOS\tREF\tALT\tMRR\n')
for line in infile:
ss = str(line).strip().split()
setoutGenoArrayIndex(ss[8]) #Check format line by line.
ref = 0
alt = 0
for x in ss[vcfMetaCols:]:
#if not outGenoArrayIndex:
# setoutGenoArrayIndex(ss[8])
#out.append(reformat(x))
temp = x.split(':')
if temp[outGenoArrayIndex[0]][0] != '.' and temp[outGenoArrayIndex[0]][0] != temp[outGenoArrayIndex[0]][2]:
ad =[int(y) for y in temp[outGenoArrayIndex[1]].split(',')]
ref += ad[0]
alt += sum(ad[1:])
out = ss[:2] + ss[3:5]
mrc = 1
if ref == 0 and alt == 0:
mrc = 1
else:
minor = min(alt*1.0/(alt + ref), ref*1.0/(alt + ref))
mrc = minor
if args['-o']:
out = ss[:2] + ss[3:5] + ['%.4f'%(mrc)]
sys.stdout.write('%s\n'%('\t'.join(out)))
if args['-f']:
if mrc >= cutoff:
sys.stdout.write('%s'%(str(line)))
infile.close()
sys.stdout.flush()
sys.stdout.close()
sys.stderr.flush()
sys.stderr.close()
|
cailloumajor/home-web
|
backend/home_web/settings.py
|
Python
|
gpl-3.0
| 7,131
| 0
|
# -*- coding: utf-8 -*-
# pylint: disable=no-init
"""
Django settings for home_web project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import re
from celery.schedules import crontab
from configurations import Configuration, values
class CeleryBrokerURLValue(values.Value):
"""
Value subclass that converts 'unix://' scheme to 'redis+socket://'.
"""
def to_python(self, value):
return re.sub(
r'^unix://', 'redis+socket://', super().to_python(value)
)
class Common(Configuration):
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '5w$77!lmo&g)e5j6uhl4i2=nffnnj0y1y07(9@-f)@b7*g%+sd'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
INTERNAL_IPS = [
'127.0.0.1',
]
# Application definition
INSTALLED_APPS = [
'core.apps.CoreConfig',
'heating.apps.HeatingConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_filters',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'home_web.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'home_web.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = values.DatabaseURLValue(
'sqlite:///{}'.format(os.path.join(BASE_DIR, 'db.sqlite3'))
)
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME':
'django.contrib.auth.password_validation.'
'UserAttributeSimilarityValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.'
'MinimumLengthValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.'
'CommonPasswordValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.'
'NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'fr-FR'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
REDIS_URL = values.Value()
CELERY_BROKER_URL = CeleryBrokerURLValue(environ_name='REDIS_URL')
CELERY_TASK_ROUTES = {
'heating.tasks.*': {'queue': 'celery', 'delivery_mode': 'transient'},
}
CELERY_BEAT_SCHEDULE = {
'update-pilotwire-status': {
'task': 'heating.pilotwire.update_status',
'schedule': 60,
},
'set-pilotwire-modes': {
'task': 'heating.pilotwire.set_modes',
'schedule': crontab(minute='*/15'),
},
'weekly-clear-old-derogations': {
'task': 'heating.tasks.clearoldderogations',
'schedule': crontab(minute=0, hour=0, day_of_week='mon'),
'args': (7,),
},
}
CELERY_TIME_ZONE = TIME_ZONE
PILOTWIRE_IP = values.IPValue()
PILOTWIRE_PORT = values.IntegerValue()
class Dev(Common):
"""
The in-development settings and the default configuration
"""
INSTALLED_APPS = Common.INSTALLED_APPS + [
'debug_toolbar',
]
MIDDLEWARE = [
'debug_toolbar.middleware.DebugToolbarMiddleware',
] + Common.MIDDLEWARE
class Test(Common):
"""
The testing settings
"""
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'pilotwire_testing_handler': {
'level': 'INFO',
'class': 'heating.log.PilotwireHandler',
'logLength': 5,
},
},
'loggers': {
'pilotwire_testing_logger': {
'handlers': ['pilotwire_testing_handler'],
'level': 'INFO',
},
},
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
ADMINS = [('Test', '[email protected]')]
class Prod(Common):
"""
The in-production settings
"""
DEBUG = False
SECRET_KEY = values.SecretValue()
ADMINS = values.SingleNestedTupleValue()
ALLOWED_HOSTS = values.ListValue()
DATABASES = values.DatabaseURLValue()
EMAIL = values.EmailURLValue()
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
),
}
STATIC_ROOT = values.PathValue()
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'pilotwire_handler': {
'level': 'INFO',
|
'class': 'heating.log.PilotwireHandler',
'logLength': 500,
},
},
'loggers': {
'heating.pilotwire': {
'handlers':
|
['pilotwire_handler'],
'level': 'INFO',
},
},
}
# Authentication
AUTHENTICATION_BACKENDS = [
'core.auth.backends.SettingsBackend',
] + Common.AUTHENTICATION_BACKENDS # pylint: disable=no-member
ADMIN_LOGIN = values.Value()
ADMIN_PASSWORD = values.SecretValue()
|
tplavcic/percona-xtradb-cluster
|
mysql-test/suite/tokudb/t/change_column_char.py
|
Python
|
gpl-2.0
| 1,383
| 0.006508
|
#!/usr/bin/env python
import sys
def gen_test(n):
print "CREATE TABLE t (a CHAR(%d));" % (n)
for v in [ 'hi', 'there', 'people' ]:
print "INSERT INTO t VALUES ('%s');" % (v)
for i in range(2,256):
if i < n:
print "--replace_regex /MariaDB/XYZ/ /MySQL/XYZ/"
print "--error ER_UNSUPPORTED_EXTENSION"
else:
print "CREATE TABLE ti LIKE t;"
print "ALTER TABLE ti ENGINE=myisam;"
print "INSERT INTO ti SELECT * FROM t;"
print "ALTER TABLE ti CHANGE COLUMN a a CHAR(%d);" % (i)
print "ALTER TABLE t CHANGE COLUMN a a CHAR(%d);" % (i)
if i >= n:
print "let $diff_tables=test.t, test.ti;"
print "source inclu
|
de/diff_tables.inc;"
print "DROP TABLE ti;"
print "DROP TABLE t;"
def main():
print "source include/have_tokudb.inc;"
print "# this test is generated by change_char.py"
print "# test char expansion"
print "--disable_warnings"
print "DROP TABLE IF EXISTS t,ti;"
print "--enable_warnings"
print "SET SESSION DEFAULT_STORAGE_ENGINE=\"TokuDB\";"
print "SET SESSION TOKUDB_DISABLE_SLOW_ALTER=1;"
# al
|
l n takes too long to run, so here is a subset of tests
for n in [ 1, 2, 3, 4, 5, 6, 7, 8, 16, 31, 32, 63, 64, 127, 128, 254, 255 ]:
gen_test(n)
return 0
sys.exit(main())
|
devilry/devilry-django
|
devilry/devilry_dbcache/admin.py
|
Python
|
bsd-3-clause
| 1,516
| 0
|
from django.contrib import admin
from devilry.devilry_dbcache.models import AssignmentGroupCachedData
@admin.register(AssignmentGroupCa
|
chedData)
class AssignmentGroupCachedDataAdmin(admin.ModelAdmin):
list_display = [
'id',
'group',
'first_f
|
eedbackset',
'last_feedbackset',
'last_published_feedbackset',
'new_attempt_count',
'public_total_comment_count',
'public_student_comment_count',
'public_examiner_comment_count',
'public_admin_comment_count',
'public_student_file_upload_count',
'examiner_count',
'candidate_count'
]
search_fields = [
'id',
'group__id',
'group__parentnode__id',
'group__parentnode__short_name',
'group__parentnode__long_name',
'group__parentnode__parentnode__id',
'group__parentnode__parentnode__short_name',
'group__parentnode__parentnode__long_name',
'group__parentnode__parentnode__parentnode__id',
'group__parentnode__parentnode__parentnode__short_name',
'group__parentnode__parentnode__parentnode__long_name',
'group__candidates__relatedstudent__candidate_id',
'group__candidates__relatedstudent__candidate_id',
'group__candidates__relatedstudent__user__shortname',
'group__candidates__relatedstudent__user__fullname',
'group__examiners__relatedexaminer__user__shortname',
'group__examiners__relatedexaminer__user__fullname',
]
|
willprice/arduino-sphere-project
|
scripts/example_direction_finder/temboo/Library/Utilities/Dates/GetTimestamp.py
|
Python
|
gpl-2.0
| 6,772
| 0.005472
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetTimestamp
# Returns the current date and time, expressed as seconds or milliseconds since January 1, 1970 (epoch time).
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetTimestamp(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetTimestamp Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetTimestamp, self).__init__(temboo_session, '/Library/Utilities/Dates/GetTimestamp')
def new_input_set(self):
return GetTimestampInputSet()
def _make_result_set(self, result, path):
return GetTimestampResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetTimestampChoreographyExecution(session, exec_id, path)
class GetTimestampInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetTimestamp
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AddDays(self, value):
"""
Set the value of the AddDays input for this Choreo. ((optional, integer) Adds the specified number of days to the specified date serial number. A negative number will subtract.)
"""
super(GetTimestampInputSet, self)._set_input('AddDays', value)
def set_AddHours(self, value):
"""
Set the value of the AddHours input for this Choreo. ((optional, inte
|
ger) Adds the specified number of hours to the specified
|
date serial number. A negative number will subtract.)
"""
super(GetTimestampInputSet, self)._set_input('AddHours', value)
def set_AddMinutes(self, value):
"""
Set the value of the AddMinutes input for this Choreo. ((optional, integer) Adds the specified number of minutes to the specified date serial number. A negative number will subtract.)
"""
super(GetTimestampInputSet, self)._set_input('AddMinutes', value)
def set_AddMonths(self, value):
"""
Set the value of the AddMonths input for this Choreo. ((optional, integer) Adds the specified number of months to the specified date serial number. A negative number will subtract.)
"""
super(GetTimestampInputSet, self)._set_input('AddMonths', value)
def set_AddSeconds(self, value):
"""
Set the value of the AddSeconds input for this Choreo. ((optional, integer) Adds the specified number of seconds to the specified date serial number. A negative number will subtract.)
"""
super(GetTimestampInputSet, self)._set_input('AddSeconds', value)
def set_AddYears(self, value):
"""
Set the value of the AddYears input for this Choreo. ((optional, integer) Adds the specified number of years to the specified date serial number. A negative number will subtract.)
"""
super(GetTimestampInputSet, self)._set_input('AddYears', value)
def set_Granularity(self, value):
"""
Set the value of the Granularity input for this Choreo. ((optional, string) Set to "seconds" to return the number of seconds since the epoch. Defaults to "milliseconds".)
"""
super(GetTimestampInputSet, self)._set_input('Granularity', value)
def set_SetDay(self, value):
"""
Set the value of the SetDay input for this Choreo. ((optional, integer) Sets the day of month (1–31) of the specified date serial number.)
"""
super(GetTimestampInputSet, self)._set_input('SetDay', value)
def set_SetHour(self, value):
"""
Set the value of the SetHour input for this Choreo. ((optional, integer) Sets the hours (0–23) of the specified date serial number.)
"""
super(GetTimestampInputSet, self)._set_input('SetHour', value)
def set_SetMinute(self, value):
"""
Set the value of the SetMinute input for this Choreo. ((optional, integer) Sets the minutes (0–59) of the specified date serial number.)
"""
super(GetTimestampInputSet, self)._set_input('SetMinute', value)
def set_SetMonth(self, value):
"""
Set the value of the SetMonth input for this Choreo. ((optional, integer) Sets the month (1–12) of the specified date serial number.)
"""
super(GetTimestampInputSet, self)._set_input('SetMonth', value)
def set_SetSecond(self, value):
"""
Set the value of the SetSecond input for this Choreo. ((optional, integer) Sets the seconds (0–59) of the specified date serial number.)
"""
super(GetTimestampInputSet, self)._set_input('SetSecond', value)
def set_SetYear(self, value):
"""
Set the value of the SetYear input for this Choreo. ((optional, integer) Sets the year (such as 1989) of the specified date serial number.)
"""
super(GetTimestampInputSet, self)._set_input('SetYear', value)
class GetTimestampResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetTimestamp Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Timestamp(self):
"""
Retrieve the value for the "Timestamp" output from this Choreo execution. ((date) A the current timestamp, expressed as the number of seconds or milliseconds since January 1, 1970 (epoch time). The Granularity input is used to indicate seconds or milliseconds.)
"""
return self._output.get('Timestamp', None)
class GetTimestampChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetTimestampResultSet(response, path)
|
modcracker/Tork
|
tork/core/manage/stop.py
|
Python
|
mit
| 347
| 0.054755
|
import sys, os
def stop(arv):
pwd = os.getcwd
|
()
# if argv given, folders = [argv]
# else, folders = pwd
### for each folder in folders
##### check pwd/folder/temp/pids for existing pid files
####### kill -15 & rm files
def main():
print "Please don't t
|
ry to run this script separately."
if __name__ == '__main__':
main()
|
oxc/Flexget
|
flexget/tests/test_list_interface.py
|
Python
|
mit
| 9,931
| 0.000503
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
class TestListInterface(object):
config = """
templates:
global:
disable: [seen]
tasks:
list_get:
entry_list: test_list
list_1_get:
entry_list: list 1
list_2_get:
entry_list: list 2
test_list_add:
mock:
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
- {title: 'title 2', url: "http://mock.url/file2.torrent"}
accept_all: yes
list_add:
- entry_list: test_list
list_1_add:
mock:
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
- {title: 'title 2', url: "http://mock.url/file2.torrent"}
accept_all: yes
list_add:
- entry_list: list 1
list_2_add:
mock:
- {title: 'title 3', url: "http://mock.url/file3.torrent"}
accept_all: yes
list_add:
- entry_list: list 2
test_multiple_list_add:
mock:
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
- {title: 'title 2', url: "http://mock.url/file2.torrent"}
accept_all: yes
list_add:
- entry_list: list 1
- entry_list: list 2
test_list_accept_with_remove:
mock:
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
- {title: 'title 2', url: "http://mock.url/file2.torrent"}
- {title: 'title 3', url: "http://mock.url/file3.torrent"}
list_match:
from:
- entry_list: test_list
test_list_accept_without_remove:
mock:
|
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
- {title: 'title 2', url: "http://mock.url/file2.torrent
|
"}
- {title: 'title 3', url: "http://mock.url/file3.torrent"}
list_match:
from:
- entry_list: test_list
remove_on_match: no
test_multiple_list_accept_with_remove:
mock:
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
- {title: 'title 2', url: "http://mock.url/file2.torrent"}
- {title: 'title 3', url: "http://mock.url/file3.torrent"}
list_match:
from:
- entry_list: list 1
- entry_list: list 2
test_multiple_list_accept_without_remove:
mock:
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
- {title: 'title 2', url: "http://mock.url/file2.torrent"}
- {title: 'title 3', url: "http://mock.url/file3.torrent"}
list_match:
from:
- entry_list: list 1
- entry_list: list 2
remove_on_match: no
test_list_remove:
mock:
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
accept_all: yes
list_remove:
- entry_list: test_list
test_list_reject:
mock:
- {title: 'title 1', url: "http://mock.url/file1.torrent"}
- {title: 'title 3', url: "http://mock.url/file3.torrent"}
list_match:
from:
- entry_list: test_list
action: reject
add_for_list_queue:
mock:
- {title: 'The 5th Wave', url: "", imdb_id: "tt2304933"}
- {title: 'Drumline', url: "", imdb_id: "tt0303933"}
accept_all: yes
list_add:
- movie_list: test_list_queue
test_list_queue:
mock:
- {title: 'Drumline 2002 1080p BluRay DTS-HD MA 5 1 x264-FuzerHD', url: "http://mock.url/Drumline 2002 1080p BluRay DTS-HD MA 5 1 x264-FuzerHD.torrent", imdb_id: "tt0303933"}
- {title: 'Drumline 2002 720p BluRay DTS-HD MA 5 1 x264-FuzerHD', url: "http://mock.url/Drumline 2002 720p BluRay DTS-HD MA 5 1 x264-FuzerHD.torrent", imdb_id: "tt0303933"}
- {title: 'Drumline 2002 DVDRip x264-FuzerHD', url: "http://mock.url/Drumline 2002 DVDRip x264-FuzerHD.torrent", imdb_id: "tt0303933"}
list_match:
from:
- movie_list: test_list_queue
single_match: yes
get_for_list_queue:
movie_list: test_list_queue
test_list_clear_start:
entry_list: test_list
list_clear:
what:
- entry_list: test_list
test_list_clear_exit:
entry_list: test_list
list_clear:
what:
- entry_list: test_list
phase: exit
test_list_clear_input:
entry_list: test_list
list_clear:
what:
- entry_list: test_list
phase: input
"""
def test_list_add(self, execute_task):
task = execute_task('test_list_add')
assert len(task.entries) == 2
task = execute_task('list_get')
assert len(task.entries) == 2
def test_multiple_list_add(self, execute_task):
task = execute_task('test_multiple_list_add')
assert len(task.entries) == 2
task = execute_task('list_1_get')
assert len(task.entries) == 2
task = execute_task('list_2_get')
assert len(task.entries) == 2
def test_list_accept_with_remove(self, execute_task):
task = execute_task('test_list_add')
assert len(task.entries) == 2
task = execute_task('list_get')
assert len(task.entries) == 2
task = execute_task('test_list_accept_with_remove')
assert len(task.all_entries) == 3
assert len(task.accepted) == 2
task = execute_task('list_get')
assert len(task.entries) == 0
def test_list_accept_without_remove(self, execute_task):
task = execute_task('test_list_add')
assert len(task.entries) == 2
task = execute_task('list_get')
assert len(task.entries) == 2
task = execute_task('test_list_accept_without_remove')
assert len(task.all_entries) == 3
assert len(task.accepted) == 2
task = execute_task('list_get')
assert len(task.entries) == 2
def test_multiple_list_accept_with_remove(self, execute_task):
task = execute_task('list_1_add')
assert len(task.entries) == 2
task = execute_task('list_2_add')
assert len(task.entries) == 1
task = execute_task('list_1_get')
assert len(task.entries) == 2
task = execute_task('list_2_get')
assert len(task.entries) == 1
task = execute_task('test_multiple_list_accept_with_remove')
assert len(task.accepted) == 3
task = execute_task('list_1_get')
assert len(task.entries) == 0
task = execute_task('list_2_get')
assert len(task.entries) == 0
def test_multiple_list_accept_without_remove(self, execute_task):
task = execute_task('list_1_add')
assert len(task.entries) == 2
task = execute_task('list_2_add')
assert len(task.entries) == 1
task = execute_task('list_1_get')
assert len(task.entries) == 2
task = execute_task('list_2_get')
assert len(task.entries) == 1
task = execute_task('test_multiple_list_accept_without_remove')
assert len(task.accepted) == 3
task = execute_task('list_1_get')
assert len(task.entries) == 2
task = execute_task('list_2_get')
assert len(task.entries) == 1
def test_list_remove(self, execute_task):
task = execute_task('test_list_add')
assert len(task.entries) == 2
task = execute_task('list_get')
assert len(task.entries) == 2
task = execute_task('test_list_remove')
|
epifanio/ecoop
|
ecooputil.py
|
Python
|
lgpl-3.0
| 10,832
| 0.003139
|
#!/usr/bin/python
###############################################################################
#
#
# Project: ECOOP, sponsored by The National Science Foundation
# Purpose: this code is part of the Cyberinfrastructure developed for the ECOOP project
# http://tw.rpi.edu/web/project/ECOOP
# from the TWC - Tetherless World Constellation
# at RPI - Rensselaer Polytechnic Institute
# founded by NSF
#
# Author: Massimo Di Stefano , [email protected] -
# http://tw.rpi.edu/web/person/MassimoDiStefano
#
###############################################################################
# Copyright (c) 2008-2014 Tetherless World Constellation at Rensselaer Polytechnic Institute
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import os
import sys
from zipfile import ZipFile, ZIP_DEFLATED
from contextlib import closing
import paramiko
import qrcode
from IPython.core.display import HTML, Image
from IPython.display import display, Javascript
import envoy
from datetime import datetime
class shareUtil():
def zipdir(self, basedir, archivename, rm='no'):
"""
utility function to zip a single file or a directory
usage : zipdir(input, output)
@param basedir: input file or directory
@param archivename: output file.zip
@param rm: [yes, no], remove source file (optional, default=no)
"""
assert os.path.isdir(basedir)
with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z:
for root, dirs, files in os.walk(basedir):
#NOTE: ignore empty directories
for fn in files:
#print fn
absfn = os.path.join(root, fn)
zfn = absfn[len(basedir) + len(os.sep):] #XXX: relative path
z.write(absfn, zfn)
if rm != 'no':
instruction = 'rm -rf %s' % basedir
os.system(instruction)
def uploadfile(self, username='epi', password='epi', hostname='localhost', port=22,
inputfile=None, outputfile=None, link=False, apacheroot='/var/www/', zip=False, qr=False):
'''
utility to upload file on remote server using sftp protocol
|
usage : uploadfile(inputfile, outputfile)
@rtype : str
@param username: str - username on
|
remote server
@param password: str - password to access remote server
@param hostname: str - hostname of remote server (default: localhost)
@param port: port number on remote server (default: 22)
@param inputfile: str - local path to the file to uploaded
@param outputfile: remote path to the file to upload
@param link: bolean [True, False] default False, print a link to download the file
(remote path needs to be in a web available directory)
@param apacheroot: path to apache root default to '/var/www/' required if link == True
@param zip: bolean deafault False, zip the output
@param qr: bolean deafault False, return qrcode as image
@return: link to uploaded file if link=True or qr image if qr=True & link=True, none if link is set to false
'''
if zip:
#print 'add zipfile'
zipfile = str(inputfile + '.zip')
self.zipdir(inputfile, zipfile)
inputfile = zipfile
#paramiko.util.log_to_file('/var/www/esr/paramiko.log')
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(hostname, username=username, password=password)
transport = paramiko.Transport((hostname, port))
transport.connect(username=username, password=password)
sftp = paramiko.SFTPClient.from_transport(transport)
parts = outputfile.split('/')
for n in range(2, len(parts)):
path = '/'.join(parts[:n])
#print 'Path:', path,
sys.stdout.flush()
try:
s = sftp.stat(path)
#print 'mode =', oct(s.st_mode)
except IOError as e:
#print e
#print 'adding dir: ', path
sftp.mkdir(path)
try:
sftp.put(remotepath=outputfile, localpath=inputfile)
sftp.close()
transport.close()
print 'file uploaded'
if qr:
if link:
pass
if not link:
print 'WORNING: qrcode not generated, set the option link to True'
if link:
filelink = outputfile.replace(apacheroot, '')
link = 'http://' + os.path.normpath(hostname + '/' + filelink)
raw_html = '<a href="%s" target="_blank">ESR results</a>' % link
print 'results are now available for download at : ', link
image = None
if qr:
imagefile = parts[-1].split('.')[0] + '.jpeg'
qr = qrcode.QRCode(version=1, error_correction=qrcode.constants.ERROR_CORRECT_L, box_size=10, border=4)
qr.add_data(link)
qr.make(fit=True)
img = qr.make_image()
img.save(imagefile, "JPEG")
print 'alive'
image = Image(imagefile)
return image
if not qr:
return HTML(raw_html)
except IOError:
print "Error: can\'t find file or read data check if input file exist and or remote location is writable"
def gistit(self, filename, jist='/usr/local/bin/jist', type='notebook'):
'''
use the jist utility to paste a txt file on github as gist and return a link to it
usage : gistit(notebookfile)
@param filename: str - path to the a text file or notebook file (.json)
@param jist: str - path to the executable jist (default=/usr/local/bin/jist)
@param type: str - notebook, text
@return: return a link to gist if type=text, link to nbviewer if type=notebook
'''
try:
with open(filename):
link = None
jist = self.which(jist)
if jist:
try:
r = envoy.run('%s -p %s' % (jist, filename))
if type == 'notebook':
link = r.std_out.replace('\n', '').replace('https://gist.github.com',
'http://nbviewer.ipython.org')
if type == 'text':
link = r.std_out.replace('\n', '')
return link
except:
print "can't generate gist, check if jist works bycommand line with: jist -p filename"
if not jist:
print 'cannot find jist utility, check if it is in your path'
excep
|
scraperwiki/xypath
|
xypath/xypath.py
|
Python
|
bsd-2-clause
| 31,182
| 0.000994
|
#!/usr/bin/env python
""" musings on order of variables, x/y vs. col/row
Everyone agrees that col 2, row 1 is (2,1) which is xy ordered.
This works well with the name.
Remember that the usual iterators (over a list-of-lists)
is outer loop y first."""
from __future__ import absolute_import
import re
import messytables
import os
import six
from six.moves import range
from six.moves import zip
try:
import hamcrest
have_ham = True
except ImportError:
have_ham = False
import sys
if sys.version_info >= (3, 6):
import typing
REGEX_PATTERN_TYPE = typing.Pattern
else:
REGEX_PATTERN_TYPE = re._pattern_type
from collections import defaultdict
from copy import copy
from itertools import product, takewhile
from xypath.contrib import excel as contrib_excel
UP = (0, -1)
RIGHT = (1, 0)
DOWN = (0, 1)
LEFT = (-1, 0)
UP_RIGHT = (1, -1)
DOWN_RIGHT = (1, 1)
UP_LEFT = (-1, -1)
DOWN_LEFT = (-1, 1)
def cmp(x, y):
if x<y:
return -1
if x>y:
return 1
return 0
class XYPathError(Exception):
"""Problems with spreadsheet layouts should raise this or a descendant."""
pass
class JunctionError(RuntimeError, XYPathError):
"""Raised if paranoid _XYCell.junction finds it is returning one of the
input cells - i.e. the input cells are in the same row or column"""
pass
class NoCellsAssertionError(AssertionError, XYPathError):
"""Raised by Bag.assert_one() if the bag contains zero cells."""
pass
class MultipleCellsAssertionError(AssertionError, XYPathError):
"""Raised by Bag.assert_one() if the bag contains multiple cells."""
pass
class LookupConfusionError(AssertionError, XYPathError):
"""Lookup found multiple equally-close headers"""
pass
class NoLookupError(AssertionError, XYPathError):
"""Lookup found no valid header"""
pass
def describe_filter_method(filter_by):
if callable(filter_by):
return "matching a function called {}".format(filter_by.__name__)
if isinstance(filter_by, six.string_types):
return "containing the string {!r}".format(filter_by)
if have_ham and isinstance(filter_by, hamcrest.matcher.Matcher):
return "containing "+str(filter_by)
if isinstance(filter_by, REGEX_PATTERN_TYPE):
return "matching the regex {!r}".format(filter_by.pattern)
else:
return "which we're surprised we found at all"
class
|
_XYCell(object):
"""needs to contain: value, position (x,y), parent bag"""
__slots__ = ['value', 'x', 'y', 'table', 'properties']
def __init__(self, value, x, y, table, properties=None):
self.value
|
= value # of appropriate type
self.x = x # column number
self.y = y # row number
self.table = table
if properties is None:
self.properties = {}
else:
self.properties = properties
def __hash__(self):
"""
In order to make a set of cells (used in Bag), they *must* be hashable.
An _XYCell is uniquely identified (by sets, etc) through its position,
content, and parent table.
Note that `properties` is ignored since dicts are unhashable, and
value may be redundant.
"""
return hash((self.value, self.x, self.y, self.table))
def __eq__(self, rhs):
"""See _XYCell.__hash__ for equality conditions"""
return hash(self) == hash(rhs)
def copy(self, new_table=None):
"""Make a copy of the cell.
Its table will be new_table, if specified"""
if new_table is None:
new_table = self.table
return _XYCell(self.value, self.x, self.y,
new_table, self.properties)
def __repr__(self):
return "_XYCell(%r, %r, %r)" % \
(self.value, self.x, self.y)
def __unicode__(self):
return six.text_type(self.value)
def lookup(self, header_bag, direction, strict=False):
"""
Given a single cell (usually a value), a bag containing the headers
of a particular type for that cell, and the direction in which to
search for the relevant header
e.g. for value cell V, searching up:
[ ] [ ]
[O]
[ ]
---> [ ]
V
[ ]
[ ]
the cell with the arrow will be returned.
Strict restricts the selection to cells in the same row/column as
the value, so O is selected instead."""
def mult(cell):
return cell.x * direction[0] + cell.y * direction[1]
def same_row_col(a, b, direction):
return (a.x - b.x == 0 and direction[0] == 0) or \
(a.y - b.y == 0 and direction[1] == 0)
best_cell = None
second_best_cell = None
for target_cell in header_bag.unordered_cells:
if mult(self) <= mult(target_cell):
if not best_cell or mult(target_cell) <= mult(best_cell):
if not strict or same_row_col(self, target_cell, direction):
second_best_cell = best_cell
best_cell = target_cell
if second_best_cell and mult(best_cell) == mult(second_best_cell):
raise LookupConfusionError("{!r} is as good as {!r} for {!r}".format(
best_cell, second_best_cell, self))
if best_cell is None:
raise NoLookupError("No lookup for {!r}".format(self))
return best_cell
def junction(self, other, direction=DOWN, paranoid=True):
""" gets the lower-right intersection of the row of one, and the
column of the other.
paranoid: should we panic if we're hitting one of our input cells?"""
def junction_coord(cells, direction=DOWN):
"""
Under the hood: given two cells and a favoured direction, get the
position of the cell with the column of one and the row of the
other:
A---->+
| ^
| |
| |
v |
*<----B
Both + and * are candidates for the junction of A and B - we take
the one furthest down by default (specified by direction)
>>> cells_dr = (_XYCell(0,1,2,None), _XYCell(0,3,4,None))
>>> junction_coord(cells_dr, DOWN)
(1, 4)
>>> junction_coord(cells_dr, UP)
(3, 2)
>>> junction_coord(cells_dr, LEFT)
(1, 4)
>>> junction_coord(cells_dr, RIGHT)
(3, 2)
>>> cells_tr = (_XYCell(0,1,4,None), _XYCell(0,3,2,None))
>>> junction_coord(cells_tr, DOWN)
(3, 4)
>>> junction_coord(cells_tr, UP)
(1, 2)
>>> junction_coord(cells_tr, LEFT)
(1, 2)
>>> junction_coord(cells_tr, RIGHT)
(3, 4)
"""
new_cells = (
(cells[0].x, cells[1].y),
(cells[1].x, cells[0].y)
)
for index, value in enumerate(direction):
if value == 0:
continue
if cmp(new_cells[0][index], new_cells[1][index]) == value:
return new_cells[0]
else:
return new_cells[1]
(x, y) = junction_coord((self, other), direction)
if paranoid and (x, y) == (self.x, self.y) or \
(x, y) == (other.x, other.y):
raise JunctionError(
"_XYCell.junction(_XYCell) resulted in a cell which is equal"
" to one of the input cells.\n"
" self: {}\n other: {}\n x: {}\n y: {}".format(
self, other, x, y))
junction_bag = self.table.get_at(x, y)
if len(junction_bag) == 0:
return
self_bag = Bag(self.table)
self_bag.add(self)
other_bag = Bag(self.table)
|
bpain2010/kgecweb
|
hostels/models.py
|
Python
|
gpl-2.0
| 2,053
| 0.032148
|
from django.db import models
from stdimage import StdImageField
from django.core.validators import RegexValidator
import datetime
YEAR_CHOICES = []
for r in range(1980, (datetime.datetime.now().year+1)):
YEAR_CHOICES.append((r,r))
S_CHOICE = [('1stYear','1stYear'),('2ndYear','2ndYear'),('3rdYear','3rdYear'),('4thYear','4thYear')]
# Create your models here.
class Hostel(models.Model):
HostelName = models.CharField(max_length=100, primary_key=True)
HostelType = models.CharField(max_length=10)
HostelSeat = models.IntegerField()
HostelImage = StdImageField(upload_to='Hostels/logo/',variations={'large': (675, 300,True)})
HostelAddress = models.CharField(max_length=200)
HostelDescription = models.TextField()
HostelEmail = models.EmailField()
phone_regex = RegexValidator(regex=r'^\+?1?\d{10,13}$', message="Phone number must be entered in the format: '+999999999'. Up to 13 digits allowed.")
HostelPhoneNo = models.CharField(max_length=13,validators=[phone_regex], blank=True)
def __str__(self):
return self.HostelName
class HostelEvents(models.Model):
HostelName = models.ForeignKey(Hostel)
HostelEventsName = models.CharField(max_length=100)
HostelEventDescription = models.TextField()
def __str__(self):
return self.HostelEventsName
class HostelPictureGalary(models.Model):
HostelName = models.ForeignKey(Hostel)
PictureName = models.CharField(max_length=100)
PictureLocation = StdImageField(upload_to='Hostels/galary/',variations={'large': (675, 300,True)})
def __str__(self):
return self.PictureName
class HostelBody(models.Model):
HostelName = models.ForeignKey(Hostel)
HostelbodyRole = models.CharField(max_length=100)
HostelbodyRol
|
eYear = models.IntegerFiel
|
d(choices=YEAR_CHOICES, default=datetime.datetime.now().year)
PersonName = models.CharField (max_length=10)
PersonYear = models.CharField (max_length=7, choices=S_CHOICE,default='NA')
PersonImage = StdImageField(upload_to='Hostels/gb/',variations={'thumbnail': (300, 200,True)})
def __str__(self):
return self.HostelbodyRole
|
wangcy6/storm_app
|
frame/c++/webrtc-master/modules/audio_coding/audio_network_adaptor/parse_ana_dump.py
|
Python
|
apache-2.0
| 4,718
| 0.010386
|
#!/usr/bin/python2
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
# To run this script please copy "out/<build_name>//pyproto/webrtc/modules/
# audio_coding/audio_network_adaptor/debug_dump_pb2.py" to this folder.
# The you can run this script with:
# "python parse_ana_dump.py -m uplink_bandwidth_bps -f dump_file.dat"
# You can add as may metrics or decisions to the plot as you like.
# form more information call:
# "python parse_ana_dump.py --help"
import struct
from optparse import OptionParser
import matplotlib.pyplot as plt
import debug_dump_pb2
def GetNextMessageSize(file_to_parse):
data = file_to_parse.read(4)
if data == '':
return 0
return struct.unpack('<I', data)[0]
def GetNextMessageFromFile(file_to_parse):
message_size = GetNextMessageSize(file_to_parse)
if message_size == 0:
return None
try:
event = debug_dump_pb2.Event()
event.ParseFromString(file_to_parse.read(message_size))
except IOError:
print 'Invalid message in file'
return None
return event
def InitMetrics():
metrics = {}
event = debug_dump_pb2.Event()
for metric in event.network_metrics.DESCRIPTOR.fields:
metrics[metric.name] = {'time': [], 'value': []}
return metrics
def InitDecisions
|
():
decisions = {}
event = debug_dump_pb2.Event()
for decision in event.encoder_runtime_config.DESCRIPTOR.fields:
decisions[decision.name] = {'time': [], 'value': []}
return decisions
def ParseA
|
naDump(dump_file_to_parse):
with open(dump_file_to_parse, 'rb') as file_to_parse:
metrics = InitMetrics()
decisions = InitDecisions()
first_time_stamp = None
while True:
event = GetNextMessageFromFile(file_to_parse)
if event == None:
break
if first_time_stamp == None:
first_time_stamp = event.timestamp
if event.type == debug_dump_pb2.Event.ENCODER_RUNTIME_CONFIG:
for decision in event.encoder_runtime_config.DESCRIPTOR.fields:
if event.encoder_runtime_config.HasField(decision.name):
decisions[decision.name]['time'].append(event.timestamp -
first_time_stamp)
decisions[decision.name]['value'].append(
getattr(event.encoder_runtime_config, decision.name))
if event.type == debug_dump_pb2.Event.NETWORK_METRICS:
for metric in event.network_metrics.DESCRIPTOR.fields:
if event.network_metrics.HasField(metric.name):
metrics[metric.name]['time'].append(event.timestamp -
first_time_stamp)
metrics[metric.name]['value'].append(
getattr(event.network_metrics, metric.name))
return (metrics, decisions)
def main():
parser = OptionParser()
parser.add_option(
"-f", "--dump_file", dest="dump_file_to_parse", help="dump file to parse")
parser.add_option(
'-m',
'--metric_plot',
default=[],
type=str,
help='metric key (name of the metric) to plot',
dest='metric_keys',
action='append')
parser.add_option(
'-d',
'--decision_plot',
default=[],
type=str,
help='decision key (name of the decision) to plot',
dest='decision_keys',
action='append')
options = parser.parse_args()[0]
if options.dump_file_to_parse == None:
print "No dump file to parse is set.\n"
parser.print_help()
exit()
(metrics, decisions) = ParseAnaDump(options.dump_file_to_parse)
metric_keys = options.metric_keys
decision_keys = options.decision_keys
plot_count = len(metric_keys) + len(decision_keys)
if plot_count == 0:
print "You have to set at least one metric or decision to plot.\n"
parser.print_help()
exit()
plots = []
if plot_count == 1:
f, mp_plot = plt.subplots()
plots.append(mp_plot)
else:
f, mp_plots = plt.subplots(plot_count, sharex=True)
plots.extend(mp_plots.tolist())
for key in metric_keys:
plot = plots.pop()
plot.grid(True)
plot.set_title(key + " (metric)")
plot.plot(metrics[key]['time'], metrics[key]['value'])
for key in decision_keys:
plot = plots.pop()
plot.grid(True)
plot.set_title(key + " (decision)")
plot.plot(decisions[key]['time'], decisions[key]['value'])
f.subplots_adjust(hspace=0.3)
plt.show()
if __name__ == "__main__":
main()
|
deyvedvm/cederj
|
fund-prog/2017-2/ap1/questao3.py
|
Python
|
gpl-3.0
| 2,277
| 0.001778
|
# coding=utf-8
"""
Faça um programa, contendo subprogramas, que:
a) Leia da entrada padrão as dimensões,
|
quantidade de linhas e quantidade de colunas de uma matriz bidimensional;
b) Gere uma matriz, onde cada célula é um número inteiro gerado aleatoriamente no intervalo 0 a 9;
c) Mostre a matriz, linha a linha
|
na tela;
d) Calcule e escreva a média de todos os valores na matriz;
e) Escreva o conteúdo de todas as linhas que possuam todos os seus valores acima da média calculada em (d).
Dica
Utilize a função random.randint(a, b), disponível na API, que retorna um número
randômico inteiro entre a e b, inclusive.
Restrição
Não serão aceitos na correção programas que utilizam o módulo numpy.
Entrada
Dois números inteiros positivos são lidos, representando respectivamente: a quantidade linhas L e quantidade de colunas
C da matriz a ser gerada.
Saída
Seu programa deverá emitir:
L linhas, com C inteiros cada linha, contendo valores no intervalo 0 e 9;
Uma linha em branco;
Uma linha com um número de ponto flutuante, representando a média solicitada;
Uma linha em branco;
Zero ou mais linhas contendo C inteiros, de cada linha com a propriedade pedida
"""
from random import randint
def gera_matriz(linhas, colunas):
matrix = []
for linha in range(linhas):
linha = []
for coluna in range(colunas):
linha.append(randint(0, 9))
matrix.append(linha)
return matrix
def imprime_matriz(matriz):
for linha in matriz:
for coluna in linha:
print(coluna, end=" ")
print()
print()
def media_da_matriz(matriz):
total = 0.0
for linha in matriz:
for coluna in linha:
total += coluna
return total / (len(matriz) * len(matriz[0]))
def imprive_valores_acima_da_media(matriz, media):
for linha in matriz:
for coluna in linha:
if coluna > media:
print(coluna, end=" ")
quantidade_linhas, quantidade_colunas = input().split()
matriz_gerada = gera_matriz(int(quantidade_linhas), int(quantidade_colunas))
imprime_matriz(matriz_gerada)
media_da_matriz = media_da_matriz(matriz_gerada)
print(media_da_matriz)
print()
imprive_valores_acima_da_media(matriz_gerada, media_da_matriz)
|
sharbison3/python-docs-samples
|
bigquery/api/getting_started.py
|
Python
|
apache-2.0
| 2,344
| 0
|
#!/usr/bin/env python
# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command-line application that demonstrates basic BigQuery API usage.
This sample queries a public shakespeare dataset and displays the 10 of
Shakespeare's works with the greatest number of distinct words.
This sample is used on this page:
https://cloud.google.com/bigquery/bigquery-api-quickstart
For more information, see the README.md under /bigquery.
"""
# [START all]
import argparse
import googleapiclient.discovery
from googleapiclient.errors import HttpError
def main(project_id):
# [START build_service]
# Construct the service object for interacting with the BigQuery API.
bigquery_service = googleapiclient.discovery.bui
|
ld('bigquery', 'v2')
# [END build_service]
try:
# [START run_query]
query_request = bigquery_service.jobs()
query_data = {
'query': (
'SELECT TOP(corpus, 10) as
|
title, '
'COUNT(*) as unique_words '
'FROM [publicdata:samples.shakespeare];')
}
query_response = query_request.query(
projectId=project_id,
body=query_data).execute()
# [END run_query]
# [START print_results]
print('Query Results:')
for row in query_response['rows']:
print('\t'.join(field['v'] for field in row['f']))
# [END print_results]
except HttpError as err:
print('Error: {}'.format(err.content))
raise err
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='Your Google Cloud Project ID.')
args = parser.parse_args()
main(args.project_id)
# [END all]
|
CCI-MOC/GUI-Backend
|
core/mixins.py
|
Python
|
apache-2.0
| 1,993
| 0
|
"""
core.mixins - Mixins available to use with models
"""
from django.db.models.signals import post_save
def on_changed(sender, **kwargs):
"""
Calls the `model_changed` method and then resets the state.
"""
instance = kwargs.get("instance")
is_new = kwargs.get("created")
dirty_fields = instance.get_dirty_fields()
instance.model_changed(instance.original_state, dirty_fields, is_new)
instance.original_state = instance.to_dict()
class ModelChangedMixin(object):
"""
Mixin for detecting changes to a model
"""
def __init__(self, *args, **kwargs):
super(ModelChangedMixin, self).__init__(*args, **kwargs)
self.original_state = self.to_dict()
identifier = "{0}_model_changed".format(self.__class__.__name__)
post_save.connect(
on_changed, sender=self.__class__, dispatch_uid=identifier)
def to_dict(self):
"""
|
Returns the model as a dict
"""
# Get all the field names that are not relations
keys = (f.nam
|
e for f in self._meta.local_fields if not f.rel)
return {field: getattr(self, field) for field in keys}
def get_dirty_fields(self):
"""
Returns the fields dirty on the model
"""
dirty_fields = {}
current_state = self.to_dict()
for key, value in current_state.items():
if self.original_state[key] != value:
dirty_fields[key] = value
return dirty_fields
def is_dirty(self):
"""
Return whether the model is dirty
An unsaved model is dirty when it has no primary key
or has at least one dirty field.
"""
if not self.pk:
return True
return {} != self.get_dirty_fields()
def model_changed(self, old_fields, new_fields, is_new):
"""
Post-hook for all fields that have been changed.
"""
raise NotImplementedError("Missing method `model_changed`")
|
mbedmicro/pyOCD
|
pyocd/probe/pydapaccess/interface/pywinusb_backend.py
|
Python
|
apache-2.0
| 6,277
| 0.001912
|
# pyOCD debugger
# Copyright (c) 2006-2020 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import platform
import collections
from time import sleep
import six
from
|
.interface import Interface
from .common import filter_device_by_usage_page
from ..dap_access_api import DAPAccessIntf
from ....utility.timeout import Timeout
OPEN_TIMEOUT_S = 60.0
LOG = logging.getLogger(__name__)
try:
import pywinusb.hid as hid
except:
if platform.
|
system() == "Windows":
LOG.error("PyWinUSB is required on a Windows Machine")
IS_AVAILABLE = False
else:
IS_AVAILABLE = True
class PyWinUSB(Interface):
"""! @brief CMSIS-DAP USB interface class using pyWinUSB for the backend.
"""
isAvailable = IS_AVAILABLE
def __init__(self):
super(PyWinUSB, self).__init__()
# Vendor page and usage_id = 2
self.report = None
# deque used here instead of synchronized Queue
# since read speeds are ~10-30% faster and are
# comparable to a list based implementation.
self.rcv_data = collections.deque()
self.device = None
# handler called when a report is received
def rx_handler(self, data):
# LOG.debug("rcv<(%d) %s" % (len(data), ' '.join(['%02x' % i for i in data])))
self.rcv_data.append(data[1:])
def open(self):
self.device.set_raw_data_handler(self.rx_handler)
# Attempt to open the device.
# Note - this operation must be retried since
# other instances of pyOCD listing board can prevent
# opening this device with exclusive access.
with Timeout(OPEN_TIMEOUT_S) as t_o:
while t_o.check():
# Attempt to open the device
try:
self.device.open(shared=False)
break
except hid.HIDError:
pass
# Attempt to open the device in shared mode to make
# sure it is still there
try:
self.device.open(shared=True)
self.device.close()
except hid.HIDError as exc:
# If the device could not be opened in read only mode
# Then it either has been disconnected or is in use
# by another thread/process
raise six.raise_from(DAPAccessIntf.DeviceError("Unable to open device %s"
% self.serial_number), exc)
else:
# If this timeout has elapsed then another process
# has locked this device in shared mode. This should
# not happen.
raise DAPAccessIntf.DeviceError("timed out attempting to open device %s" % self.serial_number)
@staticmethod
def get_all_connected_interfaces():
"""! @brief Returns all the connected CMSIS-DAP devices
"""
all_devices = hid.find_all_hid_devices()
# find devices with good vid/pid
all_mbed_devices = []
for d in all_devices:
if (d.product_name.find("CMSIS-DAP") >= 0):
all_mbed_devices.append(d)
boards = []
for dev in all_mbed_devices:
try:
dev.open(shared=True)
# Perform device-specific filtering.
if filter_device_by_usage_page(dev.vendor_id, dev.product_id, dev.hid_caps.usage_page):
dev.close()
continue
report = dev.find_output_reports()
if len(report) != 1:
dev.close()
continue
new_board = PyWinUSB()
new_board.report = report[0]
new_board.packet_size = len(new_board.report.get_raw_data()) - 1
new_board.vendor_name = dev.vendor_name
new_board.product_name = dev.product_name
new_board.serial_number = dev.serial_number
new_board.vid = dev.vendor_id
new_board.pid = dev.product_id
new_board.device = dev
dev.close()
boards.append(new_board)
except Exception as e:
if (str(e) != "Failure to get HID pre parsed data"):
LOG.error("Receiving Exception: %s", e)
dev.close()
return boards
def write(self, data):
"""! @brief Write data on the OUT endpoint associated to the HID interface
"""
data.extend([0] * (self.packet_size - len(data)))
# LOG.debug("snd>(%d) %s" % (len(data), ' '.join(['%02x' % i for i in data])))
self.report.send([0] + data)
def read(self, timeout=20.0):
"""! @brief Read data on the IN endpoint associated to the HID interface
"""
with Timeout(timeout) as t_o:
while t_o.check():
if len(self.rcv_data):
break
sleep(0)
else:
# Read operations should typically take ~1-2ms.
# If this exception occurs, then it could indicate
# a problem in one of the following areas:
# 1. Bad usb driver causing either a dropped read or write
# 2. CMSIS-DAP firmware problem cause a dropped read or write
# 3. CMSIS-DAP is performing a long operation or is being
# halted in a debugger
raise DAPAccessIntf.DeviceError("Read timed out")
return self.rcv_data.popleft()
def close(self):
"""! @brief Close the interface
"""
LOG.debug("closing interface")
self.device.close()
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/connection_monitor_parameters.py
|
Python
|
mit
| 2,077
| 0.001444
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright
|
(c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------
|
------------------------------------------------
from msrest.serialization import Model
class ConnectionMonitorParameters(Model):
"""Parameters that define the operation to create a connection monitor.
All required parameters must be populated in order to send to Azure.
:param source: Required.
:type source:
~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorSource
:param destination: Required.
:type destination:
~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start
automatically once created. Default value: True .
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
Default value: 60 .
:type monitoring_interval_in_seconds: int
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'monitoringIntervalInSeconds', 'type': 'int'},
}
def __init__(self, **kwargs):
super(ConnectionMonitorParameters, self).__init__(**kwargs)
self.source = kwargs.get('source', None)
self.destination = kwargs.get('destination', None)
self.auto_start = kwargs.get('auto_start', True)
self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
|
integree/hello-world
|
manage.py
|
Python
|
mit
| 648
| 0
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'project.settings')
|
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name_
|
_ == '__main__':
main()
|
calvinhklui/Schedulize
|
GenEdLookup.py
|
Python
|
mit
| 931
| 0.015038
|
import numpy as np
import pandas as pd
import pickle
# Return 0 or 1 based on whether Course fulfills a General Education Requirement
def lookupGenEd(cNum, college):
fileName = "data/Dietrich Gen Eds.csv"
picklepath = "data\\dietrich_gen_eds.p"
try:
with open(picklepath,'rb') as file:
gen_eds
|
= pickle.load(file)
except:
df = pd.read_csv(fileName,names=['Dept','Num','Title','1','2'])
gen_eds = set(df['Dept'].values)
with open(picklepath,'wb') as file:
pickle.dump(gen_eds,file)
return cNum in gen_eds
'''
genEdubility = lookupGenEd(73100, "dietrich")
print("73100")
print('Is Gen Ed?:', genEdubility)
print()
genEdubility = lookupGenEd(70100, "tepper")
print("70
|
100")
print('Is Gen Ed?:', genEdubility)
print()
genEdubility = lookupGenEd(15322, "scs")
print("15322")
print('Is Gen Ed?:', genEdubility)
print()
'''
|
immersinn/ncga
|
ncga/extract_billpage_content.py
|
Python
|
mit
| 2,219
| 0.00721
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 7 21:16:09 2017
@author: immersinn
"""
import regex as re
def billsummaryaref_matcher(tag):
return tag.name =='a' and hasattr(tag, 'text') and tag.text == 'View Available Bill Summaries'
def extract_links(soup):
"""Extract Bill Text Links from Bill Page"""
billtext_links = []
target_a = soup.find_all(billsummaryaref_matcher)
if len(target_a) == 1:
target_a = target_a[0]
content_table = target_a.parent.parent.parent
for row in content_table.find_all('tr')[2:]:
row_info = {}
arefs = row.find_all('td')[0].find_all('a')
for a in arefs:
if a.text == 'HTML':
row_info['html'] = a['href']
else:
row_info['label'] = a.text.encode('utf8').replace(b'\xc2\xa0', b' ').decode('utf8')
row_info['pdf'] = a['href']
billtext_links.append(row_info)
return billtext_links
def extract_meta(soup):
"""Extract the(select) a bout the Bill Info Page"""
chamber_re = re.compile(r"(?:(?<=Chamber=))(H|S)")
userid_re = re.compile(r"(?:(?<=UserID=))([0-9]+)")
meta = {}
for kw in ["Sponsors","Counties", "Statutes", "Keywords"]:
tr = soup.find('th', text=kw + ':').parent
content = tr.find('td')
if kw=='Sponsors':
spons = content.find_all('a')
spons_list = []
for a in spons:
hr
|
= a['href']
spons_list.append({'userid' : userid_re.findall(hr)[0],
'chamber' : chamber_re.findall(hr)[0]})
meta[kw] =
|
spons_list
elif kw in ['Counties', 'Keywords', 'Statutes']:
meta[kw] = content.text.split(', ')
else:
meta[kw] = content.text
if kw == 'Counties' and \
meta[kw][0].lower().strip() == 'no counties specifically cited':
meta[kw] = None
if kw == 'Statutes' and \
meta[kw][0].lower().strip() == 'no affected general statutes':
meta[kw] = None
return meta
|
Azure/azure-linux-extensions
|
VMEncryption/main/oscrypto/ubuntu_1604/Ubuntu1604EncryptionStateMachine.py
|
Python
|
apache-2.0
| 7,381
| 0.003252
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
import inspect
import os
import sys
import traceback
from time import sleep
scriptdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
maindir = os.path.abspath(os.path.join(scriptdir, '../../'))
sys.path.append(maindir)
transitionsdir = os.path.abspath(os.path.join(scriptdir, '../../transitions'))
sys.path.append(transitionsdir)
from oscrypto import *
from encryptstates import *
from Common import *
from CommandExecutor import *
from DiskUtil import *
from transitions import *
class Ubuntu1604EncryptionStateMachine(OSEncryptionStateMachine):
states = [
State(name='uninitialized'),
State(name='prereq', on_enter='on_enter_state'),
State(name='stripdown', on_enter='on_enter_state'),
State(name='unmount_oldroot', on_enter='on_enter_state'),
State(name='split_root_partition', on_enter='on_enter_state'),
State(name='encrypt_block_device', on_enter='on_enter_state'),
State(name='patch_boot_system', on_enter='on_enter_state'),
State(name='completed'),
]
transitions = [
{
'trigger': 'skip_encryption',
'source': 'uninitialized',
'dest': 'completed'
},
{
'trigger': 'enter_prereq',
'source': 'uninitialized',
'dest': 'prereq'
},
{
'trigger': 'enter_stripdown',
'source': 'prereq',
'dest': 'stripdown',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_unmount_oldroot',
'source': 'stripdown',
'dest': 'unmount_oldroot',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'retry_unmount_oldroot',
'source': 'unmount_oldroot',
'dest': 'unmount_oldroot',
'before': 'on_enter_state'
},
{
'trigger': 'enter_split_root_partition',
'source': 'unmount_oldroot',
'dest': 'split_root_partition',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_encrypt_block_device',
'source': 'split_root_partition',
'dest': 'encrypt_block_device',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_patch_boot_system',
'source': 'encrypt_block_device',
'dest': 'patch_boot_system',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'stop_machine',
'source': 'patch_boot_system',
'dest': 'completed',
'conditions': 'should_exit_previous_state'
},
]
def on_enter_state(self):
super(Ubuntu1604EncryptionStateMachine, self).on_enter_state()
def should_exit_previous_state(self):
# when this is called, self.state is still the "source" state in the transition
return super(Ubuntu1604EncryptionStateMachine, self).should_exit_previous_state()
def __init__(self, hutil, distro_patcher, logger, encryption_environment):
super(Ubuntu1604EncryptionStateMachine, self).__init__(hutil, distro_patcher, logger, encryption_environment)
self.state_objs = {
'prereq': PrereqState(self.context),
'stripdown': StripdownState(self.context),
'unmount_oldroot': UnmountOldrootState(self.context),
'split_root_partition': SplitRootPartitionState(self.context),
'encrypt_block_device': EncryptBlockDeviceState(self.context),
'patch_boot_system': PatchBootSystemState(self.context),
}
self.state_machine = Machine(model=self,
states=Ubuntu1604EncryptionStateMachine.states,
transitions=Ubuntu1604EncryptionStateMachine.transitions,
initial='uninitialized')
def start_encryption(self):
proc_comm = ProcessCommunicator()
self.command_executor.Execute(command_to_execute="mount",
raise_exception_on_failure=True,
communicator=proc_comm)
if '/dev/mapper/osencrypt' in proc_comm.stdout:
|
self.logger.log("OS volume is already encrypted")
self.skip_encryption()
self.log_machine_state()
return
self.log_machine_state()
self.enter_prereq()
self.log_machine_stat
|
e()
self.enter_stripdown()
self.log_machine_state()
oldroot_unmounted_successfully = False
attempt = 1
while not oldroot_unmounted_successfully:
self.logger.log("Attempt #{0} to unmount /oldroot".format(attempt))
try:
if attempt == 1:
self.enter_unmount_oldroot()
elif attempt > 10:
raise Exception("Could not unmount /oldroot in 10 attempts")
else:
self.retry_unmount_oldroot()
self.log_machine_state()
except Exception as e:
message = "Attempt #{0} to unmount /oldroot failed with error: {1}, stack trace: {2}".format(attempt,
e,
traceback.format_exc())
self.logger.log(msg=message)
self.hutil.do_status_report(operation='EnableEncryptionOSVolume',
status=CommonVariables.extension_error_status,
status_code=str(CommonVariables.unmount_oldroot_error),
message=message)
sleep(10)
if attempt > 10:
raise Exception(message)
else:
oldroot_unmounted_successfully = True
finally:
attempt += 1
self.enter_split_root_partition()
self.log_machine_state()
self.enter_encrypt_block_device()
self.log_machine_state()
self.enter_patch_boot_system()
self.log_machine_state()
self.stop_machine()
self.log_machine_state()
self._reboot()
|
hryamzik/ansible
|
lib/ansible/modules/network/cumulus/_cl_ports.py
|
Python
|
gpl-3.0
| 2,580
| 0.00155
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Cumulus Networks <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cl_ports
version_added: "2.1"
author: "
|
Cumulus Networks (@CumulusNetworks)"
short_description: Configure Cumulus Switch port attributes (ports.conf)
deprecated:
removed_in: "2.5"
why: The M(nclu) module is designed to be easier to use for individuals who are new to Cumulus Linux by exposing the NCLU interface in an automatable way.
alternative: Use M(nclu) instead.
description:
- Set the initial port attribute defined in
|
the Cumulus Linux ports.conf,
file. This module does not do any error checking at the moment. Be careful
to not include ports that do not exist on the switch. Carefully read the
original ports.conf file for any exceptions or limitations.
For more details go the Configure Switch Port Attribute Documentation at
U(http://docs.cumulusnetworks.com).
options:
speed_10g:
description:
- List of ports to run initial run at 10G.
speed_40g:
description:
- List of ports to run initial run at 40G.
speed_4_by_10g:
description:
- List of 40G ports that will be unganged to run as 4 10G ports.
speed_40g_div_4:
description:
- List of 10G ports that will be ganged to form a 40G port.
'''
EXAMPLES = '''
# Use cl_ports module to manage the switch attributes defined in the
# ports.conf file on Cumulus Linux
## Unganged port configuration on certain ports
- name: configure ports.conf setup
cl_ports:
speed_4_by_10g:
- swp1
- swp32
speed_40g:
- swp2-31
## Unganged port configuration on certain ports
- name: configure ports.conf setup
cl_ports:
speed_4_by_10g:
- swp1-3
- swp6
speed_40g:
- swp4-5
- swp7-32
'''
RETURN = '''
changed:
description: whether the interface was changed
returned: changed
type: bool
sample: True
msg:
description: human-readable report of success or failure
returned: always
type: string
sample: "interface bond0 config updated"
'''
from ansible.module_utils.common.removed import removed_module
if __name__ == '__main__':
removed_module()
|
pranka02/image_processing_py
|
gaussian.py
|
Python
|
mit
| 652
| 0.019939
|
import numpy as np
def gauss(win, sigma)
|
:
x = np.arange(0, win, 1, float)
y = x[:,np.newaxis]
x0 = y0 = win // 2
g=1/(2*np.pi*sigma**2)*np.exp((((x-x0)**2+(y-y0)**2))/2*sigma**2)
return g
def gaussx(win, sigma):
x = np.arange(0, win, 1, float)
y = x[:,np.newaxis]
x0 = y0 = win // 2
gx=(x-x0)/(2*np.pi*sigma**4)*np.exp((((x-x0)**2+(y-y0)**2))/2*sigma**2)
return gx
def gaussy(win, sigma):
x = np.arange(0, win, 1, float)
y = x[:,np.newaxis]
x0 = y0 = win // 2
|
gy=(y-y0)/(2*np.pi*sigma**4)*np.exp((((x-x0)**2+(y-y0)**2))/2*sigma**2)
return gy
|
cnheitman/barf-project
|
tests/core/smt/test_smtsolver.py
|
Python
|
bsd-2-clause
| 13,055
| 0.000153
|
# Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from barf.core.reil import ReilParser
from barf.core.smt.smtsymbol import BitVec
from barf.core.smt.smtsymbol import Bool
from barf.core.smt.smtsolver import Z3Solver as SmtSolver
# from barf.core.smt.smtsolver import CVC4Solver as SmtSolver
class SmtSolverBitVecTests(unittest.TestCase):
def setUp(self):
self._address_size = 32
self._parser = ReilParser()
self._solver = SmtSolver()
# Arithmetic operations.
def test_add(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x + y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val + y_val == z_val)
def test_sub(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x - y == z)
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
# Add constraints to avoid trivial solutions.
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue((x_val - y_val) & 0xffffffff == z_val)
def test_mul(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x * y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue((x_val * y_val) & 0xffffffff == z_val)
def test_div(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x / y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val / y_val == z_val)
def test_mod(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x % y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val % y_val == z_val)
def test_neg(self):
x = BitVec(32, "x")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("z", z)
self._solver.add(-x == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
z_val = self._solver.get_value(z)
self.assertTrue(-x_val & 0xffffffff == z_val)
# Bitwise operations.
def test_and(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x & y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val & y_val == z_val)
def test_xor(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x ^ y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val ^ y_val == z_val)
def test_or(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.decl
|
are_fun("z", z)
self._solver.add(x | y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
s
|
elf._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val | y_val == z_val)
def test_lshift(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x << y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_valu
|
nextgis-extra/tests
|
lib_gdal/gdrivers/sgi.py
|
Python
|
gpl-2.0
| 2,443
| 0.006959
|
#!/usr/bin/env python
###############################################################################
# $Id: sgi.py 31335 2015-11-04 00:17:39Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: PNM (Portable Anyware Map) Testing.
# Author: Frank Warmerdam <[email protected]>
#
###############################################################################
# Copyright (c) 2007, Frank Warmerdam <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT
|
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISIN
|
G
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
###############################################################################
# Read existing simple 1 band SGI file.
def sgi_1():
tst = gdaltest.GDALTest( 'SGI', 'byte.sgi', 1, 4672 )
return tst.testOpen()
###############################################################################
# Write Test grayscale
def sgi_2():
tst = gdaltest.GDALTest( 'SGI', 'byte.tif', 1, 4672 )
return tst.testCreate()
###############################################################################
# Write Test rgb
def sgi_3():
tst = gdaltest.GDALTest( 'SGI', 'rgbsmall.tif', 2, 21053 )
return tst.testCreate()
gdaltest_list = [
sgi_1,
sgi_2,
sgi_3,
]
if __name__ == '__main__':
gdaltest.setup_run( 'SGI' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
|
ava-project/ava-website
|
website/apps/plugins/forms.py
|
Python
|
mit
| 2,440
| 0
|
import json
import os
import avasdk
from zipfile import ZipFile, BadZipFile
from avasdk.plugins.manifest import validate_manifest
from avasdk.plugins.hasher import hash_plugin
from django import forms
from django.core.validators import ValidationError
from .validators import ZipArchiveValidator
class PluginArchiveField(forms.FileField):
default_validators = [ZipArchiveValidator()]
label = 'Plugin .zip'
def get_prefix(self, archive):
files = archive.namelist()
return os.path.commonpath(files)
def get_manifest(self, archive):
try:
with ZipFile(archive.temporary_file_path()) as plugin:
prefix = self.get_prefix(plugin)
prefix = prefix + '/' if len(prefix) else ''
with plugin.open('{}manifest.json'.format(prefix)) as myfile:
manifest = json.loads(myfile.read())
validate_manifest(manifest)
return manifest
except BadZipFile:
raise ValidationError('Bad .zip format')
except FileNotFoundError:
|
raise ValidationError('Error with upload, please try again')
except KeyError:
raise ValidationE
|
rror('No manifest.json found in archive')
except json.JSONDecodeError:
raise ValidationError('Error with manifest.json, bad Json Format')
except avasdk.exceptions.ValidationError as e:
raise ValidationError('Error in manifest.json ({})'.format(e))
def get_readme(self, archive):
try:
with ZipFile(archive.temporary_file_path()) as plugin:
prefix = self.get_prefix(plugin)
prefix = prefix + '/' if len(prefix) else ''
with plugin.open('{}/README.md'.format(prefix)) as myfile:
readme = myfile.read()
return readme
except FileNotFoundError:
raise ValidationError('Error with upload, please try again')
except KeyError:
return None
def clean(self, data, initial=None):
f = super().clean(data, initial)
manifest = self.get_manifest(f)
readme = self.get_readme(f)
return {
'zipfile': f,
'manifest': manifest,
'readme': readme,
'checksum': hash_plugin(f.temporary_file_path()),
}
class UploadPluginForm(forms.Form):
archive = PluginArchiveField()
|
AntonelliLab/seqcap_processor
|
secapr/remove_uninformative_seqs.py
|
Python
|
mit
| 2,167
| 0.032764
|
#author: Tobias Andermann, [email protected]
import os
import sys
import re
import glob
import shutil
import
|
argparse
from Bio import SeqIO
from .utils import CompletePath
# Get arguments
def get_args():
parser = argparse.ArgumentParser(
description="Set the maximum fraction of missing data that you want to allow in an alignment and drop all sequences above this threshold.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'--alignment',
required=True,
action=CompletePath,
default=None,
help='The alignment in
|
fasta format.'
)
parser.add_argument(
'--maximum_missing',
type=float,
default=0.8,
help='Define the maximal fraction of missing data that you want to allow. All sequences below this threshold will be exported into a new alignment.'
)
parser.add_argument(
'--output',
required=True,
action=CompletePath,
default=None,
help='The output directory where results will be safed.'
)
return parser.parse_args()
args = get_args()
# Set working directory
out_dir = args.output
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# Get other input variables
alignment = args.alignment
max_mis = args.maximum_missing
def manage_homzygous_samples(fasta,threshold,output):
fasta_alignment = SeqIO.parse(open(fasta),'fasta')
with open('%s/cleaned_alignment_all_sequences_less_than_%f_missing_data.fasta' %(output,threshold), 'w') as outfile:
final_seqs = {}
for sample in fasta_alignment:
header = sample.description
sequence = sample.seq
chars = list(sequence)
bad_chars = []
for char in chars:
if char not in ['A','C','T','G','a','c','t','g']:
bad_chars.append(char)
sequence_length = float(len(chars))
count_bad_chars = float(len(bad_chars))
fraction = float(count_bad_chars/sequence_length)
if fraction <= threshold:
final_seqs.setdefault(header,[]).append(sequence)
else:
print("Dropped sequence for", header)
for seqname, seq in final_seqs.items():
sequence = str(seq[0])
outfile.write(">"+seqname+"\n")
outfile.write(sequence+"\n")
outfile.close
manage_homzygous_samples(alignment,max_mis,out_dir)
|
d-plaindoux/fluent-rest
|
tests/runall.py
|
Python
|
lgpl-2.1
| 828
| 0
|
# Copyright (C)2016 D. Plaindoux.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2, or (at your option) any
# later version.
import unittest
import path_parse_test
import path_match_test
import provider_test
import verb_test
import mime_test
import inspection_test
import wsgi_test
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTest(path_parse_test.suite())
suite.addTest(path_match_test.suite())
suite.addTest(verb_test.suite())
suite.addTest(mime_test.suite())
suite.addTest(provider_test.suite())
suite.addTest(inspection_test.suite())
suite.addTest(wsgi_test.suite
|
())
unittest.TextTestRunner(verbosity=2).run(suite
|
)
|
gzqichang/wa
|
qsite/qsite/apps.py
|
Python
|
mit
| 119
| 0.009009
|
from django.apps import AppConfig
class QsiteConfig(AppCon
|
fig):
name = '
|
qsite'
verbose_name = '站点管理'
|
GDGLima/contentbox
|
third_party/social/tests/backends/test_yahoo.py
|
Python
|
apache-2.0
| 1,798
| 0
|
import json
from httpretty import HTTPretty
from social.p3 import urlencode
from social.tests.backends.oauth import OAuth1Test
class YahooOAuth1Test(OAuth1Test):
backend_path = 'social.backends.yahoo.YahooOAuth'
user_data_url = 'https://social.yahooapis.com/v1/user/a-guid/profile?' \
'format=json'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
request_token_body = urlencode({
'oauth_token_secret': 'foobar-secret',
'oauth_token': 'foobar',
'oauth_callback_confirmed':
|
'true'
})
guid_body = json.dumps({
'guid': {
'uri': 'https://social.yahooapis.com/v1/me/guid',
'value': '
|
a-guid'
}
})
user_data_body = json.dumps({
'profile': {
'bdRestricted': True,
'memberSince': '2007-12-11T14:40:30Z',
'image': {
'width': 192,
'imageUrl': 'http://l.yimg.com/dh/ap/social/profile/'
'profile_b192.png',
'size': '192x192',
'height': 192
},
'created': '2013-03-18T04:15:08Z',
'uri': 'https://social.yahooapis.com/v1/user/a-guid/profile',
'isConnected': False,
'profileUrl': 'http://profile.yahoo.com/a-guid',
'guid': 'a-guid',
'nickname': 'foobar'
}
})
def test_login(self):
HTTPretty.register_uri(
HTTPretty.GET,
'https://social.yahooapis.com/v1/me/guid?format=json',
status=200,
body=self.guid_body
)
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
|
sghai/robottelo
|
tests/robottelo/ui/test_location.py
|
Python
|
gpl-3.0
| 2,887
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import six
import unittest2
from robottelo.ui.location import Location
from robottelo.ui.locators import common_locators
from robottelo.ui.locators import locators
if six.PY2:
import mock
else:
from unittest import mock
class LocationTestCase(unittest2.TestCase):
def test_creation_without_parent_and_without_unassigned_host(self):
location = Location(None)
location.click = mock.Mock()
location.assign_value = mock.Mock()
location.wait_until_element = mock.Mock(return_value=None)
location._configure_location = mock.Mock()
location.select = mock.Mock()
location.create('foo')
click_calls = [
mock.call(locators['location.new']),
mock.call(common_locators['subm
|
it']),
mock.call(common_locators['submit'])
]
|
self.assertEqual(3, location.click.call_count)
location.click.assert_has_calls(click_calls, any_order=False)
location.assign_value.assert_called_once_with(
locators['location.name'], 'foo')
# not called if parent is None
location.select.assert_not_called()
location._configure_location.assert_called_once_with(
capsules=None, all_capsules=None, domains=None, envs=None,
hostgroups=None, medias=None, organizations=None, ptables=None,
resources=None, select=True, subnets=None, templates=None,
users=None, params=None
)
def test_creation_with_parent_and_unassigned_host(self):
location = Location(None)
location.click = mock.Mock()
location.assign_value = mock.Mock()
location.wait_until_element = mock.Mock()
location._configure_location = mock.Mock()
location.select = mock.Mock()
configure_arguments = {
arg: arg for arg in
'capsules all_capsules domains hostgroups medias organizations '
'envs ptables resources select subnets templates users params '
'select'.split()
}
location.create('foo', 'parent', **configure_arguments)
click_calls = [
mock.call(locators['location.new']),
mock.call(common_locators['submit']),
mock.call(locators['location.proceed_to_edit']),
mock.call(common_locators['submit'])
]
self.assertEqual(4, location.click.call_count)
location.click.assert_has_calls(click_calls, any_order=False)
location.assign_value.assert_called_once_with(
locators['location.name'], 'foo')
# called only if parent is not None
location.select.assert_called_once_with(
locators['location.parent'], 'parent'
)
location._configure_location.assert_called_once_with(
**configure_arguments)
|
forslund/mycroft-core
|
test/integrationtests/voight_kampff/generate_feature.py
|
Python
|
apache-2.0
| 2,177
| 0
|
# Copyright 2020 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licens
|
es/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from glob import glob
import json
from pathlib i
|
mport Path
import sys
"""Convert existing intent tests to behave tests."""
TEMPLATE = """
Scenario: {scenario}
Given an english speaking user
When the user says "{utterance}"
Then "{skill}" should reply with dialog from "{dialog_file}.dialog"
"""
def json_files(path):
"""Generator function returning paths of all json files in a folder."""
for json_file in sorted(glob(str(Path(path, '*.json')))):
yield Path(json_file)
def generate_feature(skill, skill_path):
"""Generate a feature file provided a skill name and a path to the skill.
"""
test_path = Path(skill_path, 'test', 'intent')
case = []
if test_path.exists() and test_path.is_dir():
for json_file in json_files(test_path):
with open(str(json_file)) as test_file:
test = json.load(test_file)
if 'utterance' and 'expected_dialog' in test:
utt = test['utterance']
dialog = test['expected_dialog']
# Simple handling of multiple accepted dialogfiles
if isinstance(dialog, list):
dialog = dialog[0]
case.append((json_file.name, utt, dialog))
output = ''
if case:
output += 'Feature: {}\n'.format(skill)
for c in case:
output += TEMPLATE.format(skill=skill, scenario=c[0],
utterance=c[1], dialog_file=c[2])
return output
if __name__ == '__main__':
print(generate_feature(*sys.argv[1:]))
|
ChantyTaguan/zds-site
|
zds/utils/templatetags/seconds_to_duration.py
|
Python
|
gpl-3.0
| 787
| 0
|
from django import template
import datetime
register = template.Library()
# https://stackoverflow.com/a/8907269/2226755
def strfdelta(tdelta, fmt):
d = {"days": tdelta.days}
d["hours"], rem = divmod(tdelta.seconds, 3600)
d["minutes"], d["seconds"] = divmod(rem, 60)
return fmt.format(**d)
# TODO add unit test
@register.filter("seconds_to_duration")
def seconds_to_duration(value):
"""
Display a human-readable reading-time (or any ot
|
her duration)
from a duration in seconds.
"""
if value <= 0:
return ""
duration = datetime.timedelta(seconds=value)
if datetime.timedel
|
ta(hours=1) > duration:
return strfdelta(duration, "{minutes}m{seconds}s")
else:
return strfdelta(duration, "{hours}h{minutes}m{seconds}s")
|
olympiag3/olypy
|
tests/unit/test_olymap_skill.py
|
Python
|
apache-2.0
| 1,098
| 0.005464
|
import olymap.skill
def test_learn_time():
tests = (
({}, None),
({'SK': {'tl': ['14']}}, '14'),
({'SK': {'an': ['0']}}, None),
({'IT': {'tl': ['1']}}, None),
({'SK': {'an': ['1']}}, None),
)
for box, answer in tests:
assert olymap.skill.get_learn_time(box) == answer
def test_get_required_skill():
tests = (
({}, None),
({'SK':
|
{'rs': ['632']}}, {'id': '632', 'oid': '632', 'name': 'Determine inventory of character'}),
({'SK': {'rs': ['630']}}, {'id': '630', 'oid': '630', 'name': 'Stealth'}),
({'SK': {'re': ['632']}}, None),
({'SL': {'rs': ['632']}}, None),
)
data = {'630': {'firstline': ['630 skill 0'], 'na': ['Stealth'], 'SK': {'tl': ['28'], 'of': ['631', '632', '633', '634', '635'], 're': ['636', '637', '638', '639']}},
'632': {'firstline'
|
: ['632 skill 0'], 'na': ['Determine inventory of character'], 'SK': {'tl': ['14'], 'rs': ['630']}}}
for box, answer in tests:
assert olymap.skill.get_required_skill(box, data) == answer
|
joshrule/LOTlib
|
LOTlib/Examples/RationalRules/__init__.py
|
Python
|
gpl-3.0
| 157
| 0.006369
|
from Model import *
|
import MemoryDecay
# Note we cannot import TwoConcepts here because that ends up modifying the grammar, ruining
|
it for example loaders
|
AmineChikhaoui/nixops
|
nixops/deployment.py
|
Python
|
lgpl-3.0
| 53,483
| 0.004463
|
# -*- coding: utf-8 -*-
import sys
import os.path
import subprocess
import json
import string
import tempfile
import shutil
import threading
import exceptions
import errno
from collections import defaultdict
from xml.etree import ElementTree
import nixops.statefile
import nixops.backends
import nixops.logger
import nixops.parallel
from nixops.nix_expr import RawValue, Function, Call, nixmerge, py2nix
import re
from datetime import datetime, timedelta
import getpass
import traceback
import glob
import fcntl
import itertools
import platform
from nixops.util import ansi_success
import inspect
import time
class NixEvalError(Exception):
pass
class UnknownBackend(Exception):
pass
debug = False
class Deployment(object):
"""NixOps top-level deployment manager."""
default_description = "Unnamed NixOps network"
name = nixops.util.attr_property("name", None)
nix_exprs = nixops.util.attr_property("nixExprs", [], 'json')
nix_path = nixops.util.attr_property("nixPath", [], 'json')
args = nixops.util.attr_property("args", {}, 'json')
description = nixops.util.attr_property("description", default_description)
configs_path = nixops.util.attr_property("configsPath", None)
rollback_enabled = nixops.util.attr_property("rollbackEnabled", False)
datadog_notify = nixops.util.attr_property("datadogNotify", False, bool)
datadog_event_info = nixops.util.attr_property("datadogEventInfo", "")
datadog_tags = nixops.util.attr_property("datadogTags", [], 'json')
# internal variable to mark if network attribute of network has been evaluated (separately)
network_attr_eval = False
def __init__(self, statefile, uuid, log_file=sys.stderr):
self._statefile = statefile
self._db = statefile._db
self.uuid = uuid
self._last_log_prefix = None
self.extra_nix_path = []
self.extra_nix_flags = []
self.extra_nix_eval_flags = []
self.nixos_version_suffix = None
self._tempdir = None
self.logger = nixops.logger.Logger(log_file)
self._lock_file_path = None
self.expr_path = os.path.realpath(os.path.dirname(__file__) + "/../../../../share/nix/nixops")
if not os.path.exists(self.expr_path):
self.expr_path = os.path.realpath(os.path.dirname(__file__) + "/../../../../../share/nix/nixops")
if not os.path.exists(self.expr_path):
self.expr_path = os.path.dirname(__file__) + "/../nix"
self.resources = {}
with self._db:
c = self._db.cursor()
c.execute("select id, name, type from Resources where deployment = ?", (self.uuid,))
for (id, name, type) in c.fetchall():
r = _create_state(self, type, name, id)
self.resources[name] = r
self.logger.update_log_prefixes()
self.definitions = None
@property
def tempdir(self):
if not self._tempdir:
self._tempdir = nixops.util.SelfDeletingDir(tempfile.mkdtemp(prefix="nixops-tmp"))
return self._tempdir
@property
def machines(self):
return {n: r for n, r in self.resources.items() if is_machine(r)}
@property
def active(self): # FIXME: rename to "active_machines"
return {n: r for n, r in self.resources.items() if is_machine(r) and not r.obsolete}
@property
def active_resources(self):
return {n: r for n, r in self.resources.items() if not r.obsolete}
def get_typed_resource(self, name, type):
res = self.active_resources.get(name, None)
if not res:
raise Exception("resource ‘{0}’ does not exist".format(name))
if res.get_type() != type:
raise Exception("resource ‘{0}’ is not of type ‘{1}’".format(name, type))
return res
def get_machine(self, name):
res = self.active_resources.get(name, None)
if not res:
raise Exception("machine ‘{0}’ does not exist".format(name))
if not is_machine(res):
raise Exception("resource ‘{0}’ is not a machine".format(name))
return res
def _set_attrs(self, attrs):
"""Update deployment attributes in the state file."""
with self._db:
c = self._db.cursor()
for n, v in attrs.iteritems():
if v == None:
c.execute("delete from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, n))
else:
c.execute("insert or replace into DeploymentAttrs(deployment, name, value) values (?, ?, ?)",
(self.uuid, n, v))
def _set_attr(self, name, value):
"""Update one deployment attribute in the state file."""
self._set_attrs({name: value})
def _del_attr(self, name):
"""Delete a deployment attribute from the state file."""
with self._db:
self._db.execute("delete from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, name))
def _get_attr(self, name, default=nixops.util.undefined):
"""Get a deployment attribute from the state file."""
with self._db:
c = self._db.cursor()
c.execute("select value from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, name))
row = c.fetchone()
if row != None: return row[0]
|
return nixops.util.undefined
def _create_resource(self, name, type):
c = self._db.cursor()
c.execute("select 1 from Resources where deployment = ? and name = ?", (self.uuid, name))
if len(c.fetchall()) != 0:
raise Exception("resource already exists in database!")
c.execute("insert into Resources(deployment, name, type) v
|
alues (?, ?, ?)",
(self.uuid, name, type))
id = c.lastrowid
r = _create_state(self, type, name, id)
self.resources[name] = r
return r
def export(self):
with self._db:
c = self._db.cursor()
c.execute("select name, value from DeploymentAttrs where deployment = ?", (self.uuid,))
rows = c.fetchall()
res = {row[0]: row[1] for row in rows}
res['resources'] = {r.name: r.export() for r in self.resources.itervalues()}
return res
def import_(self, attrs):
with self._db:
for k, v in attrs.iteritems():
if k == 'resources': continue
self._set_attr(k, v)
for k, v in attrs['resources'].iteritems():
if 'type' not in v: raise Exception("imported resource lacks a type")
r = self._create_resource(k, v['type'])
r.import_(v)
def clone(self):
with self._db:
new = self._statefile.create_deployment()
self._db.execute("insert into DeploymentAttrs (deployment, name, value) " +
"select ?, name, value from DeploymentAttrs where deployment = ?",
(new.uuid, self.uuid))
new.configs_path = None
return new
def _get_deployment_lock(self):
if self._lock_file_path is None:
lock_dir = os.environ.get("HOME", "") + "/.nixops/locks"
if not os.path.exists(lock_dir): os.makedirs(lock_dir, 0700)
self._lock_file_path = lock_dir + "/" + self.uuid
class DeploymentLock(object):
def __init__(self, depl):
self._lock_file_path = depl._lock_file_path
self._logger = depl.logger
self._lock_file = None
def __enter__(self):
self._lock_file = open(self._lock_file_path, "w")
fcntl.fcntl(self._lock_file, fcntl.F_SETFD, fcntl.FD_CLOEXEC)
try:
fcntl.flock(self._lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
self._logger.log(
"waiting for exclusive deployment lock..."
)
fcntl.flock(self._lock_file, fcntl.LOCK_EX)
def __exit__(self, excepti
|
Sonicbids/django
|
django/contrib/admin/filters.py
|
Python
|
bsd-3-clause
| 17,360
| 0.00121
|
"""
This encapsulates the logic for displaying filters in the Django admin.
Filters are specified in models with the "list_filter" option.
Each filter subclass knows how to display a filter for a field that passes a
certain test -- e.g. being a DateField or ForeignKey.
"""
import datetime
from django.db import models
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.utils.encoding import smart_text, force_text
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib.admin.utils import (get_model_from_relation,
reverse_field_path, get_limit_choices_to_from_path, prepare_lookup_value)
from django.contrib.admin.options import IncorrectLookupParameters
class ListFilter(object):
title = None # Human-readable title to appear in the right sidebar.
template = 'admin/filter.html'
def __init__(self, request, params, model, model_admin):
# This dictionary will eventually contain the request's query string
# parameters actually used by this filter.
self.used_parameters = {}
if self.title is None:
raise ImproperlyConfigured(
"The list filter '%s' does not specify "
"a 'title'." % self.__class__.__name__)
def has_output(self):
"""
Returns True if some choices would be output for this filter.
"""
raise NotImplementedError('subclasses of ListFilter must provide a has_output() method')
def choices(self, cl):
"""
Returns choices ready to be output in the template.
"""
raise NotImplementedError('subclasses of ListFilter must provide a choices() method')
def queryset(self, request, queryset):
"""
Returns the filtered queryset.
"""
raise NotImplementedError('subclasses of ListFilter must provide a queryset() method')
def expected_parameters(self):
"""
Returns the list of parameter names that are expected from the
request's query string and that will be used by this filter.
"""
raise NotImplementedError('subclasses of ListFilter must provide an expected_parameters() method')
class SimpleListFilter(ListFilter):
# The parameter that should be used in the query string for that filter.
parameter_name = None
def __init__(self, request, params, model, model_admin):
super(SimpleListFilter, self).__init__(
request, params, model, model_admin)
if self.parameter_name is None:
raise ImproperlyConfigured(
"The list filter '%s' does not specify "
"a 'parameter_name'." % self.__class__.__name__)
if self.parameter_name in params:
value = params.pop(self.parameter_name)
self.used_parameters[self.parameter_name] = value
lookup_choices = self.lookups(request, model_admin)
if lookup_choices is None:
lookup_choices = ()
self.lookup_choices = list(lookup_choices)
def has_output(self):
return len(self.lookup_choices) > 0
def value(self):
"""
Returns the value (in string format) provided in the request's
query string for this filter, if any. If the value wasn't provided then
returns None.
"""
return self.used_parameters.get(self.parameter_name, None)
def lookups(self, request, model_admin):
"""
Must be overridden to return a list of tuples (value, verbose value)
"""
raise NotImplementedError(
'The SimpleListFilter.lookups() method must be overridden to '
'return a list of tuples (value, verbose value)')
def expected_parameters(self):
return [self.parameter_name]
def choices(self, cl):
yield {
'selected': self.value() is None,
'query_string': cl.get_query_string({}, [self.parameter_name]),
'display': _('All'),
}
for lookup, title in self.lookup_choices:
yield {
'selected': self.value() == force_text(lookup),
'query_string': cl.get_query_string({
self.parameter_name: lookup,
}, []),
'display': title,
}
class FieldListFilter(ListFilter):
_field_list_filters = []
_take_priority_index = 0
def __init__(self, field, request, params, model, model_admin, field_path):
self.field = field
self.field_path = field_path
self.title = getattr(field, 'verbose_name', field_path)
super(FieldListFilter, self).__init__(
request, params, model, model_admin)
for p in self.expected_parameters():
if p in params:
value = params.pop(p)
self.used_parameters[p] = prepare_lookup_value(p, value)
def has_output(self):
return True
def queryset(self, request, queryset):
try:
return queryset.filter(**self.used_parameters)
except ValidationError as e:
raise IncorrectLookupParameters(e)
@classmethod
def register(cls, test, list_filter_class, take_priority=False):
if take_priority:
# This is to allow overriding the default filters for certain types
# of fields with some custom filters. The first found in the list
# is used in priority.
cls._field_list_filters.insert(
cls._take_priority_index, (test, list_filter_class))
cls._take_priority_index += 1
else:
cls._field_list_filters.append((test, list_filter_class))
@classmethod
def create(cls, field, request, params, model, model_admin, field_path):
for test, list_filter_class in cls._field_list_filters:
if not test(field):
|
continue
return list
|
_filter_class(field, request, params,
model, model_admin, field_path=field_path)
class RelatedFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
other_model = get_model_from_relation(field)
if hasattr(field, 'rel'):
rel_name = field.rel.get_related_field().name
else:
rel_name = other_model._meta.pk.name
self.lookup_kwarg = '%s__%s__exact' % (field_path, rel_name)
self.lookup_kwarg_isnull = '%s__isnull' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg)
self.lookup_val_isnull = request.GET.get(self.lookup_kwarg_isnull)
self.lookup_choices = self.field_choices(field, request, model_admin)
super(RelatedFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
if hasattr(field, 'verbose_name'):
self.lookup_title = field.verbose_name
else:
self.lookup_title = other_model._meta.verbose_name
self.title = self.lookup_title
def has_output(self):
if (isinstance(self.field, ForeignObjectRel) and
self.field.field.null or hasattr(self.field, 'rel') and
self.field.null):
extra = 1
else:
extra = 0
return len(self.lookup_choices) + extra > 1
def expected_parameters(self):
return [self.lookup_kwarg, self.lookup_kwarg_isnull]
def field_choices(self, field, request, model_admin):
return field.get_choices(include_blank=False)
def choices(self, cl):
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
yield {
'selected': self.lookup_val is None and not self.lookup_val_isnull,
'query_string': cl.get_query_string({},
[self.lookup_kwarg, self.lookup_kwarg_isnull]),
'display': _('All'),
}
for pk_val, val in self.lookup_choices:
yield {
'selected': self.lookup_val == smart_text(pk_val),
'query_string': cl.get_quer
|
anantag/flightsearch
|
backend/flightsearch/rawParser/admin.py
|
Python
|
gpl-2.0
| 139
| 0
|
from django.
|
contrib import admin
from
|
rawParser.models import flightSearch
# Register your models here.
admin.site.register(flightSearch)
|
Ircam-Web/mezzanine-organization
|
organization/shop/management/commands/__init__.py
|
Python
|
agpl-3.0
| 846
| 0
|
# -*- coding: utf-8 -*-
#
# Copyrigh
|
t (c) 2016-2017 Ircam
# Copyright (c) 2016-2017 Guillaume Pellerin
# Copyright (c) 2016-2017 Emilie Zawadzki
# This file is part of mezzanine-organization.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
|
by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
IronCountySchoolDistrict/powerschool_apps
|
docs/conf.py
|
Python
|
mit
| 8,001
| 0.001125
|
# -*- coding: utf-8 -*-
#
# powerschool_apps documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import unicode_literals
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'powerschool_apps'
copyright = """2017, Iron County School District"""
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'powerschool_appsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index',
'powerschool_apps.tex',
'powerschool_apps Documentation',
"""Iron County School District""", 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'powerschool_apps', 'powerschool_apps Documentation',
["""Iron County School District"""], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source
|
start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'powerschool_apps', 'powerschool_apps Documentation',
"""Iron County School District""", 'powerschool_apps',
"""PowerSchool customizations written in Django""", 'Miscellaneous'),
|
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
|
looooo/panel-method
|
examples/tests/test_linear_2d_doublet.py
|
Python
|
gpl-3.0
| 276
| 0.003623
|
from parabem.pan2d import doublet_2_1
import parabem
im
|
port numpy as np
v1 = parabem.PanelVector2(-1, 0)
v2 = parabem.PanelVector2(1, 0)
panel = par
|
abem.Panel2([v2, v1])
vals = ([doublet_2_1(parabem.Vector2(x, 0), panel, True) for x in np.linspace(-2, 2, 20)])
print(vals)
|
rlsharpton/byte-of-Python
|
oop_objvar.py
|
Python
|
gpl-2.0
| 1,127
| 0.003549
|
__author__ = 'rls'
class Robot:
"""Represents a robot, with a name."""
# A class variable, counting the number of robots
population = 0
def __init__(self, name):
"""Initializes the data."""
self.name = name
print('(Initializing {})'.format(self.name))
# When this person is created, the robot adds to the population
Robot.population += 1
def die(self):
"""I am dying."""
print("{} is being destroyed!".format(self.name))
Robot.population -= 1
if Robot.population == 0:
print("{} was the last one.".format(self.name))
else:
|
print("There are still {:d} robots working.".format(Robot.population))
def say_hi(self):
"""Greeting by the robot
Long doc statement."""
print("Greetings, my masters have called me {}".format(self.name))
@classmethod
def how_many(cls):
"""Prints the current population."""
print("We have {:d} robots.".format(cls.population))
droid1 = Robot("R2-D2")
droid1.say_hi()
Robot.how_many()
|
__version__ = 0.2
|
fujicoin/electrum-fjc
|
electrum/gui/kivy/main_window.py
|
Python
|
mit
| 44,226
| 0.003346
|
import re
import os
import sys
import time
import datetime
import traceback
from decimal import Decimal
import threading
import asyncio
from electrum.bitcoin import TYPE_ADDRESS
from electrum.storage import WalletStorage
from electrum.wallet import Wallet, InternalAddressCorruption
from electrum.paymentrequest import InvoiceStore
from electrum.util import profiler, InvalidPassword, send_exception_to_crash_reporter
from electrum.plugin import run_hook
from electrum.util import format_satoshis, format_satoshis_plain, format_fee_satoshis
from electrum.paymentrequest import PR_UNPAID, PR_PAID, PR_UNKNOWN, PR_EXPIRED
from electrum import blockchain
from electrum.network import Network, TxBroadcastError, BestEffortRequestFailed
from .i18n import _
from kivy.app import App
from kivy.core.window import Window
from kivy.logger import Logger
from kivy.utils import platform
from kivy.properties import (OptionProperty, AliasProperty, ObjectProperty,
StringProperty, ListProperty, BooleanProperty, NumericProperty)
from kivy.cache import Cache
from kivy.clock import Clock
from kivy.factory import Factory
from kivy.metrics import inch
from kivy.lang import Builder
## lazy imports for factory so that widgets can be used in kv
#Factory.register('InstallWizard', module='electrum.gui.kivy.uix.dialogs.installwizard')
#Factory.register('InfoBubble', module='electrum.gui.kivy.uix.dialogs')
#Factory.register('OutputList', module='electrum.gui.kivy.uix.dialogs')
#Factory.register('OutputItem', module='electrum.gui.kivy.uix.dialogs')
from .uix.dialogs.installwizard import InstallWizard
from .uix.dialogs import InfoBubble, crash_reporter
from .uix.dialogs import OutputList, OutputItem
from .uix.dialogs import TopLabel, RefLabel
#from kivy.core.window import Window
#Window.softinput_mode = 'below_target'
# delayed imports: for startup speed on android
notification = app = ref = None
util = False
# register widget cache for keeping memory down timeout to forever to cache
# the data
Cache.register('electrum_widgets', timeout=0)
from kivy.uix.screenmanager import Screen
from kivy.uix.tabbedpanel import TabbedPanel
from kivy.uix.label import Label
from kivy.core.clipboard import Clipboard
Factory.register('TabbedCarousel', module='electrum.gui.kivy.uix.screens')
# Register fonts without this you won't be able to use bold/italic...
# inside markup.
from kivy.core.text import Label
Label.register('Roboto',
'electrum/gui/kivy/data/fonts/Roboto.ttf',
'electrum/gui/kivy/data/fonts/Roboto.ttf',
'electrum/gui/kivy/data/fonts/Roboto-Bold.ttf',
'electrum/gui/kivy/data/fonts/Roboto-Bold.ttf')
from electrum.util import (base_units, NoDynamicFeeEstimates, decimal_point_to_base_unit_name,
base_unit_name_to_decimal_point, NotEnoughFunds, UnknownBaseUnit,
DECIMAL_POINT_DEFAULT)
class ElectrumWindow(App):
electrum_config = ObjectProperty(None)
language = StringProperty('en')
# properties might be updated by the network
num_blocks = NumericProperty(0)
num_nodes = NumericProperty(0)
server_host = StringProperty('')
server_port = StringProperty('')
num_chains = NumericProperty(0)
blockchain_name = StringProperty('')
fee_status = StringProperty('Fee')
balance = StringProperty('')
fiat_balance = StringProperty('')
is_fiat = BooleanProperty(False)
blockchain_forkpoint = NumericProperty(0)
auto_connect = BooleanProperty(False)
def on_auto_connect(self, instance, x):
net_params = self.network.get_parameters()
net_params = net_params._replace(auto_connect=self.auto_connect)
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def toggle_auto_connect(self, x):
self.auto_connect = not self.auto_connect
oneserver = BooleanProperty(False)
def on_oneserver(self, instance, x):
net_params = self.network.get_parameters()
net_params = net_params._replace(oneserver=self.oneserver)
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def toggle_oneserver(self, x):
self.oneserver = not self.oneserver
proxy_str = StringProperty('')
def update_proxy_str(self, proxy: dict):
mode = proxy.get('mode')
host = proxy.get('host')
port = proxy.get('port')
self.proxy_str = (host + ':' + port) if mode else _('None')
def choose_server_dialog(self, popup):
from .uix.dialogs.choice_dialog import ChoiceDialog
protocol = 's'
def cb2(host):
from electrum import constants
pp = servers.get(host, constants.net.DEFAULT_PORTS)
port = pp.get(protocol, '')
popup.ids.host.text = host
popup.ids.port.text = port
servers = self.network.get_servers()
ChoiceDialog(_('Choose a server'), sorted(servers), popup.ids.host.text, cb2).open()
def choose_blockchain_dialog(self, dt):
from .uix.dialogs.choice_dialog import ChoiceDialog
chains = self.network.get_blockchains()
def cb(name):
with blockchain.blockchains_lock: blockchain_items = list(blockchain.blockchains.items())
for chain_id, b in blockchain_items:
if name == b.get_name():
self.network.run_from_another_thread(self.network.follow_chain_given_id(chain_id))
chain_objects = [blockchain.blockchains.get(chain_id) for chain_id in chains]
chain_objects = filter(lambda b: b is not None, chain_objects)
names = [b.get_name() for b in chain_objects]
if len(names) > 1:
cur_chain = self.network.blockchain().get_name()
ChoiceDialog(_('Choose your chain'), names, cur_chain, cb).open()
use_rbf = BooleanProperty(False)
def on_use_rbf(self, instance, x):
self.electrum_config.set_key('use_rbf', self.use_rbf, True)
use_change = BooleanProperty(False)
def on_use_change(self, instance, x):
self.electrum_config.set_key('use_change', self.use_change, True)
use_unconfirmed = BooleanProperty(False)
def on_use_unconfirmed(self, instance, x):
self.electrum_config.set_key('confirmed_only', not self.use_unconfirmed, True)
def set_URI(self, uri):
self.switch_to('send')
self.send_screen.set_URI(uri)
def on_new_intent(self, intent):
if intent.getScheme() != 'fujicoin':
return
uri = intent.getDataString()
self.set_URI(uri)
def on_language(self, instance, language):
Logger.info('language: {}'.format(language))
_.switch_lang(language)
def update_history(self, *dt):
if self.history_screen:
self.history_screen.update()
def on_quotes(self, d):
Logger.info("on_quotes")
self._trigger_update_status()
self._trigger_update_history()
def on_history(s
|
elf, d):
Logger.info("on_history")
if self.wallet:
self.wallet.clear_coin_price_cache()
self._trigger_update_history()
def on_fee_histogram(self, *args):
self._trigger_update_history()
def _get_bu(self):
decimal_point = self.electrum_config.get('decimal_point', DECIMAL_POINT_D
|
EFAULT)
try:
return decimal_point_to_base_unit_name(decimal_point)
except UnknownBaseUnit:
return decimal_point_to_base_unit_name(DECIMAL_POINT_DEFAULT)
def _set_bu(self, value):
assert value in base_units.keys()
decimal_point = base_unit_name_to_decimal_point(value)
self.electrum_config.set_key('decimal_point', decimal_point, True)
self._trigger_update_status()
self._trigger_update_history()
wallet_name = StringProperty(_('No Wallet'))
base_unit = AliasProperty(_get_bu, _set_bu)
fiat_unit = StringProperty('')
def on_fiat_unit(self, a, b):
self._trigger_update_history()
def decimal_point(self):
return base_units[self.base_unit]
def btc_to_fiat(self, amount_str):
if not amount_str:
|
befair/gasistafelice
|
gasistafelice/gf/gas/forms/order/__init__.py
|
Python
|
agpl-3.0
| 5,005
| 0.006194
|
"""
Order module has been split for its complexity.
Proposed clean hierarchy for GASSupplierOrder that
can be used in many contexts such as:
DES: ChooseSupplier ChooseGAS ChooseReferrer
GAS: ChooseSupplier OneGAS ChooseReferrer
Supplier: OneSupplier ChooseGAS ChooseReferrer
Solidal Pact: OneSupplier OneGAS ChooseReferrer
* BaseOrderForm: base for add and edit
|
|---* AddOrderForm: encapsulate Add logic.
| Just this class is enough if Resource API encapsulate
| logic behind specific resource. Otherwise we need to write
| subclasses XAddOrderForm where X is one of DES, GAS, Supplier, Pact.
|
| It manages:
| * common attributes
| * setting of withdrawal and deliveries
|
----* EditOrderForm
* PlannedAddOrderForm: mix-in class to add planning facilities
#TODO LEFT OUT NOW InterGASAddOrderForm: it requires some considerations and
#TODO LEFT OUT NOW so probably it should be managed as a separated module.
#TODO LEFT OUT NOW P.e: deliveries and withdrawals MUST be always specified.
#TODO LEFT OUT NOW It also would need multiple delivery and withdrawal places,
#TODO LEFT OUT NOW but this will be a FUTURE module update
Factory function `form_class_factory_for_request` is there for:
* composition of final classes
(XAddOrderForm, PlannedAddOrderForm, InterGASAddOrderForm)
* follows GAS configuration options and prepare delivery and withdrawal fields
Where can you find above classes:
* base.BaseOrderForm
* base.AddOrderForm
* base.EditOrderForm
* X.XAddOrderForm (where X can be des,gas,supplier,pact)
* __init__.form_class_factory_for_request
* extra.PlannedAddOrderForm
#TODO LEFT OUT NOW * intergas.InterGASAddOrderForm
There are also some other classes that support order interactions:
* gmo.SingleGASMemberOrderForm
* gmo.BasketGASMemberOrderForm
* gsop.GASSupplierOrderProductForm
"""
from django import forms
from django.utils.translation import ugettext, ugettext_lazy as _
from gf.base.models import Place, Person
from lib.widgets import SplitDateTimeFormatAwareWidget
from gf.gas.forms.order.ba
|
se import AddOrderForm, EditOrderForm
from gf.gas.forms.order.plan import AddPlannedOrderForm
from gf.gas.forms.order.intergas import AddInterGASOrderForm, AddInterGASPlannedOrderForm
from gf.gas.models import GASSupplierOrder
import copy
import logging
log = logging.getLogger(__name__)
def form_class_factory
|
_for_request(request, base):
"""Return appropriate form class basing on GAS configuration
and other request parameters if needed"""
#log.debug("OrderForm--> form_class_factory_for_request")
fields = copy.deepcopy(base.Meta.fields)
gf_fieldsets = copy.deepcopy(base.Meta.gf_fieldsets)
attrs = {}
gas = request.resource.gas
if gas:
if gas.config.use_withdrawal_place:
gf_fieldsets[0][1]['fields'].append('withdrawal_referrer_person')
attrs.update({
'withdrawal_referrer' : forms.ModelChoiceField(
queryset=Person.objects.none(),
required=False
),
})
if gas.config.can_change_delivery_place_on_each_order:
gf_fieldsets[0][1]['fields'].append(('delivery_city', 'delivery_addr_or_place'))
attrs.update({
'delivery_city' : forms.CharField(required=True,
label=_('Delivery city'),
initial=gas.city
),
'delivery_addr_or_place': forms.CharField(
required=True, label=_('Delivery address or place'),
initial=gas.headquarter
),
})
if gas.config.use_withdrawal_place:
if gas.config.can_change_withdrawal_place_on_each_order:
gf_fieldsets[0][1]['fields'].append((
'withdrawal_datetime', 'withdrawal_city',
'withdrawal_addr_or_place')
)
attrs.update({
'withdrawal_datetime' : forms.SplitDateTimeField(
required=False, label=_('Withdrawal on/at'),
widget=SplitDateTimeFormatAwareWidget
),
'withdrawal_city' : forms.CharField(
required=True, label=_('Withdrawal city'),
initial=gas.city
),
'withdrawal_addr_or_place': forms.CharField(required=True,
label=_('Withdrawal address or place'),
initial=gas.headquarter
),
})
attrs.update(Meta=type('Meta', (), {
'model' : GASSupplierOrder,
'fields' : fields,
'gf_fieldsets' : gf_fieldsets
}))
return type('Custom%s' % base.__name__, (base,), attrs)
|
tim-janik/testbit-tools
|
buglist.py
|
Python
|
gpl-3.0
| 7,933
| 0.024707
|
#!/usr/bin/env python
# Copyright (C) 2008,2011 Lanedo GmbH
#
# Author: Tim Janik
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, os, re, urllib, csv
pkginstall_configvars = {
'PACKAGE' : 'dummy', 'PACKAGE_NAME' : 'dummy', 'VERSION' : '0.0', 'REVISION' : 'uninstalled',
#@PKGINSTALL_CONFIGVARS_IN24LINES@ # configvars are substituted upon script installation
}
# TODO:
# - support mixing in comments.txt which has "bug# person: task"
bugurls = (
('gb', 'http://bugzilla.gnome.org/buglist.cgi?bug_id='),
('g
|
nome', 'http://bugzilla.gnome.org/buglist.cgi?bug_id='),
('fd', 'https://bugs.freedesktop.org/buglist.cgi?bug_id='),
('freedesktop', 'https://bugs.freedesktop.org/buglist.cgi?bug_id='),
('mb', 'https://bugs.maemo.org/buglist.cgi?bug_id='),
('maemo', 'https://bugs.maemo.org/buglist.cgi?bug_id='),
('nb', 'https://projects.maemo.org/bugzilla/buglist.cgi?bug_id='),
('nokia',
|
'https://projects.maemo.org/bugzilla/buglist.cgi?bug_id='),
('gcc', 'http://gcc.gnu.org/bugzilla/buglist.cgi?bug_id='),
('libc', 'http://sources.redhat.com/bugzilla/buglist.cgi?bug_id='),
('moz', 'https://bugzilla.mozilla.org/buglist.cgi?bug_id='),
('mozilla', 'https://bugzilla.mozilla.org/buglist.cgi?bug_id='),
('xm', 'http://bugzilla.xamarin.com/buglist.cgi?id='),
('xamarin', 'http://bugzilla.xamarin.com/buglist.cgi?id='),
)
# URL authentication handling
def auth_urls():
import ConfigParser, os, re
cp = ConfigParser.SafeConfigParser()
cp.add_section ('authentication-urls')
cp.set ('authentication-urls', 'urls', '')
cp.read (os.path.expanduser ('~/.urlrc'))
urlstr = cp.get ('authentication-urls', 'urls') # space separated url list
urls = re.split ("\s*", urlstr.strip()) # list urls
urls = [u for u in urls if u] # strip empty urls
global auth_urls; auth_urls = lambda : urls # cache result for the future
return urls
def add_auth (url):
for ai in auth_urls():
prefix = re.sub ('//[^:/@]*:[^:/@]*@', '//', ai)
if url.startswith (prefix):
pl = len (prefix)
return ai + url[pl:]
return url
# carry out online bug queries
def bug_summaries (buglisturl):
if not buglisturl:
return []
# Bugzilla query to use
query = buglisturl + '&ctype=csv' # buglisturl.replace (',', '%2c')
query = add_auth (query)
f = urllib.urlopen (query)
csvdata = f.read()
f.close()
# read CSV lines
reader = csv.reader (csvdata.splitlines (1))
# parse head to interpret columns
col_bug_id = -1
col_description = -1
header = reader.next()
i = 0
for col in header:
col = col.strip()
if col == 'bug_id':
col_bug_id = i
if col == 'short_short_desc':
col_description = i
elif col_description < 0 and col == 'short_desc':
col_description = i
i = i + 1
if col_bug_id < 0:
print >>sys.stderr, 'Failed to identify bug_id from CSV data'
sys.exit (11)
if col_description < 0:
print >>sys.stderr, 'Failed to identify description columns from CSV data'
sys.exit (12)
# parse bug list
result = []
summary = ''
for row in reader:
bug_number = row[col_bug_id]
description = row[col_description]
result += [ (bug_number, description) ]
return result
# parse bug numbers and list bugs
def read_handle_bugs (config, url):
lines = sys.stdin.read()
# print >>sys.stderr, 'Using bugzilla URL: %s' % (bz, url)
for line in [ lines ]:
# find all bug numbers
bugs = re.findall (r'\b[0-9]+\b', line)
# int-convert, dedup and sort bug numbers
ibugs = []
if bugs:
bught = {}
for b in bugs:
b = int (b)
if not b or bught.has_key (b): continue
bught[b] = True
ibugs += [ b ]
del bugs
if config.get ('sort', False):
ibugs.sort()
# construct full query URL
fullurl = url + ','.join ([str (b) for b in ibugs])
# print fullurl
if len (ibugs) and config.get ('show-query', False):
print fullurl
# print bug summaries
if len (ibugs) and config.get ('show-list', False):
bught = {}
for bug in bug_summaries (fullurl):
bught[int (bug[0])] = bug[1] # bug summaries can have random order
for bugid in ibugs: # print bugs in user provided order
iid = int (bugid)
if bught.has_key (iid):
desc = bught[iid]
if len (desc) >= 70:
desc = desc[:67].rstrip() + '...'
print "% 7u - %s" % (iid, desc)
else:
print "% 7u (NOBUG)" % iid
def help (version = False, verbose = False):
print "buglist %s (%s, %s)" % (pkginstall_configvars['VERSION'],
pkginstall_configvars['PACKAGE_NAME'], pkginstall_configvars['REVISION'])
print "Redistributable under GNU GPLv3 or later: http://gnu.org/licenses/gpl.html"
if version: # version *only*
return
print "Usage: %s [options] <BUG-TRACKER> " % os.path.basename (sys.argv[0])
print "List or download bugs from a bug tracker. Bug numbers are read from stdin."
if not verbose:
print "Use the --help option for verbose usage information."
return
# 12345678911234567892123456789312345678941234567895123456789612345678971234567898
print "Options:"
print " -h, --help Print verbose help message."
print " -v, --version Print version information."
print " -U Keep bug list unsorted."
print " --bug-tracker-list List supported bug trackers."
print "Authentication:"
print " An INI-style config file is used to associate bugzilla URLs with account"
print " authentication for secured installations. The file should be unreadable"
print " by others to keep passwords secret, e.g. with: chmod 0600 ~/.urlrc"
print " A sample ~/.urlrc might look like this:"
print "\t# INI-style config file for URLs"
print "\t[authentication-urls]"
print "\turls =\thttps://USERNAME:[email protected]/bugzilla"
print "\t\thttp://BLOGGER:[email protected]/BLOGGER/xmlrpc.php"
def main ():
import getopt
# default configuration
config = {
'sort' : True,
'show-query' : True,
'show-list' : True,
}
# parse options
try:
options, args = getopt.gnu_getopt (sys.argv[1:], 'vhU', [ 'help', 'version', 'bug-tracker-list' ])
except getopt.GetoptError, err:
print >>sys.stderr, "%s: %s" % (os.path.basename (sys.argv[0]), str (err))
help()
sys.exit (126)
for arg, val in options:
if arg == '-h' or arg == '--help': help (verbose=True); sys.exit (0)
if arg == '-v' or arg == '--version': help (version=True); sys.exit (0)
if arg == '-U': config['sort'] = False
if arg == '--bug-tracker-list':
print "Bug Tracker:"
for kv in bugurls:
print " %-20s %s" % kv
sys.exit (0)
if len (args) < 1:
print >>sys.stderr, "%s: Missing bug tracker argument" % os.path.basename (sys.argv[0])
help()
sys.exit (126)
trackerdict = dict (bugurls)
if not trackerdict.has_key (args[0]):
print >>sys.stderr, "%s: Unknown bug tracker: %s" % (os.path.basename (sys.argv[0]), args[0])
sys.exit (10)
# handle bugs
read_handle_bugs (config, trackerdict[args[0]])
if __name__ == '__main__':
main()
|
qiudebo/13learn
|
code/python/myPython.py
|
Python
|
mit
| 1,435
| 0.0445
|
# *-* coding:utf-8 *-*
from functools import partial
# 可读性好
# range()
print range(0,9,2) #递增列表
for i in xrange(0,9,2): # 只用于for 循环中
print i
albums = ("Poe","Gaudi","Freud","Poe2")
years = (1976,1987,1990,2003)
for album in sorted(albums):
print album
for album in reversed(albums):
print album
for i,album in enumerate(albums):
print i,album
for album,yr in zip(albums,years):
print album,yr
# 列表表达式
# 8.12 列表
|
解析,列表解析的表达式比map lambda效
|
率更高
def fuc(a):
return a**2
x = range(1,10,1)
print x
print map(fuc,x)
# map 对所有的列表元素执行一个操作
# lambda 创建只有一行的函数 -- 只使用一次,非公用的函数
print map(lambda x:x**2,range(6))
print [x**2 for x in range(6) if x>3]
print filter(lambda x:x%2,range(10))
print [x for x in range(10) if x%2]
# 11.7.2 函数式编程
print range(6)
print reduce(lambda x,y:x+y,range(6)) # 累加
# 偏函数 简化代码,提高代码速度
int2 = partial(int,base=2)
print int2('1000')
# 闭包
# 列表生成器
g = (x for x in range(10))
print g.next()
print "------"
for n in g:
print n
# 匿名函数,无函数名,将函数赋给变量
f = lambda x:x*x
print f(2)
# 装饰器
def log():
print 'log'
def now():
print 'time is:','2017-09-14'
# 字典
|
funkring/fdoo
|
addons/payment_paypal/models/paypal.py
|
Python
|
agpl-3.0
| 19,377
| 0.003716
|
# -*- coding: utf-'8' "-*-"
import base64
try:
import simplejson as json
except ImportError:
import json
import logging
import urlparse
import werkzeug.urls
import urllib2
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment_paypal.controllers.main import PaypalController
from openerp.osv import osv, fields
from openerp.tools.float_utils import float_compare
from openerp import SUPERUSER_ID
_logger = logging.getLogger(__name__)
class AcquirerPaypal(osv.Model):
_inherit = 'payment.acquirer'
def _get_paypal_urls(self, cr, uid, environment, context=None):
""" Paypal URLS """
if environment == 'prod':
return {
'paypal_form_url': 'https://www.paypal.com/cgi-bin/webscr',
'paypal_rest_url': 'https://api.paypal.com/v1/oauth2/token',
}
else:
return {
'paypal_form_url': 'https://www.sandbox.paypal.com/cgi-bin/webscr',
'paypal_rest_url': 'https://api.sandbox.paypal.com/v1/oauth2/token',
}
def _get_providers(self, cr, uid, context=None):
providers = super(AcquirerPaypal, self)._get_providers(cr, uid, context=context)
providers.append(['paypal', 'Paypal'])
return providers
_columns = {
'paypal_email_account': fields.char('Paypal Email ID', required_if_provider='paypal', groups='base.group_user'),
'paypal_seller_account': fields.char(
'Paypal Merchant ID', groups='base.group_user',
help='The Merchant ID is used to ensure communications coming from Paypal are valid and secured.'),
'paypal_use_ipn': fields.boolean('Use IPN', help='Paypal Instant Payment Notification', groups='base.group_user'),
# Server 2 server
'paypal_api_enabled': fields.boolean('Use Rest API'),
'paypal_api_username': fields.char('Rest API Username', groups='base.group_user'),
'paypal_api_password': fields.char('Rest API Password', groups='base.group_user'),
'paypal_api_access_token': fields.char('Access Token', groups='base.group_user'),
'paypal_api_access_token_validity': fields.datetime('Access Token Validity', groups='base.group_user'),
}
_defaults = {
'paypal_use_ipn': True,
'fees_active': False,
'fees_dom_fixed': 0.35,
'fees_dom_var': 3.4,
'fees_int_fixed': 0.35,
'fees_int_var': 3.9,
'paypal_api_enabled': False,
}
def _migrate_paypal_account(self, cr, uid, context=None):
""" COMPLETE ME """
cr.execute('SELECT id, paypal_account FROM res_company')
res = cr.fetchall()
for (company_id, company_paypal_account) in res:
if company_paypal_account:
company_paypal_ids = self.search(cr, uid, [('company_id', '=', company_id), ('provider', '=', 'paypal')], limit=1, context=context)
if company_paypal_ids:
self.write(cr, uid, company_paypal_ids, {'paypal_email_account': company_paypal_account}, context=context)
else:
paypal_view = self.pool['ir.model.data'].get_object(cr, uid, 'payment_paypal', 'paypal_acquirer_button')
self.create(cr, uid, {
'name': 'Paypal',
'provider': 'paypal',
'paypal_email_account': company_paypal_account,
'view_template_id': paypal_view.id,
}, context=context)
return True
def paypal_compute_fees(self, cr, uid, id, amount, currency_id, country_id, context=None):
""" Compute paypal fees.
:param float amount: the amount to pay
:param integer country_id: an ID of a res.country, or None. This is
the customer's country, to be compared to
the acquirer company country.
:return float fees: computed fees
"""
acquirer = self.browse(cr, uid, id, context=context)
if not acquirer.fees_active:
return 0.0
country = self.pool['res.country'].browse(cr, uid, country_id, context=context)
if country and acquirer.company_id.country_id.id == country.id:
percentage = acquirer.fees_dom_var
fixed = acquirer.fees_dom_fixed
else:
percentage = acquirer.fees_int_var
fixed = acquirer.fees_int_fixed
fees = (percentage / 100.0 * amount + fixed ) / (1 - percentage / 100.0)
return fees
def paypal_form_generate_values(self, cr, uid, id, partner_values, tx_values, context=None):
base_url = self.pool['ir.config_parameter'].get_param(cr, SUPERUSER_ID, 'web.base.url')
acquirer = self.browse(cr, uid, id, context=context)
paypal_tx_values = dict(tx_values)
paypal_tx_values.update({
'cmd': '_xclick',
'business': acquirer.paypal_email_account,
'item_name': '%s: %s' % (acquirer.company_id.name, tx_values['reference']),
'item_number': tx_values['reference'],
'amount': tx_values['amount'],
'currency_code': tx_values['currency'] and tx_values['currency'].name or '',
'address1': partner_values['address'],
'city': partner_values['city'],
'country': partner_values['country'] and partner_values['country'].code or '',
'state': partner_values['state'] and (partner_values['state'].code or partner_values['state'].name) or '',
'email': partner_values['email'],
'zip': partner_values['zip'],
'first_name': partner_values['first_name'],
'last_name': partner_values['last_name'],
'return': '%s' % urlparse.urljoin(base_url, PaypalController._return_url),
'notify_url': '%s' % urlparse.urljoin(base_url, PaypalController._notify_url),
'cancel_return': '%s' % urlparse.urljoin(base_url, PaypalController._cancel_url),
})
if acquirer.fees_active:
paypal_tx_values['handling'] = '%.2f' % paypal_tx_values.pop('fees', 0.0)
if paypal_tx_values.get('return_url'):
pa
|
ypal_tx_values['custom'] = json.dumps({'return_url': '%s' % paypal_tx_values.pop('return_url')})
return partner_values, paypal_tx_values
def paypal_get_form_action_url(self, cr, uid, id, context=None):
acquirer = self.browse(cr, uid, id, context=context)
return self._get_paypal_urls(cr, uid, acquirer.environment, context=context)['paypal_form_url']
def _paypal_s2s_get_access_to
|
ken(self, cr, uid, ids, context=None):
"""
Note: see # see http://stackoverflow.com/questions/2407126/python-urllib2-basic-auth-problem
for explanation why we use Authorization header instead of urllib2
password manager
"""
res = dict.fromkeys(ids, False)
parameters = werkzeug.url_encode({'grant_type': 'client_credentials'})
for acquirer in self.browse(cr, uid, ids, context=context):
tx_url = self._get_paypal_urls(cr, uid, acquirer.environment)['paypal_rest_url']
request = urllib2.Request(tx_url, parameters)
# add other headers (https://developer.paypal.com/webapps/developer/docs/integration/direct/make-your-first-call/)
request.add_header('Accept', 'application/json')
request.add_header('Accept-Language', tools.config.defaultLang)
# add authorization header
base64string = base64.encodestring('%s:%s' % (
acquirer.paypal_api_username,
acquirer.paypal_api_password)
).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
request = urllib2.urlopen(request)
result = request.read()
res[acquirer.id] = json.loads(result).get('access_token')
request.close()
return res
class TxPaypal(osv.Model):
_inherit = 'payment.transaction'
_columns = {
'paypal_txn_id': fields
|
rauburtin/pysftpserver
|
pysftpserver/tests/stub_sftp.py
|
Python
|
mit
| 6,888
| 0.000145
|
# Copyright (C) 2003-2009 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
A stub SFTP server for loopback SFTP testing.
"""
import os
from paramiko import ServerInterface, SFTPServerInterface, SFTPServer, SFTPAttributes, \
SFTPHandle, SFTP_OK, AUTH_SUCCESSFUL, AUTH_FAILED, OPEN_SUCCEEDED, RSAKey
from paramiko.common import o666
from pysftpserver.tests.utils import t_path
USERNAME = "test"
PASSWORD = "secret"
RSA_KEY = t_path("id_rsa")
SERVER_ROOT = "server_root"
class StubServer (ServerInterface):
good_pub_key = RSAKey(filename=RSA_KEY)
def check_auth_password(self, username, password):
if username == USERNAME and password == PASSWORD:
return AUTH_SUCCESSFUL
return AUTH_FAILED
def check_auth_publickey(self, username, key):
if username == USERNAME and key == self.good_pub_key:
return AUTH_SUCCESSFUL
return AUTH_FAILED
def check_channel_request(self, kind, chanid):
return OPEN_SUCCEEDED
class StubSFTPHandle (SFTPHandle):
def stat(self):
try:
return SFTPAttributes.from_stat(os.fstat(self.readfile.fileno()))
except OSError as e:
return SFTPServer.convert_errno(e.errno)
def chattr(self, attr):
# python doesn't have equivalents to fchown or fchmod, so we have to
# use the stored filename
try:
SFTPServer.set_file_attr(self.filename, attr)
return SFTP_OK
except OSError as e:
return SFTPServer.convert_errno(e.errno)
class StubSFTPServer (SFTPServerInterface):
ROOT = t_path(SERVER_ROOT)
de
|
f _realpath(s
|
elf, path):
return self.ROOT + self.canonicalize(path)
def list_folder(self, path):
path = self._realpath(path)
try:
out = []
flist = os.listdir(path)
for fname in flist:
attr = SFTPAttributes.from_stat(
os.lstat(os.path.join(path, fname)))
attr.filename = fname
out.append(attr)
return out
except OSError as e:
return SFTPServer.convert_errno(e.errno)
def stat(self, path):
path = self._realpath(path)
try:
return SFTPAttributes.from_stat(os.stat(path))
except OSError as e:
return SFTPServer.convert_errno(e.errno)
def lstat(self, path):
path = self._realpath(path)
try:
return SFTPAttributes.from_stat(os.lstat(path))
except OSError as e:
return SFTPServer.convert_errno(e.errno)
def open(self, path, flags, attr):
path = self._realpath(path)
try:
binary_flag = getattr(os, 'O_BINARY', 0)
flags |= binary_flag
mode = getattr(attr, 'st_mode', None)
if mode is not None:
fd = os.open(path, flags, mode)
else:
# os.open() defaults to 0777 which is
# an odd default mode for files
fd = os.open(path, flags, o666)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
if (flags & os.O_CREAT) and (attr is not None):
attr._flags &= ~attr.FLAG_PERMISSIONS
SFTPServer.set_file_attr(path, attr)
if flags & os.O_WRONLY:
if flags & os.O_APPEND:
fstr = 'ab'
else:
fstr = 'wb'
elif flags & os.O_RDWR:
if flags & os.O_APPEND:
fstr = 'a+b'
else:
fstr = 'r+b'
else:
# O_RDONLY (== 0)
fstr = 'rb'
try:
f = os.fdopen(fd, fstr)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
fobj = StubSFTPHandle(flags)
fobj.filename = path
fobj.readfile = f
fobj.writefile = f
return fobj
def remove(self, path):
path = self._realpath(path)
try:
os.remove(path)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def rename(self, oldpath, newpath):
oldpath = self._realpath(oldpath)
newpath = self._realpath(newpath)
try:
os.rename(oldpath, newpath)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def mkdir(self, path, attr):
path = self._realpath(path)
try:
os.mkdir(path)
if attr is not None:
SFTPServer.set_file_attr(path, attr)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def rmdir(self, path):
path = self._realpath(path)
try:
os.rmdir(path)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def chattr(self, path, attr):
path = self._realpath(path)
try:
SFTPServer.set_file_attr(path, attr)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def symlink(self, target_path, path):
path = self._realpath(path)
if (len(target_path) > 0) and (target_path[0] == '/'):
# absolute symlink
target_path = os.path.join(self.ROOT, target_path[1:])
try:
os.symlink(target_path, path)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
return SFTP_OK
def readlink(self, path):
path = self._realpath(path)
try:
symlink = os.readlink(path)
except OSError as e:
return SFTPServer.convert_errno(e.errno)
# if it's absolute, remove the root
if os.path.isabs(symlink):
if symlink[:len(self.ROOT)] == self.ROOT:
symlink = symlink[len(self.ROOT):]
if (len(symlink) == 0) or (symlink[0] != '/'):
symlink = '/' + symlink
else:
symlink = '<error>'
return symlink
|
dhgarcia/babelModules
|
pynnModules/Network/spike_file_to_spike_array.py
|
Python
|
gpl-3.0
| 8,559
| 0.015189
|
import numpy
import itertools
import random
import math
def convert_spike_list_to_timed_spikes(spike_list, min_idx, max_idx, tmin, tmax, tstep):
times = numpy.array(range(tmin, tmax, tstep))
spike_ids = sorted(spike_list)
possible_neurons = range(min_idx, max_idx)
spikeArray = dict([(neuron, times) for neuron in spike_ids if neuron in possible_neurons])
return spikeArray
def convert_file_to_spikes(input_file_name, min_idx=None, max_idx=None, tmin=None, tmax=None, compatible_input=True):
data = numpy.array(numpy.loadtxt(fname=input_file_name), dtype=int) # get the array from the original text file
if compatible_input: data = numpy.roll(data, 1, axis=1) # swap neuron ID and time if necessary
if min_idx is None: min_idx = numpy.fmin.reduce(data[:,0], 0)
if max_idx is None: max_idx = numpy.fmax.reduce(data[:,0], 0) + 1
if tmin is None: tmin = numpy.fmin.reduce(data[:,1], 0)
if tmax is None: tmax = numpy.fmax.reduce(data[:,1], 0)
data = data[(data[:,1]>=tmin) & (data[:,1]<tmax) & (data[:,0]>=min_idx) & (data[:,0]<max_idx),:] # filter by mins and maxes
if data.shape == (0,): return {} # nothing left: return an empty dict.
sort_keys = numpy.lexsort((data[:,1], data[:,0])) # otherwise sort, grouping by neuron ID then time.
data = data[sort_keys,:]
spiking_neurons = itertools.groupby(data, lambda x: x[0]) # and taking one group at a time#,
spikeArray = dict([(neuron[0], numpy.array([spike_time[1] for spike_time in neuron[1]])) for neuron in spiking_neurons]) # create a dictionary indexed by neuron number of the spike times.
return spikeArray
def loop_array(input_array, runtime=0, num_repeats=1, sampletime=0):
spikeArray = {}
for neuron in input_array:
if not sampletime:
sampletime = int(numpy.fmax.reduce(input_array[neuron],0))
last_array = numpy.array([])
if sampletime*num_repeats < runtime or (runtime > 0 and sampletime*num_repeats > runtime):
num_repeats = runtime/sampletime
last_array = input_array[neuron][input_array[neuron] <= (runtime%sampletime)]
spikeArray[neuron] = numpy.concatenate([input_array[neuron]+repeat*sampletime for repeat in range(num_repeats)])
if len(last_array): spikeArray[neuron] = numpy.concatenate([spikeArray[neuron], last_array])
return spikeArray
def splice_arrays(input_arrays, input_times=None, input_neurons=None):
spikeArray = {}
if input_neurons is None: input_neurons = [None]*len(input_arrays)
if input_times is None: input_times = [[(reduce(lambda x, y: min(x, numpy.fmin.reduce(y,0)), input_group.values(), 0), reduce(lambda x, y: max(x, numpy.fmax.reduce(y,0)), input_group.values(), 0))] for input_group in input_arrays]
for in_idx in range(len(input_arrays)):
for neuron in input_arrays[in_idx].items():
if input_neurons[in_idx] is None or neuron[0] in input_neurons[in_idx]:
for time_range in input_times[in_idx]:
if time_range is None: time_range = (reduce(lambda x, y: min(x, numpy.fmin.reduce(y,0)), input_arrays[in_idx].values(), 0), reduce(lambda x, y: max(x, numpy.fmax.reduce(y,0)), input_arrays[in_idx].values(), 0))
if neuron[0] in spikeArray:
spikeArray[neuron[0]].extend([time for time in neuron[1] if time >= time_range[0] and time < time_range[1]])
else:
spikeArray[neuron[0]] = [time for time in neuron[1] if time >= time_range[0] and time < time_range[1]]
for neuron in spikeArray.items():
spikeArray[neuron[0]] = numpy.sort(numpy.unique(numpy.array(neuron[1])))
return spikeArray
def splice_files(input_files, input_times=None, input_neurons=None, compatible_input=True):
# splice_files expects a list of files, a list of lists, one for each file, giving the onset
# and offset times for each file, and a list of neurons relevant to each file, which will be
# spliced together into a single spike list.
spikeArray = {}
if input_times is None: input_times = [[(None, None)] for file_idx in len(input_files)]
for file_idx in len(input_files):
if input_neurons is None or input_neurons[file_idx] is None:
max_neuron_id = numpy.fmax.reduce(input_files[file_idx].keys(), 0) + 1
min_neuron_id = numpy.fmin.reduce(input_files[file_idx].keys(), 0)
else:
max_neuron_id = numpy.fmax.reduce(input_neurons[file_idx], 0) + 1
min_neuron_id = numpy.fmin.reduce(input_neurons[file_idx], 0)
|
for time_range in input_times[file_idx]:
for neuron in convert_file_to_spikes(input_file_name=input_files[file_idx], min_idx=min_neuron_id, max_idx=max_neuron_id, tmin=time_range[0], tmax=time_range[1], compatible_input=compatible_input).items():
if neuron[0] in spikeArray:
spikeArray[neuron[0]].append(neuron[1])
else:
|
spikeArray[neuron[0]] = neuron[1]
for neuron in spikeArray.items():
spikeArray[neuron[0]] = numpy.sort(numpy.unique(numpy.array(neuron[1])))
return spikeArray
def subsample_spikes_by_time(spikeArray, start, stop, step):
subsampledArray = {}
for neuron in spikeArray:
times = numpy.sort(spikeArray[neuron][(spikeArray[neuron] >= start) & (spikeArray[neuron] < stop)])
interval = step/2 + step%2
t_now = times[0]
t_start = times[0]
t_last = len(times)
t_index = 0
subsampled_times = []
while t_index < t_last:
spikes_in_interval = 0
while t_index < t_last and times[t_index] <= t_start + interval:
spikes_in_interval += 1
if spikes_in_interval >= interval:
t_start = times[t_index] + interval
subsampled_times.append(times[t_index])
try:
t_index = next(i for i in range(t_index, t_last) if times[i] >= t_start)
except StopIteration:
t_index = t_last
break
t_index += 1
else:
if t_index < t_last:
t_start = times[t_index]
subsampledArray[neuron] = numpy.array(subsampled_times)
return subsampledArray
def random_skew_times(spikeArray, skewtime, seed=3425670):
random.seed(seed)
#return dict([(neuron, [int(abs(t+random.uniform(-skewtime, skewtime))) for t in spikeArray[neuron]]) for neuron in spikeArray])
spikeDict = dict([(neuron, numpy.array(numpy.fabs(spikeArray[neuron]+numpy.random.uniform(-skewtime, skewtime, len(spikeArray[neuron]))), dtype=int)) for neuron in spikeArray])
#test_out = open('spikeArray.txt', 'w')
#test_out.write('%s' % spikeDict)
#test_out.close()
return spikeDict
def generate_shadow_spikes(spikeArray, dim_x, dim_y, move_velocity):
"""
generate a second set of spikes as if coming from a DVS retina.
imagines that the offset pixels perfectly register with perfect
timing precision.
args: spikeArray, in the format above for an array of Spikes: a
dict of {id: [times]}
dim_x, size of the field in the x-dimension
dim_y, size of the field in the y-dimension
move_velocity: an (s, theta) tuple, where s is the speed
measured in pixels/ms, theta the angle of virtual movement
measured in radians anticlockwise (0 is horizontal movement
to the right). The function will displace the shadow by
s pixels in the reverse of direction indicated for each time
point where spikes are registered. It will add one last set
of spikes at time tmax+1, at position of the source spikes at
time tmax.
"""
motion_x = -move_velocity[0]*math.cos(move_velocity[1])
motion_y = -move_velocity[0]*math.sin(move_velocity[1])
spikeArray_out = dict([(int(motion_x+spike[0]%dim_x)+dim_x*int(motion_y+spike[0]/dim_x), spike
|
jeremy-ma/gmmmc
|
gmmmc/fastgmm/setup_fast_likelihood.py
|
Python
|
mit
| 597
| 0.025126
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy as np
import os
sourcefiles = [ 'fast_likelihood.pyx']
ext_modules = [Extension("fast_likelihood",
sourcefiles,
include_dirs = [np.get_include()],
extra_compile_args=['-O3', '-fop
|
enmp', '-lc++'],
extra_link_args=['-fopenmp'],
|
language='c++')]
setup(
name = 'fastgmm',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules
)
|
EntropyFactory/creativechain-core
|
qa/rpc-tests/p2p-fullblocktest.py
|
Python
|
mit
| 52,732
| 0.003926
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.blocktools import *
import time
from test_framework.key import CECKey
from test_framework.script import *
import struct
class PreviousSpendableOutput(object):
def __init__(self, tx = CTransaction(), n = -1):
self.tx = tx
self.n = n # the output we're spending
'''
This reimplements tests from the bitcoinj/FullBlockTestGenerator used
by the pull-tester.
We use the testing framework in which we expect a particular answer from
each test.
'''
# Use this class for tests that require behavior other than normal "mininode" behavior.
# For now, it is used to serialize a bloated varint (b64).
class CBrokenBlock(CBlock):
|
def __init__(self, header=None):
super(CBrokenBlock, self).__init__(header)
def initialize(self, base_block):
self.vtx = copy.deepcopy(base_block.vtx)
self.hashMerkleRo
|
ot = self.calc_merkle_root()
def serialize(self):
r = b""
r += super(CBlock, self).serialize()
r += struct.pack("<BQ", 255, len(self.vtx))
for tx in self.vtx:
r += tx.serialize()
return r
def normal_serialize(self):
r = b""
r += super(CBrokenBlock, self).serialize()
return r
class FullBlockTest(ComparisonTestFramework):
# Can either run this test as 1 node with expected answers, or two and compare them.
# Change the "outcome" variable from each TestInstance object to only do the comparison.
def __init__(self):
super().__init__()
self.num_nodes = 1
self.block_heights = {}
self.coinbase_key = CECKey()
self.coinbase_key.set_secretbytes(b"horsebattery")
self.coinbase_pubkey = self.coinbase_key.get_pubkey()
self.tip = None
self.blocks = {}
def add_options(self, parser):
super().add_options(parser)
parser.add_option("--runbarelyexpensive", dest="runbarelyexpensive", default=True)
def run_test(self):
self.test = TestManager(self, self.options.tmpdir)
self.test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
self.test.run()
def add_transactions_to_block(self, block, tx_list):
[ tx.rehash() for tx in tx_list ]
block.vtx.extend(tx_list)
# this is a little handier to use than the version in blocktools.py
def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
tx = create_transaction(spend_tx, n, b"", value, script)
return tx
# sign a transaction, using the key we know about
# this signs input 0 in tx, which is assumed to be spending output n in spend_tx
def sign_tx(self, tx, spend_tx, n):
scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend
tx.vin[0].scriptSig = CScript()
return
(sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL)
tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])):
tx = self.create_tx(spend_tx, n, value, script)
self.sign_tx(tx, spend_tx, n)
tx.rehash()
return tx
def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True):
if self.tip == None:
base_block_hash = self.genesis_hash
block_time = int(time.time())+1
else:
base_block_hash = self.tip.sha256
block_time = self.tip.nTime + 1
# First create the coinbase
height = self.block_heights[base_block_hash] + 1
coinbase = create_coinbase(height, self.coinbase_pubkey)
coinbase.vout[0].nValue += additional_coinbase_value
coinbase.rehash()
if spend == None:
block = create_block(base_block_hash, coinbase, block_time)
else:
coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
coinbase.rehash()
block = create_block(base_block_hash, coinbase, block_time)
tx = create_transaction(spend.tx, spend.n, b"", 1, script) # spend 1 satoshi
self.sign_tx(tx, spend.tx, spend.n)
self.add_transactions_to_block(block, [tx])
block.hashMerkleRoot = block.calc_merkle_root()
if solve:
block.solve()
self.tip = block
self.block_heights[block.sha256] = height
assert number not in self.blocks
self.blocks[number] = block
return block
def get_tests(self):
self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
self.block_heights[self.genesis_hash] = 0
spendable_outputs = []
# save the current tip so it can be spent by a later block
def save_spendable_output():
spendable_outputs.append(self.tip)
# get an output that we previously marked as spendable
def get_spendable_output():
return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)
# returns a test case that asserts that the current tip was accepted
def accepted():
return TestInstance([[self.tip, True]])
# returns a test case that asserts that the current tip was rejected
def rejected(reject = None):
if reject is None:
return TestInstance([[self.tip, False]])
else:
return TestInstance([[self.tip, reject]])
# move the tip back to a previous block
def tip(number):
self.tip = self.blocks[number]
# adds transactions to the block and updates state
def update_block(block_number, new_transactions):
block = self.blocks[block_number]
self.add_transactions_to_block(block, new_transactions)
old_sha256 = block.sha256
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
# Update the internal state just like in next_block
self.tip = block
if block.sha256 != old_sha256:
self.block_heights[block.sha256] = self.block_heights[old_sha256]
del self.block_heights[old_sha256]
self.blocks[block_number] = block
return block
# shorthand for functions
block = self.next_block
create_tx = self.create_tx
create_and_sign_tx = self.create_and_sign_transaction
# these must be updated if consensus changes
MAX_BLOCK_SIGOPS = 20000
# Create a new block
block(0)
save_spendable_output()
yield accepted()
# Now we need that block to mature so we can spend the coinbase.
test = TestInstance(sync_every_block=False)
for i in range(99):
block(5000 + i)
test.blocks_and_transactions.append([self.tip, True])
save_spendable_output()
yield test
# collect spendable outputs now to avoid cluttering the code later on
out = []
for i in range(33):
out.append(get_spendable_output())
# Start by building a couple of blocks on top (which output is spent is
# in parentheses):
# genesis -> b1 (0) -> b2 (1)
block(1, spend=out[0])
save_spendable_output()
yield accepted()
block(2, spend=out[1])
yield accepted()
save_spendable_output()
# so fork like this:
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1)
#
# Nothing should happen at this poin
|
mark-adams/django-waffle
|
waffle/south_migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 7,674
| 0.007297
|
# encoding: utf-8
import datetime
import django
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
# Django 1.5+ compatibility
if django.VERSION >= (1, 5):
from django.contrib.auth import get_user_model
else:
from django.contrib.auth.models import User
def get_user_model():
return User
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Flag'
db.create_table('waffle_flag', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('everyone', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
('percent', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=3, decimal_places=1, blank=True)),
('superusers', self.gf('django.db.models.fields.BooleanField')(default=True)),
('staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('authenticated', self.gf('django.db.models.fields.BooleanField')(default=False)),
('rollout', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('waffle', ['Flag'])
# Adding M2M table for field groups on 'Flag'
db.create_table('waffle_flag_groups', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('flag', models.ForeignKey(orm['waffle.flag'], null=False)),
('group', models.ForeignKey(orm['auth.group'], null=False))
))
db.create_unique('waffle_flag_groups', ['flag_id', 'group_id'])
# Adding M2M table for field users on 'Flag'
db.create_table('waffle_flag_users', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('flag', models.ForeignKey(orm['waffle.flag'], null=False)),
('user', models.ForeignKey(get_user_model(), null=False))
))
db.create_unique('waffle_flag_users', ['flag_id', 'user_id'])
# Adding model 'Switch'
db.create_table('waffle_switch', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('active', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('waffle', ['Switch'])
def backwards(self, orm):
# Deleting model 'Flag'
db.delete_table('waffle_flag')
# Removing M2M table for field groups on 'Flag'
db.delete_table('waffle_flag_groups')
# Removing M2M table for field users on 'Flag'
db.delete_table('waffle_flag_users')
# Deleting model 'Switch'
db.delete_table('waffle_switch')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.u
|
ser': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank':
|
'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'waffle.flag': {
'Meta': {'object_name': 'Flag'},
'authenticated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'everyone': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'percent': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '3', 'decimal_places': '1', 'blank': 'True'}),
'rollout': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'superusers': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'})
},
'waffle.switch': {
'Meta': {'object_name': 'Switch'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
}
}
complete_apps = ['waffle']
|
davidmalcolm/pygobject
|
gi/_glib/__init__.py
|
Python
|
lgpl-2.1
| 4,827
| 0
|
# -*- Mode: Python; py-indent-offset: 4 -*-
# pygobject - Python bindings for the GObject library
# Copyright (C) 2006-2012 Johan Dahlin
#
# glib/__init__.py: initialisation file for glib module
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the im
|
plied warranty of
# MERCHANTABILITY or FITNESS FOR A PART
|
ICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
from . import _glib
# Internal API
_PyGLib_API = _glib._PyGLib_API
# Types
GError = _glib.GError
IOChannel = _glib.IOChannel
Idle = _glib.Idle
MainContext = _glib.MainContext
MainLoop = _glib.MainLoop
OptionContext = _glib.OptionContext
OptionGroup = _glib.OptionGroup
Pid = _glib.Pid
PollFD = _glib.PollFD
Source = _glib.Source
Timeout = _glib.Timeout
# Constants
IO_ERR = _glib.IO_ERR
IO_FLAG_APPEND = _glib.IO_FLAG_APPEND
IO_FLAG_GET_MASK = _glib.IO_FLAG_GET_MASK
IO_FLAG_IS_READABLE = _glib.IO_FLAG_IS_READABLE
IO_FLAG_IS_SEEKABLE = _glib.IO_FLAG_IS_SEEKABLE
IO_FLAG_IS_WRITEABLE = _glib.IO_FLAG_IS_WRITEABLE
IO_FLAG_MASK = _glib.IO_FLAG_MASK
IO_FLAG_NONBLOCK = _glib.IO_FLAG_NONBLOCK
IO_FLAG_SET_MASK = _glib.IO_FLAG_SET_MASK
IO_HUP = _glib.IO_HUP
IO_IN = _glib.IO_IN
IO_NVAL = _glib.IO_NVAL
IO_OUT = _glib.IO_OUT
IO_PRI = _glib.IO_PRI
IO_STATUS_AGAIN = _glib.IO_STATUS_AGAIN
IO_STATUS_EOF = _glib.IO_STATUS_EOF
IO_STATUS_ERROR = _glib.IO_STATUS_ERROR
IO_STATUS_NORMAL = _glib.IO_STATUS_NORMAL
OPTION_ERROR = _glib.OPTION_ERROR
OPTION_ERROR_BAD_VALUE = _glib.OPTION_ERROR_BAD_VALUE
OPTION_ERROR_FAILED = _glib.OPTION_ERROR_FAILED
OPTION_ERROR_UNKNOWN_OPTION = _glib.OPTION_ERROR_UNKNOWN_OPTION
OPTION_FLAG_FILENAME = _glib.OPTION_FLAG_FILENAME
OPTION_FLAG_HIDDEN = _glib.OPTION_FLAG_HIDDEN
OPTION_FLAG_IN_MAIN = _glib.OPTION_FLAG_IN_MAIN
OPTION_FLAG_NOALIAS = _glib.OPTION_FLAG_NOALIAS
OPTION_FLAG_NO_ARG = _glib.OPTION_FLAG_NO_ARG
OPTION_FLAG_OPTIONAL_ARG = _glib.OPTION_FLAG_OPTIONAL_ARG
OPTION_FLAG_REVERSE = _glib.OPTION_FLAG_REVERSE
OPTION_REMAINING = _glib.OPTION_REMAINING
PRIORITY_DEFAULT = _glib.PRIORITY_DEFAULT
PRIORITY_DEFAULT_IDLE = _glib.PRIORITY_DEFAULT_IDLE
PRIORITY_HIGH = _glib.PRIORITY_HIGH
PRIORITY_HIGH_IDLE = _glib.PRIORITY_HIGH_IDLE
PRIORITY_LOW = _glib.PRIORITY_LOW
SPAWN_CHILD_INHERITS_STDIN = _glib.SPAWN_CHILD_INHERITS_STDIN
SPAWN_DO_NOT_REAP_CHILD = _glib.SPAWN_DO_NOT_REAP_CHILD
SPAWN_FILE_AND_ARGV_ZERO = _glib.SPAWN_FILE_AND_ARGV_ZERO
SPAWN_LEAVE_DESCRIPTORS_OPEN = _glib.SPAWN_LEAVE_DESCRIPTORS_OPEN
SPAWN_SEARCH_PATH = _glib.SPAWN_SEARCH_PATH
SPAWN_STDERR_TO_DEV_NULL = _glib.SPAWN_STDERR_TO_DEV_NULL
SPAWN_STDOUT_TO_DEV_NULL = _glib.SPAWN_STDOUT_TO_DEV_NULL
USER_DIRECTORY_DESKTOP = _glib.USER_DIRECTORY_DESKTOP
USER_DIRECTORY_DOCUMENTS = _glib.USER_DIRECTORY_DOCUMENTS
USER_DIRECTORY_DOWNLOAD = _glib.USER_DIRECTORY_DOWNLOAD
USER_DIRECTORY_MUSIC = _glib.USER_DIRECTORY_MUSIC
USER_DIRECTORY_PICTURES = _glib.USER_DIRECTORY_PICTURES
USER_DIRECTORY_PUBLIC_SHARE = _glib.USER_DIRECTORY_PUBLIC_SHARE
USER_DIRECTORY_TEMPLATES = _glib.USER_DIRECTORY_TEMPLATES
USER_DIRECTORY_VIDEOS = _glib.USER_DIRECTORY_VIDEOS
# Functions
child_watch_add = _glib.child_watch_add
filename_display_basename = _glib.filename_display_basename
filename_display_name = _glib.filename_display_name
filename_from_utf8 = _glib.filename_from_utf8
find_program_in_path = _glib.find_program_in_path
get_application_name = _glib.get_application_name
get_current_time = _glib.get_current_time
get_prgname = _glib.get_prgname
get_system_config_dirs = _glib.get_system_config_dirs
get_system_data_dirs = _glib.get_system_data_dirs
get_user_cache_dir = _glib.get_user_cache_dir
get_user_config_dir = _glib.get_user_config_dir
get_user_data_dir = _glib.get_user_data_dir
get_user_special_dir = _glib.get_user_special_dir
glib_version = _glib.glib_version
idle_add = _glib.idle_add
io_add_watch = _glib.io_add_watch
main_context_default = _glib.main_context_default
main_depth = _glib.main_depth
markup_escape_text = _glib.markup_escape_text
pyglib_version = _glib.pyglib_version
set_application_name = _glib.set_application_name
set_prgname = _glib.set_prgname
source_remove = _glib.source_remove
spawn_async = _glib.spawn_async
threads_init = _glib.threads_init
timeout_add = _glib.timeout_add
timeout_add_seconds = _glib.timeout_add_seconds
uri_list_extract_uris = _glib.uri_list_extract_uris
|
mzdaniel/oh-mainline
|
vendor/packages/scrapy/scrapy/contrib/linkextractors/htmlparser.py
|
Python
|
agpl-3.0
| 2,447
| 0.001635
|
"""
HTMLParser-based link extractor
"""
from HTMLParser import HTMLParser
from urlparse import urljoin
from w3lib.url import safe_url_string
from scrapy.link import Link
from scrapy.utils.python import unique as unique_list
class HtmlParserLinkExtractor(HTMLParser):
def __init__(self, tag="a", attr="href", process=None, unique=False):
HTMLParser.__init__(self)
self.scan_tag = tag if callable(tag) else lambda t: t == tag
self.scan_attr = attr if callable(attr) else lambda a: a == attr
self.process_attr = process if callable(process) else lambda v: v
self.unique = unique
def _extract_links(self, response_text, response_url, response_encoding):
self.reset()
self.feed(response_text)
self.close()
links = unique_list(self.links, key=lambda link: link.url) if self.unique else self.links
ret = []
base_url = urljoin(response_url, self.base_url) if self.base_url else response_url
for link in links:
if isinstance(link.url, unicode):
link.url = link.url.encode(response_encoding)
link.url = urljoin(base_ur
|
l, link.url)
link.url = safe_url_string(link.url, response_encoding)
link.text = link.text.decode(response_encoding)
ret.append(link)
return ret
def extract_links(self, response):
# wrapper needed to allow to work directly with text
return self._extract_links(response.body, response.url, response.encoding)
def reset(s
|
elf):
HTMLParser.reset(self)
self.base_url = None
self.current_link = None
self.links = []
def handle_starttag(self, tag, attrs):
if tag == 'base':
self.base_url = dict(attrs).get('href')
if self.scan_tag(tag):
for attr, value in attrs:
if self.scan_attr(attr):
url = self.process_attr(value)
link = Link(url=url)
self.links.append(link)
self.current_link = link
def handle_endtag(self, tag):
self.current_link = None
def handle_data(self, data):
if self.current_link and not self.current_link.text:
self.current_link.text = data.strip()
def matches(self, url):
"""This extractor matches with any url, since
it doesn't contain any patterns"""
return True
|
digitalocean/netbox
|
netbox/dcim/models/device_components.py
|
Python
|
apache-2.0
| 27,423
| 0.002079
|
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.db.models import Sum
from django.urls import reverse
from mptt.models import MPTTModel, TreeForeignKey
from taggit.managers import TaggableManager
from dcim.choices import *
from dcim.constants import *
from dcim.fields import MACAddressField
from extras.models import ObjectChange, TaggedItem
from extras.utils import extras_features
from utilities.fields import NaturalOrderingField
from utilities.mptt import TreeManager
from utilities.ordering import naturalize_interface
from utilities.querysets import RestrictedQuerySet
from utilities.query_functions import CollateAsChar
from utilities.utils import serialize_object
__all__ = (
'BaseInterface',
'CableTermination',
'ConsolePort',
'ConsoleServerPort',
'DeviceBay',
'FrontPort',
'Interface',
'InventoryItem',
'PathEndpoint',
'PowerOutlet',
'PowerPort',
'RearPort',
)
class ComponentModel(models.Model):
"""
An abstract model inherited by any model which has a parent Device.
"""
device = models.ForeignKey(
to='dcim.Device',
on_delete=models.CASCADE,
related_name='%(class)ss'
)
name = models.CharField(
max_length=64
)
_name = NaturalOrderingField(
target_field='name',
max_length=100,
blank=True
)
label = models.CharField(
max_length=64,
blank=True,
help_text="Physical label"
)
description = models.CharField(
max_length=200,
blank=True
)
objects = RestrictedQuerySet.as_manager()
class Meta:
abstract = True
def __str__(self):
if self.label:
return f"{self.name} ({self.label})"
return self.name
def to_objectchange(self, action):
# Annotate the parent Device
try:
device = self.device
except ObjectDoesNotExist:
# The parent Device has already been deleted
device = None
return ObjectChange(
changed_object=self,
object_repr=str(self),
action=action,
related_object=device,
object_data=serialize_object(self)
)
@property
def parent(self):
return getattr(self, 'device', None)
class CableTermination(models.Model):
"""
An abstract model inherited by all models to which a Cable can terminate (certain device components, PowerFeed, and
CircuitTermination instances). The `cable` field indicates the Cable instance which is terminated to this instance.
`_cable_peer` is a GenericForeignKey used to cache the far-end CableTermination on the local instance; this is a
shortcut to referencing `cable.termination_b`, for example. `_cable_peer` is set or cleared by the receivers in
dcim.signals when a Cable instance is created or deleted, respectively.
"""
cable = models.ForeignKey(
to='dcim.Cable',
on_delete=models.SET_NULL,
related_name='+',
blank=True,
null=True
)
_cable_peer_type = models.ForeignKey(
to=ContentType,
on_delete=models.SET_NULL,
related_name='+',
blank=True,
null=True
)
_cable_peer_id = models.PositiveIntegerField(
blank=True,
null=True
)
_cable_peer = GenericForeignKey(
ct_field='_cable_peer_type',
fk_field='_cable_peer_id'
)
# Generic relations to Cable. These ensure that an attached Cable is deleted if the terminated object is deleted.
_cabled_as_a = GenericRelation(
to='dcim.Cable',
content_type_field='termination_a_type',
object_id_field='termination_a_id'
)
_cabled_as_b = GenericRelation(
to='dcim.Cable',
content_type_field='termination_b_type',
object_id_field='termination_b_id'
)
class Meta:
abstract = True
def get_cable_peer(self):
return self._cable_peer
class PathEndpoint(models.Model):
"""
An abstract model inherited by any CableTermination subclass which represents the end of a CablePath; specifically,
these include ConsolePort, ConsoleServerPort, PowerPort, PowerOutlet, Interface, PowerFeed, and CircuitTermination.
`_path` references the CablePath originating from this instance, if any. It is set or cleared by the receivers in
dcim.signals in response to changes in the cable path, and complements the `origin` GenericForeignKey field on the
CablePath model. `_path` should not be accessed directly; rather, use the `path` property.
`connected_endpoint()` is a convenience method for returning the destination of the associated CablePath, if any.
"""
_path = models.ForeignKey(
to='dcim.CablePath',
on_delete=models.SET_NULL,
null=True,
blank=True
)
class Meta:
abstract = True
def trace(self):
if self._path is None:
return []
# Construct the complete path
path = [self, *self._path.get_path()]
while (len(path) + 1) % 3:
# Pad to ensure we have complete three-tuples (e.g. for paths that end at a RearPort)
path.append(None)
path.append(self._path.destination)
# Return the path as a list of three-tuples (A termination, cable, B termination)
return list(zip(*[iter(path)] * 3))
@property
def path(self):
return self._path
@property
def connected_endpoint(self):
"""
Caching accessor for the attached CablePath's destination (if any)
"""
if not hasattr(self, '_connected_endpoint'):
self._connected_endpoint = self._path.destination if self._path else None
return self._connected_endpoint
#
# Console ports
#
@extras_features('export_templates', 'webhooks', 'custom_links')
class ConsolePort(CableTermination, PathEndpoint, ComponentModel):
"""
A physical console port within a Device. ConsolePorts connect to ConsoleServerPorts.
"""
type = models.CharField(
max_length=50,
choices=ConsolePortTypeChoices,
blank=True,
help_text='Physical port type'
)
tags = TaggableManager(through=TaggedItem)
csv_headers = ['device', 'name', 'label', 'type', 'description']
class Meta:
ordering = ('device', '_name')
unique_together = ('device', 'name')
def get_absolute_url(self):
return reverse('dcim:consoleport', kwargs={'pk': self.pk})
def to_csv(self):
return (
self.device.identifier,
self.name,
self.label,
self.type,
self.description,
)
#
# Console server ports
#
@extras_features('webhooks', 'custom_links')
class ConsoleServerPort(CableTermination, PathEndpoint, ComponentModel):
"""
A physical port within a Device (typically a designated console serv
|
er) which provides access to ConsolePorts.
"""
|
type = models.CharField(
max_length=50,
choices=ConsolePortTypeChoices,
blank=True,
help_text='Physical port type'
)
tags = TaggableManager(through=TaggedItem)
csv_headers = ['device', 'name', 'label', 'type', 'description']
class Meta:
ordering = ('device', '_name')
unique_together = ('device', 'name')
def get_absolute_url(self):
return reverse('dcim:consoleserverport', kwargs={'pk': self.pk})
def to_csv(self):
return (
self.device.identifier,
self.name,
self.label,
self.type,
self.description,
)
#
# Power ports
#
@extras_features('export_templates', 'webhooks', 'custom_links')
class PowerPort(CableTermination, PathEndpoint, ComponentModel):
"""
A physical power supply (intake) port within a Device. PowerPorts connect to
|
rz4/SpaceManBash
|
src/GameData.py
|
Python
|
mit
| 8,056
| 0.002731
|
'''
GameData.py
Last Updated: 3/16/17
'''
import json, os
import numpy as np
import pygame as pg
from GameAssets import GameAssets as ga
class GameData():
"""
GameData class is used to stores game state information.
"""
def __init__(self):
'''
Method initiates game state variables.
'''
self.debug = False
self.game_name = "SpaceManBash"
self.delta_sum = 0
self.running = True
# GameFrome Data
self.frames = []
self.frame_current = None
# Configs
self.screen_dim = (800, 600)
self.controls = {
'LEFT' : pg.K_a,
'RIGHT' : pg.K_d,
'UP' : pg.K_w,
'DOWN' : pg.K_s,
'CROUCH' : pg.K_LALT,
'ATTACK' : pg.K_j,
'ALTATTACK' : pg.K_k,
'JUMP' : pg.K_SPACE,
'SPRINT' : pg.K_LSHIFT,
'PAUSE' : pg.K_ESCAPE,
'ENTER' : pg.K_RETURN,
'HOME' : pg.K_h
}
# Save Data
self.saves = []
self.save_index = None
# Level Data
self.levels = []
self.level_index = 0
self.level_background = None
self.level_midground = None
self.camera_pos = np.array([0.0, 0.0, 0.0, 0.0])
self.camera_limits = [0.0, 0.0, 0.0, 0.0]
self.game_objects = []
self.collisions = {}
self.level_scripts = []
self.script_vars = {}
# Player Data
self.player_pos = np.array([0.0, 0.0])
self.player_health = 100
def switch_frame(self, frame):
'''
Method switches current frame to desired frame. Instantiates desired
frame if not found.
Param:
frame ;GameFrame new current frame
'''
for f in self.frames:
if f.__class__.__name__ == frame:
self.frame_current = f
return
module = __import__("GameFrames")
class_ = getattr(module, frame)
instance = class_(self)
self.frames.append(instance)
self.frame_current = self.frames[-1]
def save_config(self, filename):
'''
Method saves game data configurations to file.
Param:
filename ;str config filename
'''
try:
with open("../data/" + filename, "w") as f:
data = {}
data['controls'] = self.controls
data['screen_dim'] = self.screen_dim
json_dump = json.dumps(data)
f.write(json_dump)
except Exception as e:
print("Could Save Config:", filename)
print(e)
def load_config(self, filename):
'''
Method loads game data configurations to file.
Param:
filename ;str config filename
'''
try:
with open("../data/" + filename, "r") as f:
for json_dump in f:
data = json.loads(json_dump)
self.controls = data['controls']
self.screen_dim = data['screen_dim']
except Exception as e:
print("Could Load Config:", filename)
print(e)
def save_save(self, filename):
'''
Method saves game data state to save file.
Param:
filename ;str save filename
'''
try:
with open("../data/saves/" + filename, "w") as f:
data = {}
data["level_index"] = self.level_index
json_dump = json.dumps(data)
f.write(json_dump + '\n')
except Exception as e:
print("Could Save Save Data:", filename)
print(e)
def load_save(self, filename):
'''
Method loads game data state from save file.
Param:
filename ;str save filename
'''
try:
with open("../data/saves/" + filename, "r") as f:
for json_dump in f:
data = json.loads(json_dump)
self.level_index = data["level_index"]
except Exception as e:
print("Could Load Save Data:", filename)
print(e)
def load_game_data(self):
'''
Method loads all game level data from file.
'''
for filename in sorted(os.listdir("../data/levels/")):
if filename.endswith(".lev"):
try:
with open("../data/levels/" + filename, "r") as f:
self.levels.append(f.read())
except Exception as e:
print("Could Load Game Data:", filename)
print(e)
def load_level(self):
'''
Method loads current level.
'''
try:
data = json.loads(self.levels[self.level_index])
self.camera_pos = np.array(data['camera_pos'])
self.camera_limits = np.array(data['camera_limits'])
for go in data['game_objects']:
module = __import__("GameObjects")
class_ = getattr(module, go[0])
instance = class_(go[1:])
self.add_game_object(instance)
pg.mixer.music.load(".
|
./data/music/"+data['music'])
pg.mixer.music.set_volume(0.15)
pg.mixer.music.play(loops=3)
self.level_background = getattr(ga, data['background'])
self.level_midground = getattr(ga, data['midground'])
for script in data['scripts']: self.add_level_script(script)
except Exception as e:
print("Couldn't Load Level:", self.level_index)
print(e)
def reset_level(self):
'''
Method resets current level.
|
'''
self.frame_current.level_loaded = False
self.game_objects = []
self.collisions = {}
self.load_level()
def switch_level(self, index):
'''
Method switches level.
Param:
index ;int index of desired level
'''
self.level_index = index
self.frame_current.level_loaded = False
self.game_objects = []
self.collisions = {}
self.save_save("save_0.sav")
self.load_level()
def add_game_object(self, game_object):
'''
Method adds game object.
Param:
game_object ;GameObject
'''
self.game_objects.append(game_object)
def remove_game_object(self, game_object):
'''
Method adds game object.
Param:
game_object ;GameObject
'''
self.game_objects.remove(game_object)
def add_level_script(self, script):
'''
'''
self.level_scripts.append(script)
def remove_level_script(self, script):
'''
'''
self.level_scripts.remove(script)
def update_collisions(self):
'''
Method calculates collisions of game objects at current game state.
Collisions are stored in self.collisions dictionary object.
'''
self.collisions = {}
for go in self.game_objects:
temp = []
for goo in self.game_objects:
if go != goo and go.check_collision(goo.rect):
temp.append(goo)
self.collisions[go] = temp
def center_camera_on_game_object(self, game_object):
'''
Method updates camera position to be centered on desired game object while
remaining in the self.camera_limits boundaries.
Param:
game_object ;GameObject
'''
x = -(game_object.rect[0] + (game_object.rect[2]/2.0)) + (self.screen_dim[0]/2.0)
y = -(game_object.rect[1] + (game_object.rect[3]/2.0)) + (self.screen_dim[1]/2.0)
if x < self.camera_limits[2] and x > self.camera_limits[0]: self.camera_pos[0] = x
if y < self.camera_limits[3] and y > self.camera_limits[1]: self.camera_pos[1] = y
|
aniketmaithani/kimani-adserver
|
Adserver/base/api/pagination.py
|
Python
|
gpl-2.0
| 640
| 0.001563
|
# -*- coding: utf-8 -*-
# Third Party Stuff
# Third Party Stuff
from rest_framework.pagination import PageNumberP
|
agination as DrfPageNumberPagination
class PageNumberPagination(DrfPageNumberPagination):
# Client can control the page using this query parameter.
page_query_param = 'page'
# Client can control the page size using this query parameter.
# Default is 'None'. Set to eg 'page_size' to enable usage.
page_size_query_param = 'per_page'
# Set to an integer to
|
limit the maximum page size the client may request.
# Only relevant if 'page_size_query_param' has also been set.
max_page_size = 1000
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.