repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
lem8r/cofair-addons
|
l10n_ch_payment_slip/__manifest__.py
|
Python
|
lgpl-3.0
| 847
| 0
|
# -*- coding: utf-8 -*-
# © 2012-2016 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{'name': 'Switzerland - Payment Slip (BVR/ESR)',
'summary': 'Print ESR/BVR payment slip with your invoices',
'version': '10.0.2.1.1',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'dep
|
ends': [
'base',
'account',
'report',
'l10n_ch_base_bank',
'base_transaction_id', # OCA/bank-statement-reconcile
],
'data': [
"views/company.xml",
"views/bank.xml",
"views/account_invoice.xml",
"wizard/bvr_import_view.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"
],
'demo': [],
'test': [],
'auto_install': False,
'installable': True,
'images': []
}
| |
leVirve/NTHU-Library
|
nthu_library/static_urls.py
|
Python
|
gpl-2.0
| 363
| 0.002755
|
info_system = 'http://webpac.lib.nthu.edu.tw/F/'
top_circulations = 'http://www.lib.
|
nthu.edu.tw/guide/topcirculations/index.htm'
top_circulations_bc2007 = 'http://www.lib.
|
nthu.edu.tw/guide/topcirculations/bc2007.htm'
rss_recent_books = 'http://webpac.lib.nthu.edu.tw:8080/nbr/reader/rbn_rss.jsp'
lost_found_url = 'http://adage.lib.nthu.edu.tw/find/search_it.php'
|
davidam/python-examples
|
pyglet/keyboard.py
|
Python
|
gpl-3.0
| 446
| 0.006726
|
import pyglet
from pyglet.window import key
|
window = pyglet.window.Window()
@window.event
def on_key_press(symbol, modifiers):
print('A key was pressed')
if symbol == key.A:
print('The "A" key was pressed.')
elif symbol == key.LEFT:
print('The left arrow key was pressed.')
elif symbol == key.ENTER:
print('The enter key was pressed.')
@window.event
def on_draw():
window.clear
|
()
pyglet.app.run()
|
zoidbergwill/lint-review
|
tests/test_review.py
|
Python
|
mit
| 15,108
| 0
|
from . import load_fixture
from lintreview.config import load_config
from lintreview.diff import DiffCollection
from lintreview.review import Review, Problems, Comment
from lintreview.repo import GithubRepository, GithubPullRequest
from mock import Mock, call
from nose.tools import eq_
from github3.issues.comment import IssueComment as GhIssueComment
from github3.pulls import PullFile
from unittest import TestCase
import json
config = load_config()
class TestReview(TestCase):
def setUp(self):
repo = Mock(spec=GithubRepository)
pr = Mock(spec=GithubPullRequest,
head='abc123',
display_name='markstory/lint-review#1',
number=2)
repo.pull_request.return_value = pr
self.repo, self.pr = repo, pr
self.review = Review(repo, pr)
def test_load_comments__none_active(self):
fixture_data = load_fixture('comments_none_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
review = Review(self.repo, self.pr)
review.load_comments()
eq_(0, len(review.comments("View/Helper/AssetCompressHelper.php")))
def test_load_comments__loads_comments(self):
fixture_data = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
review = Review(self.repo, self.pr)
review.load_comments()
filename = "Routing/Filter/AssetCompressor.php"
res = review.comments(filename)
eq_(1, len(res))
expected = Comment(filename, None, 87, "A pithy remark")
eq_(expected, res[0])
filename = "View/Helper/AssetCompressHelper.php"
res = review.comments(filename)
eq_(2, len(res))
expected = Comment(filename, None, 40, "Some witty comment.")
eq_(expected, res[0])
expected = Comment(filename, None, 89, "Not such a good comment")
eq_(expected, res[1])
def test_filter_existing__removes_duplicates(self):
fixture_data = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
problems = Problems()
review = Review(self.repo, self.pr)
filename_1 = "Routing/Filter/AssetCompressor.php"
filename_2 = "View/Helper/AssetCompressHelper.php"
problems.add(filename_1, 87, 'A pithy remark')
problems.add(filename_1, 87, 'Something different')
problems.add(filename_2, 88, 'I <3 it')
problems.add(filename_2, 89, 'Not such a good comment')
review.load_comments()
review.remove_existing(problems)
res = problems.all(filename_1)
eq_(1, len(res))
expected = Comment(filename_1,
87,
87,
'A pithy remark\nSomething different')
eq_(res[0], expected)
res = problems.all(filename_2)
eq_(1, len(res))
expected = Comment(filename_2, 88, 88, 'I <3 it')
eq_(res[0], expected)
def test_publish_problems(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
sha = 'abc123'
review = Review(self.repo, self.pr)
review.publish_problems(problems, sha)
assert self.pr.create_review_comment.called
eq_(2, self.pr.create_review_comment.call_count)
assert_review_comments_created(
self.pr.create_review_comment.call_args_list,
errors,
sha)
def test_publish_status__ok_no_comment_label_or_status(self):
config = {
'OK_COMMENT': None,
'OK_LABEL': None,
'PULLREQUEST_STATUS': False,
}
review = Review(self.repo, self.pr, config)
review.publish_status(0)
assert not self.repo.create_status.called, 'Create status called'
assert not self.pr.create_comment.called, 'Comment not created'
assert not self.pr.add_label.called, 'Label added created'
def test_publish_status__ok_with_comment_label_and_status(self):
config = {
'OK_COMMENT': 'Great job!',
'OK_LABEL': 'No lint errors',
'PULLREQUEST_STATUS': True,
}
review = Review(self.repo, self.pr, config)
review.publish_status(0)
assert self.repo.create_status.called, 'Create status not called'
self.repo.create_status.assert_called_with(
self.pr.head,
'success',
'No lint errors found.')
assert self.pr.create_comment.called, 'Issue comment created'
self.pr.create_comment.assert_called_with('Great job!')
assert self.pr.add_label.called, 'Label added created'
self.pr.add_label.assert_called_with('No lint errors')
def test_publish_status__has_errors(self):
config = {
'OK_COMMENT': 'Great job!',
'OK_LABEL': 'No lint errors',
'APP_NAME': 'custom-name'
}
review = Review(self.repo, self.pr, config)
review.publish_status(1)
assert self.repo.create_status.called, 'Create status not called'
|
self.repo.create_status.assert_called_with(
self.pr.head,
'failure',
'Lint errors found, see pull request comments.')
assert not self.
|
pr.create_comment.called, 'Comment not created'
assert not self.pr.add_label.called, 'Label added created'
def test_publish_problems_remove_ok_label(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
sha = 'abc123'
config = {'OK_LABEL': 'No lint'}
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish_problems(problems, sha)
assert self.pr.remove_label.called, 'Label should be removed'
assert self.pr.create_review_comment.called, 'Comments should be added'
eq_(2, self.pr.create_review_comment.call_count)
self.pr.remove_label.assert_called_with(config['OK_LABEL'])
assert_review_comments_created(
self.pr.create_review_comment.call_args_list,
errors,
sha)
def test_publish_empty_comment(self):
problems = Problems(changes=[])
review = Review(self.repo, self.pr)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'Should create a comment'
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.pr.create_comment.assert_called_with(msg)
def test_publish_empty_comment_add_ok_label(self):
problems = Problems(changes=[])
config = {'OK_LABEL': 'No lint'}
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'ok comment should be added.'
assert self.pr.remove_label.called, 'label should be removed.'
self.pr.remove_label.assert_called_with(config['OK_LABEL'])
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.pr.create_comment.assert_called_with(msg)
def test_publish_empty_comment_with_comment_status(self):
config = {
'PULLREQUEST_STATUS': True,
}
problems = Problems(changes=[])
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'Should create a
|
allotria/intellij-community
|
python/testData/inspections/PyTypeCheckerInspection/UnresolvedReceiverGeneric.py
|
Python
|
apache-2.0
| 165
| 0.006061
|
from typing import TypeVar, Dict, Iterable, Any
T = TypeVar("T")
def foo(values: Dict[T, Iterable[Any]]):
for e in []:
|
values.setdefault(e, undefined
|
)
|
mhogg/bonemapy
|
setup.py
|
Python
|
mit
| 2,237
| 0.021904
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Michael Hogg
#
|
This file is part of bonemapy - See LICEN
|
SE.txt for information on usage and redistribution
import bonemapy
from distutils.core import setup
setup(
name = 'bonemapy',
version = bonemapy.__version__,
description = 'An ABAQUS plug-in to map bone properties from CT scans to 3D finite element bone/implant models',
license = 'MIT license',
keywords = ["ABAQUS", "plug-in","CT","finite","element","bone","properties","python"],
author = 'Michael Hogg',
author_email = '[email protected]',
url = "https://github.com/mhogg/bonemapy",
download_url = "https://github.com/mhogg/bonemapy/releases",
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Environment :: Plugins",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Medical Science Apps.",
"Topic :: Scientific/Engineering :: Visualization",
],
long_description = """
bonemapy is an ABAQUS plug-in that is used to extract bone density, or Hounsfield Unit (HU) values, from CT scans. The bone density can then be used to setup heterogeneous
material properties for a 3D finite element bone/implant model.
The HU values are extracted at the element integration points. Tri-linear interpolation is used to calculate the HU values at the location of the integration points.
bonemapy produces a text file containing the HU values that is formatted so that it can easily be read using ABAQUS user subroutines that are required to apply the bone properties. An
ABAQUS odb file is also created containing a fieldoutput representing HU so that the user can quickly visualise the mapped HU values.
""",
)
|
mikeckennedy/cookiecutter-course
|
src/ch8_sharing_your_template/show_off_web_app/show_off_web_app/controllers/base_controller.py
|
Python
|
gpl-2.0
| 1,456
| 0.000687
|
import logbook
import show_off_web_app.infrastructure.static_cache as static_cache
import pyramid.httpexceptions as exc
from show_off_web_app.infrastructure.supressor import suppress
import show_off_web_app.infrastructure.cookie_auth as cookie_auth
from show_off_web_app.services.account_service import AccountService
class BaseController:
def __init__(self, request):
self.request = request
self.build_cache_id = static_cache.build_cache_id
log_name = 'Ctrls/' + type(self).__name__.replace("Controller"
|
, "")
self.log = logbook.Logger(log_name)
@property
def is_logged_in(self):
return cookie_auth.get_user_id_via_auth_cookie(self.request) is not None
# noinspection PyMethodMayBeStatic
@suppress()
def redirect(self, to_url,
|
permanent=False):
if permanent:
raise exc.HTTPMovedPermanently(to_url)
raise exc.HTTPFound(to_url)
@property
def merged_dicts(self):
data = dict()
data.update(self.request.GET)
data.update(self.request.POST)
data.update(self.request.matchdict)
return data
@property
def logged_in_user_id(self):
user_id = cookie_auth.get_user_id_via_auth_cookie(self.request)
return user_id
@property
def logged_in_user(self):
uid = self.logged_in_user_id
if not uid:
return None
return AccountService.find_account_by_id(uid)
|
Obijuan/protocoder-apps
|
servos/python-client/Servo.py
|
Python
|
gpl-2.0
| 1,675
| 0.027463
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------
#-- Servo
|
class
#-- Juan Gonzalez-Gomez (obijuan). May-2013
#-----------------------------------------------------------------
#-- Controlling the position of servos from the PC
#-- The Arduino / skymega or anoth
|
er arduino compatible board
#-- should have the firmware FingerServer uploaded
#-----------------------------------------------------------------
import time
class IncorrectAngle():
pass
class Servo(object):
"""Servo class. For accessing to all the Servos"""
def __init__(self, sp, dir = 0):
"""Arguments: serial port and servo number"""
self.sp = sp #-- Serial device
self.dir = dir #-- Servo number
self._pos = 0; #-- Current pos
def __str__(self):
str1 = "Servo: {0}\n".format(self.dir)
str2 = "Serial port: {0}".format(self.sp.name)
return str1 + str2
def set_pos(self, pos):
"""Set the angular servo pos. The pos is an integer number
in the range [-90 ,90] """
#-- Check that the pos in the range [-90,90]
if not (-90 <= pos <= 90):
raise IncorrectAngle()
return
#-- Convert the pos to an integer value
pos = int(round(pos))
#-- Build the frame
frame = self.dir + str(pos) + "\r"
#-- Debug
print (frame)
#-- Send the frame
self.sp.write(frame)
#-- Store the current servo pos
self._pos = pos
@property
def pos(self):
"""Read the current servo pos"""
return self._pos
@pos.setter
def pos(self, value):
"""Set the sero pos"""
self.set_pos(value)
|
rosarior/rua
|
rua/apps/common/compressed_files.py
|
Python
|
gpl-3.0
| 2,263
| 0.001326
|
import zipfile
try:
import zlib
COMPRESSION = zipfile.ZIP_DEFLATED
except:
COMPRESSION = zipfile.ZIP_STORED
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from django.core.files.uploadedfile import SimpleUploadedFile
class NotACompressedFile(Exception):
pass
class CompressedFile(object):
def __init__(self, file_input=None):
if file_input:
self._open(file_input)
else:
self._create()
def _create(self):
self.descriptor = StringIO()
self.zf = zipfile.ZipFile(self.descriptor, mode='w')
def _open(self, file_input):
try:
# Is it a file like object?
file_input.seek(0)
except AttributeError:
# If not, try open it.
self.descriptor = open(file_input, 'r+b')
else:
self.descriptor = file_input
try:
test = zipfile.Zip
|
File(self.descriptor, mode='r')
except zipfile.BadZipfile:
raise NotACompressedFile
else:
test.close()
self.descriptor.seek(0)
self.zf = zipfile.ZipFile(s
|
elf.descriptor, mode='a')
def add_file(self, file_input, arcname=None):
try:
# Is it a file like object?
file_input.seek(0)
except AttributeError:
# If not, keep it
self.zf.write(file_input, arcname=arcname, compress_type=COMPRESSION)
else:
self.zf.writestr(arcname, file_input.read())
def contents(self):
return [filename for filename in self.zf.namelist() if not filename.endswith('/')]
def get_content(self, filename):
return self.zf.read(filename)
def write(self, filename=None):
# fix for Linux zip files read in Windows
for file in self.zf.filelist:
file.create_system = 0
self.descriptor.seek(0)
if filename:
descriptor = open(filename, 'w')
descriptor.write(self.descriptor.read())
else:
return self.descriptor
def as_file(self, filename):
return SimpleUploadedFile(name=filename, content=self.write().read())
def close(self):
self.zf.close()
|
geo-poland/frappe
|
frappe/patches/v4_0/set_todo_checked_as_closed.py
|
Python
|
mit
| 191
| 0.041885
|
import frappe
def execute():
|
frappe.reload_doc("core", "doctype", "todo")
try:
frappe.db.sql("""update tabToDo set status = if(ifn
|
ull(checked,0)=0, 'Open', 'Closed')""")
except:
pass
|
sbidoul/pip
|
tests/data/src/simplewheel-2.0/simplewheel/__init__.py
|
Python
|
mit
| 20
| 0
|
__
|
version__ = "2.0"
| |
WPI-ARC/deformable_planners
|
deformable_astar/src/deformable_astar/robot_marker_debug_pub.py
|
Python
|
bsd-2-clause
| 2,259
| 0.001771
|
#!/usr/bin/env python
# Calder Phillips-Grafflin - WPI/ARC Lab
import rospy
import math
import tf
from tf.transformations import *
from visualization_msgs.msg import *
from geometry_msgs.msg import *
class RobotMarkerPublisher:
def __init__(self, root_frame, rate):
self.root_frame = root_frame
self.rate = rate
self.marker_pub = rospy.Publisher("robot_markers_debug", Marker)
rate = rospy.Rate(self.rate)
while not rospy.is_shutdown():
self.display_table()
rate.sleep()
def display_table(self):
# Make table top
marker_msg = Marker()
marker_msg.type = Marker.CUBE_LIST
marker_msg.ns = "robot"
marker_msg.id = 1
marker_msg.action = Marker.ADD
marker_msg.lifetime = rospy.Duration(0.0)
marker_msg.header.stamp = rospy.Time.now()
marker_msg.header.frame_id = self.root_frame
marker_msg.scale.x = 0.04
marker_msg.scale.y = 0.04
marker_msg.scale.z = 0.02
marker_msg.color.a = 1.0
marker_msg.color.r = 1.0
marker_msg.color.b = 0.0
marker_msg.color.g = 1.0
marker_msg.pose.position.x = 0.0
marker_msg.pose.position.y = 0.0
marker_msg.pose.position.z = 0.0
marker_msg.pose.orientation.x = 0.0
marker_msg.pose.orientation.y = 0.0
marker_msg.pose.orientation.z = 0.0
marker_msg.pose.orientation.w = 1.0
# Make the individual points
p1 = Point()
p1.x = 0.0025
p1.y = 0.0025
p1.z = -0.01
p2 = Point()
p2.x = p1.x
p2.y = p1.y + 0.04
p2.z = p1.z
|
p3 = Point()
p3.x = p1.x - 0.04
p3.y = p1.y
p3.z = p1.z
marker_msg.points = [p1, p2, p3]
marker_msg.colors = [marker_msg.color, marker_msg.color, marker_msg.color]
self.marker_pub.publish(marker_msg)
if __name__ == "__main__":
rospy.init_node("robot_marker_debug_publisher")
rospy.loginfo("Starting the robot
|
marker broadcaster...")
#Get the parameters from the server
root_frame = rospy.get_param("~root_frame", "test_robot_frame")
rate = rospy.get_param("~rate", 10.0)
RobotMarkerPublisher(root_frame, rate)
|
CLVsol/clvsol_odoo_addons
|
clv_summary/__manifest__.py
|
Python
|
agpl-3.0
| 940
| 0
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLV
|
sol
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Summary',
|
'summary': 'Summary Module used by CLVsol Solutions.',
'version': '12.0.4.0',
'author': 'Carlos Eduardo Vercelino - CLVsol',
'category': 'CLVsol Solutions',
'license': 'AGPL-3',
'website': 'https://github.com/CLVsol',
'images': [],
'depends': [
'clv_base',
'clv_global_log',
],
'data': [
'security/summary_security.xml',
'security/ir.model.access.csv',
'views/summary_template_view.xml',
'views/summary_view.xml',
'views/summary_log_view.xml',
'views/file_system_view.xml',
],
'demo': [],
'test': [],
'init_xml': [],
'test': [],
'update_xml': [],
'installable': True,
'application': False,
'active': False,
'css': [],
}
|
tensorflow/model-remediation
|
tensorflow_model_remediation/min_diff/keras/utils/input_utils.py
|
Python
|
apache-2.0
| 18,202
| 0.003846
|
# coding=utf-8
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Input utils module for MinDiff Keras integration.
This module provides default implementations for packing and unpacking min_diff
data into or from an input dataset.
"""
import collections
import tensorflow as tf
from tensorflow_model_remediation.min_diff.keras.utils import structure_utils
# Convenience class to help with packing and unpacking.
class MinDiffPackedInputs(
collections.namedtuple("MinDiffPackedInputs",
["original_inputs", "min_diff_data"])):
"""Named tuple containing both `original_inputs` and `min_diff_data`.
`MinDiffModel` default implementations and `utils.(un)pack_*` functions use
this class to pack and unpack the separate components required for MinDiff
and regular training.
Attributes:
original_inputs: Batch of inputs that would originally (i.e. without
applying MinDiff) be passed in to a model's `Model.call` method. This
corresponds to the `x` component described in `tf.keras.Model.fit`.
min_diff_data: Batch of supplemental data to be used to calculate the
`min_diff_loss`.
"""
def pack_min_diff_data(original_dataset: tf.data.Dataset,
sensitive_group_dataset=None,
nonsensitive_group_dataset=None,
min_diff_dataset=None) -> tf.data.Dataset:
# pyformat: disable
"""Packs `min_diff_data` with the `x` component of the original dataset.
Ar
|
guments:
original_dataset: `tf.data.Dataset` that was used before applying min
diff. The output should conform to the format used in
`tf.keras.Model.fit`.
sensitive_group_dataset: `tf.data.Dataset` or valid MinDiff structure
(unnested dict) of `tf.data.Dataset`s containing only examples that
belong to the sensitive group.
This must be passed in if `nonsensitive_group_dataset` is passed in.
Furthermore, the `x`
|
component for every batch should have the same
structure as that of the `original_dataset` batches' `x` components.
nonsensitive_group_dataset: `tf.data.Dataset` or valid MinDiff structure
(unnested dict) of `tf.data.Dataset`s containing only examples that do
**not** belong to the sensitive group.
This must be passed in if `sensitive_group_dataset` is passed in.
Furthermore, the `x` component for every batch should have the same
structure as that of the `original_dataset` batches' `x` components.
min_diff_dataset: `tf.data.Dataset` or valid MinDiff structure (unnested
dict) of `tf.data.Dataset`s containing only examples to be used to
calculate the `min_diff_loss`.
This should only be set if neither `sensitive_group_dataset` or
`nonsensitive_group_dataset` is passed in.
Furthermore, the `x` component for every batch should have the same
structure as that of the `original_dataset` batches' `x` components.
This function should be used to create the dataset that will be passed to
`min_diff.keras.MinDiffModel` during training and, optionally, during
evaluation.
The inputs should either have both `sensitive_group_dataset` and
`nonsensitive_group_dataset` passed in and `min_diff_dataset` left unset or
vice versa. In the case of the former, `min_diff_data` will be built using
`utils.build_min_diff_dataset`.
Warning: All input datasets should be batched **before** being passed in.
Each input dataset must output a tuple in the format used in
`tf.keras.Model.fit`. Specifically the output must be a tuple of
length 1, 2 or 3 in the form `(x, y, sample_weight)`.
This output will be parsed internally in the following way:
```
batch = ... # Batch from any one of the input datasets.
x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(batch)
```
Every batch from the returned `tf.data.Dataset` will contain one batch from
each of the input datasets. Each returned batch will be a tuple of
`(packed_inputs, original_y, original_sample_weight)` matching the length of
`original_dataset` batches where:
- `packed_inputs`: is an instance of `utils.MinDiffPackedInputs` containing:
- `original_inputs`: `x` component taken directly from the
`original_dataset` batch.
- `min_diff_data`: batch of data formed from `sensitive_group_dataset` and
`nonsensitive_group_dataset` (as described in
`utils.build_min_diff_dataset`) or taken directly from `min_diff_dataset`.
- `original_y`: is the `y` component taken directly from the
`original_dataset` batch.
- `original_sample_weight`: is the `sample_weight` component taken directly
from the `original_dataset` batch.
`min_diff_data` will be used in `min_diff.keras.MinDiffModel` when calculating
the `min_diff_loss`. It is a tuple or structure (matching the structure of the
inputs) of `(min_diff_x, min_diff_membership, min_diff_sample_weight)`.
Caution: If you are passing in `min_diff_dataset` make sure that each
`min_diff_data` batch contains about the same number of sensitive and
nonsensitive examples as indicated by `min_diff_membership` (when passing in
`sensitive_group_dataset` and `nonsensitive_group_dataset` this is determined
by their batch sizes).
Returns:
A `tf.data.Dataset` whose output is a tuple of (`packed_inputs`,
`original_y`, `original_sample_weight`) matching the output length
of `original_dataset`.
"""
# pyformat: enable
# Either sensitive_group_dataset and nonsensitive_group_dataset are both set
# and min_diff_dataset is not or vice versa.
min_diff_dataset_present = min_diff_dataset is not None
sensitive_dataset_present = sensitive_group_dataset is not None
nonsensitive_dataset_present = nonsensitive_group_dataset is not None
# Case where min_diff_dataset is set and the others are not.
set_to_use_min_diff_dataset = (
min_diff_dataset_present and
not (sensitive_dataset_present or nonsensitive_dataset_present))
# Case where sensitive_group_dataset and nonsensitive_group_dataset are both
# set and min_diff_dataset is not.
set_to_construct_min_diff_dataset = ((sensitive_dataset_present and
nonsensitive_dataset_present) and
not min_diff_dataset_present)
if not (set_to_use_min_diff_dataset or set_to_construct_min_diff_dataset):
raise ValueError(
"Invalid arguments: You must either pass in only the `min_diff_dataset`"
" (and leave `sensitive_group_dataset` and `nonsensitive_group_dataset`"
" as None) or set both `sensitive_group_dataset` and "
"`nonsensitive_group_dataset` (and leave `min_diff_dataset` as None), "
"given: \n"
"\n`sensitive_group_dataset`: {}"
"\n`nonsensitive_group_dataset`: {}"
"\n`min_diff_dataset`: {}".format(sensitive_group_dataset,
nonsensitive_group_dataset,
min_diff_dataset))
# First construct the min_diff_dataset if need be.
if set_to_construct_min_diff_dataset:
min_diff_dataset = build_min_diff_dataset(sensitive_group_dataset,
nonsensitive_group_dataset)
else:
# validate min_diff_dataset since it was passed in.
structure_utils.validate_min_diff_structure(
min_diff_dataset,
struct_name="min_diff_dataset",
element_type=tf.data.Dataset)
dataset = tf.data.Dataset.zip((original_dataset, min_diff_dataset))
def _map_fn(original_batch, min_diff_batch):
# Unpack original batch.
|
Ikaguia/LWBR-WarForge
|
module_scene_props.py
|
Python
|
unlicense
| 133,561
| 0.056558
|
# -*- coding: cp1252 -*-
from compiler import *
####################################################################################################################
# Each scene prop record contains the following fields:
# 1) Scene prop id: used for referencing scene props in other files. The prefix spr_ is automatically added before each scene prop id.
# 2) Scene prop flags. See header_scene_props.py for a list of available flags
# 3) Mesh name: Name of the mesh.
# 4) Physics object name:
# 5) Triggers: Simple triggers that are associated with the scene prop
####################################################################################################################
check_item_use_trigger = (ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
])
check_sally_door_use_trigger_double = (ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
(agent_get_position, pos1, ":agent_id"),
(prop_instance_get_starting_position, pos2, ":instance_id"),
(scene_prop_get_slot, ":opened_or_closed", ":instance_id", scene_prop_open_or_close_slot),
(try_begin),
#out doors like castle sally door can be opened only from inside, if door coordinate is behind your coordinate. Also it can be closed from both sides.
(prop_instance_get_scene_prop_kind, ":scene_prop_id", ":instance_id"),
(assign, ":can_open_door", 0),
(try_begin),
(neg|eq, ":scene_prop_id", "spr_viking_keep_destroy_sally_door_right"),
(neg|eq, ":scene_prop_id", "spr_viking_keep_destroy_sally_door_left"),
(neg|eq, ":scene_prop_id", "spr_earth_sally_gate_right"),
(neg|eq, ":scene_prop_id", "spr_earth_sally_gate_left"),
(position_is_behind_position, pos1, pos2),
(assign, ":can_open_door", 1),
(else_try),
(this_or_next|eq, ":scene_prop_id", "spr_viking_keep_destroy_sally_door_right"),
(this_or_next|eq, ":scene_prop_id", "spr_viking_keep_destroy_sally_door_left"),
(this_or_next|eq, ":scene_prop_id", "spr_earth_sally_gate_right"),
(eq, ":scene_prop_id", "spr_earth_sally_gate_left"),
(neg|position_is_behind_position, pos1, pos2),
(assign, ":can_open_door", 1),
(try_end),
(this_or_next|eq, ":can_open_door", 1),
(eq, ":opened_or_closed", 1),
(try_begin),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
(try_end),
(try_end),
])
check_sally_door_use_trigger = (ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
(agent_get_position, pos1, ":agent_id"),
(prop_instance_get_starting_position, pos2, ":instance_id"),
(scene_prop_get_slot, ":opened_or_closed", ":instance_id", scene_prop_open_or_close_slot),
(try_begin),
#out doors like castle sally door can be opened only from inside, if door coordinate is behind your coordinate. Also it can be closed from both sides.
(this_or_next|position_is_behind_position, pos1, pos2),
(eq, ":opened_or_closed", 1),
(try_begin),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
(try_end),
(try_end),
])
check_castle_door_use_trigger = (ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
(agent_get_position, pos1, ":agent_id"),
(prop_instance_get_starting_position, pos2, ":instance_id"),
(scene_prop_get_slot, ":opened_or_closed", ":instance_id", scene_prop_open_or_close_slot),
(try_begin),
(ge, ":agent_id", 0),
(agent_get_team, ":agent_team", ":agent_id"),
#in doors like castle room doors can be opened from both sides, but only defenders can open these doors. Also it can be closed from both sides.
(this_or_next|eq, ":agent_team", 0),
(eq, ":opened_or_closed", 1),
(try_begin),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
(try_end),
(try_end),
])
check_ladder_animate_trigger = (ti_on_scene_prop_is_animating,
[
(store_trigger_param_1, ":instance_id"),
(store_trigger_param_2, ":remaining_time"),
(call_script, "script_check_creating_ladder_dust_effect", ":instance_id", ":remaining_time"),
])
check_ladder_animation_finish_trigger = (ti_on_sc
|
ene_prop_animation_finished,
[
(store_trigger_param_1, ":instance_id"),
(prop_instance_enable_physics, ":instance_id", 1),
])
scene_props = [
("invalid_object",0,"question_mark","0", []),
("inventory",sokf_type_container|sokf_place_at_origin,"package","bobaggage", []),
("empty", 0, "0", "0", []),
("chest_a",sokf_type_container,"chest_gothic","bochest_g
|
othic", []),
("container_small_chest",sokf_type_container,"package","bobaggage", []),
("container_chest_b",sokf_type_container,"chest_b","bo_chest_b", []),
("container_chest_c",sokf_type_container,"chest_c","bo_chest_c", []),
("player_chest",sokf_type_container,"player_chest","bo_player_chest", []),
("locked_player_chest",0,"player_chest","bo_player_chest", []),
("light_sun",sokf_invisible,"light_sphere","0", [
(ti_on_init_scene_prop,
[
(neg|is_currently_night),
(store_trigger_param_1, ":prop_instance_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_scale, pos5, ":prop_instance_no"),
(position_get_scale_x, ":scale", pos5),
(store_time_of_day,reg(12)),
(try_begin),
(is_between,reg(12),5,20),
(store_mul, ":red", 5 * 200, ":scale"),
(store_mul, ":green", 5 * 193, ":scale"),
(store_mul, ":blue", 5 * 180, ":scale"),
(else_try),
(store_mul, ":red", 5 * 90, ":scale"),
(store_mul, ":green", 5 * 115, ":scale"),
(store_mul, ":blue", 5 * 150, ":scale"),
(try_end),
(val_div, ":red", 100),
(val_div, ":green", 100),
(val_div, ":blue", 100),
(set_current_color,":red", ":green", ":blue"),
(set_position_delta,0,0,0),
(add_point_light_to_entity, 0, 0),
]),
]),
("light",s
|
kjniemi/scylla
|
tools/scyllatop/userinput.py
|
Python
|
agpl-3.0
| 648
| 0
|
import urwid
import logging
class UserInput(object):
def __init__(self):
self._viewMap = None
self._mainLoop = None
def setMap(self, ** viewMap):
self._viewMap = viewMap
def setLoop(self, loop):
self._mainLoop = loop
def __call__(self, keypress):
logging.debug('keypress={}'.format(keypress))
if keypress in ('q', 'Q'):
raise urwid.ExitMain
|
Loop()
if type(keypress) is not str:
return
if keypress.upper() not in self._viewMap:
|
return
view = self._viewMap[keypress.upper()]
self._mainLoop.widget = view.widget()
|
becxer/pytrain
|
pytrain/SVM/SVC.py
|
Python
|
mit
| 1,018
| 0.007859
|
#
# SVC (SVM Multi classifier)
#
# @ author becxer
# @ e-mail [email protected]
#
import numpy as np
from pytrain.SVM import SVM
from pytrain.lib import convert
from pytrain.lib import ptmath
class SVC:
def __init__(self, mat_data, label_data):
self.x = np.mat(convert.list2npfloat(mat_data))
self.ys =
|
np.mat(np.sign(convert.list2npfloat(label_data) - 0.5))
self.outbit = self.ys.shape[1]
self
|
.svm4bit = []
for i in range(self.outbit):
self.svm4bit.append(SVM(self.x, self.ys[:,i]))
def fit(self, C, toler, epoch, kernel = 'Linear', kernel_params = {}):
for i in range(self.outbit):
self.svm4bit[i].fit(C, toler, epoch, kernel, kernel_params)
def predict(self, array_input):
array_input = np.mat(convert.list2npfloat(array_input))
output = []
for i in range(self.outbit):
output.append(self.svm4bit[i].predict(array_input))
return list(np.sign(np.array(output) + 1))
|
lensacom/sparkit-learn
|
splearn/preprocessing/label.py
|
Python
|
apache-2.0
| 3,089
| 0
|
import numpy as np
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing.label import _check_numpy_unicode_bug
from sklearn.utils import column_or_1d
from ..base import SparkBroadcasterMixin, SparkTransformerMixin
class SparkLabelEncoder(LabelEncoder, SparkTransformerMixin,
SparkBroadcasterMixin):
"""Encode labels with value between 0 and n_classes-1.
Read more in the :ref:`User Guide <preprocessing_targets>`.
Attributes
----------
classes_ : array of shape (n_class,)
Holds the label for each class.
Examples
--------
`SparkLabelEncoder` can be used to normalize labels.
>>> from splearn.preprocessing import SparkLabelEncoder
>>> from splearn import BlockRDD
>>>
>>> data = ["paris", "paris", "tokyo", "amsterdam"]
>>> y = BlockRDD(sc.parallelize(data))
>>>
>>> le = SparkLabelEncoder()
>>> le.fit(y)
>>> le.classes_
array(['amsterdam', 'paris', 'tokyo'],
dtype='|S9')
>>>
>>> test = ["tokyo", "tokyo", "paris"]
>>> y_test = BlockRDD(sc.parallelize(test))
>>>
>>> le.transform(y_test).toarray()
array([2, 2, 1])
>>>
>>> test = [2, 2, 1]
>>> y_test = BlockRDD(sc.parallelize(test))
>>>
>>> le.inverse_transform(y_test).toarray()
array(['tokyo', 'tokyo', 'paris'],
dtype='|S9')
"""
__transient__ = ['classes_']
def fit(self, y):
"""Fit label encoder
Parameters
----------
y : ArrayRDD (n_samples,)
Target values.
Returns
-------
self : returns an instance of self.
"""
def mapper(y):
y = column_or_1d(y, warn=True
|
)
_check_numpy_unicode_bug(y)
return np.unique(y)
def reducer(a, b):
return np.unique(np.concatenate((a, b)))
self.classes_ = y.map(mapper).reduce(reducer)
return self
def fit_transform(self, y):
"""Fit label encoder and return encoded labels
Parameters
----------
y : ArrayRDD [n_sam
|
ples]
Target values.
Returns
-------
y : ArrayRDD [n_samples]
"""
return self.fit(y).transform(y)
def transform(self, y):
"""Transform labels to normalized encoding.
Parameters
----------
y : ArrayRDD [n_samples]
Target values.
Returns
-------
y : ArrayRDD [n_samples]
"""
mapper = super(SparkLabelEncoder, self).transform
mapper = self.broadcast(mapper, y.context)
return y.transform(mapper)
def inverse_transform(self, y):
"""Transform labels back to original encoding.
Parameters
----------
y : numpy array of shape [n_samples]
Target values.
Returns
-------
y : ArrayRDD [n_samples]
"""
mapper = super(SparkLabelEncoder, self).inverse_transform
mapper = self.broadcast(mapper, y.context)
return y.transform(mapper)
|
huangtao-sh/grace
|
grace/alembic/versions/3d30c324ed4_grace.py
|
Python
|
gpl-2.0
| 531
| 0.011299
|
"""grace
Revision ID: 3d30c324ed4
Revises: 8c78a916f1
Create Date: 2015-09-07 08:51:46.375707
"""
# revision identifiers, use
|
d by Alembic.
revision = '3d30c324ed4'
down_revision = '8c78a916f1'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end
|
Alembic commands ###
|
aniketpuranik/pynet_test
|
day2/ex15_exception.py
|
Python
|
apache-2.0
| 445
| 0.024719
|
#!/
|
usr/bin/env python
network_device = {
'ip_addr' : '81.1.1.3',
'username' : 'user1',
'passwd' : 'pass123',
'vendor' : 'cisco',
'model' : '3940',
}
for k,v in network_device.items():
print k,v
network_device['passwd']='newpass'
network_device['secret']='enable'
for k,v in network_device.items():
print k,v
try:
print network_device['device_type']
except KeyError:
print "Device type not found\n"
| |
cydenix/OpenGLCffi
|
OpenGLCffi/GLES2/EXT/EXT/occlusion_query_boolean.py
|
Python
|
mit
| 616
| 0.01461
|
from OpenGLCffi.GLES2 import params
@params(api='gles2', prms=['n', 'ids'])
def glGenQueriesEXT(n, ids)
|
:
pass
@params(api='gles2', prms=['n', 'ids'])
def glDeleteQueriesEXT(n, ids):
pass
@params(api='gles2', prms=['id'])
def glIsQueryEXT(id):
pass
@params(api='gles2', prms=['target', 'id'])
def glBeginQueryEXT(targ
|
et, id):
pass
@params(api='gles2', prms=['target'])
def glEndQueryEXT(target):
pass
@params(api='gles2', prms=['target', 'pname', 'params'])
def glGetQueryivEXT(target, pname):
pass
@params(api='gles2', prms=['id', 'pname', 'params'])
def glGetQueryObjectuivEXT(id, pname):
pass
|
ARM-software/lisa
|
external/devlib/devlib/platform/gem5.py
|
Python
|
apache-2.0
| 12,624
| 0.00198
|
# Copyright 2016-2018 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import subprocess
import shutil
import time
import types
import shlex
from pipes import quote
from devlib.exception import TargetStableError
from devlib.host import PACKAGE_BIN_DIRECTORY
from devlib.platform import Platform
from devlib.utils.ssh import AndroidGem5Connection, LinuxGem5Connection
class Gem5SimulationPlatform(Platform):
def __init__(self, name,
host_output_dir,
gem5_bin,
gem5_args,
gem5_virtio,
core_names=None,
core_clusters=None,
big_core=None,
model=None,
modules=None,
gem5_telnet_port=None):
# First call the parent class
super(Gem5SimulationPlatform, self).__init__(name, core_names, core_clusters,
big_core, model, modules)
# Start setting up the gem5 parameters/directories
# The gem5 subprocess
self.gem5 = None
self.gem5_port = gem5_telnet_port or None
self.stats_directory = host_output_dir
self.gem5_out_dir = os.path.join(self.stats_directory, "gem5")
self.gem5_interact_dir = '/tmp' # Host directory
self.executable_dir = None # Device directory
self.working_dir = None # Device directory
self.stdout_file = None
self.stderr_file = None
self.stderr_filename = None
if self.gem5_port is None: # pylint: disable=simplifiable-if-statement
# Allows devlib to pick up already running simulations
self.start_gem5_simulation = True
else:
self.start_gem5_simulation = False
# Find the first one that does not exist. Ensures that we do not re-use
# the directory used by someone else.
i = 0
directory = os.path.join(self.gem5_interact_dir, "wa_{}".format(i))
while os.path.exists(directory):
i += 1
directory = os.path.join(self.gem5_interact_dir, "wa_{}".format(i))
self.gem5_interact_dir = directory
self.logger.debug("Using {} as the temporary directory."
.format(self.gem5_interact_dir))
# Parameters passed onto gem5
self.gem5args_binary = gem5_bin
self.gem5args_args = gem5_args
self.gem5args_virtio = gem5_virtio
self._check_gem5_command()
# Start the interaction with gem5
self._start_interaction_gem5()
def _check_gem5_command(self):
"""
Check if the command to start gem5 makes sense
"""
if self.gem5args_binary is None:
raise TargetStableError('Please specify a gem5 binary.')
if self.gem5args_args is None:
raise TargetStableError('Please specify the arguments passed on to gem5.')
self.gem5args_virtio = str(self.gem5args_virtio).format(self.gem5_interact_dir)
if self.gem5args_virtio is None:
raise TargetStableError('Please specify arguments needed for virtIO.')
def _start_interaction_gem5(self):
"""
Starts the interaction of devlib with gem5.
"""
# First create the input and output directories for gem5
if self.start_gem5_simulation:
# Create the directory to send data to/from gem5 system
self.logger.info("Creating temporary directory for interaction "
" with gem5 via virtIO: {}"
.format(self.gem5_interact_dir))
os.mkdir(self.gem5_interact_dir)
# Create the directory for gem5 output (stats files etc)
if not os.path.exists(self.stats_directory):
os.mkdir(self.stats_directory)
if os.path.exists(self.gem5_out_dir):
raise TargetStableError("The gem5 stats directory {} already "
"exists.".format(self.gem5_out_dir))
else:
os.mkdir(self.gem5_out_dir)
# We need to redirect the standard output and standard error for the
# gem5 process to a file so that we can debug when things go wrong.
f = os.path.join(self.gem5_out_dir, 'stdout')
self.stdout_file = open(f, 'w')
f = os.path.join(self.gem5_out_dir, 'stderr')
self.stderr_file = open(f, 'w')
# We need to keep this so we can check which port to use for the
# telnet connection.
self.stderr_filename = f
# Start gem5 simulation
self.logger.info("Starting the gem5 simulator")
command_line = "{} --outdir={} {} {}".format(self.gem5args_binary,
quote(self.gem5_out_dir),
self.gem5args_args,
self.gem5args_virtio)
self.logger.debug("gem5 command line: {}".format(command_line))
self.gem5 = subprocess.Popen(shlex.split(command_line),
stdout=self.stdout_file,
stderr=self.stderr_file)
else:
# The simulation should already be running
# Need to dig up the (1) gem5 simulation in question (2) its input
# and output directories (3) virtio setting
self._intercept_existing_gem5()
# As the gem5 simulation is running now or was already running
# we now need to find out which telnet port it uses
self._intercept_telnet_port()
def _intercept_existing_gem5(self):
"""
Intercept the information about a running gem5 simulation
e.g. pid, input directory etc
"""
self.logger("This functionality is not yet implemented")
raise TargetStableError()
def _intercept_telnet_port(self):
"""
Intercept the telnet port of a running gem5 simulation
"""
if self.gem5 is None:
raise TargetStableError('The platform has no gem5 simulation! '
'Something went wrong')
while self.gem5_port is None:
# Ch
|
eck that gem5 is ru
|
nning!
if self.gem5.poll():
message = "The gem5 process has crashed with error code {}!\n\tPlease see {} for details."
raise TargetStableError(message.format(self.gem5.poll(), self.stderr_file.name))
# Open the stderr file
with open(self.stderr_filename, 'r') as f:
for line in f:
# Look for two different strings, exact wording depends on
# version of gem5
m = re.search(r"Listening for system connection on port (?P<port>\d+)", line)
if not m:
m = re.search(r"Listening for connections on port (?P<port>\d+)", line)
if m:
port = int(m.group('port'))
if port >= 3456 and port < 5900:
self.gem5_port = port
break
# Check if the sockets are not disabled
m = re.search(r"Sockets disabled, not accepting terminal connections", line)
if m:
raise TargetStableError("The sockets have been disabled!"
"Pass --listener-mode=on to gem5")
else:
|
fran-bravo/pylogic-module
|
test/test_case_operands.py
|
Python
|
mit
| 666
| 0.004505
|
import pytest, sys, os
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../")
from unittest import TestCase
from pylogic.case import Case
class TestBaseOperand(TestCase):
def test_eq_case(self):
case1 = Case("parent", "homer", "bart")
case2 = Case("parent", "homer", "bart")
assert case1 == case2
def test_not_eq_case1(self):
ca
|
se1 = Case("parent", "homer", "bart")
case2 = Case("parent", "homer", "lisa")
assert case1 != case2
def test_not_eq_case2(self):
case1 = Case("parent", "homer", "bart")
|
case2 = Case("brother", "homer", "lisa")
assert case1 != case2
|
kevinschoon/prtgcli
|
test/test_cli.py
|
Python
|
apache-2.0
| 294
| 0
|
import unittest
from prtgcli.cli import main
class TestQuery(un
|
ittest.TestCase):
def setUp(self):
pass
def test_list_devices(self):
pass
def test_list_sensors(self):
pass
|
def test_status(self):
pass
def test_update(self):
pass
|
sstebbins/pppcpro
|
pppcemr/migrations/0123_auto_20160426_1253.py
|
Python
|
agpl-3.0
| 663
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-26 16:53
from __future__ import unicode_l
|
iterals
from django.db import migrations, mode
|
ls
class Migration(migrations.Migration):
dependencies = [
('pppcemr', '0122_auto_20160425_1327'),
]
operations = [
migrations.AddField(
model_name='treatment',
name='height_cm',
field=models.FloatField(blank=True, help_text='cm', null=True),
),
migrations.AlterField(
model_name='treatment',
name='weight_kg',
field=models.FloatField(blank=True, help_text='kg', null=True),
),
]
|
megatharun/basic-python-for-researcher
|
TempConv.py
|
Python
|
artistic-2.0
| 243
| 0.012346
|
# TempConv.py
# Celcius to Fahrein
|
heit
def Fahreinheit(temp):
temp = float(temp)
temp = (temp*9/5)+32
return temp
# Fahreinhe
|
it to Celcius
def Celcius(temp):
temp = float(temp)
temp = (temp-32)*5/9
return temp
|
zhouhan0126/SCREENTEST1
|
tests/rtmplite/multitask.py
|
Python
|
gpl-2.0
| 41,396
| 0.002053
|
################################################################################
#
# Copyright (c) 2007 Christopher J. Stawarz
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission no
|
tice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE
|
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
################################################################################
"""
Cooperative multitasking and asynchronous I/O using generators
multitask allows Python programs to use generators (a.k.a. coroutines)
to perform cooperative multitasking and asynchronous I/O.
Applications written using multitask consist of a set of cooperating
tasks that yield to a shared task manager whenever they perform a
(potentially) blocking operation, such as I/O on a socket or getting
data from a queue. The task manager temporarily suspends the task
(allowing other tasks to run in the meantime) and then restarts it
when the blocking operation is complete. Such an approach is suitable
for applications that would otherwise have to use select() and/or
multiple threads to achieve concurrency.
The functions and classes in the multitask module allow tasks to yield
for I/O operations on sockets and file descriptors, adding/removing
data to/from queues, or sleeping for a specified interval. When
yielding, a task can also specify a timeout. If the operation for
which the task yielded has not completed after the given number of
seconds, the task is restarted, and a Timeout exception is raised at
the point of yielding.
As a very simple example, here's how one could use multitask to allow
two unrelated tasks to run concurrently:
>>> def printer(message):
... while True:
... print message
... yield
...
>>> multitask.add(printer('hello'))
>>> multitask.add(printer('goodbye'))
>>> multitask.run()
hello
goodbye
hello
goodbye
hello
goodbye
[and so on ...]
For a more useful example, here's how one could implement a
multitasking server that can handle multiple concurrent client
connections:
def listener(sock):
while True:
conn, address = (yield multitask.accept(sock))
multitask.add(client_handler(conn))
def client_handler(sock):
while True:
request = (yield multitask.recv(sock, 1024))
if not request:
break
response = handle_request(request)
yield multitask.send(sock, response)
multitask.add(listener(sock))
multitask.run()
Tasks can also yield other tasks, which allows for composition of
tasks and reuse of existing multitasking code. A child task runs
until it either completes or raises an exception. To return output to
its parent, a child task raises StopIteration, passing the output
value(s) to the StopIteration constructor. An unhandled exception
raised within a child task is propagated to its parent. For example:
>>> def parent():
... print (yield return_none())
... print (yield return_one())
... print (yield return_many())
... try:
... yield raise_exception()
... except Exception, e:
... print 'caught exception: %s' % e
...
>>> def return_none():
... yield
... # do nothing
... # or return
... # or raise StopIteration
... # or raise StopIteration(None)
...
>>> def return_one():
... yield
... raise StopIteration(1)
...
>>> def return_many():
... yield
... raise StopIteration(2, 3) # or raise StopIteration((2, 3))
...
>>> def raise_exception():
... yield
... raise RuntimeError('foo')
...
>>> multitask.add(parent())
>>> multitask.run()
None
1
(2, 3)
caught exception: foo
"""
import collections
import errno
from functools import partial
import heapq
import os
import select
import sys
import time
import types
__author__ = 'Christopher Stawarz <[email protected]>'
__version__ = '0.2.0'
# __revision__ = int('$Revision$'.split()[1])
################################################################################
#
# Timeout exception type
#
################################################################################
class Timeout(Exception):
'Raised in a yielding task when an operation times out'
pass
################################################################################
#
# _ChildTask class
#
################################################################################
class _ChildTask(object):
def __init__(self, parent, task):
self.parent = parent
self.task = task
def send(self, value):
return self.task.send(value)
def throw(self, type, value=None, traceback=None):
return self.task.throw(type, value, traceback)
################################################################################
#
# YieldCondition class
#
################################################################################
class YieldCondition(object):
"""
Base class for objects that are yielded by a task to the task
manager and specify the condition(s) under which the task should
be restarted. Only subclasses of this class are useful to
application code.
"""
def __init__(self, timeout=None):
"""
If timeout is None, the task will be suspended indefinitely
until the condition is met. Otherwise, if the condition is
not met within timeout seconds, a Timeout exception will be
raised in the yielding task.
"""
self.task = None
self.handle_expiration = None
if timeout is None:
self.expiration = None
else:
self.expiration = time.time() + float(timeout)
def _expires(self):
return (self.expiration is not None)
################################################################################
#
# _SleepDelay class and related functions
#
################################################################################
class _SleepDelay(YieldCondition):
def __init__(self, seconds):
seconds = float(seconds)
if seconds <= 0.0:
raise ValueError("'seconds' must be greater than 0")
super(_SleepDelay, self).__init__(seconds)
def sleep(seconds):
"""
A task that yields the result of this function will be resumed
after the specified number of seconds have elapsed. For example:
while too_early():
yield sleep(5) # Sleep for five seconds
do_something() # Done sleeping; get back to work
"""
return _SleepDelay(seconds)
################################################################################
#
# FDReady class and related functions
#
################################################################################
class FDReady(YieldCondition):
"""
A task that yields an instance of this class will be suspended
until a specified file descriptor is ready for I/O.
"""
def __init__(self, fd, read=False, write=False, exc=False, timeout=None):
"""
Resume the yielding task when fd is ready for reading,
writing, and/or "exceptional" condition handling. fd can be
any object accepted by select.select() (meaning an in
|
nickmckay/LiPD-utilities
|
Python/lipd/__init__.py
|
Python
|
gpl-2.0
| 38,558
| 0.002542
|
from lipd.lipd_io import lipd_read, lipd_write
from lipd.timeseries import extract, collapse, mode_ts, translate_expression, get_matches
from lipd.doi_main import doi_main
from lipd.csvs import get_csv_from_metadata
from lipd.excel import excel_main
from lipd.noaa import noaa_prompt, noaa_to_lpd, lpd_to_noaa, noaa_prompt_1
from lipd.dataframes import *
from lipd.directory import get_src_or_dst, list_files, collect_metadata_file
from lipd.loggers import create_logger, log_benchmark, create_benchmark
from lipd.misc import path_type, load_fn_matches_ext, rm_values_fields, get_dsn, rm_empty_fields, print_filename, rm_wds_url, rm_od_url
from lipd.tables import addModel, addTable
from lipd.validator_api import call_validator_api, display_results, get_validator_format
from lipd.alternates import FILE_TYPE_MAP
from lipd.regexes import re_url
from lipd.fetch_doi import update_dois
from lipd.download_lipd import download_from_url, get_download_path
from lipd.directory import _go_to_package
import re
from time import process_time as clock
import os
import json
import copy
from collections import OrderedDict
import subprocess
# READ
def run():
"""
Initialize and start objects. This is called automatically when importing the package.
:return none:
"""
# GLOBALS
global cwd, files, logger_start, logger_benchmark, settings, _timeseries_data
_timeseries_data = {}
# files = {".lpd": [ {"full_path", "filename_ext", "filename_no_ext", "dir"} ], ".xls": [...], ".txt": [...]}
settings = {"note_update": True, "note_validate": True, "verbose": True}
cwd = os.getcwd()
# logger created in whatever directory lipd is ca
|
lled from
logger_start = create_logger("start")
files = {".txt": [], ".lpd": [], ".xls": []}
return
def readLipd(usr_path="", remote_file_save=False):
"""
Read LiPD file(s).
Enter a file path, directory path, or leave args blank to trigger gui.
:param str usr_path: Path to file / directory (optional)
:return dict _d: Metadata
"""
global cwd, settings, files
try:
if
|
settings["verbose"]:
__disclaimer(opt="update")
files[".lpd"] = []
__read(usr_path, ".lpd")
_d = __read_lipd_contents(usr_path, remote_file_save)
# Clear out the lipd files metadata. We're done loading, we dont need it anymore.
files[".lpd"] = []
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return _d
def readExcel(usr_path=""):
"""
Read Excel file(s)
Enter a file path, directory path, or leave args blank to trigger gui.
:param str usr_path: Path to file / directory (optional)
:return str cwd: Current working directory
"""
global cwd, files
try:
files[".xls"] = []
__read(usr_path, ".xls")
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return cwd
def readNoaa(usr_path=""):
"""
Read NOAA file(s)
Enter a file path, directory path, or leave args blank to trigger gui.
:param str usr_path: Path to file / directory (optional)
:return str cwd: Current working directory
"""
global cwd, files
try:
files[".txt"] = []
__read(usr_path, ".txt")
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return cwd
def readAll(usr_path=""):
"""
Read all approved file types at once.
Enter a file path, directory path, or leave args blank to trigger gui.
:param str usr_path: Path to file / directory (optional)
:return str cwd: Current working directory
"""
print("readAll: This function no longer exists. Sorry! :(")
# global cwd, files
# start = clock()
# files = {".txt": [], ".lpd": [], ".xls": []}
# if not usr_path:
# usr_path, src_files = get_src_or_dst("read", "directory")
# __read_directory(usr_path, ".lpd")
# __read_directory(usr_path, ".xls")
# __read_directory(usr_path, ".xlsx")
# __read_directory(usr_path, ".txt")
# end = clock()
# logger_benchmark.info(log_benchmark("readAll", start, end))
# return cwd
def excel():
"""
Convert Excel files to LiPD files. LiPD data is returned directly from this function.
| Example
| 1: lipd.readExcel()
| 2: D = lipd.excel()
:return dict _d: Metadata
"""
global files, cwd, settings
_d = {}
# Turn off verbose. We don't want to clutter the console with extra reading/writing output statements
settings["verbose"] = False
try:
# Find excel files
print("Found " + str(len(files[".xls"])) + " Excel files")
logger_start.info("found excel files: {}".format(len(files[".xls"])))
# Loop for each excel file
for file in files[".xls"]:
# Convert excel file to LiPD
dsn = excel_main(file)
try:
# Read the new LiPD file back in, to get fixes, inferred calculations, updates, etc.
_d[dsn] = readLipd(os.path.join(file["dir"], dsn + ".lpd"))
# Write the modified LiPD file back out again.
writeLipd(_d[dsn], cwd)
except Exception as e:
logger_start.error("excel: Unable to read new LiPD file, {}".format(e))
print("Error: Unable to read new LiPD file: {}, {}".format(dsn, e))
except Exception as e:
pass
# Start printing stuff again.
settings["verbose"] = True
os.chdir(cwd)
return _d
def noaa(D="", path="", wds_url="", lpd_url="", version=""):
"""
Convert between NOAA and LiPD files
| Example: LiPD to NOAA converter
| 1: L = lipd.readLipd()
| 2: lipd.noaa(L, "/Users/someuser/Desktop", "https://www1.ncdc.noaa.gov/pub/data/paleo/pages2k/NAm2kHydro-2017/noaa-templates/data-version-1.0.0", "https://www1.ncdc.noaa.gov/pub/data/paleo/pages2k/NAm2kHydro-2017/data-version-1.0.0", "v1-1.0.0")
| Example: NOAA to LiPD converter
| 1: lipd.readNoaa()
| 2: lipd.noaa()
:param dict D: Metadata
:param str path: Path where output files will be written to
:param str wds_url: WDSPaleoUrl, where NOAA template file will be stored on NOAA's FTP server
:param str lpd_url: URL where LiPD file will be stored on NOAA's FTP server
:param str version: Version of the dataset
:return none:
"""
global files, cwd
try:
# When going from NOAA to LPD, use the global "files" variable.
# When going from LPD to NOAA, use the data from the LiPD Library.
# Choose the mode
_mode = noaa_prompt()
# LiPD mode: Convert LiPD files to NOAA files
if _mode == "1":
# _project, _version = noaa_prompt_1()
if not version or not lpd_url:
print("Missing parameters: Please try again and provide all parameters.")
return
if not D:
print("Error: LiPD data must be provided for LiPD -> NOAA conversions")
else:
try:
os.mkdir("noaa_files")
except FileExistsError:
pass
if "paleoData" in D:
_d = copy.deepcopy(D)
D = lpd_to_noaa(_d, wds_url, lpd_url, version, path)
else:
# For each LiPD file in the LiPD Library
for dsn, dat in D.items():
_d = copy.deepcopy(dat)
# Process this data through the converter
_d = lpd_to_noaa(_d, wds_url, lpd_url, version, path)
# Overwrite the data in the LiPD object with our new data.
D[dsn] = _d
# If no wds url is provided, then remove instances from jsonld metadata
if not wds_url:
D = rm_wds_url(D)
# Write out the new LiPD files, since they now contain the new NOAA URL data
if(p
|
jakubroztocil/httpie
|
docs/packaging/spack/package.py
|
Python
|
bsd-3-clause
| 1,904
| 0.003151
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Httpie(PythonPackage):
"""Modern, user-friendly command-line HTTP client for the API era."""
homepage = 'https://httpie.io/'
pypi = 'httpie/httpie-2.6.0.tar.gz'
maintainers = ['jakubroztocil']
version('2.6.0', sha256='ef929317b239bbf0a5bb7159b4c5d2edbfc55f8a0bcf9cd24ce597daec2afca5')
version('2.5.0', sha256='fe6a8bc50fb0635a84ebe1296a732e39357c3e1354541bf51a7057b4877e47f9')
# TODO: Remove both versions for HTTPie 2.7.0.
version('0.9.9', sha256='f1202e6fa60367e2265284a53f35bfa5917119592c2ab08277efc7fffd744fcb', deprecated=True)
version('0.9.8', sha256='515870b15231530f56fe2164190581748e8799b66ef0fe36ec9da3396f0df6e1', deprecated=True)
d
|
epends_on('[email protected]:', when='@2.5:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-charset-normalizer@2:', when='@2.6:', type=('build', 'run'))
depends_on('[email protected]:', when='@2.5:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', when='@2.5:', typ
|
e=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:+socks', when='@2.5:', type=('build', 'run'))
depends_on('[email protected]:', when='@2.5:', type=('build', 'run'))
# TODO: Remove completely py-argparse for HTTPie 2.7.0.
# Concretization problem breaks this. Unconditional for now...
# https://github.com/spack/spack/issues/3628
# depends_on('[email protected]:', type=('build', 'run'),
# when='^python@:2.6,3.0:3.1')
depends_on('[email protected]:', type=('build', 'run'), when='^python@:2.6')
|
colinleefish/theotherbarn
|
vmware/serializers.py
|
Python
|
mit
| 786
| 0.003817
|
from vmware.models import VM, VMwareHost
from rest_framework import serializers
class VMSerializer(serializers.ModelSerializer):
class Meta:
model = VM
fields = ('name',
'moid',
'vcenter',
'host',
'instance_uuid',
'os_type',
'added_time',
'is_template',
|
'state')
class VMWareHostSerializer(serializers.ModelSerializer):
baremetal = serializers.HyperlinkedRelatedField(many=False,
|
view_name='baremetal-detail', read_only=True)
class Meta:
model = VMwareHost
fields = ('name',
'ip_address',
'vcenter',
'baremetal',
'state')
|
eliran-stratoscale/inaugurator
|
inaugurator/targetdevice.py
|
Python
|
apache-2.0
| 1,263
| 0.001584
|
import os
import stat
import time
from inaugurator import sh
class TargetDevice:
_found = None
@classmethod
def device(cls, candidates):
if cls._found is None:
cls._found = cls._find(candidates)
return cls._found
pass
@classmethod
def _find(cls, candidates):
RETRIES = 5
for retry in xrange(RETRIES):
for device in candidates:
if not os.path.exists(device):
continue
if not stat.S_ISBLK(os.stat(device).st_mode):
continue
try:
output = sh.run("dosfslabel", device + 1)
if output.strip() == "STRATODOK":
raise Exception(
"DOK was found on SDA.
|
cannot continue: its likely the "
"the HD driver was not loaded correctly")
except:
pass
print "Found target device %s" % device
return device
print "didn't find target device, sleeping before retry %d" % retry
time.sleep(1)
|
os.system("/usr/sbin/busybox mdev -s")
raise Exception("Failed finding target device")
|
jerynmathew/AssetManager
|
AssetManager/core/baseobject/models.py
|
Python
|
mit
| 1,839
| 0.001631
|
from django.db import models
from jsonfield import JSONField
from collections import OrderedDict
class BaseObject(models.Model):
"""
The base model from which all apps inherit
"""
# Type represents the app that uses it. Assets, Persons, Orgs, etc
type = models.CharField(max_length=256)
# Related-to represents the the relation of this object with other objects (of any type)
related_to = models.ManyToManyField("self", blank=True)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now_add=True, auto_now=True)
# Store all attributes/properties of the object as dictionary
attributes = JSONField(load_kwargs={'object_pairs_hook': OrderedDict}, blank=True)
def __init__(self, *args, **kwargs):
super(BaseObject, self).__init__(*args, **kwargs)
if not self.pk and not self.type:
self.type = self.TYPE
class BasePropertyManager(models.Manager):
def create_attributes(self, baseobject, **attributes):
"""
Given a set of key-value attributes for a given object,
create the attribute-set in table
"""
property_set = []
for attr, value in attributes.items
|
():
property_set.append(BaseProperty(baseobject=baseobject, key=attr, value=value))
self.bulk_create(property_set)
class BaseProperty(models.Model):
"""
Key-Value attributes of objects are stored here.
"""
baseobject = models.ForeignKey(BaseObject)
key = models.CharField(max_length=256)
value = models.CharField(max
|
_length=256)
objects = BasePropertyManager()
def __unicode__(self):
"""Representation of field"""
return {self.baseobject.id: {self.key: self.value}}
class ProxyObject(BaseObject):
class Meta:
proxy = True
|
orlenko/bccf
|
src/mezzanine/generic/defaults.py
|
Python
|
unlicense
| 4,223
| 0.000947
|
"""
Default settings for the ``mezzanine.generic`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
generic_comments = getattr(settings, "COMMENTS_APP", "") == "mezzanine.generic"
if generic_comments:
register_setting(
name="COMMENTS_ACCOUNT_REQUIRED",
label=_("Accounts required for commenting"),
description=_("If ``True``, users must log in to comment."),
editable=True,
default=False,
)
register_setting(
name="COMMENTS_DISQUS_SHORTNAME",
label=_("Disqus shortname"),
description=_("Shortname for the http://disqus.com comments "
"service."),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DISQUS_API_PUBLIC_KEY",
label=_("Disqus public key"),
description=_("Public key for http://disqus.com developer API"),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DISQUS_API_SECRET_KEY",
label=_("Disqus secret key"),
description=_("Secret key for http://disqus.com developer API"),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DEFAULT_APPROVED",
label=_("Auto-approve comments"),
description=_("If ``True``, built-in comments are approved by "
"default."),
editable=True,
default=True,
)
register_setting(
name="COMMENT_FILTER",
description=_("Dotted path to the function to call on a comment's "
"value before it is rendered to the template."),
editable=False,
default=None,
)
register_setting(
name="COMMENTS_NOTIFICATION_EMAILS",
label=_("Comment notification email addresses"),
description=_("A comma separated list of email addresses that "
"will receive an email notification each time a "
"new comment is posted on the site."),
editable=True,
default="",
)
register_setting(
name="COMMENTS_NUM_LATEST",
label=_("Admin comments"),
description=_("Number of latest comments shown in the admin "
"dashboard."),
editable=True,
default=5,
)
register_setting(
name="COMMENTS_UNAPPROVED_VISIBLE",
label=_("Show unapproved comments"),
description=_("If ``True``, comments that have ``is_public`` "
"unchecked will still be displayed, but replaced with a "
"``waiting to be approved`` message."),
editable=True,
default=True,
)
register_setting(
name="COMMENTS_REMOVED_VISIBLE",
label=_("Show removed comments"),
description=_("If ``True``, comments that have ``removed`` "
"checked will still be displayed, but replaced "
"with a ``removed`` message."),
editable=True,
default=True,
)
register_setting(
name="COMMENTS_USE_RATINGS",
description=_("If ``True``, comments can be rated."),
editable=False,
default=True,
)
register_setting(
name="RATINGS_ACCOUNT_REQUIRED",
label=_("Accounts required for rating"),
d
|
escription=_("If ``True``, users must log in to rate content "
"such as blog posts and comments."),
editable=True,
default=False,
)
register_setting(
name="RATINGS_RANGE",
description=_("A sequence of integers that are valid ratings."),
editable=False,
|
default=range(getattr(settings, "RATINGS_MIN", 1),
getattr(settings, "RATINGS_MAX", 5) + 1),
)
|
bealdav/OpenUpgrade
|
addons/sale/report/sale_report.py
|
Python
|
agpl-3.0
| 5,204
| 0.002882
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields, osv
class sale_report(osv.osv):
_name = "sale.report"
_description = "Sales Orders Statistics"
_auto = False
_rec_name = 'date'
_columns = {
|
'date': fields.datetime('Date Order', readonly=True),
'date_confirm': fields.date('Date Confirm', readonly=True),
'product_id': fields.many2one('product.pro
|
duct', 'Product', readonly=True),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', readonly=True),
'product_uom_qty': fields.float('# of Qty', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Price', readonly=True),
'delay': fields.float('Commitment Delay', digits=(16,2), readonly=True),
'categ_id': fields.many2one('product.category','Category of Product', readonly=True),
'nbr': fields.integer('# of Lines', readonly=True),
'state': fields.selection([
('draft', 'Quotation'),
('waiting_date', 'Waiting Schedule'),
('manual', 'Manual In Progress'),
('progress', 'In Progress'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
('cancel', 'Cancelled')
], 'Order Status', readonly=True),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', readonly=True),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
}
_order = 'date desc'
def _select(self):
select_str = """
SELECT min(l.id) as id,
l.product_id as product_id,
t.uom_id as product_uom,
sum(l.product_uom_qty / u.factor * u2.factor) as product_uom_qty,
sum(l.product_uom_qty * l.price_unit * (100.0-l.discount) / 100.0) as price_total,
count(*) as nbr,
s.date_order as date,
s.date_confirm as date_confirm,
s.partner_id as partner_id,
s.user_id as user_id,
s.company_id as company_id,
extract(epoch from avg(date_trunc('day',s.date_confirm)-date_trunc('day',s.create_date)))/(24*60*60)::decimal(16,2) as delay,
s.state,
t.categ_id as categ_id,
s.pricelist_id as pricelist_id,
s.project_id as analytic_account_id,
s.section_id as section_id
"""
return select_str
def _from(self):
from_str = """
sale_order_line l
join sale_order s on (l.order_id=s.id)
left join product_product p on (l.product_id=p.id)
left join product_template t on (p.product_tmpl_id=t.id)
left join product_uom u on (u.id=l.product_uom)
left join product_uom u2 on (u2.id=t.uom_id)
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY l.product_id,
l.order_id,
t.uom_id,
t.categ_id,
s.date_order,
s.date_confirm,
s.partner_id,
s.user_id,
s.company_id,
s.state,
s.pricelist_id,
s.project_id,
s.section_id
"""
return group_by_str
def init(self, cr):
# self._table = sale_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
FROM ( %s )
%s
)""" % (self._table, self._select(), self._from(), self._group_by()))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
fbradyirl/home-assistant
|
script/lazytox.py
|
Python
|
apache-2.0
| 7,111
| 0.000422
|
#!/usr/bin/env python3
"""
Lazy 'tox' to quickly check if branch is up to PR standards.
This is NOT a tox replacement, only a quick check during development.
"""
import os
import asyncio
import sys
import re
import shlex
from collections import namedtuple
try:
from colorlog.escape_codes import escape_codes
except ImportError:
escape_codes = None
RE_ASCII = re.compile(r"\033\[[^m]*m")
Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"])
PASS = "green"
FAIL = "bold_red"
def printc(the_color, *args):
"""Color print helper."""
msg = " ".join(args)
if not escape_codes:
print(msg)
return
try:
print(escape_codes[the_color] + msg + escape_codes["reset"])
except KeyError:
print(msg)
raise ValueError("Invalid color {}".format(the_color))
def validate_requirements_ok():
"""Validate requirements, returns True of ok."""
from gen_requirements_all import main as req_main
return req_main(True) == 0
async def read_stream(stream, display):
"""Read from stream line by line until EOF, display, and capture lines."""
output = []
while True:
|
line = await stream.readline()
if not line:
break
output.append(line)
display(line.decode()) # assume it doesn't block
return b"".join(output)
async def as
|
ync_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
argsp.append("\\\n {}".format(shlex.quote(arg)))
else:
argsp.append(shlex.quote(arg))
printc("cyan", *argsp)
try:
kwargs = {
"loop": LOOP,
"stdout": asyncio.subprocess.PIPE,
"stderr": asyncio.subprocess.STDOUT,
}
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
printc(
FAIL,
"Could not execute {}. Did you install test requirements?".format(args[0]),
)
raise err
if not display:
# Readin stdout into log
stdout, _ = await proc.communicate()
else:
# read child's stdout/stderr concurrently (capture and display)
stdout, _ = await asyncio.gather(
read_stream(proc.stdout, sys.stdout.write),
read_stream(proc.stderr, sys.stderr.write),
)
exit_code = await proc.wait()
stdout = stdout.decode("utf-8")
return exit_code, stdout
async def git():
"""Exec git."""
if len(sys.argv) > 2 and sys.argv[1] == "--":
return sys.argv[2:]
_, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD")
merge_base = log.splitlines()[0]
_, log = await async_exec("git", "diff", merge_base, "--name-only")
return log.splitlines()
async def pylint(files):
"""Exec pylint."""
_, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 3:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/")))
return res
async def flake8(files):
"""Exec flake8."""
_, log = await async_exec("flake8", "--doctests", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 4:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], line[2], line[3].strip(), False))
return res
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
fres, pres = await asyncio.gather(flake8(files), pylint(files))
res = fres + pres
res.sort(key=lambda item: item.file)
if res:
print("Pylint & Flake8 errors:")
else:
printc(PASS, "Pylint and Flake8 passed")
lint_ok = True
for err in res:
err_msg = "{} {}:{} {}".format(err.file, err.line, err.col, err.msg)
# tests/* does not have to pass lint
if err.skip:
print(err_msg)
else:
printc(FAIL, err_msg)
lint_ok = False
return lint_ok
async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
files = await git()
if not files:
print(
"No changed files found. Please ensure you have added your "
"changes with git add & git commit"
)
return
pyfile = re.compile(r".+\.py$")
pyfiles = [file for file in files if pyfile.match(file)]
print("=============================")
printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles))
print("=============================")
skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint"
if skip_lint:
printc(FAIL, "LINT DISABLED")
elif not await lint(pyfiles):
printc(FAIL, "Please fix your lint issues before continuing")
return
test_files = set()
gen_req = False
for fname in pyfiles:
if fname.startswith("homeassistant/components/"):
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
# All test helpers should be excluded
test_files.add(fname)
else:
parts = fname.split("/")
parts[0] = "tests"
if parts[-1] == "__init__.py":
parts[-1] = "test_init.py"
elif parts[-1] == "__main__.py":
parts[-1] = "test_main.py"
else:
parts[-1] = "test_" + parts[-1]
fname = "/".join(parts)
if os.path.isfile(fname):
test_files.add(fname)
if gen_req:
print("=============================")
if validate_requirements_ok():
printc(PASS, "script/gen_requirements.py passed")
else:
printc(FAIL, "Please run script/gen_requirements.py")
return
print("=============================")
if not test_files:
print("No test files identified, ideally you should run tox")
return
code, _ = await async_exec(
"pytest", "-vv", "--force-sugar", "--", *test_files, display=True
)
print("=============================")
if code == 0:
printc(PASS, "Yay! This will most likely pass tox")
else:
printc(FAIL, "Tests not passing")
if skip_lint:
printc(FAIL, "LINT DISABLED")
if __name__ == "__main__":
LOOP = (
asyncio.ProactorEventLoop()
if sys.platform == "win32"
else asyncio.get_event_loop()
)
try:
LOOP.run_until_complete(main())
except (FileNotFoundError, KeyboardInterrupt):
pass
finally:
LOOP.close()
|
matthew-Ng/sol
|
exp_sol/sol_shuffle.py
|
Python
|
gpl-3.0
| 626
| 0.003195
|
#!/usr/bin/env python
"""shuffle a dataset"""
import random
import sys
def sol_shuffle(filename, out_filename):
try:
file = open(filena
|
me, 'rb')
lines = file.readlines()
if len(lines) == 0:
|
print 'empty file'
file.close()
sys.exit()
if lines[-1][-1] != '\n':
lines[-1]+='\n'
random.shuffle(lines)
wfile = open(out_filename, 'wb')
wfile.writelines(lines)
wfile.close()
except IOError as e:
print "I/O error ({0}): {1}".format(e.errno, e.strerror)
sys.exit()
else:
file.close()
|
cmdunkers/DeeperMind
|
RuleLearner/RuleLearner.py
|
Python
|
bsd-3-clause
| 974
| 0.002053
|
import abc
class RuleLearner:
"""2D 2-person board game rule learner base class
TODO
"""
def __
|
init__(self, board_height, board_width):
"""Initialize the rule learner
Subclasses should call this constructor.
:type board_height: positive integer
:param board_height: the height (number of rows) of the board
:type board_width: positive integer
:param board_width: the width (number of columns) of the board
"""
self._board_height = board_height
self._board_width = board_width
@abc.abstractme
|
thod
def get_valid_moves(self, board):
"""Get the valid moves for the board.
:type board: Boards.Board
:param board: the board for which to determine the valid moves
:returns: a 2D Numpy array with the same dimensions as the board contains, the cells where moves are valid set
to 1, the rest set to 0
"""
pass
|
kdart/pycopia
|
core/pycopia/OS/Linux/proc/devices.py
|
Python
|
apache-2.0
| 1,668
| 0.002398
|
#!/usr/bin/python2.7
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The devices file.
"""
class Devices:
def __init__(self):
fo = open("/proc/devices")
|
self._charmap = {}
self._blockmap = {}
for line in fo.readlines():
if line.startswith("Character"):
curmap = self._charmap
continue
elif line.startswith("Block"):
curmap = self._blockmap
continue
elif len(line) > 4:
[num, fmt] = line.split()
num = int(num)
curmap[num] = fmt
def __str__(self):
s = ["Character
|
devices:"]
for num, fmt in self._charmap.items():
s.append("%3d %s" % (num, fmt))
s.append("\nBlock devices:")
for num, fmt in self._blockmap.items():
s.append("%3d %s" % (num, fmt))
return "\n".join(s)
def get_device(self, dtype, major, minor):
pass
def _test(argv):
d = Devices()
print d
if __name__ == "__main__":
import sys
_test(sys.argv)
|
osuripple/ripple
|
c.ppy.sh/matchReadyEvent.py
|
Python
|
mit
| 370
| 0.035135
|
imp
|
ort glob
def handle(userToken, _):
# Get usertoken data
userID = userToken.userID
# Make sure the match exists
matchID = userToken.matchID
if matchID not in glob.matches.matches:
|
return
match = glob.matches.matches[matchID]
# Get our slotID and change ready status
slotID = match.getUserSlotID(userID)
if slotID != None:
match.toggleSlotReady(slotID)
|
ActiveState/code
|
recipes/Python/528949_Copying_Generators/recipe-528949.py
|
Python
|
mit
| 3,934
| 0.008897
|
###
#
# W A R N I N G
#
# This recipe is obsolete!
#
# When you are looking for copying and pickling functionality for generators
# implemented in pure Python download the
#
# generator_tools
#
# package at the cheeseshop or at www.fiber-space.de
#
###
import new
import copy
import types
import sys
from opcode import*
def copy_generator(f_gen):
'''
Function used to copy a generator object.
@param f_gen: generator object.
@return: pair (g_gen, g) where g_gen is a new generator object and g a generator
function g producing g_gen. The function g is created from f_gen.gi_frame.
Usage: function copies a running generator.
def inc(start, step = 1):
i = start
while True:
yield i
i+= step
>>> inc_gen = inc(3)
>>> inc_gen.next()
3
>>> inc_gen.next()
4
>>> inc_gen_c, inc_c = copy_generator(inc_gen)
>>> inc_gen_c.next() == inc_gen.next()
True
>>> inc_gen_c.next()
6
Implementation strategy:
Inspecting the frame of a
|
running generator object f provides following important
information about the state of the generator:
- the values of bound locals inside the generator object
- the last bytecode being executed
This state information of f is restored in a new function g
|
enerator g in the following way:
- the signature of g is defined by the locals of f ( co_varnames of f ). So we can pass the
locals to g inspected from the current frame of running f. Yet unbound locals are assigned
to None.
All locals will be deepcopied. If one of the locals is a generator object it will be copied
using copy_generator. If a local is not copyable it will be assigned directly. Shared state
is therefore possible.
- bytecode hack. A JUMP_ABSOLUTE bytecode instruction is prepended to the bytecode of f with
an offset pointing to the next unevaluated bytecode instruction of f.
Corner cases:
- an unstarted generator ( last instruction = -1 ) will be just cloned.
- if a generator has been already closed ( gi_frame = None ) a ValueError exception
is raised.
'''
if not f_gen.gi_frame:
raise ValueError("Can't copy closed generator")
f_code = f_gen.gi_frame.f_code
offset = f_gen.gi_frame.f_lasti
locals = f_gen.gi_frame.f_locals
if offset == -1: # clone the generator
argcount = f_code.co_argcount
else:
# bytecode hack - insert jump to current offset
# the offset depends on the version of the Python interpreter
if sys.version_info[:2] == (2,4):
offset +=4
elif sys.version_info[:2] == (2,5):
offset +=5
start_sequence = (opmap["JUMP_ABSOLUTE"],)+divmod(offset, 256)[::-1]
modified_code = "".join([chr(op) for op in start_sequence])+f_code.co_code
argcount = f_code.co_nlocals
varnames = list(f_code.co_varnames)
for i, name in enumerate(varnames):
loc = locals.get(name)
if isinstance(loc, types.GeneratorType):
varnames[i] = copy_generator(loc)[0]
else:
try:
varnames[i] = copy.deepcopy(loc)
except TypeError:
varnames[i] = loc
new_code = new.code(argcount,
f_code.co_nlocals,
f_code.co_stacksize,
f_code.co_flags,
modified_code,
f_code.co_consts,
f_code.co_names,
f_code.co_varnames,
f_code.co_filename,
f_code.co_name,
f_code.co_firstlineno,
f_code.co_lnotab)
g = new.function(new_code, globals(),)
g_gen = g(*varnames)
return g_gen, g
|
weirdgiraffe/plugin.video.giraffe.seasonvar
|
resources/site-packages/kodi/__init__.py
|
Python
|
mit
| 3,703
| 0
|
# coding: utf-8
#
# Copyright © 2017 weirdgiraffe <[email protected]>
#
# Distributed under terms of the MIT lic
|
ense.
#
import sys
try: # real kodi
import xbmc
import xbmcaddon
import xbmcgui
import xbmcplugin
except ImportError: # mocked kodi
from mock_kodi import xbmc
from mock_kodi import xbmcaddon
from mock_kodi import xbmcgui
|
from mock_kodi import xbmcplugin
try: # python2
from urllib import urlencode
from urlparse import urlparse, parse_qs
except ImportError: # python3
from urllib.parse import urlparse, parse_qs, urlencode
class logger:
@staticmethod
def debug(s):
xbmc.log(s, xbmc.LOGDEBUG)
@staticmethod
def info(s):
xbmc.log(s, xbmc.LOGNOTICE)
@staticmethod
def error(s):
s += '\n\taddon arguments:\n\t{0}'.format('\n\t'.join(sys.argv[1:]))
xbmc.log(s, xbmc.LOGERROR)
def list_item(name, thumb):
li = xbmcgui.ListItem(name)
if thumb is not None:
li.setArt(thumb)
# it is sayed that both of these methods are deprecated
# see: http://kodi.wiki/view/Jarvis_API_changes
# but only these methods actually works with Jarvis
li.setIconImage(thumb)
li.setThumbnailImage(thumb)
return li
class Plugin:
def __init__(self, *args):
self._addon = xbmcaddon.Addon()
self._url = args[0]
self._handler = int(args[1], base=10)
# addon url has format:
# plugin://plugin.hello.blah?arg1=xxx&arg2=xxx
# where args are urlencoded
o = urlparse(args[2])
self._args = dict()
for k, v in parse_qs(o.query).items():
if len(v) == 1:
self._args[k] = v[0]
else:
self._args[k] = v
@property
def icon(self):
return self._addon.getAddonInfo('icon')
@property
def args(self):
return self._args
def read_input(self, header):
keyboard = xbmc.Keyboard('', 'Что искать?', False)
keyboard.doModal()
if keyboard.isConfirmed():
return keyboard.getText()
def play(self, url):
li = xbmcgui.ListItem(path=url)
xbmcplugin.setResolvedUrl(self._handler, True, li)
def add_screen_item(self, name, url, **kwargs):
thumb = kwargs.get('thumb')
li = list_item(name, thumb)
li.setProperty('IsPlayable', 'true')
ret = xbmcplugin.addDirectoryItem(self._handler, url, li, False)
if not ret:
logger.error('failed to add {0} playable item'.format(name))
def add_screen_directory(self, name, url, **kwargs):
thumb = kwargs.get('thumb')
li = list_item(name, thumb)
args = [self._handler, url, li, True]
items_count = kwargs.get('items_count')
if items_count:
args += [items_count]
ret = xbmcplugin.addDirectoryItem(*args)
if not ret:
logger.error('failed to add {0} directory item'.format(name))
def publish_screen(self, ok, refresh=False):
xbmcplugin.endOfDirectory(self._handler, ok, refresh)
def make_url(self, argv):
return '{0}?{1}'.format(self._url, urlencode(argv))
def settings_value(self, setting_id):
return self._addon.getSetting(setting_id)
def show_notification(self, title, message):
timeout = len(message) / 10 * 2000
title = title.replace('"', '\\"')
message = message.replace('"', '\\"')
xbmc.executebuiltin('Notification("{0}","{1}","{2}","{3}")'.format(
title.encode('ascii', 'ignore'),
message.encode('ascii', 'ignore'),
timeout,
self.icon))
|
ashang/calibre
|
src/calibre/web/feeds/recipes/model.py
|
Python
|
gpl-3.0
| 14,130
| 0.002052
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
import copy, zipfile
from PyQt5.Qt import QAbstractItemModel, Qt, QColor, QFont, QIcon, \
QModelIndex, pyqtSignal, QPixmap
from calibre.utils.search_query_parser import SearchQueryParser
from calibre.utils.localization import get_language
from calibre.web.feeds.recipes.collection import \
get_builtin_recipe_collection, get_custom_recipe_collection, \
SchedulerConfig, download_builtin_recipe, update_custom_recipe, \
update_custom_recipes, add_custom_recipe, add_custom_recipes, \
remove_custom_recipe, get_custom_recipe, get_builtin_recipe
from calibre.utils.search_query_parser import ParseException
class NewsTreeItem(object):
def __init__(self, builtin, custom, scheduler_config, parent=None):
self.builtin, self.custom = builtin, custom
self.scheduler_config = scheduler_config
self.parent = parent
if self.parent is not None:
self.parent.append(self)
self.children = []
def row(self):
if self.parent is not None:
return self.parent.children.index(self)
return 0
def append(self, child):
child.parent = self
self.children.append(child)
def data(self, role):
return None
def flags(self):
return Qt.ItemIsEnabled|Qt.ItemIsSelectable
def sort(self):
self.children.sort()
for child in self.children:
child.sort()
def prune(self):
for child in list(self.children):
if len(child.children) == 0:
self.children.remove(child)
child.parent = None
class NewsCategory(NewsTreeItem):
def __init__(self, category, builtin, custom, scheduler_config, parent):
NewsTreeItem.__init__(self, builtin, custom, scheduler_config, parent)
self.category = category
self.cdata = get_language(self.category)
self.bold_font = QFont()
self.bold_font.setBold(True)
self.bold_font = (self.bold_font)
def data(self, role):
if role == Qt.DisplayRole:
return (self.cdata + ' [%d]'%len(self.children))
elif role == Qt.FontRole:
return self.bold_font
elif role == Qt.ForegroundRole and self.category == _('Scheduled'):
return (QColor(0, 255, 0))
return None
def flags(self):
return Qt.ItemIsEnabled
def __cmp__(self, other):
def decorate(x):
if x == _('Scheduled'):
x = '0' + x
elif x == _('Custom'):
x = '1' + x
else:
x = '2' + x
return x
return cmp(decorate(self.cdata), decorate(getattr(other, 'cdata', '')))
class NewsItem(NewsTreeItem):
def __init__(self, urn, title, default_icon, custom_icon, favicons, zf,
builtin, custom, scheduler_config, parent):
NewsTreeItem.__init__(self, builtin, custom, scheduler_config, parent)
self.urn, self.title = urn, title
self.icon = self.default_icon = None
self.default_icon = default_icon
self.favicons, self.zf = favicons, zf
if 'custom:' in self.urn:
self.icon = custom_icon
def data(self, role):
if role == Qt.DisplayRole:
return (self.title)
if role == Qt.DecorationRole:
if self.icon is None:
icon = '%s.png'%self.urn[8:]
p = QPixmap()
if icon in self.favicons:
try:
with zipfile.ZipFile(self.zf, 'r') as zf:
p.loadFromData(zf.read(self.favicons[icon]))
except:
pass
if not p.isNull():
self.icon = (QIcon(p))
else:
self.icon = self.default_icon
return self.icon
return None
def __cmp__(self, other):
return cmp(self.title.lower(), getattr(other, 'title', '').lower())
class AdaptSQP(SearchQueryParser):
def __init__(self, *args, **kwargs):
pass
class RecipeModel(QAbstractItemModel, AdaptSQP):
LOCATIONS = ['all']
searched = pyqtSignal(object)
def __init__(self, *args):
QAbstractItemModel.__init__(self, *args)
SearchQueryParser.__init__(self, locations=['all'])
self.default_icon = (QIcon(I('news.png')))
self.custom_icon = (QIcon(I('user_profile.png')))
self.builtin_recipe_collection = get_builtin_recipe_collection()
self.scheduler_config = SchedulerConfig()
try:
with zipfile.ZipFile(P('builtin_recipes.zip',
allow_user_override=False), 'r') as zf:
self.favicons = dict([(x.filename, x) for x in zf.infolist() if
x.filename.endswith('.png')])
except:
self.favicons = {}
self.do_refresh()
def get_builtin_recipe(self, urn, download=True):
if download:
try:
return download_builtin_recipe(urn)
except:
import traceback
traceback.print_exc()
return get_builtin_recipe(urn)
def get_recipe(self, urn, download=True):
coll = self.custom_recipe_collection if urn.startswith('custom:') \
else self.builtin_recipe_collection
for recipe in coll:
if recipe.get('id', False) == urn:
if coll is self.builtin_recipe_collection:
return self.get_builtin_recipe(urn[8:], download=download)
return get_custom_recipe(int(urn[len('custom:'):]))
def update_custom_recipe(self, urn, title, script):
id_ = int(urn[len('custom:'):])
update_custom_recipe(id_, title, script)
self.custom_recipe_collection = get_custom_recipe_collection()
def update_custom_recipes(self, script_urn_map):
script_ids = []
for urn, title_script in script_urn_map.iteritems():
id_ = int(urn[len('custom:'):])
(title, script) = title_script
script_ids.append((id_, title, script))
update_custom_recipes(script_ids)
self.custom_recipe_collection = get_custom_recipe_collection()
def add_custom_recipe(self, title, script):
add_custom_recipe(title, script)
self.custom_recipe_collection = get_custom_recipe_collection()
def add_custom_recipes(self, scriptmap):
add_custom_recipes(scriptmap)
self.custom_recipe_collection = get_custom_recipe_collection()
def remove_custom_recipes(self, urns):
ids = [int(x[len('custom:'):]) for x in urns]
for id_ in ids:
remove_custom_recipe(id_)
self.custom_recipe_collection = get_custom_recipe_collection()
def do_refresh(self, restrict_to_urns=set([])):
self.custom_recipe_collection = get_custom_recipe_collection()
zf = P('builtin_recipes.zip', allow_user_override=False)
def factory(cls, parent, *args):
args = list(args)
if cls is NewsItem:
args.extend([self.default_icon, self.custom_icon,
self.favicons, zf])
args += [self.builtin_re
|
cipe_collection,
self.custom_recipe_collection, self.scheduler_config,
parent]
return cls(*args)
def ok(urn):
if restrict_to_urns is None:
|
return False
return not restrict_to_urns or urn in restrict_to_urns
new_root = factory(NewsTreeItem, None)
scheduled = factory(NewsCategory, new_root, _('Scheduled'))
custom = factory(NewsCategory, new_root, _('Custom'))
lang_map = {}
self.all_urns = set([])
self.showing_count = 0
self.builtin_count = 0
for x in self.custom_recipe_collection:
urn = x.get('id')
self.all_urn
|
lercloud/shipping_api_usps
|
stock_packages.py
|
Python
|
gpl-3.0
| 3,338
| 0.004494
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>)
# Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR P
|
URPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
|
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from . import api
class stock_packages(osv.osv):
_inherit = "stock.packages"
def cancel_postage(self, cr, uid, ids, context=None):
for package in self.browse(cr, uid, ids, context=context):
if package.shipping_company_name.lower() != "usps":
continue
usps_config = api.v1.get_config(cr, uid, sale=package.pick_id.sale_id, context=context)
test = package.pick_id.logis_company.test_mode
if hasattr(package, "tracking_no") and package.tracking_no:
try:
response = api.v1.cancel_shipping(usps_config, package, shipper=None, test=test)
except Exception, e:
self.pool.get('stock.packages').write(cr, uid, package.id, {'ship_message': str(e)}, context=context)
return {
'type': 'ir.actions.client',
'tag': 'action_warn',
'name': _('Exception'),
'params': {'title': _('Exception'), 'text': str(e), 'sticky': True}
}
if hasattr(response, "error") or not response.refunds[0].refunded:
err = response.error if hasattr(response, "error") else response.refunds[0].message
self.pool.get('stock.packages').write(cr, uid, package.id, {'ship_message': err}, context=context)
return {
'type': 'ir.actions.client',
'tag': 'action_warn',
'name': _('Failure'),
'params': {
'title': _('Package #%s Cancellation Failed') % package.packge_no,
'text': err,
'sticky': True
}
}
else:
self.pool.get('stock.packages').write(cr, uid, package.id, {
'ship_message' : 'Shipment Cancelled', 'tracking_no': ''
}, context=context)
return super(stock_packages, self).cancel_postage(cr, uid, ids, context=context)
stock_packages()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
bmcculley/splinter
|
splinter/meta.py
|
Python
|
bsd-3-clause
| 954
| 0.001048
|
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class InheritedDocs(type):
def __new__(mcs, class_name, bases, dict):
items_to_patch = [
(k, v) for k, v in dict.items() if not k.startswith("__") and not v.__doc__
]
for name, obj in items_to_patch:
doc = None
for base in bases:
if hasattr(base, name):
doc = getattr(base, name).__doc__
if doc:
if isinstance(obj, property) and not obj.fset:
|
obj.fget.__doc__ = doc
dict[name] = property(fget=obj.fget)
el
|
se:
obj.__doc__ = doc
break
return type.__new__(mcs, class_name, bases, dict)
|
bhupennewalkar1337/erpnext
|
erpnext/hr/doctype/leave_control_panel/leave_control_panel.py
|
Python
|
gpl-3.0
| 1,939
| 0.025271
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cint, cstr, flt, nowdate, comma_and, date_diff
from frappe import msgprint, _
from frappe.model.document import Document
class LeaveControlPanel(Document):
def get_employees(self):
conditions, values = [], []
for field in ["employment_type", "branch", "designation", "department"]:
if self.get(field):
conditions.append("{0}=%s".format(field))
values.append(self.get(field))
condition_str = " and " + " and ".join(conditions) if len(conditions) else ""
e = frappe.db.sql("select name from tabEmployee where status='Active' {condition}"
.format(condition=condition_str), tuple(values))
return e
def validate_values(self):
for f in ["from_date", "to_date", "leave_type", "no_of_days"]:
if not self.get(f):
frappe.throw(_("{0} is required").format(self.meta.get_label(f)))
def to_date_validation(self):
if date_diff(self.to_date, self.from_date) <= 0:
return "Invalid period"
def allocate_leave(self):
self.validate_values()
leave_allocated_for = []
employees = self.get_employees()
if not employees:
frappe.throw(_("No emplo
|
yee found"))
for d in self.get
|
_employees():
try:
la = frappe.new_doc('Leave Allocation')
la.set("__islocal", 1)
la.employee = cstr(d[0])
la.employee_name = frappe.db.get_value('Employee',cstr(d[0]),'employee_name')
la.leave_type = self.leave_type
la.from_date = self.from_date
la.to_date = self.to_date
la.carry_forward = cint(self.carry_forward)
la.new_leaves_allocated = flt(self.no_of_days)
la.docstatus = 1
la.save()
leave_allocated_for.append(d[0])
except:
pass
if leave_allocated_for:
msgprint(_("Leaves Allocated Successfully for {0}").format(comma_and(leave_allocated_for)))
|
naoyat/latin
|
latin/latindic.py
|
Python
|
mit
| 1,985
| 0.010106
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import latin_noun
import latin_pronoun
import latin_adj
import latin_conj
import latin_prep
import latin_verb_reg
import latin_verb_irreg
import util
class LatinDic:
dic = {}
auto_macron_mode = False
def flatten(text):
return text.replace(u'ā',u'a').replace(u'ē',u'e').replace(u'ī',
|
u'i').replace(u'ō',u'o').replace(u'ū',u'u').replace(u'ȳ',u'y').lower()
def register(surface, info):
if not info.has_key('pos'): return
if LatinDic.auto_macron_mode:
surface = flatten(surface)
if LatinDic.dic.has_key(surface):
LatinDic.dic[surface].append(info)
else:
LatinDic.dic[surface] = [info]
def register_items(items):
for item in items:
register(item['surface'], item
|
)
def lookup(word):
return LatinDic.dic.get(word, None)
def dump():
for k, v in LatinDic.dic.items():
print util.render2(k, v)
def load_def(file, tags={}):
items = []
with open(file, 'r') as fp:
for line in fp:
if len(line) == 0: continue
if line[0] == '#': continue
fs = line.rstrip().split('\t')
if len(fs) < 3: continue
surface = fs[0].decode('utf-8')
pos = fs[1]
ja = fs[2]
items.append(util.aggregate_dicts({'surface':surface, 'pos':pos, 'ja':ja}, tags))
return items
def load(auto_macron_mode=False):
LatinDic.auto_macron_mode = auto_macron_mode
items = []
items += latin_noun.load()
items += latin_pronoun.load()
items += latin_adj.load()
items += latin_conj.load()
items += latin_prep.load()
items += latin_verb_reg.load()
items += latin_verb_irreg.load()
items += load_def('words/adv.def', {'pos':'adv'})
items += load_def('words/other.def')
register_items(items)
# return ld
if __name__ == '__main__':
# for k, v in dic.items():
# print util.render(k), util.render(v)
pass
|
elweezy/django-skeleton
|
app/blog/views.py
|
Python
|
gpl-3.0
| 4,187
| 0.002627
|
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db import IntegrityError
from django.shortcuts import render, redirect
from django.contrib import messages
from django import forms as django_forms
from django.views.decorators.cache import cache_page
from django.utils.translation import ugettext_lazy as _
from core.utils.decorators import log
from . import forms
from . import logic
@log
@cache_page(60 * 3)
def index(request, template='user/blog/index.html', context={}):
blog_logic = logic.BlogLogic(request)
context['pages'] = blog_logic.pages()
context['posts'] = blog_logic.posts()
# context['beeps'] = blog_logic.beeps()
return render(request, template, context)
''' Pages '''
@log
def pages(request, template='user/blog/pages.html', context={}):
blog_logic = logic.BlogLogic(request)
context['pages'] = blog_logic.pages()
return render(request, template, context)
@log
@cache_page(60 * 3)
def page(request, page_slug, template='user/blog/page.html', context={}):
blog_logic = logic.BlogLogic(request)
context['page'] = blog_logic.page(page_slug)
return render(request, template, context)
''' Posts '''
@log
def posts(request, template='user/blog/posts.html', context={}):
blog_logic = logic.BlogLogic(request)
context['posts'] = blog_logic.posts()
return render(request, template, context)
@log
@cache_page(60 * 3)
def post(request, post_id, post_slug, template='user/blog/post.html', context={}):
blog_logic = logic.BlogLogic(request)
context['post'] = blog_logic.post(post_id, post_slug)
return render(request, template, context)
''' Others '''
@log
def contact(request, template="user/blog/contact.html", context={}):
contact_form = forms.ContactForm(request.POST or None)
if request.method == 'POST':
if contact_form.is_valid():
contact_form.save()
messages.add_message(request, messages.SUCCESS, _('Your message successfully submitted.'))
return redirect(reverse('blog_contact'))
else:
messages.add_message(request, messages.ERROR, _('Please fix errors bellow.'))
context['contact_form'] = contact_form
context['document_form'] = forms.DocumentForm()
return render(request, template, context)
@log
def document(request, template="user/blog/contact.html", context={}):
document_form = forms.DocumentForm(request.POST or None, request.FILES or None)
if request.method == 'POST':
if document_form.is_valid():
document_form.save()
messages.add_message(request, messages.SUCCESS, _('Your application successfully submitted.'))
return redirect(reverse('blog_contact'))
else:
messages.add_message(request, messages.ERROR, _('Please fix errors bellow.'))
context['contact_form'] = forms.ContactForm()
context['document_form'] = document_form
return render(request, template, context)
@log
def search(request, template='
|
user/blog/search.html', context={}):
blog_logic = logic.BlogLogic(request)
term = blog_logic.get_param("term")
|
search_result = blog_logic.search(term)
context['term'] = term
context['pages'] = search_result.pages
context['posts'] = search_result.posts
return render(request, template, context)
@log
def subscribe(request):
blog_logic = logic.BlogLogic(request)
name = blog_logic.get_param("name")
email = blog_logic.get_param("email")
if not name or not email:
messages.add_message(request, messages.ERROR, _('Please enter your name and email.'))
else:
try:
django_forms.EmailField().clean(email)
blog_logic.new_subscription(name, email)
messages.add_message(request, messages.SUCCESS, _('You successfully subscribed.'))
except ValidationError:
messages.add_message(request, messages.ERROR, _('Please enter correct email.'))
except IntegrityError:
messages.add_message(request, messages.WARNING, _('You already have been subscribed.'))
return redirect(request.META.get('HTTP_REFERER'))
|
calidae/python-aeat_sii
|
src/pyAEATsii/callback_utils.py
|
Python
|
apache-2.0
| 918
| 0
|
__all__ = [
'fixed_value',
'coalesce',
]
try:
from itertools import ifilter as filter
except ImportError:
pass
class _FixedValue(object):
def __init__(self
|
, value):
self._value = value
def __call__(self, *args, **kwargs):
return self._value
def fixed_value(value):
return _FixedValue(value)
class _Coalesce(object):
def _filter(self, x):
return x is not None
def __init__(self, callbac
|
ks, else_=None):
self._callbacks = callbacks
self._else = else_
def __call__(self, invoice):
results = (
callback(invoice)
for callback in self._callbacks
)
try:
return next(filter(
self._filter, results
))
except StopIteration:
return self._else
def coalesce(callbacks, else_=None):
return _Coalesce(callbacks, else_=else_)
|
schriftgestalt/Mekka-Scripts
|
Font Info/Set Preferred Names (Name IDs 16 and 17).py
|
Python
|
apache-2.0
| 1,956
| 0.027607
|
#MenuTitle: Set Preferred Names (Name IDs 16 and 17) for Width Variants
# -*- coding: utf-8 -*-
__doc__="""
Sets Preferred Names custom parameters (Name IDs 16 and 17) for all instances, so that width variants will appear in separate menus in Adobe apps.
"""
thisFont = Glyphs.font # frontmost font
widths = (
"Narrow", "Seminarrow", "Semi Narrow", "Extranarrow", "Extra Narrow", "Ultranarrow", "Ultra Narrow",
"Condensed", "Semicondensed", "Semi Condensed", "Extracondensed", "Extra Condensed", "Ultracondensed", "Ultra Condensed",
"Compressed", "Semicompressed", "Semi Compressed", "Extracompressed", "Extra Compressed", "Ultracompres
|
sed", "Ultra Compressed",
"Extended", "Semiextended", "Semi Extended", "Extraextended", "Extra Extended", "Ultraextended", "Ultra Extended",
"Expanded", "Semiexpanded", "Semi Expanded", "Extraexpanded", "Extra Expanded", "Ultraexpanded", "Ultra Expanded",
"Wide", "Semiwide", "Semi Wide", "Extrawide", "Extra Wide", "Ultrawide", "Ultr
|
a Wide",
)
for thisInstance in thisFont.instances:
print "Processing Instance:", thisInstance.name
familyName = thisFont.familyName
if thisInstance.customParameters["familyName"]:
familyName = thisInstance.customParameters["familyName"]
widthVariant = None
for width in widths:
if width in thisInstance.name:
widthVariant = width
elif " " in width:
width = width.replace(" ","")
if width in thisInstance.name:
widthVariant = width
if widthVariant:
preferredFamilyName = "%s %s" % ( thisFont.familyName.strip(), widthVariant.strip() )
preferredStyleName = thisInstance.name.replace(widthVariant,"").strip()
if not preferredStyleName:
preferredStyleName = "Regular"
thisInstance.customParameters["preferredFamilyName"] = preferredFamilyName
thisInstance.customParameters["preferredSubfamilyName"] = preferredStyleName
print " preferredFamilyName:", preferredFamilyName
print " preferredSubfamilyName:", preferredStyleName
|
smtpinc/sendapi-python
|
lib/smtpcom/config.py
|
Python
|
mit
| 1,148
| 0.003484
|
import os
import yaml
DEFAULT_DIR = '../etc/'
class BaseConfig(object):
__config = {}
__default_dir = None
@classmethod
def load(cls, filename, default_path=DEFAULT_DIR):
"""
Setup configuration
"""
path = "%s/%s.yaml" % (default_path, filename)
cls.__default_dir = default_path
if os.path.exists(path):
with open(path, 'rt') as filehandle:
cls.__config = dict(yaml.load(filehandle.read()).items() + \
cls.__config.items())
else:
raise OSError("Config doesn't exists: %s" % path)
@classmethod
def get_default_path(cls):
return cls.__default_dir
@classmethod
d
|
ef get(cls, key, value=None):
if key in cls.__config:
return cls.__config.get(key, value)
return cls.__config.get(key.upper(), value)
@classmethod
def get_url(cls, method):
url = cls.__config.get('urls', {}).get(method)
if not url:
raise ValueError("C
|
ould not find url for method: %s" % method)
return Config.get('api_host') + url
Config = BaseConfig()
|
jajberni/pcse_web
|
main/pcse/crop/partitioning.py
|
Python
|
apache-2.0
| 12,949
| 0.005638
|
# -*- coding: utf-8 -*-
# Copyright (c) 2004-2014 Alterra, Wageningen-UR
# Allard de Wit ([email protected]), April 2014
from collections import namedtuple
from math import exp
from ..traitlets import Float, Int, Instance, AfgenTrait
from ..decorators import prepare_rates, prepare_states
from ..base_classes import ParamTemplate, StatesTemplate, SimulationObject,\
VariableKiosk
from .. import exceptions as exc
from warnings import warn
# Template for namedtuple containing partitioning factors
class PartioningFactors(namedtuple("partitioning_factors", "FR FL FS FO")):
pass
class DVS_Partitioning(SimulationObject):
"""Class for assimilate partioning based on development stage (`DVS`).
`DVS_partioning` calculates the partitioning of the assimilates to roots,
stems, leaves and storage organs using fixed partitioning tables as a
function of crop development stage. The available assimilates are first
split into below-ground and abovegrond using the values in FRTB. In a
second stage they are split into leaves (`FLTB`), stems (`FSTB`) and storage
organs (`FOTB`).
Since the partitioning fractions are derived from the state variable `DVS`
they are regarded state variables as well.
**Simulation parameters** (To be provided in cropdata dictionary):
======= ============================================= ======= ============
Name Description Type Unit
======= ============================================= ======= ============
FRTB Partitioning to roots as a function of TCr -
development stage.
FSTB Partitioning to stems as a function of TCr -
development stage.
FLTB Partitioning to leaves as a function of TCr -
development stage.
FOTB Partitioning to storage organs as a function TCr -
of development stage.
======= ============================================= ======= ============
**State variables**
======= ================================================= ==== ============
Name Description Pbl Unit
======= ================================================= ==== ============
FR Fraction partitioned to roots. Y -
FS Fraction partitioned to stems. Y -
FL Fraction partitioned to leaves. Y -
FO Fraction partitioned to storage orgains Y -
======= ================================================= ==== ============
**Rate variables**
None
**Signals send or handled**
None
**External dependencies:**
======= =================================== ================= ============
Name Description Provided by Unit
======= =================================== ================= ============
DVS Crop development stage DVS_Phenology -
======= =================================== ================= ============
*Exceptions raised*
A PartitioningError is raised if the partitioning coefficients to leaves,
stems and storage organs on a given day do not add up to '1'.
"""
class Parameters(ParamTemplate):
FRTB = AfgenTrait()
FLTB = AfgenTrait()
FSTB = AfgenTrait()
FOTB = AfgenTrait()
class StateVariables(StatesTemplate):
FR = Float(-99.)
FL = Float(-99.)
FS = Float(-99.)
FO = Float(-99.)
PF = Instance(PartioningFactors)
def initialize(self, day, kiosk, parvalues):
"""
:param day: start date of the simulation
:param kiosk: variable kiosk of this PCSE instance
:param parvalues: `ParameterProvider` object providing parameters as
key/value pairs
"""
self.params = self.Parameters(parvalues)
self.kiosk = kiosk
# initial partitioning factors (pf)
DVS = self.kiosk["DVS"]
FR = self.params.FRTB(DVS)
FL = self.params.FLTB(DVS)
FS = self.params.FSTB(DVS)
FO = self.params.FOTB(DVS)
# Pack partitioning factors into tuple
PF = PartioningFactors(FR, FL, FS, FO)
# Initial states
self.states = self.StateVariables(kiosk, publish=["FR","FL","FS","FO"],
FR=FR, FL=FL, FS=FS, FO=FO, PF=PF)
self._check_partitioning()
def _check_partitioning(self):
"""Check for partitioning errors."""
FR = self.states.FR
FL = self.states.FL
FS = self.states.FS
FO = self.states.FO
checksum = FR+(FL+FS+FO)*(1.-FR) - 1.
if abs(checksum) >= 0.0001:
msg = ("Error in partitioning!\n")
msg += ("Checksum: %f, FR: %5.3f, FL: %5.3f, FS: %5.3f, FO: %5.3f\n" \
% (checksum, FR, FL, FS, FO))
self.logger.error(msg)
warn(msg)
# raise exc.PartitioningError(msg)
@prepare_states
def integrate(self, day, delt=1.0):
"""Update partitioning factors based on development stage (DVS)"""
params = self.params
DVS = self.kiosk["DVS"]
self.states.FR = params.FRTB(DVS)
self.states.FL = params.FLTB(DVS)
self.states.FS = params.FSTB(DVS)
self.states.FO = params.FOTB(DVS)
# Pack partitioning factors into tuple
self.states.PF = PartioningFactors(self.states.FR, self.states.FL,
self.states.FS, self.states.FO)
self._check_partitioning
|
()
def calc_rates(self, day, drv):
""" Return partitioning factors based on current DVS.
"""
# rate calculation does nothing for partioning as it is a de
|
rived
# state
return self.states.PF
class DVS_Partitioning_NPK(SimulationObject):
"""Class for assimilate partitioning based on development stage (`DVS`)
with influence of NPK stress.
`DVS_Partitioning_NPK` calculates the partitioning of the assimilates to roots,
stems, leaves and storage organs using fixed partitioning tables as a
function of crop development stage. The only different with the normal
partitioning class is the effect of nitrogen stress on partitioning to
leaves (parameter NPART). The available assimilates are first
split into below-ground and aboveground using the values in FRTB. In a
second stage they are split into leaves (`FLTB`), stems (`FSTB`) and storage
organs (`FOTB`).
Since the partitioning fractions are derived from the state variable `DVS`
they are regarded state variables as well.
**Simulation parameters** (To be provided in cropdata dictionary):
======= ============================================= ======= ============
Name Description Type Unit
======= ============================================= ======= ============
FRTB Partitioning to roots as a function of TCr -
development stage.
FSTB Partitioning to stems as a function of TCr -
development stage.
FLTB Partitioning to leaves as a function of TCr -
development stage.
FOTB Partitioning to starge organs as a function TCr -
of development stage.
NPART Coefficient for the effect of N stress on SCR -
leaf biomass allocation
======= ============================================= ======= ============
**State variables**
======= ================================================= ==== ============
Name Description Pbl Unit
======= ================================================= ==== ============
FR Fraction partitioned to roots. Y -
|
jordanemedlock/psychtruths
|
temboo/core/Library/Amazon/SNS/__init__.py
|
Python
|
apache-2.0
| 2,012
| 0.006461
|
from temboo.Library.Amazon.SNS.AddPermission import AddPermission, AddPermissionInputSet, AddPermissionResultSet, AddPermissionChoreographyExecution
from temboo.Library.Amazon.SNS.ConfirmSubscription import ConfirmSubscription, ConfirmSubscriptionInputSet, ConfirmSubscriptionResultSet, ConfirmSubscriptionChoreographyExecution
from temboo.Library.Amazon.SNS.CreateTopic import CreateTopic, CreateTopic
|
InputSet, CreateTopicResultSet, CreateTopicChoreographyExecution
from temboo.Library.Amazon.SNS.DeleteTopic import DeleteTopic, DeleteTopicInputSet, DeleteTopicResultSet, DeleteTop
|
icChoreographyExecution
from temboo.Library.Amazon.SNS.GetTopicAttributes import GetTopicAttributes, GetTopicAttributesInputSet, GetTopicAttributesResultSet, GetTopicAttributesChoreographyExecution
from temboo.Library.Amazon.SNS.ListSubscriptions import ListSubscriptions, ListSubscriptionsInputSet, ListSubscriptionsResultSet, ListSubscriptionsChoreographyExecution
from temboo.Library.Amazon.SNS.ListSubscriptionsByTopic import ListSubscriptionsByTopic, ListSubscriptionsByTopicInputSet, ListSubscriptionsByTopicResultSet, ListSubscriptionsByTopicChoreographyExecution
from temboo.Library.Amazon.SNS.ListTopics import ListTopics, ListTopicsInputSet, ListTopicsResultSet, ListTopicsChoreographyExecution
from temboo.Library.Amazon.SNS.Publish import Publish, PublishInputSet, PublishResultSet, PublishChoreographyExecution
from temboo.Library.Amazon.SNS.RemovePermission import RemovePermission, RemovePermissionInputSet, RemovePermissionResultSet, RemovePermissionChoreographyExecution
from temboo.Library.Amazon.SNS.SetTopicAttributes import SetTopicAttributes, SetTopicAttributesInputSet, SetTopicAttributesResultSet, SetTopicAttributesChoreographyExecution
from temboo.Library.Amazon.SNS.Subscribe import Subscribe, SubscribeInputSet, SubscribeResultSet, SubscribeChoreographyExecution
from temboo.Library.Amazon.SNS.Unsubscribe import Unsubscribe, UnsubscribeInputSet, UnsubscribeResultSet, UnsubscribeChoreographyExecution
|
anthropo-lab/XP
|
EPHEMER/EDHEC_Project/both_change_group_en/consumers.py
|
Python
|
gpl-3.0
| 13,913
| 0.004313
|
from channels import Group as channelsGroup
from channels.sessions import channel_session
import random
from .models import Group as OtreeGroup, Subsession as OtreeSubsession, Constants
import json
import channels
import logging
from otree import constants_internal
import django.test
from otree.common_internal import (get_admin_secret_code)
client = django.test.Client()
ADMIN_SECRET_CODE = get_admin_secret_code()
# For automatic inactive pushing
#??? from .models import LiveManagementThread, LivePusherThread
from threading import Event
import time
# End-For automatic inactive pushing
from .pages import PresenterView
#############################################
#############################################
# Connected to websocket.connect
def ws_winnerpage_connect(message):
print("*********CONNECTWINNERPAGE************")
channelsGroup("WINNERPAGE").add(message.reply_channel)
# Connected to websocket.receive
def ws_winnerpage_message(message):
print("*********RECEIVEWINNERPAGE************")
# Connected to websocket.disconnect
def ws_winnerpage_disconnect(message):
print("*********DISCONNECTWINNERPAGE************")
channelsGroup("WINNERPAGE").discard(message.reply_channel)
#############################################
#############################################
# Connected to websocket.connect
def ws_connect(message):
print("*********CONNECT************")
channelsGroup("adminreport").add(message.reply_channel)
# Connected to websocket.receive
def ws_message(message):
print("*********RECEIVE************")
# Decrypt the url: No info in the url in this app
# Decrypt the received message
jsonmessage = json.loads(message.content['text'])
subsession_pk = jsonmessage['subsession_pk']
mysubsession = OtreeSubsession.objects.get(pk=subsession_pk)
if 'order' in jsonmessage:
order = jsonmessage['order']
# Manage the synchronisation page between the 2 parts
if order == "No Jump 2 Next":
mysubsession.jump_2_next = False
mysubsession.save()
mysubsession.session.vars['running_part_2'] = "False"
mysubsession.session.save()
elif order == "Jump 2 Next":
mysubsession.jump_2_next = True
mysubsession.save()
mysubsession.session.vars['running_part_2'] = "True"
mysubsession.session.save()
elif order == "push_all_players_on_page":
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mysubsession.get_players():
if ((str(p.participant._current_page_name) == page_name)
& (p.participant._round_number == round_nb)):
# This player is one of those who needs to be advanced
try:
if p.participant._current_form_page_url:
resp = client.post(
p.participant._current_form_page_url,
data={
constants_internal.timeout_happened: True,
constants_internal.admin_secret_code: ADMIN_SECRET_CODE
},
follow=True
)
else:
resp = client.get(p.participant._start_url(), follow=True)
except:
logging.exception("Failed to advance participant.")
raise
assert resp.status_code < 400
p.participant.vars['participant_was_pushed'] = 'True'
p.participant.save()
channels.Group(
'auto-advance-{}'.format(p.participant.code)
).send(
{'text': json.dumps(
{'auto_advanced': True})}
)
elif order == "push_active_players_on_page":
group_pk = jsonmessage['group_pk']
mygroup = OtreeGroup.objects.get(pk=group_pk)
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mygroup.get_players():
if ((str(p.participant._current_page_name) == page_name)
& (p.participant._round_number == round_nb)
& (p.participant.vars['active_flag'] != 'inactive')):
# This player is one of those who needs to be advanced
try:
if p.participant._current_form_page_url:
resp = client.post(
p.participant._current_form_page_url,
data={
constants_internal.timeout_happened: True,
constants_internal.admin_secret_code: ADMIN_SECRET_CODE
},
follow=True
)
else:
resp = client.get(p.participant._start_url(), follow=True)
except:
logging.exception("Failed to advance participant.")
raise
assert resp.status_code < 400
p.participant.vars['participant_was_pushed'] = 'True'
p.participant.save()
channels.Group(
'auto-advance-{}'.format(p.participant.code)
).send(
{'text': json.dumps(
{'auto_advanced': True})}
)
elif order == "push_inactive_players_on_page":
group_pk = jsonmessage['group_pk']
mygroup = OtreeGroup.objects.get(pk=group_pk)
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mygroup.get_players():
if ((str(p.participant._current_page_name) == page_name)
& (p.participant._round_number == round_nb)
& (p.participant.vars['active_flag'] == 'inactive')):
# This player is one of those who needs to be advanced
try:
if p.participant._current_form_page_url:
resp = client.post(
p.participant._current_form_page_url,
data={
constants_internal.timeout_happened: True,
constants_internal.admin_secret_code: ADMIN_SECRET_CODE
},
follow=True
)
else:
resp = client.get(p.participant._start_url(), follow=True)
except:
logging.exception("Failed to advance participant.")
raise
assert resp.status_code < 400
p.participant.vars['participant_was_pushed'] = 'True'
p.participant.save()
channels.Group(
'auto-advance-{}'.format(p.participant.code)
).send(
{'text': json.dumps(
{'auto_advanced': True})}
)
elif order == "deactivate_all_group_on_page":
group_pk = jsonmessage['group_pk']
mygroup = OtreeGroup.objects.get(pk=group_pk)
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mygroup.get_players():
if ((str(p.participant._current_page_name) == page_name)
|
& (p.participant._round_number == round_nb)):
p.participant.vars['active_flag'] = 'inactive'
|
p.participant.save()
elif order == "reactivate_all
|
peterhinch/micropython-async
|
v2/i2c/i2c_esp.py
|
Python
|
mit
| 2,245
| 0.001336
|
# i2c_esp.py Test program for asi2c.py
# Tests Responder on ESP8266
# The MIT License (MIT)
#
# Copyright (c) 2018 Peter Hinch
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT
|
WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDIN
|
G BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# pyb esp8266
# scl X9 - 0
# sda X10 - 2
# sync X11 - 5
# ack Y8 - 4
# gnd - gnd
import uasyncio as asyncio
from machine import Pin, I2C
import asi2c
import ujson
i2c = I2C(scl=Pin(0),sda=Pin(2)) # software I2C
syn = Pin(5)
ack = Pin(4)
chan = asi2c.Responder(i2c, syn, ack)
async def receiver():
sreader = asyncio.StreamReader(chan)
await chan.ready()
print('started')
for _ in range(5): # Test flow control
res = await sreader.readline()
print('Received', ujson.loads(res))
await asyncio.sleep(4)
while True:
res = await sreader.readline()
print('Received', ujson.loads(res))
async def sender():
swriter = asyncio.StreamWriter(chan, {})
txdata = [0, 0]
while True:
await swriter.awrite(''.join((ujson.dumps(txdata), '\n')))
txdata[1] += 1
await asyncio.sleep_ms(1500)
loop = asyncio.get_event_loop()
loop.create_task(receiver())
loop.create_task(sender())
try:
loop.run_forever()
finally:
chan.close() # for subsequent runs
|
steeve/plugin.video.pulsar
|
resources/site-packages/pulsar/logger.py
|
Python
|
bsd-3-clause
| 680
| 0.002941
|
# Borrowed and modified from xbmcswift
import log
|
ging
import xbmc
from pulsar.addon import ADDON_ID
class XBMCHandler(logging.StreamHandler):
xbmc_levels = {
'DEBUG': 0,
'INFO': 2,
'WARNING': 3,
'ERROR': 4,
'LOGCRITICAL': 5,
}
def emit(self, record):
xbmc_level = self.xbmc_levels.get(record.levelname)
xbmc.log(self.format(record), xbmc_level)
def _get_logger():
logger = logging.getLogger(ADDON_ID)
logger.setLevel(logging.DEBUG)
handler = XBMCHandler()
ha
|
ndler.setFormatter(logging.Formatter('[%(name)s] %(message)s'))
logger.addHandler(handler)
return logger
log = _get_logger()
|
blueboxgroup/ansible
|
lib/ansible/executor/playbook_executor.py
|
Python
|
gpl-3.0
| 11,134
| 0.004581
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.compat.six import string_types
from ansible import constants as C
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.playbook import Playbook
from ansible.template import Templar
from ansible.utils.unicode import to_unicode
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class PlaybookExecutor:
'''
This is the primary class for executing playbooks, and thus the
basis for bin/ansible-playbook operation.
'''
def __init__(self, playbooks, inventory, variable_manager, loader, options, passwords):
self._playbooks = playbooks
self._inventory = inventory
self._variable_manager = variable_manager
self._loader = loader
self._options = options
self.passwords = passwords
self._unreachable_hosts = dict()
if options.listhosts or options.listtasks or options.listtags or options.syntax:
self._tqm = None
else:
self._tqm = TaskQueueManager(inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=self.passwords)
def run(self):
'''
Run the given playbook, based on the settings in the play which
may limit the runs to serialized groups, etc.
'''
result = 0
entrylist = []
entry = {}
try:
for playbook_path in self._playbooks:
pb = Playbook.load(playbook_path, variable_manager=self._variable_manager, loader=self._loader)
self._inventory.set_playbook_basedir(os.path.dirname(playbook_path))
if self._tqm is None: # we are doing a listing
entry = {'playbook': playbook_path}
entry['plays'] = []
else:
# make sure the tqm has callbacks loaded
self._tqm.load_callbacks()
self._tqm.send_callback('v2_playbook_on_start', pb)
i = 1
plays = pb.get_plays()
display.vv(u'%d plays in %s' % (len(plays), to_unicode(playbook_path)))
for play in plays:
if play._included_path is not None:
self._loader.set_basedir(play._included_path)
else:
self._loader.set_basedir(pb._basedir)
# clear any filters which may have been applied to the inventory
self._inventory.remove_restriction()
if play.vars_prompt:
for var in play.vars_prompt:
vname = var['name']
prompt = var.get("prompt", vname)
default = var.get("default", None)
private = var.get("private", True)
confirm = var.get("confirm", False)
encrypt = var.get("encrypt", None)
salt_size = var.get("salt_size", None)
salt = var.get("salt", None)
if vname not in self._variable_manager.extra_vars:
if self._tqm:
self._tqm.send_callback('v2_playbook_on_vars_prompt', vname, private, prompt, encrypt, confirm, salt_size, salt, default)
play.vars[vname] = display.do_var_prompt(vname, private, prompt, encrypt, confirm, salt_size, salt, default)
else: # we are either in --list-<option> or syntax check
play.vars[vname] = default
# Create a temporary copy of the play here, so we can run post_validate
# on it without the templating changes affecting the original object.
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars)
new_play = play.copy()
new_play.post_validate(templar)
if self._options.syntax:
continue
if self._tqm is None:
# we are just doing a listing
entry['plays'].append(new_play)
else:
self._tqm._unreachable_hosts.update(self._unreachable_hosts)
# we are actually running plays
for batch in self._get_serialized_batches(new_play):
if len(batch) == 0:
self._tqm.send_callback('v2_playbook_on_play_start', new_play)
self._tqm.send_callback('v2_playbook_on_no_hosts_matched')
break
# restrict the inventory to the hosts in the serialized batch
self._inventory.restrict_to_hosts(batch)
# and run it...
result = self._tqm.run(play=play)
# check the number of failures here, to see if they're above the maximum
# failure percentage allowed, or i
|
f any errors are fatal. If either of those
# conditions are met, we break out, otherwise we only break out if the entire
# batch failed
failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts)
if new_play.max_fail_percentage is not None and \
int((new_play.max_fail_percentage)/100.0 * len(batch)) > int((len(batch) - failed
|
_hosts_count) / len(batch) * 100.0):
break
elif len(batch) == failed_hosts_count:
break
# clear the failed hosts dictionaires in the TQM for the next batch
self._unreachable_hosts.update(self._tqm._unreachable_hosts)
self._tqm.clear_failed_hosts()
# if the last result wasn't zero or 3 (some hosts were unreachable),
# break out of the serial batch loop
if result not in (0, 3):
break
i = i + 1 # per play
if entry:
entrylist.append(entry) # per playbook
# send the stats callback for this playbook
if self._tqm is not None:
if C.RETRY_FILES_ENABLED:
retries = set(self._tqm._failed_hosts.keys())
retries.update(self._tqm._unreachable_hosts.keys())
retries = sorted(retries)
if len(retries) > 0:
if C.RETRY_FILES_SAVE_PATH:
basedir = C.shell_expand(C.RETRY_FILES_SAVE_PATH)
else:
basedir
|
richard-fisher/repository
|
desktop/util/tint2/actions.py
|
Python
|
gpl-2.0
| 263
| 0.019011
|
#!/
|
usr/bin/python
from pisi.actionsapi import shelltools, get, cmaketools, pisitools
def setup():
cmaketools.configure()
def build
|
():
cmaketools.make()
def install():
cmaketools.install()
pisitools.dodoc ("AUTHORS", "ChangeLog", "COPYING")
|
Shopzilla-Ops/python-coding-challenge
|
cost-of-tile/mjones/tilecost/setup.py
|
Python
|
mit
| 998
| 0
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
READ
|
ME = open(os.path.join(here, 'README.txt')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='tilecost',
version='0.0',
description='tilecost',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"
|
Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="tilecost",
entry_points="""\
[paste.app_factory]
main = tilecost:main
""",
)
|
allspatial/vertex-tools
|
controller/VertexDialog.py
|
Python
|
mit
| 550
| 0.001818
|
__author__ = 'mwagner'
from PyQt4.Qt import Qt
from PyQt4.QtGui import QDialog
|
, QIcon
from ..view.Ui_VertexDialog import Ui_VertexDialog
from ..model.VertexToolsError import *
class VertexDialog(QDialog, Ui_VertexDialog):
def __init__(self, plugin, parent=None):
super(VertexDialog, self).__init__(parent)
self.setAttribute(Qt.WA_DeleteOnClose)
self.plugin = plugin
self.setupUi(self)
self.helpButton.setIcon(self.plugin.get_icon("help.gif"))
|
self.setWindowIcon(QIcon(":beninCad/info.png"))
|
kanghtta/zerorpc-python
|
tests/test_client.py
|
Python
|
mit
| 1,919
| 0.002606
|
# -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gevent
import zerorpc
from testutils import teardown, random_ipc_endpoint
def test_client_connect():
endpoint = random_ipc_endpoint()
class MySrv(zerorpc.Server):
d
|
ef lolita(self):
return 42
srv = MySrv()
srv.bind(endpoint)
gevent.spawn(srv.run)
client = zerorpc.Client()
client.connect(endpoint)
assert client.lolita() == 42
def test_client_quick_connect():
endpoint = random_ipc_endpoint()
class MySrv(zerorpc.Server):
def lolita(self):
return 42
srv = MySrv()
srv.bind(endpoint)
gevent.spawn(srv.run)
client = zerorpc.Client(endpoint)
assert client.lolita() == 42
|
dzhang55/riftwatch
|
static_images.py
|
Python
|
mit
| 1,397
| 0.027917
|
import json
import requests
import key
API_key = key.getAPIkey()
#load all champion pictures
def load_champion_pictures(champion_json):
print len(champion_json['data'])
version = champion_json['version']
print "version: " + version
for champion in champion_json['data']:
print champion
r = requests.get('http://ddragon.leagueoflegends.com/cdn/' + version + '/img/champion/' + champion + '.png')
if r.status_code == 200:
img = r.content
with open('static/images/champions/' + champion_json['data'][champion]['name'] + '.png', 'w') as f:
f.write(img)
print "img created"
else:
print "pictures: something went wrong"
#load champion json
#converts to python dict using json()
|
and json.dump() for error checking
def load_champion_json():
try:
r = requests.get('https://global.api.pvp.net/api/lol/static-data/na/v1.2/champion?&api_key='
|
+ API_key)
champion_json = r.json()
if 'status' in champion_json:
print champion_json['status']['message']
return
load_champion_pictures(champion_json)
# quick fix to change MonkeyKing to Wukong so that sort_keys sorts it properly
champion_json['data']['Wukong'] = champion_json['data']['MonkeyKing']
del champion_json['data']['MonkeyKing']
except ValueError as e:
print e.message
return
with open('static/json/champion.json', 'w') as f:
json.dump(champion_json, f, sort_keys=True)
load_champion_json()
|
ferdinandvwyk/gs2_analysis
|
structure_analysis.py
|
Python
|
gpl-2.0
| 4,637
| 0.009273
|
# Standard
import os
import sys
# Third Party
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns
import pyfilm as pf
from skimage.measure import label
from skimage import filters
plt.rcParams.update({'figure.autolayout': True})
mpl.rcParams['axes.unicode_minus'] = False
#local
from run import Run
import plot_style
plot_style.white()
pal = sns.color_palette('deep')
def structure_analysis(run, perc_thresh, create_film=False):
"""
Calculates the number of structures as a function of time for a given
percentile cut-off. Writes results and plots to an appropriate directory.
Parameters
----------
run : object
Run object calculated by the Run class.
perc_thresh : int
Percentile threshold at which to cut off fluctuations.
create_film : bool
Determines whether a film of the labelled structures is produced.
"""
run.read_ntot()
make_results_dir(run, perc_thresh)
labelled_image, nlabels = label_structures(run, perc_thresh)
no_structures = count_structures(run, labelled_image, nlabels)
plot_no_structures(run, no_structures, perc_thresh)
save_results(run, no_structures, perc_thresh)
if create_film:
make_film(run, no_structures, labelled_image, perc_thresh)
def make_results_dir(run, perc_thresh):
os.system('mkdir -p ' + run.run_dir + 'analysis/structures_' +
str(perc_thresh))
def label_structures(run, perc_thresh):
nlabels = np.empty(run.nt, dtype=int)
labelled_image = np.empty([run.nt, run.nx, run.ny], dtype=int)
for it in range(run.nt):
tmp = run.ntot_i[it,:,:].copy()
# Apply Gaussian filter
tmp = filters.gaussian(tmp, sigma=1)
thresh = np.percentile(tmp, perc_thresh,
interpolation='nearest')
tmp_max = np.max(tmp)
tmp_thresh = thresh/tmp_max
tmp /= tmp_max
tmp[tmp <= tmp_thresh] = 0
tmp[tmp > tmp_thresh] = 1
# Label the resulting structures
labelled_image[it,:,:], nlabels[it] = label(tmp, return_num=True,
background=0)
return(labelled_image, nlabels)
def count_structures(run, labelled_image, nlabels):
"""
Remove any structures which are too small and count structures.
"""
nblobs = np.empty(run.nt, dtype=int)
for it in range(run.nt):
hist = np.histogram(np.ravel(labelled_image[it]),
bins=range(1,nlabels[it]+1))[0]
smallest_struc = np.mean(hist)*0.1
hist = hist[hist > smallest_struc]
nblobs[it] = len(hist)
return(nblobs)
def plot_no_structures(run, no_structures, perc_thresh):
"""
Plot number of structures as a function of time.
"""
plt.clf
|
()
plt.plot(no_structures)
plt.xlabel('Time index')
plt.ylabel('Number of structures')
plt.ylim(0)
plt.savefig(run.run_dir + 'analysis/structures_' + str(perc_thresh) +
'/nblobs.pdf')
def save_results(run, no_structures, perc_thresh):
"""
Save the number of structures as a function of time in a file.
"""
np.savetxt(run.run_dir +
|
'analysis/structures_' + str(perc_thresh) +
'/nblobs.csv', np.transpose((range(run.nt), no_structures)),
delimiter=',', fmt='%d', header='t_index,nblobs')
def make_film(run, no_structures, labelled_image, perc_thresh):
titles = []
for it in range(run.nt):
titles.append('No. of structures = {}'.format(no_structures[it]))
plot_options = {'cmap':'gist_rainbow',
'levels':np.arange(-1,np.max(labelled_image))
}
options = {'file_name':'structures',
'film_dir':run.run_dir + 'analysis/structures_' +
str(perc_thresh) ,
'frame_dir':run.run_dir + 'analysis/structures_' +
str(perc_thresh) + '/film_frames',
'nprocs':None,
'aspect':'equal',
'xlabel':r'$x$ (m)',
'ylabel':r'$y$ (m)',
'cbar_ticks':np.arange(-1,np.max(labelled_image),2),
'cbar_label':r'Label',
'fps':10,
'bbox_inches':'tight',
'title':titles
}
pf.make_film_2d(run.r, run.z, labelled_image,
plot_options=plot_options, options=options)
if __name__ == '__main__':
run = Run(sys.argv[1])
structure_analysis(run, 75, create_film=False)
structure_analysis(run, 95, create_film=False)
|
ciudadanointeligente/popit-django
|
runtests.py
|
Python
|
agpl-3.0
| 621
| 0.008052
|
#!/usr/bin/env python
# This file mainly exists to allow python setup.py test to work.
#
# You can test all the variations of tests by running:
#
# ./manage.py test && python runtests.py && ./setup.py test && echo OK
#
import os, sys
os
|
.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from django.core.management import call_command
def runtests():
# use the call_command approach so that we are as similar to running
# './manage.py test' as possible. Notably we need the South migrations to be
# run.
call_command('test', v
|
erbosity=2)
sys.exit(0)
if __name__ == '__main__':
runtests()
|
pombredanne/invenio-old
|
modules/bibclassify/lib/bibclassify_webinterface.py
|
Python
|
gpl-2.0
| 14,432
| 0.004088
|
# This file is part of CDS Invenio.
# Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
#
# CDS Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CDS Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibClassify's web interface.
This module is NOT standalone safe - this component is never expected
to run in a standalone mode, but always inside invenio."""
import os
from cgi import escape
from urllib import quote
import time
import bibupload
from invenio.messages import gettext_set_language
from invenio.bibdocfile import BibRecDocs
from invenio.webinterface_handler import WebInterfaceDirectory
from invenio.webpage import pageheaderonly, pagefooteronly
from invenio.search_engine import get_colID, \
guess_primary_collection_of_a_record, create_navtrail_links, \
perform_request_search, get_record, print_record
from invenio.websearchadminlib import get_detailed_page_tabs
from invenio.template import load
from invenio.webinterface_handler import wash_urlargd
from invenio.webuser import collect_user_info
from invenio import access_control_engine as acce
from invenio import dbquery
from invenio import bibtask
from invenio import bibrecord
import bibclassify_config as bconfig
import bibclassify_text_extractor
import bibclassify_engine
import bibclassify_ontology_reader as bor
log = bconfig.get_logger("bibclassify.webinterface")
template = load('bibclassify')
def main_page(req, recid, tabs, ln, template):
"""Generates the main page for the keyword tab - http://url/record/[recid]/keywords
@var req: request object
@var recid: int docid
@var tabs: list of tab links
@var ln: language id
@var template: template object
@return: nothing, writes using req object
"""
form = req.form
argd = wash_urlargd(form, {
'generate': (str, 'no'),
'sorting': (str, 'occurences'),
'type': (str, 'tagcloud'),
'numbering': (str, 'off'),
'showall': (str, 'off'),
})
for k,v in argd.items():
argd[k] = escape(v)
req.write(template.detailed_record_container_top(recid, tabs, ln))
# Get the keywords from MARC (if any)
success, keywords, marcrec = record_get_keywords(recid)
if success:
# check for the cached file and delete it (we don't need it anymore, data are in the DB)
tmp_file = bibclassify_engine.get_tmp_file(recid)
if os.path.exists(tmp_file):
try:
os.remove(tmp_file)
except Exception, msg:
log.error('Error removing the cached file: %s' % tmp_file)
log.error(msg)
else:
# Give user possibility to generate them ONLY if not available already
# we may have some keywords, but they are the old ones and we want to generate new
new_found, new_keywords, marcrec = generate_keywords(req, recid, argd)
if keywords and new_keywords:
for key in keywords.keys():
if key in new_keywords:
log.warning('The old "DESY" keyword will be overwritten by the newly extracted one: %s' % key)
keywords.update(new_keywords)
if keywords:
# Output the keywords or the generate button or some message why kw not available
write_keywords_body(keywords, req, recid, argd, marcrec=marcrec)
req.write(template.detailed_record_container_bottom(recid,
tabs, ln))
def write_keywords_body(keywords, req, recid, argd, marcrec=None):
"""Writes the bibclassify keyword output into req object"""
if not keywords:
req.write(template.tmpl_page_no_keywords(req=req, **argd))
return
# test if more than half of the entries have weight (0,0) - ie. not weighted
#if argd['type'] == 'tagcloud' and len(filter(lambda x: (0,0) in x[0], keywords.values())) > (len(keywords) * .5):
# argd['type'] = 'list'
if argd['type'] == 'list':
# Display keywords as a list.
req.write(template.tmpl_page_list(keywords, req=req, **argd))
elif argd['type'] == 'tagcloud':
# Display keywords as a tag cloud.
req.write(template.tmpl_page_tagcloud(keywords=keywords, req=req, **argd))
elif argd['type'] == 'xml':
if marcrec:
marcxml = filter_marcrec(marcrec)
else:
marcxml = bibclassify_engine.build_marc(recid, keywords, {})
req.write(template.tmpl_page_xml_output(keywords,
marcxml,
req=req, **argd))
else:
_ = gettext_set_language(argd['ln'])
req.write(template.tmpl_page(top=_('Unknown type: %s') % argd['type'], **argd))
def record_get_keywords(record, main_field=bconfig.CFG_MAIN_FIELD,
others=bconfig.CFG_OTHER_FIELDS):
"""Returns a dictionary of keywordToken objects from the marc
record. Weight is set to (0,0) if no weight can be found.
This will load keywords from the field 653 and 695__a (which are the
old 'DESY' keywords)
@var record: int or marc record, if int - marc record is loaded
from the database. If you pass record instance, keywords are
extracted from it
@return: tuple (found, keywords, marcxml)
found - int indicating how many main_field keywords were found
the other fields are not counted
keywords - standard dictionary of keywordToken objects
marcrec - marc record object loaded with data
"""
keywords = {}
if isinstance(main_field, basestring):
main_field = [main_field]
if isinstance(others, basestring):
others = [others]
if isinstance(record, int):
rec = get_record(record)
else:
rec = record
found = 0
for m_field in main_field:
tag, ind1, ind2 = bibclassify_engine._parse_marc_code(m_field)
for field in rec.get(tag, []):
keyword = ''
weight = 0
type = ''
for subfield in field[0]:
if subfield[0] == 'a':
keyword = subfield[1]
elif subfield[0] == 'n':
weight = int(subfield[1])
elif subfield[0] == '9':
type = subfield[1]
if keyword:
found += 1
keywords[bor.KeywordToken(keyword, type=type)] = [[(0,0) for x in range(weight)]]
if others:
for field_no in others:
tag, ind1, ind2 = bibclassify_engine._parse_marc_code(field_no)
type = 'f%s' % field_no
for field in rec.get(tag, []):
keyword = ''
for subfield in field[0]:
if subfield[0] == 'a':
|
keyword = subfield[1]
keywords[bor.KeywordToken(keyword, type=type)] = [[(0,0)]]
break
return found,
|
keywords, rec
def generate_keywords(req, recid, argd):
"""Extracts keywords from the fulltexts (if found) for the
given recid. It first checks whether the keywords are not already
stored in the temp file (maybe from the previous run).
@var req: req object
@var recid: record id
@var argd: arguments passed from web
@keyword store_keywords: boolean, whether to save records in the file
@return: standard dictionary of kw objects or {}
"""
ln = argd['ln']
_ = gettext_set_language(ln)
keywords = {}
# check the files were not already generated
abs_path = bibclassify_engine.get_tmp_file(recid)
if os.path.e
|
gmr/mredis
|
tests/general.py
|
Python
|
bsd-3-clause
| 795
| 0.003774
|
#!/usr/bin/env python
import mredis
import time
ports = [6379, 6380]
servers = []
for port in ports:
servers.append({'host': 'localhost', 'port': port, 'db': 0})
mr = mredis.MRedis(servers)
# Destructive test of the d
|
atabase
#print mr.flushall()
#print mr.
|
flushdb()
print mr.ping()
# Build a set of keys for operations
keys = set()
for x in xrange(0, 100):
key = 'key:%.8f' % time.time()
keys.add(key)
for key in keys:
mr.set(key, time.time())
fetched = mr.keys('key:*')
results = []
for server in fetched:
for key in fetched[server]:
results.append('%s->%s' % (key, mr.get(key)))
print '%i keys fetched' % len(results)
for key in keys:
mr.delete(key)
print mr.bgrewriteaof()
print mr.dbsize()
print mr.lastsave()
#print mr.info()
print mr.randomkey()
|
ercas/scripts
|
weather.py
|
Python
|
apache-2.0
| 3,411
| 0.017014
|
#!/usr/bin/python3
import argparse, random, textwrap
from datetime import datetime
from urllib import request
from xml.etree import ElementTree
labels = {
"clouds": "%",
"humidity": "%",
"precipitation": "%",
"temp": "°F",
"wind-direction": "°",
"wind-speed": " mph",
}
parser = argparse.ArgumentParser(description = "display weather using data from weather.gov")
parser.add_argument("latitude",
help = "latitude of location"
|
,
type = float)
parser.add_argument("longitude",
help = "longitude of location",
type = float)
args = parser.parse_args()
def print_weather(latitude, longitude):
# weather.gov provides two xml files: digitalDWML and dwml.
# digitalDWML includes detailed, 24-hour forecast data for the next 7 days.
# dwml inc
|
ludes simple data for the current day as well as text and icons.
# in this script, digitalDWML is referred to as "detailed" and dwml is
# referred to as "simple".
weather_detailed_xml = request.urlopen("http://forecast.weather.gov/MapClick.php?lat="
+ str(latitude) + "&lon=" + str(longitude)
+ "&FcstType=digitalDWML").read()
weather_simple_xml = request.urlopen("http://forecast.weather.gov/MapClick.php?lat="
+ str(latitude) + "&lon=" + str(longitude)
+ "&FcstType=dwml").read()
# these variables and functions refer to digitalDWML
root = ElementTree.fromstring(weather_detailed_xml)
parameters = root.find("data").find("parameters")
def temperature(type):
for node in parameters.iter("temperature"):
if node.get("type") == type:
return node
wrapped_description = "\n".join(
textwrap.wrap(
ElementTree.fromstring(weather_simple_xml).\
find("data").find("parameters").find("weather").\
find("weather-conditions").attrib["weather-summary"],
width = 30,
break_long_words = False))
print("Weather Forecast for "
+ root.find("data").find("location").find("city").text
+ ":\n"
+ wrapped_description
+ "\n"
)
print("Updated: "
# %z is defective so the timezone is cropped from the date string
+ datetime.strptime(
root.find("data").find("time-layout").find("start-valid-time").text[:-6],
"%Y-%m-%dT%H:%M:%S").strftime("%d %B %Y @ %I:%M %p")
)
print("Temperature: "
+ temperature("hourly")[0].text
+ labels["temp"]
)
print("Cloud Cover: "
+ parameters.find("cloud-amount")[0].text
+ labels["clouds"]
)
print("Sustained Wind: "
+ parameters.find("wind-speed")[0].text
+ labels["wind-speed"]
+ " @ "
+ parameters.find("direction")[0].text
+ labels["wind-direction"]
)
print("Humidity: "
+ parameters.find("humidity")[0].text
+ labels["humidity"]
)
print("Precipitation: "
+ parameters.find("probability-of-precipitation")[0].text
+ labels["precipitation"]
)
try:
print_weather(args.latitude, args.longitude)
except Exception as error:
if type(error) == ElementTree.ParseError:
print("error: invalid coordinates given or weather.gov's xml format has changed.")
else:
print("error: " + error)
|
pmeier82/spike_gnode
|
base/storage.py
|
Python
|
bsd-3-clause
| 2,381
| 0.00126
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import errno
import hashlib
import os
from django.conf import settings
from django.core.files import File
from django.core.files.storage import Fil
|
eSystemStorage
from django.utils.encoding import force_unicode
__all__ = ["HashedFileSystemStorage"]
__author__ = "pmeier82"
class ContentExists(Exception):
pass
class HashedFileSystemStorage(FileSystemStorage):
"""`FileSystemStorage` subcla
|
ss that manages file names by content hashes"""
def get_available_name(self, name):
raise ContentExists()
def _get_content_name(self, name, content, chunk_size=None):
dir_name = os.path.split(name)[0]
file_name = self._generate_hash(content=content, chunk_size=chunk_size)
return os.path.join(dir_name, file_name)
def _generate_hash(self, content, chunk_size=None):
if chunk_size is None:
chunk_size = getattr(content, "DEFAULT_CHUNK_SIZE", File.DEFAULT_CHUNK_SIZE)
hash_gen = hashlib.sha1()
cursor = content.tell()
content.seek(0)
try:
while True:
data = content.read(chunk_size)
if not data:
break
hash_gen.update(data)
return hash_gen.hexdigest()
finally:
content.seek(cursor)
def save(self, name, content):
if getattr(settings, "DEBUG", None) is True:
print "{}::save({})".format(self.__class__.__name__, name)
if name is None:
name = content.name
name = self._get_content_name(name, content)
name = self._save(name, content)
return force_unicode(name.replace('\\', '/'))
def _save(self, name, content):
new_name = self._get_content_name(name=name, content=content)
try:
return super(HashedFileSystemStorage, self)._save(new_name, content)
except ContentExists:
pass
except OSError, e:
if e.errno == errno.EEXIST:
pass
else:
raise
return new_name
def delete(self, name):
if getattr(settings, "DEBUG", None) is True:
print "{}::delete({})".format(self.__class__.__name__, name)
return super(HashedFileSystemStorage, self).delete(name)
if __name__ == "__main__":
pass
|
kevinconway/rpmvenv
|
rpmvenv/extensions/blocks/__init__.py
|
Python
|
mit
| 199
| 0
|
"""Extensions w
|
hich provide a block segments."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unic
|
ode_literals
|
nalabelle/druid-django
|
frontend/views.py
|
Python
|
mit
| 1,868
| 0.002677
|
from django.shortcuts import render, get_object_or_404
from django.views import generic
from django.http import HttpResponse, HttpResponseRedirect
from django.template import loader
from rest_framework import reverse
from druidapi.query.models import QueryModel
from models import Result
from forms import SearchForm
import requests
import json
class IndexView(generic.View):
"""
The view for the main page, where the search form is
"""
def get(self, request):
form = SearchForm
return render(request, 'index.html', {'form': form})
def post(self, request):
form = SearchForm(request.POST)
if form.is_valid():
# Little bit
|
of cheating, ideally the html would handle this
#
|
but, I felt like building the webapp in django...
# alternatively, I could just reach over and build this.
start = form.cleaned_data['start'].isoformat()
end = form.cleaned_data['end'].isoformat()
# POST the query and return the pk, so we can look it up later
r = requests.post('http://localhost:9000/api/query/', data={'start_date': start, 'end_date': end})
result = Result.objects.create(key=r.json()["pk"])
result.save()
# To the results!
return HttpResponseRedirect("/{0}/".format(r.json()["pk"]))
else:
return render(request, 'index.html', {'form': form})
class ResultsView(generic.View):
"""
When the search is executed, it needs to display the results...
"""
def get(self, request, pk):
result = Result.objects.get(key=pk)
# GET the results for the key we're given
r = requests.get("http://localhost:9000/api/query/{0}/execute/".format(pk))
result.data = r.json()
return render(request, 'results.html', {'result': result})
|
catapult-project/catapult
|
third_party/ijson/tests.py
|
Python
|
bsd-3-clause
| 8,608
| 0.002679
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
import unittest
from io import BytesIO, StringIO
from decimal import Decimal
import threading
from importlib import import_module
from ijson import common
from ijson.backends.python import basic_parse, Lexer
from ijson.compat import IS_PY2
JSON = b'''
{
"docs": [
{
"null": null,
"boolean": false,
"true": true,
"integer": 0,
"double": 0.5,
"exponent": 1.0e+2,
"long": 10000000000,
"string": "\\u0441\\u0442\\u0440\\u043e\\u043a\\u0430 - \xd1\x82\xd0\xb5\xd1\x81\xd1\x82"
},
{
"meta": [[1], {}]
},
{
"meta": {"key": "value"}
},
{
"meta": null
}
]
}
'''
JSON_EVENTS = [
('start_map', None),
('map_key', 'docs'),
('start_array', None),
('start_map', None),
('map_key', 'null'),
('null', None),
('map_key', 'boolean'),
('boolean', False),
('map_key', 'true'),
('boolean', True),
('map_key', 'integer'),
('number', 0),
('map_key', 'double'),
('number', Decimal('0.5')),
('map_key', 'exponent'),
('number', 100),
('map_key', 'long'),
('number', 10000000000),
('map_key', 'string'),
('string', 'строка - тест'),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('start_array', None),
('start_array', None),
('number', 1),
('end_array', None),
('start_map', None),
('end_map', None),
('end_array', None),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('start_map', None),
('map_key', 'key'),
('string', 'value'),
('end_map', None),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('null', None),
('end_map', None),
('end_array', None),
('end_map', None),
]
SCALAR_JSON = b'0'
INVALID_JSONS = [
b'["key", "value",]', # trailing comma
b'["key" "value"]', # no comma
b'{"key": "value",}', # trailing comma
b'{"key": "value" "key"}', # no comma
b'{"key" "value"}', # no colon
b'invalid', # unknown lexeme
b'[1, 2] dangling junk' # dangling junk
]
YAJL1_PASSING_INVALID = INVALID_JSONS[6]
INCOMPLETE_JSONS = [
b'',
b'"test',
b'[',
b'[1',
b'[1,',
b'{',
b'{"key"',
b'{"key":',
b'{"key": "value"',
b'{"key": "value",',
]
STRINGS_JSON = br'''
{
"str1": "",
"str2": "\"",
"str3": "\\",
"str4": "\\\\",
"special\t": "\b\f\n\r\t"
}
'''
NUMBERS_JSON = b'[1, 1.0, 1E2]'
SURROGATE_PAIRS_JSON = b'"\uD83D\uDCA9"'
class Parse(object):
'''
Base class for parsing tests that is used to create test cases for each
available backends.
'''
def test_basic_parse(self):
events = list(self.backend.basic_parse(BytesIO(JSON)))
self.assertEqual(events, JSON_EVENTS)
def test_basic_parse_threaded(self):
thread = threading.Thread(target=self.test_basic_parse)
thread.start()
threa
|
d.join()
def test_scalar(self):
events = list(self.backend.basic_parse(BytesIO(SCALAR_JSON)))
self.assertEqual(events, [('number', 0)])
def test_strings(self):
events = list(self.backend.basic_parse(BytesIO(STRINGS_JSON)))
strings = [value for event, value in events if event == 'string']
self.assertEqual(strings, ['', '"', '\\', '\\\\', '\b\f\n\r\t'])
self.assertTrue(('map_key', 'special\t') in events)
def test_surrogate_pairs(se
|
lf):
event = next(self.backend.basic_parse(BytesIO(SURROGATE_PAIRS_JSON)))
parsed_string = event[1]
self.assertEqual(parsed_string, '💩')
def test_numbers(self):
events = list(self.backend.basic_parse(BytesIO(NUMBERS_JSON)))
types = [type(value) for event, value in events if event == 'number']
self.assertEqual(types, [int, Decimal, Decimal])
def test_invalid(self):
for json in INVALID_JSONS:
# Yajl1 doesn't complain about additional data after the end
# of a parsed object. Skipping this test.
if self.__class__.__name__ == 'YajlParse' and json == YAJL1_PASSING_INVALID:
continue
with self.assertRaises(common.JSONError) as cm:
list(self.backend.basic_parse(BytesIO(json)))
def test_incomplete(self):
for json in INCOMPLETE_JSONS:
with self.assertRaises(common.IncompleteJSONError):
list(self.backend.basic_parse(BytesIO(json)))
def test_utf8_split(self):
buf_size = JSON.index(b'\xd1') + 1
try:
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
except UnicodeDecodeError:
self.fail('UnicodeDecodeError raised')
def test_lazy(self):
# shouldn't fail since iterator is not exhausted
self.backend.basic_parse(BytesIO(INVALID_JSONS[0]))
self.assertTrue(True)
def test_boundary_lexeme(self):
buf_size = JSON.index(b'false') + 1
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
self.assertEqual(events, JSON_EVENTS)
def test_boundary_whitespace(self):
buf_size = JSON.index(b' ') + 1
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
self.assertEqual(events, JSON_EVENTS)
def test_api(self):
self.assertTrue(list(self.backend.items(BytesIO(JSON), '')))
self.assertTrue(list(self.backend.parse(BytesIO(JSON))))
# Generating real TestCase classes for each importable backend
for name in ['python', 'yajl', 'yajl2', 'yajl2_cffi']:
try:
classname = '%sParse' % ''.join(p.capitalize() for p in name.split('_'))
if IS_PY2:
classname = classname.encode('ascii')
locals()[classname] = type(
classname,
(unittest.TestCase, Parse),
{'backend': import_module('ijson.backends.%s' % name)},
)
except ImportError:
pass
class Common(unittest.TestCase):
'''
Backend independent tests. They all use basic_parse imported explicitly from
the python backend to generate parsing events.
'''
def test_object_builder(self):
builder = common.ObjectBuilder()
for event, value in basic_parse(BytesIO(JSON)):
builder.event(event, value)
self.assertEqual(builder.value, {
'docs': [
{
'string': 'строка - тест',
'null': None,
'boolean': False,
'true': True,
'integer': 0,
'double': Decimal('0.5'),
'exponent': 100,
'long': 10000000000,
},
{
'meta': [[1], {}],
},
{
'meta': {'key': 'value'},
},
{
'meta': None,
},
],
})
def test_scalar_builder(self):
builder = common.ObjectBuilder()
for event, value in basic_parse(BytesIO(SCALAR_JSON)):
builder.event(event, value)
self.assertEqual(builder.value, 0)
def test_parse(self):
events = common.parse(basic_parse(BytesIO(JSON)))
events = [value
for prefix, event, value in events
if prefix == 'docs.item.meta.item.item'
]
self.assertEqual(events, [1])
def test_items(self):
events = basic_parse(BytesIO(JSON))
meta = list(common.items(common.parse(events), 'docs.item.meta'))
self.as
|
shoaibali/kodi.background.rotator
|
randombackground.py
|
Python
|
gpl-3.0
| 713
| 0.026648
|
import os, random
rfilename=random.choice(os.listdir("/storage/pictures"
|
))
rextension=os.path.splitext(rfilename)[1]
picturespath='/storage/pictures/'
#TODO Probably dont need a forloop can possibly do random*
#TODO What if the directory is empty?
for filename in os.listdir(p
|
icturespath):
if filename.startswith("random"):
extension=os.path.splitext(filename)[1]
newname=picturespath + str(random.random()).rsplit('.',1)[1] + extension
# rename the existing random wallpaper to something random
filename=picturespath+filename
os.rename(filename, newname)
# now rename the newly randomly founded file to be random
rfilename=picturespath+rfilename
os.rename(rfilename, picturespath+'random'+rextension)
|
RyanNoelk/OpenEats
|
api/v1/recipe/migrations/0011_auto_20171114_1543.py
|
Python
|
mit
| 466
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-11-14 21:43
from __future__ import unicode_litera
|
ls
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('recipe', '0010_auto_20171114_1443'),
]
operations = [
migrations.RemoveField(
|
model_name='direction',
name='recipe',
),
migrations.DeleteModel(
name='Direction',
),
]
|
rahlk/CSC579__Computer_Performance_Modeling
|
simulation/proj1/tasks/task5.py
|
Python
|
mit
| 2,063
| 0.010664
|
from __future__ import division
from __future__ import print_function
import os
import sys
import functools
# Update path
root = os.path.join(os.getcwd().split('proj1')[0], 'proj1')
if root not in sys.path:
sys.path.append(root)
import numpy as np
import pandas as pd
import multiprocessing
from pdb import set_trace
from Simulator import simulate
from Utils.PlotsUtils import line, line2
from Utils.RandomUtil import Random
from Utils.MisclUtils import TimeUtil
rand = Random()
timer = TimeUtil()
# Set seed
rand.set_seed(seed_val=12458)
def customer_loss_rate(customers):
served = np.sum([customer.serviced for customer in customers])
total = len(customers)
return served / total
def plot_runtime(x=None, y=None):
line(x, y, x_labe
|
l=r"$\rho$", y_label=r"Run Times", the_title=r"$\mathrm{Run\ Times\ in\ }\mu\mathrm{s\ vs.\ }\rho$")
def plot_runtime_vs_avg
|
(x, y, y_1):
line2(x, y, x, y_1, label_1="Actual Runtimes", label_2="Expected value of $\rho$", x_label=r"$\rho$", y_label=r"Run Times", the_title=r"$\mathrm{Run\ Times\ in\ }\mu\mathrm{s\ vs.\ }\rho$")
def task_5():
rho_list = np.arange(0.05, 1, 0.1)
C = 1e5
elapsed = []
for rho in rho_list:
start_time = timer.current_time()
serviced = simulate(l = rho, server_lim = 40, max_serviced=C, L=1, verbose=False)
end_time = timer.current_time()
elapsed.append(end_time-start_time)
data = pd.DataFrame([[a,b] for a, b in zip(rho_list, elapsed)], columns=["Rho", "Seconds"])
data.to_csv(os.path.abspath(os.path.join(root,"tasks/task5.csv")))
def task5_plot():
data = pd.read_csv(os.path.abspath("tasks/task5.csv"))
plot_runtime(data["Rho"], data["Seconds"])
set_trace()
def compare_plot():
rho_list = np.arange(0.05, 1, 0.1)
average_rho = [np.mean([rand.exponential(lam=p) for _ in xrange(10000)]) for p in rho_list]
data = pd.read_csv(os.path.abspath("tasks/task5.csv"))
plot_runtime(data["Rho"], average_rho)
if __name__ == "__main__":
task_5()
task5_plot()
compare_plot()
|
schleichdi2/OPENNFR-6.1-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/core/decorator/data.py
|
Python
|
gpl-2.0
| 2,959
| 0.004055
|
# Copyright (C) 2016 Intel Corporation
# Released under the MIT license (see COPYING.MIT)
from oeqa.core.exception import OEQAMissingVariable
from . im
|
port OETestDecorator, registerDecorator
def has_feature(td, feature):
"""
Checks for featur
|
e in DISTRO_FEATURES or IMAGE_FEATURES.
"""
if (feature in td.get('DISTRO_FEATURES', '') or
feature in td.get('IMAGE_FEATURES', '')):
return True
return False
@registerDecorator
class skipIfDataVar(OETestDecorator):
"""
Skip test based on value of a data store's variable.
It will get the info of var from the data store and will
check it against value; if are equal it will skip the test
with msg as the reason.
"""
attrs = ('var', 'value', 'msg')
def setUpDecorator(self):
msg = ('Checking if %r value is %r to skip test' %
(self.var, self.value))
self.logger.debug(msg)
if self.case.td.get(self.var) == self.value:
self.case.skipTest(self.msg)
@registerDecorator
class skipIfNotDataVar(OETestDecorator):
"""
Skip test based on value of a data store's variable.
It will get the info of var from the data store and will
check it against value; if are not equal it will skip the
test with msg as the reason.
"""
attrs = ('var', 'value', 'msg')
def setUpDecorator(self):
msg = ('Checking if %r value is not %r to skip test' %
(self.var, self.value))
self.logger.debug(msg)
if not self.case.td.get(self.var) == self.value:
self.case.skipTest(self.msg)
@registerDecorator
class skipIfNotInDataVar(OETestDecorator):
"""
Skip test if value is not in data store's variable.
"""
attrs = ('var', 'value', 'msg')
def setUpDecorator(self):
msg = ('Checking if %r value is in %r to run '
'the test' % (self.var, self.value))
self.logger.debug(msg)
if not self.value in self.case.td.get(self.var):
self.case.skipTest(self.msg)
@registerDecorator
class OETestDataDepends(OETestDecorator):
attrs = ('td_depends',)
def setUpDecorator(self):
for v in self.td_depends:
try:
value = self.case.td[v]
except KeyError:
raise OEQAMissingVariable("Test case need %s variable but"\
" isn't into td" % v)
@registerDecorator
class skipIfNotFeature(OETestDecorator):
"""
Skip test based on DISTRO_FEATURES.
value must be in distro features or it will skip the test
with msg as the reason.
"""
attrs = ('value', 'msg')
def setUpDecorator(self):
msg = ('Checking if %s is in DISTRO_FEATURES '
'or IMAGE_FEATURES' % (self.value))
self.logger.debug(msg)
if not has_feature(self.case.td, self.value):
self.case.skipTest(self.msg)
|
vlegoff/tsunami
|
src/secondaires/navigation/equipage/signaux/__init__.py
|
Python
|
bsd-3-clause
| 2,386
| 0
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# ma
|
y be used to endorse or promote products derived from this software
# without specific prior written permission
|
.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant les différents signaux.
Un signal est une classe toute simple, semblable à une exception en
ce qu'elle permet de transmettre des messages et met en pause l'exécution
pendant le temps que le message passe. Cependant, après réception
du signal, l'exécution peut se poursuivre.
"""
from secondaires.navigation.equipage.signaux.base import Signal
from secondaires.navigation.equipage.signaux.attendre import SignalAttendre
from secondaires.navigation.equipage.signaux.abandonne import SignalAbandonne
from secondaires.navigation.equipage.signaux.inutile import SignalInutile
from secondaires.navigation.equipage.signaux.relais import SignalRelais
from secondaires.navigation.equipage.signaux.repete import SignalRepete
from secondaires.navigation.equipage.signaux.termine import SignalTermine
|
ruyang/ironic
|
ironic/tests/unit/drivers/modules/network/test_neutron.py
|
Python
|
apache-2.0
| 19,670
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from neutronclient.common import exceptions as neutron_exceptions
from oslo_config import cfg
from oslo_utils import uuidutils
from ironic.common import exception
from ironic.common import neutron as neutron_common
from ironic.conductor import task_manager
from ironic.drivers.modules.network import neutron
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.objects import utils
CONF = cfg.CONF
CLIENT_ID1 = '20:00:55:04:01:fe:80:00:00:00:00:00:00:00:02:c9:02:00:23:13:92'
CLIENT_ID2 = '20:00:55:04:01:fe:80:00:00:00:00:00:00:00:02:c9:02:00:23:13:93'
VIFMIXINPATH = 'ironic.drivers.modules.network.common.VIFPortIDMixin'
class NeutronInterfaceTestCase(db_base.DbTestCase):
def setUp(self):
super(NeutronInterfaceTestCase, self).setUp()
self.config(enabled_drivers=['fake'])
mgr_utils.mock_the_extension_manager()
self.interface = neutron.NeutronNetwork()
self.node = utils.create_test_node(self.context,
network_interface='neutron')
self.port = utils.create_test_port(
self.context, node_id=self.node.id,
address='52:54:00:cf:2d:32',
extra={'vif_port_id': uuidutils.generate_uuid()})
self.neutron_port = {'id': '132f871f-eaec-4fed-9475-0d54465e0f00',
'mac_address': '52:54:00:cf:2d:32'}
@mock.patch('%s.vif_list' % VIFMIXINPATH)
def test_vif_list(self, mock_vif_list):
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.vif_list(task)
mock_vif_list.assert_called_once_with(task)
@mock.patch('%s.vif_attach' % VIFMIXINPATH)
def test_vif_attach(self, mock_vif_attach):
vif = mock.MagicMock()
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.vif_attach(task, vif)
mock_vif_attach.assert_called_once_with(task, vif)
@mock.patch('%s.vif_detach' % VIFMIXINPATH)
def test_vif_detach(self, mock_vif_detach):
vif_id = "vif"
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.vif_detach(task, vif_id)
mock_vif_detach.assert_called_once_with(task, vif_id)
@mock.patch('%s.port_changed' % VIFMIXINPATH)
def test_vif_port_changed(self, mock_p_changed):
port = mock.MagicMock()
with task_manager.acquire(self.context, self.node.id) as task:
|
self.interface.port_changed(task, port)
mock_p_changed.assert_called_once_with(task, port)
def test_init_incorrect_provisioning_net(self):
self.config(provisioning_network=None, group='neutron')
self.assertRaises(exception.DriverLoadError, neutron.NeutronNetwork)
self.config(provisioning_network=uuidutils.generate_uuid(),
group='neutron')
self.config(cleaning_network=None, group='neutro
|
n')
self.assertRaises(exception.DriverLoadError, neutron.NeutronNetwork)
@mock.patch.object(neutron_common, 'validate_network', autospec=True)
def test_validate(self, validate_mock):
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.validate(task)
self.assertEqual([mock.call(CONF.neutron.cleaning_network,
'cleaning network'),
mock.call(CONF.neutron.provisioning_network,
'provisioning network')],
validate_mock.call_args_list)
@mock.patch.object(neutron_common, 'validate_network',
side_effect=lambda n, t: n)
@mock.patch.object(neutron_common, 'rollback_ports')
@mock.patch.object(neutron_common, 'add_ports_to_network')
def test_add_provisioning_network(self, add_ports_mock, rollback_mock,
validate_mock):
self.port.internal_info = {'provisioning_vif_port_id': 'vif-port-id'}
self.port.save()
add_ports_mock.return_value = {self.port.uuid: self.neutron_port['id']}
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.add_provisioning_network(task)
rollback_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network)
add_ports_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network,
security_groups=[])
validate_mock.assert_called_once_with(
CONF.neutron.provisioning_network,
'provisioning network')
self.port.refresh()
self.assertEqual(self.neutron_port['id'],
self.port.internal_info['provisioning_vif_port_id'])
@mock.patch.object(neutron_common, 'validate_network',
lambda n, t: n)
@mock.patch.object(neutron_common, 'rollback_ports')
@mock.patch.object(neutron_common, 'add_ports_to_network')
def test_add_provisioning_network_with_sg(self, add_ports_mock,
rollback_mock):
sg_ids = []
for i in range(2):
sg_ids.append(uuidutils.generate_uuid())
self.config(provisioning_network_security_groups=sg_ids,
group='neutron')
add_ports_mock.return_value = {self.port.uuid: self.neutron_port['id']}
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.add_provisioning_network(task)
rollback_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network)
add_ports_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network,
security_groups=(
CONF.neutron.provisioning_network_security_groups))
self.port.refresh()
self.assertEqual(self.neutron_port['id'],
self.port.internal_info['provisioning_vif_port_id'])
@mock.patch.object(neutron_common, 'validate_network',
side_effect=lambda n, t: n)
@mock.patch.object(neutron_common, 'remove_ports_from_network')
def test_remove_provisioning_network(self, remove_ports_mock,
validate_mock):
self.port.internal_info = {'provisioning_vif_port_id': 'vif-port-id'}
self.port.save()
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.remove_provisioning_network(task)
remove_ports_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network)
validate_mock.assert_called_once_with(
CONF.neutron.provisioning_network,
'provisioning network')
self.port.refresh()
self.assertNotIn('provisioning_vif_port_id', self.port.internal_info)
@mock.patch.object(neutron_common, 'validate_network',
side_effect=lambda n, t: n)
@mock.patch.object(neutron_common, 'rollback_ports')
@mock.patch.object(neutron_common, 'add_ports_to_network')
def test_add_cleaning_network(self, add_ports_mock, rollback_mock,
validate_mock):
add_ports_mock.return_value = {self.port.uuid: self.neutron_port['id']}
with task_manager.acquire(self.context, self.node.id) as task:
res = self.interface.add_cleaning_network(task)
rollback_mock.assert_called_once_
|
Athrun29/horizon
|
openstack_dashboard/dashboards/project/firewalls/tables.py
|
Python
|
apache-2.0
| 15,142
| 0
|
# Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implie
|
d. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.template import defa
|
ultfilters as filters
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
LOG = logging.getLogger(__name__)
class AddRuleLink(tables.LinkAction):
name = "addrule"
verbose_name = _("Add Rule")
url = "horizon:project:firewalls:addrule"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_firewall_rule"),)
class AddPolicyLink(tables.LinkAction):
name = "addpolicy"
verbose_name = _("Add Policy")
url = "horizon:project:firewalls:addpolicy"
classes = ("ajax-modal", "btn-addpolicy",)
icon = "plus"
policy_rules = (("network", "create_firewall_policy"),)
class AddFirewallLink(tables.LinkAction):
name = "addfirewall"
verbose_name = _("Create Firewall")
url = "horizon:project:firewalls:addfirewall"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_firewall"),)
class DeleteRuleLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deleterule"
policy_rules = (("network", "delete_firewall_rule"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Rule",
u"Delete Rules",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Rule",
u"Scheduled deletion of Rules",
count
)
def allowed(self, request, datum=None):
if datum and datum.policy:
return False
return True
def delete(self, request, obj_id):
try:
api.fwaas.rule_delete(request, obj_id)
except Exception as e:
exceptions.handle(request, _('Unable to delete rule. %s') % e)
class DeletePolicyLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deletepolicy"
policy_rules = (("network", "delete_firewall_policy"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Policy",
u"Delete Policies",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Policy",
u"Scheduled deletion of Policies",
count
)
def delete(self, request, obj_id):
try:
api.fwaas.policy_delete(request, obj_id)
except Exception as e:
exceptions.handle(request, _('Unable to delete policy. %s') % e)
class DeleteFirewallLink(policy.PolicyTargetMixin,
tables.DeleteAction):
name = "deletefirewall"
policy_rules = (("network", "delete_firewall"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Firewall",
u"Delete Firewalls",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Firewall",
u"Scheduled deletion of Firewalls",
count
)
def delete(self, request, obj_id):
try:
api.fwaas.firewall_delete(request, obj_id)
except Exception as e:
exceptions.handle(request, _('Unable to delete firewall. %s') % e)
class UpdateRuleLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updaterule"
verbose_name = _("Edit Rule")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall_rule"),)
def get_link_url(self, rule):
base_url = reverse("horizon:project:firewalls:updaterule",
kwargs={'rule_id': rule.id})
return base_url
class UpdatePolicyLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatepolicy"
verbose_name = _("Edit Policy")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall_policy"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:updatepolicy",
kwargs={'policy_id': policy.id})
return base_url
class UpdateFirewallLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatefirewall"
verbose_name = _("Edit Firewall")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:updatefirewall",
kwargs={'firewall_id': firewall.id})
return base_url
def allowed(self, request, firewall):
if firewall.status in ("PENDING_CREATE",
"PENDING_UPDATE",
"PENDING_DELETE"):
return False
return True
class InsertRuleToPolicyLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "insertrule"
verbose_name = _("Insert Rule")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall_policy"),
("network", "insert_rule"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:insertrule",
kwargs={'policy_id': policy.id})
return base_url
class RemoveRuleFromPolicyLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "removerule"
verbose_name = _("Remove Rule")
classes = ("ajax-modal", "btn-danger",)
policy_rules = (("network", "get_firewall_policy"),
("network", "remove_rule"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:removerule",
kwargs={'policy_id': policy.id})
return base_url
def allowed(self, request, policy):
if len(policy.rules) > 0:
return True
return False
class AddRouterToFirewallLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "addrouter"
verbose_name = _("Add Router")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall"),
("network", "add_router"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:addrouter",
kwargs={'firewall_id': firewall.id})
return base_url
def allowed(self, request, firewall):
if not api.neutron.is_extension_supported(request,
'fwaasrouterinsertion'):
return False
tenant_id = firewall['tenant_id']
available_routers = api.fwaas.firewall_unassociated_routers_list(
request, tenant_id)
return bool(available_routers)
class RemoveRouterFromFirewallLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "removerouter"
verbose_name = _("Remove Router")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall"),
("network", "remove_router"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:removerouter",
|
e-koch/canfar_scripts
|
img_pipe/casanfar_image.py
|
Python
|
mit
| 7,517
| 0.000133
|
import os
import numpy as np
from scipy.optimize import curve_fit
def gauss(x, A, mu, sigma):
return A * np.exp(-(x - mu)**2 / (2. * sigma**2))
scriptmode = True
SDM_name = 'test' # The prefix to use for all output files
# SDM_name = '13A-213.sb20685305.eb20706999.56398.113012800924'
# Set up some useful variables (these will be altered later on)
msfile = SDM_name + '.ms'
hisplitms = SDM_name + '.hi.ms'
splitms = SDM_name + '.hi.src.split.ms'
contsubms = SDM_name + '.hi.src.split.ms.contsub'
rawcleanms = SDM_name + '.hi.src.split.ms.contsub.rawcleanimg'
cleanms = SDM_name + '.hi.src.split.ms.contsub.cleanimg'
pathname = os.environ.get('CASAPATH').split()[0]
pipepath = '/home/dcolombo/pipe_scripts/'
# pipepath = '/home/dario/pipe_scripts/'
source = 'SextansA'
# VOS stuff
vos_dir = '../vos/'
vos_proc = './'
vos_link = '../vos_link/'
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%%&%&%&%&%&%&%%&%
# Find the 21cm spw and check if the obs
# is single pointing or mosaic
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%%&%&%&%&%&%&%%&%
print "Find HI spw..."
# But first find the spw corresponding to it
tb.open(vos_dir + msfile + '/SPECTRAL_WINDOW')
freqs = tb.getcol('REF_FREQUENCY')
nchans = tb.getcol('NUM_CHAN')
tb.close()
spws = range(0, len(freqs))
# Select the 21cm
sel = np.where((freqs > 1.40 * 10**9) & (freqs < 1.43 * 10**9))
hispw = str(spws[sel[0][0]])
freq = freqs[sel[0][0]]
nchan = nchans[sel[0][0]]
print "Selected spw ", hispw, "with frequency ", freq, "and ", nchan, " channels"
print "Starting split the HI line"
# Mosaic or single pointing?
tb.open(vos_dir + msfile + '/FIELD')
names = tb.getcol('NAME')
tb.close()
moscount = 0
for name in names:
chsrc = name.find(source)
if chsrc != -1:
moscount = moscount + 1
if moscount > 1:
imagermode = "mosaic"
else:
imagermode = "csclean"
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# Split the corrected source data from the rest
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
print "Starting source split..."
os.system('rm -rf ' + vos_proc + splitms)
default('split')
vis = vos_dir + hisplitms
outputvis = vos_proc + splitms
field = source
spw = ''
datacolumn = 'corrected'
keepflags = False
split()
print "Created splitted-source .ms " + splitms
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# UV continum subtraction
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# 1) Save a .txt file of the amplitude vs
# channels, plotms runs only to get the
# ASCII file
print "Estimating channels with signal..."
real_amps = []
imag_amps = []
default('visstat')
vis = vos_proc + splitms
field = '0'
datacolumn = 'data'
selectdata = True
useflags = False
for nc in range(nchan):
spw = '0:' + str(nc)
axis = 'real'
pdata = visstat()
real_amps.append(pdata['DATA']['mean'])
axis = 'imag'
pdata = visstat()
imag_amps.append(pdata['DATA']['mean'])
real_amps = np.asarray(real_amps)
imag_amps = np.asarray(imag_amps)
amps = np.sqrt(real_amps**2 + imag_amps**2)
chans = np.arange(nchan) + 1
# Guessing parameters for fitting
A = max(amps)
mu = chans[amps.tolist().index(A)]
hm = chans[amps > A / 2]
sigma = float(hm[-1] - hm[0]) / 2.35
opar, _ = curve_fit(gauss, chans, amps, p0=[A, mu, sigma])
# Move away to 3.5sigma for the fit, in order to exclude the data
# from the fit
chan1 = int(mu - 3.5 * opar[2])
chan2 = int(mu + 3.5 * opar[2])
fitspws = str(chan1) + '~' + str(chan2)
print "Signal within channels " + fitspws
print "Starting contsub..."
# Run the routinne
os.system('rm -rf ' + vos_proc + contsubms)
default('uvcontsub')
vis = vos_proc + splitms
fitspw = '0:' + fitspws
excludechans = True
solint = 0.0
fitorder = 0
fitmode = 'subtract'
splitdata = True
uvcontsub()
print "Created continum subtracted image" + contsubms
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# CLEANing
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
print "Starting CLEANing..."
os.system('rm -rf ' + vos_proc + rawcleanms + '*')
# First generate a 0-iterations
# image to estimate the noise level
# (threshold)
# Get max baseline and dish size
bline_max = au.getBaselineExtrema(vos_proc + splitms)[0]
tb.open(vos_proc + splitms + '/ANTENNA')
dishs = tb.getcol('DISH_DIAMETER')
dish_min = min(dishs)
tb.close()
# Find the beam
hi_lambda = 299792458.0 / (freq)
min_lambda = 299792458.0 / (min(freqs))
syn_beam = (hi_lambda / bline_max) * 180 / np.pi * 3600
prim_beam = (min_lambda / dish_min) * 180 / np.pi * 3600
# Setting CLEANing parameters
sel_cell = str(round(syn_beam / 5)) + 'arcsec'
sel_imsize = int(round(prim_beam / (syn_beam / 5)))
# Increase the sel_imsize of a couple of beam
# to be sure
dx = int(round(syn_beam / prim_beam * sel_imsize))
sel_imsize = sel_imsize + 1 * dx
# The image size should be a multiplier of
# 2, 3 and 5 to work well with clean so:
sel_imsize = sel_imsize - 1
pnum = 1 * sel_imsize
while pnum != 1:
sel_imsize = sel_imsize + 1
pnum = 1 * sel_imsize
while pnum % 2 == 0:
pnum = pnum / 2
while pnum % 3 ==
|
0:
pnum = pnum / 3
while pnum % 5 == 0:
pnum = pnum / 5
print "Image size:", sel_imsize
print "Cell
|
size:", sel_cell
# First generate a 0-iterations
# image to estimate the noise level
# (threshold)
default('clean')
vis = vos_proc + contsubms
imagename = vos_proc + rawcleanms
cell = [sel_cell, sel_cell]
imsize = [sel_imsize, sel_imsize]
imagermode = imagermode
mode = "channel"
nchan = 4
start = chan1 - 5
width = 1
field = '0'
spw = '0'
interactive = False
pbcor = False
minpb = 0.25
restfreq = '1.420405752GHz'
niter = 0
clean()
print "Estimating sigma..."
default('imstat')
imagename = vos_proc + rawcleanms + '.image'
chans = '0~3'
rawclean_stat = imstat()
rms = rawclean_stat['sigma'][0] * 1000
rms = round(rms)
rms = str(int(rms)) + 'mJy'
print "Sigma=", rms, ". Now the real CLEANing..."
# Now run the real cleaning
os.system('rm -rf ' + cleanms + '*')
default('clean')
vis = vos_proc + contsubms
imagename = vos_proc + cleanms
cell = [sel_cell, sel_cell]
imsize = [sel_imsize, sel_imsize]
imagermode = imagermode
mode = "channel"
start = chan1
nchan = chan2 - chan1
width = 1
field = ''
spw = ''
interactive = False
restfreq = '1.420405752GHz'
outframe = 'LSRK'
niter = 10000
threshold = rms
usescratch = True
clean()
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# Moment maps 0,1,2
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
default("immoments")
imagename = vos_proc + cleanms + '.image'
moments = [0, 1, 2]
outfile = vos_proc + cleanms
immoments()
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# Convert everything to fits file
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
print "Exporting the image fits..."
default('exportfits')
imagename = vos_proc + cleanms + '.image'
fitsimage = vos_proc + source + '_21cm.fits'
velocity = True
optical = False
overwrite = True
dropstokes = True
exportfits()
print "Exporting moment maps..."
default('exportfits')
# Moment 0
imagename = vos_proc + cleanms + '.integrated'
fitsimage = vos_proc + source + '_21cm_mom0.fits'
velocity = True
optical = False
overwrite = True
dropstokes = True
exportfits()
default('exportfits')
# Moment 1
imagename = vos_proc + cleanms + '.weighted_coord'
fitsimage = vos_proc + source + '_21cm_mom1.fits'
velocity = True
optical = False
overwrite = True
dropstokes = True
exportfits()
default('exportfits')
# Moment 2
imagename = vos_proc + cleanms + '.weighted_dispersion_coord'
fitsimage = vos_proc + source + '_21cm_mom2.fits'
velocity = True
optical = False
overwrite = True
dropstokes = True
exportfits()
|
incuna/authentic
|
authentic2/saml/migrations/0010_auto__add_field_spoptionsidppolicy_enabled.py
|
Python
|
agpl-3.0
| 22,083
| 0.007472
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SPOptionsIdPPolicy.enabled'
db.add_column('saml_spoptionsidppolicy', 'enabled', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'SPOptionsIdPPolicy.enabled'
db.delete_column('saml_spoptionsidppolicy', 'enabled')
models = {
'attribute_aggregator.attributesource': {
'Meta': {'object_name': 'AttributeSource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.
|
Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
|
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'idp.attributeitem': {
'Meta': {'object_name': 'AttributeItem'},
'attribute_name': ('django.db.models.fields.CharField', [], {'default': "('OpenLDAProotDSE', 'OpenLDAProotDSE')", 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:basic', 'SAMLv2 BASIC')", 'max_length': '100'}),
'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['attribute_aggregator.AttributeSource']", 'null': 'True', 'blank': 'True'})
},
'idp.attributelist': {
'Meta': {'object_name': 'AttributeList'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'attributes of the list'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['idp.AttributeItem']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'idp.attributepolicy': {
'Meta': {'object_name': 'AttributePolicy'},
'attribute_filter_for_sso_from_push_sources': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filter attributes of push sources with list'", 'null': 'True', 'to': "orm['idp.AttributeList']"}),
'attribute_list_for_sso_from_pull_sources': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes from pull sources'", 'null': 'True', 'to': "orm['idp.AttributeList']"}),
'filter_source_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'forward_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'map_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'map_attributes_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:basic', 'SAMLv2 BASIC')", 'max_length': '100'}),
'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}),
'send_error_and_no_attrs_if_missing_required_attrs': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source_filter_for_sso_from_push_sources': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'filter attributes of push sources with sources'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['attribute_aggregator.AttributeSource']"})
},
'saml.authorizationattributemap': {
'Meta': {'object_name': 'AuthorizationAttributeMap'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
'saml.authorizationattributemapping': {
'Meta': {'object_name': 'AuthorizationAttributeMapping'},
'attribute_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'attribute_value': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'attribute_value_format': ('django.db.models.fields.CharField', [],
|
tjcsl/ion
|
intranet/apps/dataimport/apps.py
|
Python
|
gpl-2.0
| 109
| 0
|
from django.apps impor
|
t AppConfig
class DataimportConfig(AppConfig):
name
|
= "intranet.apps.dataimport"
|
eustislab/horton
|
data/examples/hamiltonian/even_tempered_li.py
|
Python
|
gpl-3.0
| 725
| 0
|
#!/usr/bin/env python
import numpy as np
from horton import *
# specify the even tempered basis set
alpha_low = 5e-3
alpha_high = 5e2
|
nbasis = 30
lnratio = (np.log(alpha_high) - np.log(alpha_low))/(nbasis-1)
# build a list of "contra
|
ctions". These aren't real contractions as every
# contraction only contains one basis function.
bcs = []
for ibasis in xrange(nbasis):
alpha = alpha_low**lnratio
# arguments of GOBasisContraction:
# shell_type, list of exponents, list of contraction coefficients
bcs.append(GOBasisContraction(0, np.array([alpha]), np.array([1.0])))
# Finish setting up the basis set:
ba = GOBasisAtom(bcs)
obasis = get_gobasis(np.array([[0.0, 0.0, 0.0]]), np.array([3]), default=ba)
|
engineer0x47/SCONS
|
engine/SCons/Variables/PathVariable.py
|
Python
|
mit
| 5,616
| 0.00089
|
"""SCons.Variables.PathVariable
This file defines an option type for SCons implementing path settings.
To be used whenever a a user-specified path override should be allowed.
Arguments to PathVariable are:
option-name = name of this option on the command line (e.g. "prefix")
option-help = help string for option
option-dflt = default value for this option
validator = [optional] validator for option value. Predefined
validators are:
PathAccept -- accepts any path setting; no validation
PathIsDir -- path must be an existing directory
PathIsDirCreate -- path must be a dir; will create
PathIsFile -- path must be a file
PathExists -- path must exist (any type) [default]
The validator is a function that is called and which
should return True or False to indicate if the path
is valid. The arguments to the validator function
are: (key, val, env). The key is the name of the
option, the val is the path specified for the option,
and the env is the env to which the Otions have been
added.
Usage example:
Examples:
prefix=/usr/local
opts = Variables()
opts = Variables()
opts.Add(PathVariable('qtdir',
'where the root of Qt is installed',
qtdir, PathIsDir))
opts.Add(PathVariable('qt_includes',
'where the Qt includes are installed',
'$qtdir/includes', PathIsDirCreate))
opts.Add(PathVariable('qt_libraries',
'where the Qt library is installed',
'$qtdir/lib'))
"""
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/PathVariable.py 2014/08/24 12:12:31 garyo"
__all__ = ['PathVariable',]
import os
import os.path
import SCons.Errors
class _PathVariableClass(object):
def PathAccept(self, key, val, env):
"""Accepts any path, no checking done."""
pass
def PathIsDir(self, key, val, env):
"""Validator to check if Path is a directory."""
if not os.path.isdir(val):
if os.path.isfile(val):
m = 'Directory path for option %s is a file: %s'
else:
m = 'Directory path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathIsDirCreate(self, key, val, env):
"""Validator to check if Path is a directory,
creating it if it does not exist."""
if os.path.isfile(val):
m = 'Path for option %s is a file, not a directory: %s'
raise SCons.Errors.UserError(m % (key, val))
if not os.path.isdir(val):
os.makedirs(val)
def PathIsFile(self, key, val, env):
"""validator to check if Path is a file"""
if not os.path.isfile(val):
if os.path.isdir(val):
|
m = 'File path for option %s is a directory: %s'
else:
m = 'File path for option %s d
|
oes not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathExists(self, key, val, env):
"""validator to check if Path exists"""
if not os.path.exists(val):
m = 'Path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def __call__(self, key, help, default, validator=None):
# NB: searchfunc is currenty undocumented and unsupported
"""
The input parameters describe a 'path list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
The 'default' option specifies the default path to use if the
user does not specify an override with this option.
validator is a validator, see this file for examples
"""
if validator is None:
validator = self.PathExists
if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
return (key, '%s ( /path/to/%s )' % (help, key[0]), default,
validator, None)
else:
return (key, '%s ( /path/to/%s )' % (help, key), default,
validator, None)
PathVariable = _PathVariableClass()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
bitcraft/pyglet
|
contrib/experimental/input/xinput.py
|
Python
|
bsd-3-clause
| 9,260
| 0
|
# ----------------------------------------------------------------------------
# Copyright (c) 2008 Andrew D. Straw and Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# Based on pygxinput originally by Andrew D. Straw
# http://code.astraw.com/projects/motmot/wiki/pygxinput
import ctypes
import pyglet
from pyglet.window.xlib import xlib
import lib_xinput as xi
class XInputDevice:
def __init__(self, display, device_info):
self._x_display = display._display
self._device_id = device_info.id
self.name = device_info.name
self._open_device = None
# TODO: retrieve inputclassinfo from device_info and expose / save
# for valuator axes etc.
def open(self):
if self._open_device:
return
self._open_device = xi.XOpenDevice(self._x_display, self._device_id)
if not self._open_device:
raise Exception('Cannot open device')
def close(self):
if not self._open_device:
return
xi.XCloseDevice(self._x_display, self._open_device)
def attach(self, window):
assert window._x_display == self._x_display
return XInputDeviceInstance(self, window)
class XInputDeviceInstance(pyglet.event.EventDispatcher):
def __init__(self, device, window):
"""Create an opened instance of a device on the given window.
:Parameters:
`device` : XInputDevice
Device to open
`window` : Window
Window to open device on
"""
assert device._x_display == window._x_display
assert device._open_device
self.device = device
self.window = window
self._events = list()
try:
dispatcher = window.__xinput_window_event_dispatcher
except AttributeError:
dispatcher = window.__xinput_window_event_dispatcher = \
XInputWindowEventDispatcher()
dispatcher.add_instance(self)
device = device._open_device.contents
if not device.num_classes:
return
# Bind matching extended window events to bound instance methods
# on this object.
#
# This is inspired by test.c of xinput package by Frederic
# Lepied available at x.org.
#
# In C, this stuff is normally handled by the macro DeviceKeyPress and
# friends. Since we don't have access to those macros here, we do it
# this way.
for i in range(device.num_classes):
class_info = device.classes[i]
if class_info.input_class == xi.KeyClass:
self._add(class_info, xi._deviceKeyPress,
dispatcher._event_xinput_key_press)
self._add(class_info, xi._deviceKeyRelease,
dispatcher._event_xinput_key_release)
elif class_info.input_class == xi.ButtonClass:
self._add(class_info, xi._deviceButtonPress,
dispatcher._event_xinput_button_press)
self._add(class_info, xi._deviceButtonRelease,
dispatcher._event_xinput_button_release)
elif class_info.input_class == xi.ValuatorClass:
self._add(class_info, xi._deviceMotionNotify,
dispatcher._event_xinput_motion)
elif class_info.input_class == xi.ProximityClass:
self._add(class_info, xi._proximityIn,
dispatcher._event_xinput_proximity_in)
self._add(class_info, xi._proximityOut,
dispatcher._event_xinput_proximity_out)
elif class_info.input_class == xi.FeedbackClass:
pass
elif class_info.input_class == xi.FocusClass:
pass
elif class_info.input_class == xi.OtherClass:
pass
array = (xi.XEventClass * len(self._events))(*self._events)
xi.XSelectExtensionEvent(window._x_display,
window._window,
array,
len(array))
def _add(self, class_info, event, handler):
_type = class_info.event_type_base + event
_class = self.device._device_id << 8 | _type
self._events.append(_class)
self.window._event_handlers[_type] = handler
XInputDeviceInstance.register_event_type('on_button_press')
XInputDeviceInstance.register_event_type('on_button_release')
XInputDeviceInstance.register_event_type('on_motion')
XInputDeviceInstance.register_event_type('on_proximity_in')
XInputDeviceInstance.register_event_type('on_proximity_out')
class XInputWindowEventDispatcher:
def __init__(self):
self._instances = dict()
def add_instance(self, instance):
self._instances[instance.device._device_id] = instance
def remove_instance(self, instance):
del self._instances[instance.device._device_id]
def dispatch_instance_event(self, e, *args):
try:
instance = self._instances[e.deviceid]
except KeyError:
return
in
|
stance.dispatch_event(*args)
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_key_press(self, ev):
raise NotImplementedError('TODO')
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_key_release(self, ev):
raise NotImplementedError('TODO')
@pyglet.window.xlib.XlibEventHandler(0)
def _e
|
vent_xinput_button_press(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XDeviceButtonEvent)).contents
self.dispatch_instance_event(e, 'on_button_press', e.button)
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_button_release(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XDeviceButtonEvent)).contents
self.dispatch_instance_event(e, 'on_button_release', e.button)
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_motion(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XDeviceMotionEvent)).contents
axis_data = list()
for i in range(e.axes_count):
axis_data.append(e.axis_data[i])
self.dispatch_instance_event(e, 'on_motion', axis_data, e.x, e.y)
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_proximity_in(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XProximityNotifyEvent)).contents
self
|
clemsos/mitras
|
tests/examples/kmeans.py
|
Python
|
mit
| 2,846
| 0.001054
|
# Author: Peter Prettenhofer <[email protected]>
# Lars Buitinck <[email protected]>
# License: Simplified BSD
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn import metrics
from sklearn.cluster import KMeans, MiniBatchKMeans
import logging
from optparse import OptionParser
import sys
from time import time
import numpy as np
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# parse commandline arguments
op = OptionParser()
op.add_option("--no-minibatch",
action="store_false", dest="minibatch", default=True,
help="Use ordinary k-means algorithm.")
print __doc__
op.print_help()
(opts, args) = op.parse_args()
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
###############################################################################
# Load some categories from the training set
categories = [
'alt.atheism',
'talk.religion.misc',
'comp.graphics',
'sci.space',
]
# Uncomment the following to do the analysis on all the categories
#categories = None
print "Loading 20 newsgroups dataset for categories:"
print categories
dataset = fetch_20newsgroups(subset='all', categories=categories,
shuffle=True, random_state=42)
print "%d documents" % len(dataset.data)
print "%d categories" % len(dataset.target_names)
print
labels = dataset.target
true_k = np.unique(labels).shape[0]
print "Extracting features from the training dataset using a sparse vectorizer"
t0 = time()
vectorizer = TfidfVectorizer(max_df=0.5, max_features=10000,
stop_words='english')
X = vect
|
orizer.fit_transform(dataset.data)
print "done in %fs" % (time() - t0)
print "n_samples: %d, n_features: %d" % X.shape
print
###############################################################################
# Do the actual clustering
if opts.minibatch:
km = MiniBatchKMeans(n_clusters=true_k, init='k-means++', n_init=1,
|
init_size=1000,
batch_size=1000, verbose=1)
else:
km = KMeans(n_clusters=true_k, init='random', max_iter=100, n_init=1, verbose=1)
print "Clustering sparse data with %s" % km
t0 = time()
km.fit(X)
print "done in %0.3fs" % (time() - t0)
print
print "Homogeneity: %0.3f" % metrics.homogeneity_score(labels, km.labels_)
print "Completeness: %0.3f" % metrics.completeness_score(labels, km.labels_)
print "V-measure: %0.3f" % metrics.v_measure_score(labels, km.labels_)
print "Adjusted Rand-Index: %.3f" % \
metrics.adjusted_rand_score(labels, km.labels_)
print "Silhouette Coefficient: %0.3f" % metrics.silhouette_score(
X, labels, sample_size=1000)
print
|
apache/incubator-superset
|
tests/unit_tests/fixtures/datasets.py
|
Python
|
apache-2.0
| 6,514
| 0
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any, Dict
from unittest.mock import Mock
def get_column_mock(params: Dict[str, Any]) -> Mock:
mock = Mock()
mock.id = params["id"]
mock.column_name = params["column_name"]
mock.verbose_name = params["verbose_name"]
mock.description = params["description"]
mock.expression = params["expression"]
mock.filterable = params["filterable"]
mock.groupby = params["groupby"]
mock.is_dttm = params["is_dttm"]
mock.type = params["type"]
return mock
def get_metric_mock(params: Dict[str, Any]) -> Mock:
mock = Mock()
mock.id = params["id"]
mock.metric_name = params["metric_name"]
mock.metric_name = params["verbose_name"]
mock.description = params["description"]
mock.expression = params["expression"]
mock.warning_text = params["warning_text"]
mock.d3format = params["d3format"]
return mock
def get_dataset_mock() -> Mock:
mock = Mock()
mock.id = None
mock.column_formats = {"ratio": ".2%"}
mock.database = {"id": 1}
mock.description = "Adding a DESCRip"
mock.default_endpoint = ""
mock.filter_select_enabled = True
mock.name = "birth_names"
mock.table_name = "birth_names"
mock.datasource_name = "birth_names"
mock.type = "table"
mock.schema = None
mock.offset = 66
mock.cache_timeout = 55
mock.sql = ""
mock.columns = [
get_column_mock(
{
"id": 504,
"column_name": "ds",
"verbose_name": "",
"description": None,
"expression": "",
"filterable": True,
"groupby": True,
"is_dttm": True,
"type": "DATETIME",
}
),
get_column_mock(
{
"id": 505,
"column_name": "gender",
"verbose_name": None,
"description": None,
"expression": "",
"filterable": True,
"groupby": True,
"is_dttm": False,
"type": "VARCHAR(16)",
}
),
get_column_mock(
{
"id": 506,
"column_name": "name",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "VARCHAR(255)",
}
),
get_column_mock(
{
"id": 508,
"column_name": "state",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "VARCHAR(10)",
}
),
get_column_mock(
{
"id": 509,
"column_name": "num_boys",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "BIGINT(20)",
}
),
get_column_mock(
{
|
"id": 510,
"column_name": "num_girls",
|
"verbose_name": None,
"description": None,
"expression": "",
"filterable": False,
"groupby": False,
"is_dttm": False,
"type": "BIGINT(20)",
}
),
get_column_mock(
{
"id": 532,
"column_name": "num",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "BIGINT(20)",
}
),
get_column_mock(
{
"id": 522,
"column_name": "num_california",
"verbose_name": None,
"description": None,
"expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
"filterable": False,
"groupby": False,
"is_dttm": False,
"type": "NUMBER",
}
),
]
mock.metrics = (
[
get_metric_mock(
{
"id": 824,
"metric_name": "sum__num",
"verbose_name": "Babies",
"description": "",
"expression": "SUM(num)",
"warning_text": "",
"d3format": "",
}
),
get_metric_mock(
{
"id": 836,
"metric_name": "count",
"verbose_name": "",
"description": None,
"expression": "count(1)",
"warning_text": None,
"d3format": None,
}
),
get_metric_mock(
{
"id": 843,
"metric_name": "ratio",
"verbose_name": "Ratio Boys/Girls",
"description": "This represents the ratio of boys/girls",
"expression": "sum(num_boys) / sum(num_girls)",
"warning_text": "no warning",
"d3format": ".2%",
}
),
],
)
return mock
|
Stracksapp/stracks_api
|
stracks_api/tasks.py
|
Python
|
bsd-2-clause
| 152
| 0.013158
|
from celery.task import Task
import requests
class Stra
|
cksFlushTask(Task):
def run(self, url, data):
requests.post(ur
|
l + "/", data=data)
|
creyesp/RF_Estimation
|
Clustering/clustering/spaceClustering.py
|
Python
|
gpl-2.0
| 4,653
| 0.041694
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# spaceClustering.py
#
# Copyright 2014 Carlos "casep" Sepulveda <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# Performs basic clustering based on the size of the RF
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '../..','LIB'))
import rfestimationLib as rfe
import argparse # argument parsing
import numpy as np # Numpy
import densityPeaks as dp
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
from sklearn import mixture
clustersColours = ['blue', 'red', 'green', 'orange', 'black','yellow', \
'#ff006f','#00e8ff','#fcfa00', '#ff0000', '#820c2c', \
'#ff006f', '#af00ff','#0200ff','#008dff','#00e8ff', \
'#0c820e','#28ea04','#ea8404','#c8628f','#6283ff', \
'#5b6756','#0c8248','k','#820cff','#932c11', \
'#002c11','#829ca7']
def main():
parser = argparse.ArgumentParser(prog='spaceClustering.py',
description='Performs basic clustering based on the size of th RF',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--sourceFolder',
help='Source folder',
type=str, required=True)
parser.add_argument('--outputFolder',
help='Output folder',
type=str, required=True)
parser.add_argument('--percentage',
help='Percentage used to calculate the distance',
type=float, default='2', required=False)
parser.add_argument('--xSize',
help='X size of the stimuli',
type=int, default='31', required=False)
parser.add_argument('--ySize',
help='Y size of the stimuli',
type=int, default='31', required=False)
args = parser.parse_args()
#Source folder of the files with the timestamps
sourceFolder = rfe.fixPath(args.sourceFolder)
if not os.path.exists(sourceFolder):
print ''
print 'Source folder does not exists ' + sourceFolder
print ''
sys.exit()
#Output folder for the graphics
outputFolder = rfe.fixPath(args.outputFolder)
if not os.path.exists(outputFolder):
try:
os.makedirs(outputFolder)
except:
print ''
print 'Unable to create folder ' + outputFolder
print ''
sys.exit()
units = []
dataCluster = np.zeros((1,7))
for unitFile in sorted(os.listdir(sourceFolder)):
if os.path.isdir(sourceFolder+unitFile):
unitName = unitFile.rsplit('_', 1)[0]
fitResult = rfe.loadFitMatrix(sourceFolder,unitFile)
dataCluster = np.vstack((dataCluster,[fitResult[0][2],\
fitResult[0][3],fitResult[0][1],fitResult[0][4],\
fitResult[0][5],fitResult[0][2]*fitResult[0][3]*3,\
(fitResult[0][2]+fitResult[0][3])/2]))
units.append(unitName)
# remove the first row of zeroes
dataCluster = dataCluster[1:,:]
percentage = args.percentage #exploratory, '...for large data sets, the results of the analysis are robust with respect to the choice of d_c'
# Area instead o Radius
#clustersNumber, labels = dp.predict(dataCluster[:,0:2], percentage)
clustersNumber, labels = dp.predict(dataCluster[:,5:7], percentage)
gmix = mixture.GMM(n_components=clustersNumber, covariance_type='spherical')
gmix.fit(dataCluster[:,5:7])
labels = gmix.predict(dataCluster[:,5:7])
for clusterId in range(clustersNumber):
clusterFile = open(outputFolder+'cluster_'+str(clusterId)+'.csv', "w")
for unit in range(labels.size):
if labels[unit] == clusterId:
clusterFile.write(units[unit]+'\n')
clusterFile.close
xSize = args.xSize
ySize = args.ySize
# generate graphics of all ellipses
for clusterId in range(clustersNumber):
dataGrilla = np.zeros((1,7))
for unitId in range(dataCluster.shape[0]):
if labels[unitId] == clusterId:
datos=np.zeros((1,7))
datos[0]=dataCluster[unit
|
Id,:]
dataGrilla = np.append(dataGrilla,datos, axis=0)
## remove the first row of zeroes
dataGrilla = dataGrilla[1:,:]
rfe.graficaGrilla(dataGrilla, outputFolder+'Grilla_'+str(clusterId)+'.png', 0, clustersColours[clusterId], xSize, ySize)
return 0
if
|
__name__ == '__main__':
main()
|
jimi-c/ansible
|
lib/ansible/modules/files/lineinfile.py
|
Python
|
gpl-3.0
| 18,737
| 0.001868
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Daniel Hokka Zakrisson <[email protected]>
# Copyright: (c) 2014, Ahti Kitsik <[email protected]>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_imp
|
ort, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = """
---
module: lineinfile
author:
- Daniel Hokka Zakrissoni (@dhozac)
- Ahti Kitsik (@ahtik)
extends_documentation_fragment:
- files
- validate
short_description: Manage lines in text files
description:
- This module ensures a particular line is in a file, or replac
|
e an
existing line using a back-referenced regular expression.
- This is primarily useful when you want to change a single line in
a file only. See the M(replace) module if you want to change
multiple, similar lines or check M(blockinfile) if you want to insert/update/remove a block of lines in a file.
For other cases, see the M(copy) or M(template) modules.
version_added: "0.7"
options:
path:
description:
- The file to modify.
- Before 2.3 this option was only usable as I(dest), I(destfile) and I(name).
aliases: [ dest, destfile, name ]
required: true
regexp:
aliases: [ 'regex' ]
description:
- The regular expression to look for in every line of the file. For
C(state=present), the pattern to replace if found. Only the last line
found will be replaced. For C(state=absent), the pattern of the line(s)
to remove. Uses Python regular expressions.
See U(http://docs.python.org/2/library/re.html).
version_added: '1.7'
state:
description:
- Whether the line should be there or not.
choices: [ absent, present ]
default: present
line:
description:
- Required for C(state=present). The line to insert/replace into the
file. If C(backrefs) is set, may contain backreferences that will get
expanded with the C(regexp) capture groups if the regexp matches.
backrefs:
description:
- Used with C(state=present). If set, C(line) can contain backreferences
(both positional and named) that will get populated if the C(regexp)
matches. This flag changes the operation of the module slightly;
C(insertbefore) and C(insertafter) will be ignored, and if the C(regexp)
doesn't match anywhere in the file, the file will be left unchanged.
If the C(regexp) does match, the last matching line will be replaced by
the expanded line parameter.
type: bool
default: 'no'
version_added: "1.1"
insertafter:
description:
- Used with C(state=present). If specified, the line will be inserted
after the last match of specified regular expression.
If the first match is required, use(firstmatch=yes).
A special value is available; C(EOF) for inserting the line at the
end of the file.
If specified regular expression has no matches, EOF will be used instead.
If regular expressions are passed to both C(regexp) and C(insertafter), C(insertafter) is only honored if no match for C(regexp) is found.
May not be used with C(backrefs).
choices: [ EOF, '*regex*' ]
default: EOF
insertbefore:
description:
- Used with C(state=present). If specified, the line will be inserted
before the last match of specified regular expression.
If the first match is required, use(firstmatch=yes).
A value is available; C(BOF) for inserting the line at
the beginning of the file.
If specified regular expression has no matches, the line will be
inserted at the end of the file.
If regular expressions are passed to both C(regexp) and C(insertbefore), C(insertbefore) is only honored if no match for C(regexp) is found.
May not be used with C(backrefs).
choices: [ BOF, '*regex*' ]
version_added: "1.1"
create:
description:
- Used with C(state=present). If specified, the file will be created
if it does not already exist. By default it will fail if the file
is missing.
type: bool
default: 'no'
backup:
description:
- Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly.
type: bool
default: 'no'
firstmatch:
description:
- Used with C(insertafter) or C(insertbefore). If set, C(insertafter) and C(inserbefore) find
a first line has regular expression matches.
type: bool
default: 'no'
version_added: "2.5"
others:
description:
- All arguments accepted by the M(file) module also work here.
notes:
- As of Ansible 2.3, the I(dest) option has been changed to I(path) as default, but I(dest) still works as well.
"""
EXAMPLES = r"""
# Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path'
- lineinfile:
path: /etc/selinux/config
regexp: '^SELINUX='
line: 'SELINUX=enforcing'
- lineinfile:
path: /etc/sudoers
state: absent
regexp: '^%wheel'
# Searches for a line that begins with 127.0.0.1 and replaces it with the value of the 'line' parameter
- lineinfile:
path: /etc/hosts
regexp: '^127\.0\.0\.1'
line: '127.0.0.1 localhost'
owner: root
group: root
mode: 0644
- lineinfile:
path: /etc/httpd/conf/httpd.conf
regexp: '^Listen '
insertafter: '^#Listen '
line: 'Listen 8080'
- lineinfile:
path: /etc/services
regexp: '^# port for http'
insertbefore: '^www.*80/tcp'
line: '# port for http by default'
# Add a line to a file if the file does not exist, without passing regexp
- lineinfile:
path: /tmp/testfile
line: '192.168.1.99 foo.lab.net foo'
create: yes
# Fully quoted because of the ': ' on the line. See the Gotchas in the YAML docs.
- lineinfile:
path: /etc/sudoers
state: present
regexp: '^%wheel\s'
line: '%wheel ALL=(ALL) NOPASSWD: ALL'
# Yaml requires escaping backslashes in double quotes but not in single quotes
- lineinfile:
path: /opt/jboss-as/bin/standalone.conf
regexp: '^(.*)Xms(\\d+)m(.*)$'
line: '\1Xms${xms}m\3'
backrefs: yes
# Validate the sudoers file before saving
- lineinfile:
path: /etc/sudoers
state: present
regexp: '^%ADMIN ALL='
line: '%ADMIN ALL=(ALL) NOPASSWD: ALL'
validate: '/usr/sbin/visudo -cf %s'
"""
import os
import re
import tempfile
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import b
from ansible.module_utils._text import to_bytes, to_native
def write_changes(module, b_lines, dest):
tmpfd, tmpfile = tempfile.mkstemp()
with open(tmpfile, 'wb') as f:
f.writelines(b_lines)
validate = module.params.get('validate', None)
valid = not validate
if validate:
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc, out, err) = module.run_command(to_bytes(validate % tmpfile, errors='surrogate_or_strict'))
valid = rc == 0
if rc != 0:
module.fail_json(msg='failed to validate: '
'rc:%s error:%s' % (rc, err))
if valid:
module.atomic_move(tmpfile,
to_native(os.path.realpath(to_bytes(dest, errors='surrogate_or_strict')), errors='surrogate_or_strict'),
unsafe_writes=module.params['unsafe_writes'])
def check_file_attrs(module, changed, message, diff):
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False, diff=diff):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def present(module, dest, regexp, line, insertafter, insertbe
|
hzlf/openbroadcast.org
|
website/tools/spurl/templatetags/spurl.py
|
Python
|
gpl-3.0
| 12,763
| 0.00047
|
import re
from django.conf import settings
from django.utils.html import escape
from django.utils.encoding import smart_str
from urlobject import URLObject
from urlobject.query_string import QueryString
from django.template import StringOrigin
from django.template.base import Lexer, Parser
from django.template.defaulttags import kwarg_re
from django.template import Template, Library, Node, TemplateSyntaxError
try:
from builtins import str
except ImportError:
str = unicode
register = Library()
TRUE_RE = re.compile(r"^(true|on)$", flags=re.IGNORECASE)
TEMPLATE_DEBUG = getattr(settings, "TEMPLATE_DEBUG", False)
def convert_to_boolean(string_or_boolean):
if isinstance(string_or_boolean, bool):
return string_or_boolean
if isinstance(string_or_boolean, str):
return bool(TRUE_RE.match(string_or_boolean))
class SpurlURLBuilder(object):
def __init__(self, args, context, tags, filters):
self.args = args
self.context = context
self.tags = tags
self.filters = filters
self.autoescape = self.context.autoescape
self.url = URLObject()
def build(self):
for argument, value in self.args:
self.handle_argument(argument, value)
try:
self.set_sensible_defaults()
url = unicode(self.url)
if self.autoescape:
url = escape(url)
url = url.replace("%20", "+")
url = url.replace("%2C", ",")
url = url.replace("&", "&")
except Exception as e:
url = self.url
return url
def handle_argument(self, argument, value):
argument = smart_str(argument, "ascii")
handler_name = "handle_%s" % argument
handler = getattr(self, handler_name, None)
if handler is not None:
value = value.resolve(self.context)
handler(value)
def handle_base(self, value):
base = self.prepare_value(value)
self.url = URLObject(base)
def handle_secure(self, value):
is_secure = convert_to_boolean(value)
scheme = "https" if is_secure else "http"
self.url = self.url.with_scheme(scheme)
def handle_query(self, value):
query = self.prepare_value(value)
if isinstance(query, dict):
query = QueryString().set_params(**query)
self.url = self.url.with_query(QueryString(query))
def handle_query_from(self, value):
url = URLObject(value)
self.url = self.url.with_query(url.query)
def handle_add_query(self, value):
query_to_add = self.prepare_value(value)
if isinstance(query_to_add, str):
query_to_add = QueryString(query_to_add).dict
self.url = self.url.add_query_params(**query_to_add)
def handle_add_query_from(self, value):
url = URLObject(value)
self.url = self.url.add_query_params(**url.query.dict)
def handle_set_query(self, value):
query_to_set = self.prepare_value(value)
if isinstance(query_to_set, str):
query_to_set = QueryString(query_to_set).dict
self.url = self.url.set_query_params(**query_to_set)
def handle_active_query(self, value):
query_to_toggle = self.prepare_value(value)
if isinstance(query_to_toggle, str):
query_to_toggle = QueryString(query_to_toggle).dict
current_query = self.url.query.dict
for key, value in query_to_toggle.items():
if key in current_query and value in current_query[key]:
self.url = True
else:
self.url = False
def handle_set_query_from(self, value):
url = URLObject(value)
self.url = self.url.set_query_params(**url.query.dict)
def handle_remove_query_param(self, value):
self.url = self.url.del_query_param(value)
def handle_toggle_query(self, value):
query_to_toggle = self.prepare_value(value)
if isinstance(query_to_toggle, str):
query_to_toggle = QueryString(query_to_toggle).dict
current_query = self.url.query.dict
for key, value in query_to_toggle.items():
if isinstance(value, str):
value = value.split(",")
first, second = value
if key in current_query and first in current_query[key]:
self.url = self.url.set_query_param(key, second)
else:
self.url = self.url.set_query_param(key, first)
def handle_trigger_query(self, value):
query_to_trigger = self.prepare_value(value)
if isinstance(query_to_trigger, str):
query_to_trigger = QueryString(query_to_trigger).dict
current_query = self.url.query.dict
for key, value in query_to_trigger.items():
if isinstance(value, str):
value = value
if key in current_query and value in current_query[key]:
# unset
self.url = self.url.del_query_param(key)
else:
# set
self.url = self.url.set_query_param(key, value)
def handle_trigger_mquery(self, value):
query_to_trigger = self.prepare_value(value)
if isinstance(query_to_trigger, str):
query_to_trigger = QueryString(query_to_trigger).dict
current_query = self.url.query.dict
for key, value in query_to_trigger.items():
# exact match of query -> unset it
if key in current_query and query_to_trigger[key] == current_query[key]:
self.url = self.url.del_query_param(key)
return
# check if current query has multiple items
try:
ext = current_query[key]
ext = ext.split(",")
except Exception as e:
ext = None
if ext and len(ext) > 1:
if key in current_query and value in ext:
# we have a key-match, so remove it from the string
ext = [x for x in ext if x != value]
else:
# no key match, so add it to the string
ext.append(value)
ext.sort()
self.url = self.url.set_query_param(key, ",".join(ext))
elif ext and len(ext) == 1:
# param already here > append
ext.append(value)
ext.sort()
ext = list(set(ext))
self.url = self.url.set_query_param(key, ",".join(ext))
else:
if isinstance(value, str):
value = value
if key in current_query and value in current_query[key]:
# unset
pass
# self.url = self.url.del_query_param(key)
else:
# set
self.url = self.url.set_query_param(key, value)
def handle_active_mquery(self, value):
active = None
query_to_trigger = self.prepare_value(value)
if isinstance(query_to_trigger, str):
query_to_trigger = QueryString(query_to_trigger).dict
current_query = self.url.query.dict
|
for key, value in query_to_trigger.items():
# exact match of query -> unset it
if key in current_query and query_to_trigger[key] == current_query[key]:
active = True
# check if current query has multiple items
try:
ext = current_query[key]
ext = ext.split(",")
except Exception as e:
ext = None
if ext and l
|
en(ext) > 1:
if key in current_query and value in ext:
active = True
self.url = active
def handle_scheme(self, value):
self.url = self.url.with_scheme(value)
def handle_scheme_from(self, value):
url = URLObject(value)
self.url = self.url.with_scheme(url.scheme)
def handle_host(self, value):
host = self.prepare_value(value)
self.url = self.url.wi
|
drufat/sympy
|
sympy/functions/elementary/tests/test_integers.py
|
Python
|
bsd-3-clause
| 6,826
| 0.001025
|
from sympy import AccumBounds, Symbol, floor, nan, oo, E, symbols, ceiling, pi, \
Rational, Float, I, sin, exp, log, factorial, frac
from sympy.utilities.pytest import XFAIL
x = Symbol('x')
i = Symbol('i', imaginary=True)
y = Symbol('y', real=True)
k, n = symbols('k,n', integer=True)
def test_floor():
assert floor(nan) == nan
assert floor(oo) == oo
assert floor(-oo) == -oo
assert floor(0) == 0
assert floor(1) == 1
assert floor(-1) == -1
assert floor(E) == 2
assert floor(-E) == -3
assert floor(2*E) == 5
assert floor(-2*E) == -6
assert floor(pi) == 3
assert floor(-pi) == -4
assert floor(Rational(1, 2)) == 0
assert floor(-Rational(1, 2)) == -1
assert floor(Rational(7, 3)) == 2
assert floor(-Rational(7, 3)) == -3
assert floor(Float(17.0)) == 17
assert floor(-Float(17.0)) == -17
assert floor(Float(7.69)) == 7
assert floor(-Float(7.69)) == -8
assert floor(I) == I
assert floor(-I) == -I
e = floor(i)
assert e.func is floor and e.args[0] == i
assert floor(oo*I) == oo*I
assert floor(-oo*I) == -oo*I
assert floor(2*I) == 2*I
assert floor(-2*I) == -2*I
assert floor(I/2) == 0
assert floor(-I/2) == -I
assert floor(E + 17) == 19
assert floor(pi + 2) == 5
assert floor(E + pi) == floor(E + pi)
assert floor(I + pi) == floor(I + pi)
assert floor(floor(pi)) == 3
assert floor(floor(y)) == floor(y)
assert floor(floor(x)) == floor(floor(x))
assert floor(x) == floor(x)
assert floor(2*x) == floor(2*x)
assert floor(k*x) == floor(k*x)
assert floor(k) == k
assert floor(2*k) == 2*k
assert floor(k*n) == k*n
assert floor(k/2) == floor(k/2)
assert floor(x + y) == floor(x + y)
assert floor(x + 3) == floor(x + 3)
assert floor(x + k) == floor(x + k)
assert floor(y + 3) == floor(y) + 3
assert floor(y + k) == floor(y) + k
assert floor(3 + I*y + pi) == 6 + floor(y)*I
assert floor(k + n) == k + n
assert floor(x*I) == floor(x*I)
assert floor(k*I) == k*I
assert floor(Rational(23, 10) - E*I) == 2 - 3*I
assert floor(sin(1)) == 0
assert floor(sin(-1)) == -1
assert floor(exp(2)) == 7
assert floor(log(8)/log(2)) != 2
assert int(floor(log(8)/log(2)).evalf(chop=True)) == 3
assert floor(factorial(50)/exp(1)) == \
11188719610782480504630258070757734324011354208865721592720336800
assert (floor(y) <= y) == True
assert (floor(y) > y) == False
assert (floor(x) <= x).is_Relational # x could be non-real
assert (floor(x) > x).is_Relational
assert (floor(x) <= y).is_Relational # arg is not same as rhs
assert (floor(x) > y).is_Relational
def test_ceiling():
assert ceiling(nan) == nan
assert ceiling(oo) == oo
assert ceiling(-oo) == -oo
assert ceiling(0) == 0
assert ceiling(1) == 1
assert ceiling(-1) == -1
assert ceiling(E) == 3
assert ceiling(-E) == -2
assert ceiling(2*E) == 6
assert ceiling(-2*E) == -5
assert ceiling(pi) == 4
assert ceiling(-pi) == -3
assert ceiling(Rational(1, 2)) == 1
assert ceiling(-Rational(1, 2)) == 0
assert ceiling(Rational(7, 3)) == 3
assert ceiling(-Rational(7, 3)) == -2
assert ceiling(Float(17.0)) == 17
assert ceiling(-Float(17.0)) == -17
assert ceiling(Float(7.69)) == 8
assert ceiling(-Float(7.69)) == -7
assert ceiling(I) == I
assert ceiling(-I) == -I
e = ceiling(i)
assert e.func is ceiling and e.args[0] == i
assert ceiling(oo*I) == oo*I
assert ceiling(-oo*I) == -oo*I
assert ceiling(2*I) == 2*I
assert ceiling(-2*I) == -2*I
assert ceiling(I/2) == I
assert ceiling(-I/2) == 0
assert ceiling(E + 17) == 20
assert ceiling(pi + 2) == 6
assert ceiling(E + pi) == ceiling(E + pi)
assert ceiling(I + pi) == ceiling(I + pi)
assert ceiling(ceiling(pi)) == 4
assert ceiling(ceiling(y)) == ceiling(y)
assert ceiling(ceiling(x)) == ceiling(ceiling(x))
assert ceiling(x) == ceiling(x)
assert ceiling(2*x) == ceiling(2*x)
assert ceiling(k*x) == ceiling(k*x)
assert ceiling(k) == k
assert ceiling(2*k) == 2*k
assert ceiling(k*n) == k*n
assert ceiling(k/2) == ceiling(k/2)
assert ceiling(x + y) == ceiling(x + y)
assert ceiling(x + 3) == ceiling(x + 3)
assert ceiling(x + k) == ceiling(x + k)
assert ceiling(y + 3) == ceiling(y) + 3
assert ceiling(y + k) == ceiling(y) + k
assert ceiling(3 + pi + y*I) == 7 + ceiling(y)*I
assert ceiling(k + n) == k + n
assert ceiling(x*I) == ceiling(x*I)
assert ceiling(k*I) == k*I
assert ceiling(Rational(23, 10) - E*I) == 3 - 2*I
assert ceiling(sin(1)) == 1
assert ceiling(sin(-1)) == 0
assert ceiling(exp(2)) == 8
assert ceiling(-log(8)/log(2)) != -2
assert int(ceiling(-log(8)/log(2)).evalf(chop=True)) == -3
assert ceiling(factorial(50)/exp(1)) == \
11188719610782480504630258070757734324011354208865721592720336801
assert (ceiling(y) >= y) == True
assert (
|
ceiling(y) < y) == False
assert (ceiling(x) >= x).is_Relational # x could be non-real
assert (ceiling(x) < x).is_Relational
assert (ceiling(x) >= y).is_Relational # arg is not same as rhs
assert (ceiling(x) < y).is_Relational
def test_frac():
assert isinstance(frac(x), frac)
assert frac(oo) == AccumBounds(0, 1)
assert frac(-oo) == AccumBounds(0, 1)
assert frac(n) == 0
assert frac(nan) == nan
assert frac(Rational(4, 3)) == Ration
|
al(1, 3)
assert frac(-Rational(4, 3)) == Rational(2, 3)
r = Symbol('r', real=True)
assert frac(I*r) == I*frac(r)
assert frac(1 + I*r) == I*frac(r)
assert frac(0.5 + I*r) == 0.5 + I*frac(r)
assert frac(n + I*r) == I*frac(r)
assert frac(n + I*k) == 0
assert frac(x + I*x) == frac(x + I*x)
assert frac(x + I*n) == frac(x)
assert frac(x).rewrite(floor) == x - floor(x)
def test_series():
x, y = symbols('x,y')
assert floor(x).nseries(x, y, 100) == floor(y)
assert ceiling(x).nseries(x, y, 100) == ceiling(y)
assert floor(x).nseries(x, pi, 100) == 3
assert ceiling(x).nseries(x, pi, 100) == 4
assert floor(x).nseries(x, 0, 100) == 0
assert ceiling(x).nseries(x, 0, 100) == 1
assert floor(-x).nseries(x, 0, 100) == -1
assert ceiling(-x).nseries(x, 0, 100) == 0
@XFAIL
def test_issue_4149():
assert floor(3 + pi*I + y*I) == 3 + floor(pi + y)*I
assert floor(3*I + pi*I + y*I) == floor(3 + pi + y)*I
assert floor(3 + E + pi*I + y*I) == 5 + floor(pi + y)*I
def test_issue_11207():
assert floor(floor(x)) == floor(x)
assert floor(ceiling(x)) == ceiling(x)
assert ceiling(floor(x)) == floor(x)
assert ceiling(ceiling(x)) == ceiling(x)
|
angelblue05/Embytest.Kodi
|
resources/lib/kodimonitor.py
|
Python
|
gpl-2.0
| 8,856
| 0.0035
|
# -*- coding: utf-8 -*-
#################################################################################################
import json
import xbmc
import xbmcgui
import clientinfo
import downloadutils
import embydb_functions as embydb
import playbackutils as pbutils
import utils
#################################################################################################
class KodiMonitor(xbmc.Monitor):
def __init__(self):
self.clientInfo = clientinfo.ClientInfo()
self.addonName = self.clientInfo.getAddonName()
self.doUtils = downloadutils.DownloadUtils()
self.logMsg("Kodi monitor started.", 1)
def logMsg(self, msg, lvl=1):
self.className = self.__class__.__name__
utils.logMsg("%s %s" % (self.addonName, self.className), msg, lvl)
def onScanStarted(self, library):
self.logMsg("Kodi library scan %s running." % library, 2)
if library == "video":
utils.window('emby_kodiScan', value="true")
def onScanFinished(self, library):
self.logMsg("Kodi library scan %s finished." % library, 2)
if library == "video":
utils.window('emby_kodiScan', clear=True)
def onSettingsChanged(self):
# Monitor emby settings
# Review reset setting at a later time, need to be adjusted to account for initial setup
# changes.
'''currentPath = utils.settings('useDirectPaths')
if utils.window('emby_pluginpath') != currentPath:
# Plugin path value changed. Offer to reset
self.logMsg("Changed to playback mode detected", 1)
utils.window('emby_pluginpath', value=currentPath)
resp = xbmcgui.Dialog().yesno(
heading="Playback mode change detected",
line1=(
"Detected the playback mode has changed. The database "
"needs to be recreated for the change to be applied. "
"Proceed?"))
if resp:
utils.reset()'''
currentLog = utils.settings('logLevel')
if utils.window('emby_logLevel') != currentLog:
# The log level changed, set new prop
self.logMsg("New log level: %s" % currentLog, 1)
utils.window('emby_logLevel', value=currentLog)
def onNotification(self, sender, method, data):
doUtils = self.doUtils
if method not in ("Playlist.OnAdd"):
self.logMsg("Method: %s Data: %s" % (method, data), 1)
if data:
data = json.loads(data,'utf-8')
if method == "Player.OnPlay":
# Set up report progress for emby playback
item = data.get('item')
try:
kodiid = item['id']
type = item['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for playstate update.", 1)
else:
if ((utils.settings('useDirectPaths') == "1" and not type == "song") or
(type == "song" and utils.settings('enableMusic') == "true")):
# Set up properties for player
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("No kodiid returned.", 1)
else:
url = "{server}/emby/Users/{UserId}/Items/%s?format=json" % itemid
result = doUtils.downloadUrl(url)
self.logMsg("Item: %s" % result, 2)
playurl = None
count = 0
while not playurl and count < 2:
try:
playurl = xbmc.Player().getPlayingFile()
except RuntimeError:
count += 1
xbmc.sleep(200)
else:
listItem = xbmcgui.ListItem()
playback = pbutils.PlaybackUtils(result)
if type == "song" and utils.settings('streamMusic') == "true":
utils.window('emby_%s.playmethod' % playurl,
value="DirectStream")
else:
utils.window('emby_%s.playmethod' % playurl,
value="DirectPlay")
# Set properties for player.py
playback.setProperties(playurl, listItem)
finally:
embycursor.close()
elif method == "VideoLibrary.OnUpdate":
# Manually marking as watched/unwatched
playcount = data.get('playcount')
item = data.get('item')
try:
kodiid = item['id']
type = item['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for playstate update.", 1)
else:
# Send notification to the server.
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("Could not find itemid in emby database.", 1)
else:
# Stop from manually marking as watched unwatched, with actual playback.
if utils.window('emby_skipWatched%s' % itemid) == "true":
# property is set in player.py
utils.window('emby_skipWatched%s' % itemid, clear=True)
else:
# notify the server
url = "{server}/emby/Users/{UserId}/PlayedItems/%s?format=json" % itemid
if playcount != 0:
doUtils.downloadUrl(url, type
|
="POST")
self.logMsg("Mark as watched for itemid: %s" % itemid, 1)
else:
doUtils.downloadUrl(url, type="DELETE")
self.logMsg("Mark as
|
unwatched for itemid: %s" % itemid, 1)
finally:
embycursor.close()
elif method == "VideoLibrary.OnRemove":
# Removed function, because with plugin paths + clean library, it will wipe
# entire library if user has permissions. Instead, use the emby context menu available
# in Isengard and higher version
pass
'''try:
kodiid = data['id']
type = data['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for emby deletion.", 1)
else:
# Send the delete action to the server.
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("Could not find itemid in emby database.", 1)
else:
if utils.settings('skipContextMenu') != "true":
resp = xbmcgui.Dialog().yesno(
heading="Confirm delete",
line1="Delete file on Emby Server?")
|
Mattie432/deluge-rbb
|
browsebutton/core.py
|
Python
|
gpl-2.0
| 5,012
| 0.003591
|
#
# core.py
#
# Copyright (C) 2014 dredkin <[email protected]>
#
# Basic plugin template created by:
# Copyright (C) 2008 Martijn Voncken <[email protected]>
# Copyright (C) 2007-2009 Andrew Resch <[email protected]>
# Copyright (C) 2009 Damien Churchill <[email protected]>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
from deluge.log import LOG as log
from deluge.plugins.pluginbase import CorePluginBase
import deluge.component as component
import deluge.configmanager
from deluge.core.rpcserver import export
import os
import locale
import pkg_resources
import gettext
def windows():
return os.name == "nt"
if windows():
import win32api
DEFAULT_PREFS = {
#Default to empty to have no specified root dir.
"RootDirPath":"",
"DisableTraversal":"false"
}
UTF8 = 'UTF-8'
CURRENT_LOCALE = locale.getdefaultlocale()[1]
if CURRENT_LOCALE is None:
CURRENT_LOCALE = UTF8
class Core(CorePluginBase):
def enable(self):
self.config = deluge.configmanager.ConfigManager("browsebutton.conf", DEFAULT_PREFS)
def disable(self):
#self.config.save()
pass
def update(self):
pass
def drives_list(self):
if windows():
drives = win32api.GetLogicalDriveStrings()
return drives.split('\000')[:-1]
else:
return "/"
def subfolders_list(self, absolutepath):
subfolders = []
try:
list
|
= os.listdir(absolutepath)
except:
list = []
for f in list:
if os.path.isdir(os.path.join(absolutepath,f)):
f2 = f.decode(CURRENT_LOCALE).encode(UTF8)
subfolders.append(f2)
return subfolders
def is_root_folder(self, folder):
return os.path.dirname(folder) == folder
@export
def save
|
_config(self):
"""Saves the config"""
self.config.save()
log.debug("RBB: config saved")
@export
def set_config(self, config):
"""Sets the config dictionary"""
log.debug("RBB: set_config")
for key in config.keys():
self.config[key] = config[key]
log.debug("RBB: added history "+str(key)+"->"+str(config[key]))
self.save_config()
@export
def get_config(self):
"""Returns the config dictionary"""
log.debug("RBB: config assigned")
return self.config.config
@export
def serverlog(self, line):
log.debug(line)
@export
def get_folder_list(self, folder, subfolder):
"""Returns the list of subfolders for specified folder on server"""
error = ""
if folder == "":
folder = os.path.expanduser("~")
else:
folder = folder.encode(CURRENT_LOCALE)
log.debug("RBB:native folder"+folder)
log.debug("RBB:orig subfolder"+subfolder)
subfolder = subfolder.encode(CURRENT_LOCALE)
newfolder = os.path.join(folder,subfolder)
absolutepath = os.path.normpath(newfolder)
if not os.path.isdir(absolutepath):
log.info("RBB:NOT A FOLDER!:"+absolutepath+" (normalized from "+newfolder+")")
error = "Cannot List Contents of "+absolutepath
absolutepath = os.path.expanduser("~")
if windows():
isroot = self.is_root_folder(folder) and (subfolder == "..")
else:
isroot = self.is_root_folder(absolutepath)
if windows() and isroot:
subfolders = self.drives_list()
absolutepath = ""
else:
subfolders = self.subfolders_list(absolutepath)
return [absolutepath.decode(CURRENT_LOCALE).encode(UTF8), isroot, subfolders, error]
|
osuripple/pep.py
|
events/changeMatchSettingsEvent.py
|
Python
|
agpl-3.0
| 2,905
| 0.027893
|
import random
from common import generalUtils
from common.log import logUtils as log
from constants import clientPackets
from constants import matchModModes
from constants import matchTeamTypes
from constants import matchTeams
from constants import slotStatuses
from objects import glob
def handle(userToken, packetData):
# Read new settings
packetData = clientPackets.changeMatchSettings(packetData)
# Get match ID
matchID = userToken.matchID
# Make sure the match exists
if matchID not in glob.matches.matches:
return
# Host check
with glob.matches.matches[matchID] as match:
if userToken.userID != match.hostUserID:
return
# Some dank memes easter egg
memeT
|
itles = [
"RWC 2020",
"Fokabot is a duck",
"Dank memes",
"1337ms Ping",
"Iscriviti a Xenotoze",
"...e i marò?",
"Superman dies",
"The brace is on fire",
"print_foot()",
|
"#FREEZEBARKEZ",
"Ripple devs are actually cats",
"Thank Mr Shaural",
"NEVER GIVE UP",
"T I E D W I T H U N I T E D",
"HIGHEST HDHR LOBBY OF ALL TIME",
"This is gasoline and I set myself on fire",
"Everyone is cheating apparently",
"Kurwa mac",
"TATOE",
"This is not your drama landfill.",
"I like cheese",
"NYO IS NOT A CAT HE IS A DO(N)G",
"Datingu startuato"
]
# Set match name
match.matchName = packetData["matchName"] if packetData["matchName"] != "meme" else random.choice(memeTitles)
# Update match settings
match.inProgress = packetData["inProgress"]
if packetData["matchPassword"] != "":
match.matchPassword = generalUtils.stringMd5(packetData["matchPassword"])
else:
match.matchPassword = ""
match.beatmapName = packetData["beatmapName"]
match.beatmapID = packetData["beatmapID"]
match.hostUserID = packetData["hostUserID"]
match.gameMode = packetData["gameMode"]
oldBeatmapMD5 = match.beatmapMD5
oldMods = match.mods
oldMatchTeamType = match.matchTeamType
match.mods = packetData["mods"]
match.beatmapMD5 = packetData["beatmapMD5"]
match.matchScoringType = packetData["scoringType"]
match.matchTeamType = packetData["teamType"]
match.matchModMode = packetData["freeMods"]
# Reset ready if needed
if oldMods != match.mods or oldBeatmapMD5 != match.beatmapMD5:
match.resetReady()
# Reset mods if needed
if match.matchModMode == matchModModes.NORMAL:
# Reset slot mods if not freeMods
match.resetMods()
else:
# Reset match mods if freemod
match.mods = 0
# Initialize teams if team type changed
if match.matchTeamType != oldMatchTeamType:
match.initializeTeams()
# Force no freemods if tag coop
if match.matchTeamType == matchTeamTypes.TAG_COOP or match.matchTeamType == matchTeamTypes.TAG_TEAM_VS:
match.matchModMode = matchModModes.NORMAL
# Send updated settings
match.sendUpdates()
# Console output
log.info("MPROOM{}: Updated room settings".format(match.matchID))
|
jackuess/pirateplay.se
|
lib/pirateplay/lib/services/ur.py
|
Python
|
gpl-3.0
| 1,161
| 0.046512
|
from ..rerequest import TemplateRequest
init_req = TemplateRequest(
re = r'(http://)?(www\.)?(?P<domain>ur(play)?)\.se/(?P<req_url>.+)',
encode_vars = lambda v: { 'req_url': 'http://%(domain)s.se/%(req_url)s' % v } )
hls = { 'title': 'UR-play', 'url': 'http://urplay.se/', 'feed_url': 'http://urplay.
|
se/rss',
'items': [init_req,
TemplateRequest(
re = r'file_html5":\s?"(?P<final_url>[^"]+)".*?"subtitles":\s?"(?P<subtitles>[^",]*)',
encode_vars = lambda v: { 'final_url': ('http://130.242.59.75/%(final_url)s/playlist.m3u8' % v).replace('\\', ''),
'suffix-hint': 'mp4',
'subtitles': v.get('s
|
ubtitles', '').replace('\\', '') % v } )] }
rtmp = { 'items': [init_req,
TemplateRequest(
re = r'file_flash":\s?"(?P<final_url>[^"]+\.(?P<ext>mp[34]))".*?"subtitles":\s?"(?P<subtitles>[^",]*)',
encode_vars = lambda v: { 'final_url': ('rtmp://130.242.59.75/ondemand playpath=%(ext)s:/%(final_url)s app=ondemand' % v).replace('\\', ''),
'suffix-hint': 'flv',
'rtmpdump-realtime': True,
'subtitles': v.get('subtitles', '').replace('\\', '') % v } )] }
services = [hls, rtmp]
|
OSSystems/lava-server
|
dashboard_app/tests/models/attachment.py
|
Python
|
agpl-3.0
| 3,410
| 0
|
# Copyright (C) 2010 Linaro Limited
#
# Author: Zygmunt Krynicki <[email protected]>
#
# This file is part of Launch Control.
#
# Launch Control is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Launch Control is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Launch Control. If not, see <http://www.gnu.org/licenses/>.
"""
Tests for the Attachment model
"""
from django.contrib.contenttypes import generic
from django.core.files.base import ContentFile
from django.db import models
from django.test import TestCase
from dashboard_app.models import Attachment
class ModelWithAttachments(models.Model):
"""
Test model that uses attachments
"""
attachments = generic.GenericRelation(Attachment)
class Meta:
# This requires a bit of explanation. Traditionally we could add new
# models inside test modules and they would be picked up by django and
# synchronized (created in the test database) as a part of the test
# provisioning process.
# Since
|
we started using south, synchronization is no longer occurring
# for the 'dashboard_app' application. This caused some test failures
# such as any tests that depended on the e
|
xistence of this model.
# As a workaround we artificially "stick" this model into the only
# application that we can count on to exist _and_ not use south as well
# -- that is south itself.
# This way the test model gets synchronized when south is synchronized
# and all the test code below works as expected.
app_label = "south"
class AttachmentTestCase(TestCase):
_CONTENT = "text"
_FILENAME = "filename"
def setUp(self):
self.obj = ModelWithAttachments.objects.create()
def test_attachment_can_be_added_to_models(self):
attachment = self.obj.attachments.create(
content_filename=self._FILENAME, content=None)
self.assertEqual(attachment.content_object, self.obj)
def test_attachment_can_be_accessed_via_model(self):
self.obj.attachments.create(
content_filename=self._FILENAME, content=None)
self.assertEqual(self.obj.attachments.count(), 1)
retrieved_attachment = self.obj.attachments.all()[0]
self.assertEqual(retrieved_attachment.content_object, self.obj)
def test_attachment_stores_data(self):
attachment = self.obj.attachments.create(
content_filename=self._FILENAME, content=None)
attachment.content.save(
self._FILENAME,
ContentFile(self._CONTENT))
self.assertEqual(attachment.content_filename, self._FILENAME)
attachment.content.open()
try:
self.assertEqual(attachment.content.read(), self._CONTENT)
finally:
attachment.content.close()
attachment.content.delete(save=False)
def test_unicode(self):
obj = Attachment(content_filename="test.json")
self.assertEqual(unicode(obj), "test.json")
|
mmb90/dftintegrate
|
tests/fourier/extractvaspdata/test_extractvaspdata.py
|
Python
|
mit
| 1,597
| 0
|
#!/usr/bin/env python3
import unittest
from tests import testfunctions
from dftintegrate.fourier import vaspdata
class TestExtractingVASPDataToDatFiles(unittest.TestCase,
testfunctions.TestFunctions):
def setUp(self):
print('Testing extracting VASP data to .dat files...')
self.cases = [str(x) for x in range(1, 3)]
self.root = './tests/fourier/extractvaspdata/'
def test_runtestcases(self):
for case in self.cases:
print(' Testing case '+case+'...')
vaspdata.VASPData(self.root+'tocheck/test'+case)
kpts_eigenvals_ans = self.readfile(case, 'answer',
'kpts_eigenvals')
kpts_eigenvals_tocheck = self.readfile(case, 'tocheck',
'kpts_eigenvals')
self.assertEqual(kpts_eigenvals_ans, kpts_eigenvals_tocheck,
msg='kpts_eigenvals case '+case)
symops_trans_ans = self.readfile(case, 'answer',
'symops_trans')
symops_trans_tocheck = self.readfile(case, 'tocheck',
|
'symops_trans')
self.assertEqual(symops_trans_ans, symops_trans_tocheck,
msg='symops_trans case '+case)
kmax_ans = self.readfile(case, 'answer', 'kmax')
kmax_tocheck = self.readfile(case, 'tocheck', 'kmax')
self.assertEqual(km
|
ax_ans, kmax_tocheck, msg='kmax case '+case)
|
2Cubed/ProjectEuler
|
euler/__init__.py
|
Python
|
mit
| 584
| 0
|
"""Solve the Project Euler problems using functional Python.
https://projecteuler.net/archives
"""
from imp
|
ortlib import import_module
from os import listdir
from os.path import abspath, dirname
from re import match
SOLVED = set(
int(m.group(1))
for f in listdir(abspath(dirname(__file__)))
for m in (match(r"^p(\d{3})\.py$", f),) if m
)
def compute(problem: int):
"""Compute the answer to problem `problem`."""
assert problem in SOLVED, "Problem currently unsolved."
module = import_module("eu
|
ler.p{:03d}".format(problem))
return module.compute()
|
kinetifex/maya-impress
|
examples/options_example.py
|
Python
|
bsd-3-clause
| 2,324
| 0.027539
|
import random
import pymel.core as pm
from impress import models, register
def randomTransform( translate=False, translateAmount=1.0, translateAxis=(False,False,False),
rotate=False, rotateAmount=1.0, rotateAxis=(False,False,False),
scale=False, scaleAmount=1.0, scaleAxis=(False,False,False) ):
"""
Transforms selected objects with random values.
"""
objects = pm.ls( selection=True, type='transform')
assert len(objects), 'randomTransform requires at least 1 selected transform object.'
for object in objects:
if translate:
offset = map(lambda axis: random.uniform( -translateAmount, translateAmount )*float(axis), translateAxis)
object.setTranslation( offset, relative=True )
if rotate:
|
offset = map(lambda axis: random.uniform( -rotateAmount, rotateAmount )*float(axis), rotateAxis)
object.setRotation( offset, relative=True )
if scale:
|
offset = map(lambda axis: 1 + ( random.uniform( -scaleAmount, scaleAmount )*float(axis) ), scaleAxis)
object.setScale( offset )
print '# Results: %i object randomized. #' % len(objects)
class RandomTransformOptions( models.OptionModel ):
translate = models.CheckBox( default=1, ann='about the checkbox' )
translateAmount = models.FloatSlider( default=1, precision=3, requires=(translate, 1) )
translateAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(translate, 1) )
sep1 = models.Separator( style='in', height=14 )
rotate = models.CheckBox( default=1, ann='about the checkbox' )
rotateAmount = models.FloatSlider( default=1, precision=3, requires=(rotate, 1) )
rotateAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(rotate, 1) )
sep2 = models.Separator( style='in', height=14 )
scale = models.CheckBox( default=1, ann='about the checkbox' )
scaleAmount = models.FloatSlider( default=1, precision=3, requires=(scale, 1) )
scaleAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(scale, 1) )
class Meta:
button_label = 'Randomize'
performRandomTransform = register.PerformCommand( randomTransform, RandomTransformOptions )
performRandomTransform(1)
|
viraptor/cryptography
|
cryptography/hazmat/primitives/asymmetric/dsa.py
|
Python
|
apache-2.0
| 3,615
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License a
|
t
#
# http:/
|
/www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
def generate_parameters(key_size, backend):
return backend.generate_dsa_parameters(key_size)
def generate_private_key(key_size, backend):
return backend.generate_dsa_private_key_and_parameters(key_size)
def _check_dsa_parameters(parameters):
if utils.bit_length(parameters.p) not in [1024, 2048, 3072]:
raise ValueError("p must be exactly 1024, 2048, or 3072 bits long")
if utils.bit_length(parameters.q) not in [160, 256]:
raise ValueError("q must be exactly 160 or 256 bits long")
if not (1 < parameters.g < parameters.p):
raise ValueError("g, p don't satisfy 1 < g < p.")
def _check_dsa_private_numbers(numbers):
parameters = numbers.public_numbers.parameter_numbers
_check_dsa_parameters(parameters)
if numbers.x <= 0 or numbers.x >= parameters.q:
raise ValueError("x must be > 0 and < q.")
if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
raise ValueError("y must be equal to (g ** x % p).")
class DSAParameterNumbers(object):
def __init__(self, p, q, g):
if (
not isinstance(p, six.integer_types) or
not isinstance(q, six.integer_types) or
not isinstance(g, six.integer_types)
):
raise TypeError(
"DSAParameterNumbers p, q, and g arguments must be integers."
)
self._p = p
self._q = q
self._g = g
p = utils.read_only_property("_p")
q = utils.read_only_property("_q")
g = utils.read_only_property("_g")
def parameters(self, backend):
return backend.load_dsa_parameter_numbers(self)
class DSAPublicNumbers(object):
def __init__(self, y, parameter_numbers):
if not isinstance(y, six.integer_types):
raise TypeError("DSAPublicNumbers y argument must be an integer.")
if not isinstance(parameter_numbers, DSAParameterNumbers):
raise TypeError(
"parameter_numbers must be a DSAParameterNumbers instance."
)
self._y = y
self._parameter_numbers = parameter_numbers
y = utils.read_only_property("_y")
parameter_numbers = utils.read_only_property("_parameter_numbers")
def public_key(self, backend):
return backend.load_dsa_public_numbers(self)
class DSAPrivateNumbers(object):
def __init__(self, x, public_numbers):
if not isinstance(x, six.integer_types):
raise TypeError("DSAPrivateNumbers x argument must be an integer.")
if not isinstance(public_numbers, DSAPublicNumbers):
raise TypeError(
"public_numbers must be a DSAPublicNumbers instance."
)
self._public_numbers = public_numbers
self._x = x
x = utils.read_only_property("_x")
public_numbers = utils.read_only_property("_public_numbers")
def private_key(self, backend):
return backend.load_dsa_private_numbers(self)
|
neishm/EC-CAS-diags
|
eccas_diags/diagnostics/movie_zonal.py
|
Python
|
lgpl-3.0
| 2,094
| 0.009551
|
###############################################################################
# Copyright 2016 - Climate Research Division
# Environment and Climate Change Canada
#
# This file is part of the "EC-CAS diags" package.
#
# "EC-CAS diags" is free software: you can redistribute it and/or modify
# it under t
|
he terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# "EC-CAS diags" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU
|
Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with "EC-CAS diags". If not, see <http://www.gnu.org/licenses/>.
###############################################################################
from .zonalmean import ZonalMean as Zonal
from .vinterp import VInterp
from . import TimeVaryingDiagnostic
class ZonalMean(Zonal,VInterp,TimeVaryingDiagnostic):
"""
Zonal mean (or standard deviation) of a field, animated in time.
"""
def __str__ (self):
return 'zonal'+self.typestat+'_'+self.zaxis
def do (self, inputs):
from .movie import ZonalMovie
prefix = '_'.join(inp.name for inp in inputs) + '_zonal'+self.typestat+'_'+self.fieldname+'_on_'+self.zaxis+self.suffix+self.end_suffix
title = 'Zonal %s %s (in %s)'%(self.typestat,self.fieldname,self.units)
aspect_ratio = 1.0
shape = (1,len(inputs))
subtitles = [inp.title for inp in inputs]
fields = [inp.datasets[0].vars[0] for inp in inputs]
cmaps = [inp.cmap for inp in inputs]
cap_extremes = [getattr(inp,'cap_extremes',False) for inp in inputs]
movie = ZonalMovie(fields, title=title, subtitles=subtitles, shape=shape, aspect_ratio=aspect_ratio, cmaps=cmaps, cap_extremes=cap_extremes)
movie.save (outdir=self.outdir, prefix=prefix)
from . import table
table['zonal-movie'] = ZonalMean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.