repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
cchristelis/inasafe
|
safe/impact_functions/bases/layer_types/classified_vector_exposure.py
|
Python
|
gpl-3.0
| 1,719
| 0
|
# coding=utf-8
from safe.common.exceptions import NoAttributeInLayerError
from safe.impact_functions.bases.utilities import check_attribute_exist
__author__ = 'Rizky Maulana Nugraha "lucernae" <[email protected]>'
__date__ = '08/05/15'
class ClassifiedVectorExposureMixin(object):
def __init__(self):
self._exposure_class_attribute = None
self._exposure_unique_values = None
@property
def exposure_class_attribute(self):
return self._exposure_class_attribute
@exposure_class_attribute.setter
def exposure_class_attribute(self, value):
# self.exposure is from base IF.
exposure_layer = self.exposure.qgis_vector_layer()
if (exposure_layer and
check_attribute_exist(exposure_layer, value)):
self._exposure_class_attribute = value
else:
message = ('The attribute "%s" does not exist in the exposure '
'layer.') % value
raise NoAttributeInLayerError(message)
# finding unique values in layer
if exposure_layer:
attr_index = exposure_layer.dataProvider().\
fieldNameIndex(value)
unique_list = list()
for feature in exposure_layer.getFeatures():
feature_value = feature.attribu
|
tes()[attr_index]
if feature_value not in unique_list:
unique_list.append(feature_value)
self.exposure_unique_values = unique_list
@property
def exposure_unique_values(self):
return self._exposure_unique_values
@exposure_unique_values.setter
def exposure_u
|
nique_values(self, value):
self._exposure_unique_values = value
|
kd7iwp/cube-bookstore
|
cube/urls.py
|
Python
|
gpl-3.0
| 2,981
| 0.006038
|
# Copyright (C) 2010 Trinity Western University
from cube.books.models import Book
from cube.twupass.settings import TWUPASS_LOGOUT_URL
from django.contrib.auth.model
|
s import User
from django.contrib import admin
from django.conf.urls.defaults import *
from django.views.generic.si
|
mple import direct_to_template, redirect_to
admin.autodiscover()
urlpatterns = patterns('',
url(r'^twupass-logout/$', redirect_to, {'url': TWUPASS_LOGOUT_URL},
name="twupass-logout"),
url(r'^help/$', direct_to_template, {'template' : 'help.html'},
name="help"),
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
(r'^admin/(.*)', admin.site.root),
)
urlpatterns += patterns('cube.twupass.views',
(r'^$', 'login_cube'),
(r'^logout/$', 'logout_cube')
)
urlpatterns += patterns('cube.books.views.books',
url(r'^books/$', 'book_list', name="list"),
url(r'^books/update/book/$', 'update_book', name="update_book"),
url(r'^books/update/book/edit/$', 'update_book_edit',
name="update_book_edit"),
url(r'books/update/remove_holds_by_user/$', 'remove_holds_by_user',
name="remove_holds_by_user"),
url(r'^add_book/$', 'add_book', name="add_book"),
url(r'^add_new_book/$', 'add_new_book', name="add_new_book"),
url(r'^attach_book/$', 'attach_book', name="attach_book"),
url(r'^my_books/$', 'my_books', name="my_books"),
)
urlpatterns += patterns('cube.books.views.reports',
url(r'^reports/$', 'menu', name="reports_menu"),
url(r'^reports/per_status/$', 'per_status', name='per_status'),
url(r'^reports/books_sold_within_date/$', 'books_sold_within_date',
name='books_sold_within_date'),
url(r'^reports/user/(\d+)/$', 'user', name='user'),
url(r'^reports/book/(\d+)/$', 'book', name='book'),
url(r'^reports/metabook/(\d+)/$', 'metabook', name='metabook'),
url(r'^reports/holds_by_user/$', 'holds_by_user', name='holds_by_user'),
)
urlpatterns += patterns('cube.books.views.metabooks',
url(r'^metabooks/$','metabook_list', name="list_metabooks"),
url(r'metabooks/update/$', 'update', name="update_metabooks"),
)
urlpatterns += patterns('cube.books.views.staff',
url(r'^staff/$','staff_list', name="staff"),
url(r'^staff_edit/$','staff_edit', name="staff_edit"),
url(r'^update_staff/$','update_staff', name="update_staff"),
)
urlpatterns += patterns('cube.books.views.admin',
url(r'^books/admin/dumpdata/$', 'dumpdata', name='dumpdata'),
url(r'^books/admin/bad_unholds/$', 'bad_unholds', name='bad_unholds'),
)
urlpatterns += patterns('cube.users.views',
url(r'^profile/$', 'profile', name='profile'),
url(r'^profile/edit/$', 'edit_profile', name='edit_profile')
)
urlpatterns += patterns('cube.appsettings.views',
url(r'^appsettings/$', 'setting_list', name='appsettings'),
url(r'^appsettings/(\d+)/$', 'edit_setting', name='edit_setting'),
url(r'^appsettings/save/$', 'save_setting', name="save_setting"),
)
|
jcchuks/MiscCodes
|
CheckPathSum.py
|
Python
|
mit
| 2,184
| 0.012821
|
'''
https://leetcode.com/problems/path-sum/#/description
Given a binary tree and a sum, determine if the tree has a root-to-leaf path such that adding up all the values along the path equals the given sum.
For example:
Given the below binary tree and sum = 22,
5
/ \
4 8
/ / \
11 13 4
/ \ \
7 2 1
return true, as there exist a root-to-leaf path 5->4->11->2 which sum is 22.
'''
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
answer = False
total = 0
if root is None:
return answer
return self.sumAndCheck(root,total,sum,answer)
def sumAndCheck(self,node,total,sum,answer):
'''
@thought process:
- Use a depth first search,
- set a base condition, if answer is true, just return
|
and exit happily.
- else, at each valid node, check if the node is a leaf an
|
d if the val plus total
equal the sum, if true, you have found your result,just return.
- if not, check on the left node, then check then check right and return answer.
- Your base condition takes care of unnecessary traversal.
- If you reach the leaf without finding the sum, return False.
'''
if answer:
return True
elif node:
if self.isLeaf(node) and (node.val + total) == sum:
return True
else:
answer = self.sumAndCheck(node.left,node.val + total,sum, answer)
answer = self.sumAndCheck(node.right,node.val + total,sum,answer)
return answer
else:
return False
def isLeaf(self,node):
'''
@params takes a valid node
@return bool
'''
return not node.left and not node.right
|
j3camero/galaxyatlas
|
data-release-2/render-lmc-frames.py
|
Python
|
mit
| 5,728
| 0.00419
|
import csv
import math
import numpy as np
from PIL import Image
width = 854
height = 480
fov_multiplier = 1.73 # For 60 degrees, set to 1.73. For 90 degrees, set to 1.
minwh2 = 0.5 * min(width, height)
class Star:
def __init__(self, ra, dec, parallax, g_flux, bp_flux, rp_flux):
self.ra = ra
self.dec = dec
self.parallax = parallax
self.g_flux = g_flux
self.bp_flux = bp_flux
self.rp_flux = rp_flux
distance_parsecs = 1000 / parallax
distance_ly = distance_parse
|
cs * 3.26156
ra_rad = ra * math.pi / 180
dec_rad = (dec + 90) * math.pi / 180
self.x = distance_ly * math.sin(dec_rad) * math.cos(ra_rad)
self.y = distance_ly * math.sin
|
(dec_rad) * math.sin(ra_rad)
self.z = distance_ly * math.cos(dec_rad)
self.absolute_luminosity = g_flux * distance_ly**2
def ParseFloat(s):
try:
return float(s)
except:
return 0
stars = []
with open('lmc-stars.csv', 'rb') as input_file:
reader = csv.DictReader(input_file)
for row in reader:
stars.append(Star(
ParseFloat(row['ra']),
ParseFloat(row['dec']),
ParseFloat(row['parallax']),
ParseFloat(row['phot_g_mean_flux']),
ParseFloat(row['phot_bp_mean_flux']),
ParseFloat(row['phot_rp_mean_flux'])
))
def ProjectPointOntoVector(p, v):
return np.dot(p, v) / dot(v, v)
def IntegrateFromPointOfView(position, direction, up):
g_flux = np.zeros((width, height))
red_flux = np.zeros((width, height))
blue_flux = np.zeros((width, height))
right = -np.cross(direction, up)
for s in stars:
transformed = [s.x - position[0], s.y - position[1], s.z - position[2]]
x = np.dot(transformed, right)
y = np.dot(transformed, up)
z = np.dot(transformed, direction)
if z < 1:
continue
sx = int(width / 2 + fov_multiplier * minwh2 * x / z)
sy = int(height / 2 - fov_multiplier * minwh2 * y / z)
if sx < 0 or sx >= width or sy < 0 or sy >= height:
continue
d2 = x**2 + y**2 + z**2
apparent_luminosity = s.absolute_luminosity / d2
g_flux[sx,sy] += apparent_luminosity
redness = 0.5
if s.rp_flux + s.bp_flux > 0:
redness = s.rp_flux / (s.rp_flux + s.bp_flux)
red_flux[sx,sy] += apparent_luminosity * redness
blue_flux[sx,sy] += apparent_luminosity * (1 - redness)
return g_flux, red_flux, blue_flux
# Mix the two colors in the proportion specified by the ratio.
def MixColors(color1, color2, ratio):
r = ratio * color2[0] + (1 - ratio) * color1[0]
g = ratio * color2[1] + (1 - ratio) * color1[1]
b = ratio * color2[2] + (1 - ratio) * color1[2]
return r, g, b
# Converts a color's components to integer values.
def IntColor(c):
return (int(c[0]), int(c[1]), int(c[2]))
# What fraction of the way between lo and hi is the value? If outside the
# range of (lo,hi), it's capped to 0 and 1 respectively.
def CappedRange(lo, hi, value):
if value < lo:
return float(0)
elif value > hi:
return float(1)
else:
return float(value - lo) / (hi - lo)
# redness is a number between 0 and 1. It's the ratio of red to blue light.
def RednessRatioToColor(redness):
red = (255, 0, 0)
blue = (0, 0, 255)
return MixColors(red, blue, CappedRange(0.3, 0.9, redness))
# g_normalized: a number between 0 and 1 representing the percentile
# brightness of a pixel.
# red_flux: how much total red flux in a pixel. No need to normalize.
# blue_flux: how much total blue flux in a pixel. No need to normalize.
def FluxToColor(g_normalized, red_flux, blue_flux):
redness = 0.6
if red_flux + blue_flux > 0:
redness = red_flux / (red_flux + blue_flux)
base_color = RednessRatioToColor(redness)
black = (0, 0, 0)
white = (255, 255, 255)
if g_normalized < 0.5:
return MixColors(black, base_color, CappedRange(0, 0.5, g_normalized))
else:
return MixColors(base_color, white, CappedRange(0.5, 1, g_normalized))
# Normalizes a raw flux value into the range [0,1].
def FluxPercentile(flux, sorted_sample):
lo = 0
hi = len(sorted_sample)
while hi - lo > 1:
mid = int((lo + hi) / 2)
if flux >= sorted_sample[mid]:
lo = mid
else:
hi = mid
return 1.0 * lo / len(sorted_sample)
frame_number = 1
def RenderImageFromFlux(g_flux, red_flux, blue_flux):
global frame_number
sorted_flux = []
for i in range(width):
for j in range(height):
flux = g_flux[i,j]
if flux > 0.000000001:
sorted_flux.append(flux)
sorted_flux.sort()
image = Image.new('RGB', (width, height))
for i in range(width):
for j in range(height):
p = FluxPercentile(g_flux[i,j], sorted_flux)
color = FluxToColor(p, red_flux[i,j], blue_flux[i,j])
image.putpixel((i, j), IntColor(color))
image.save('frames/lmc%05d.png' % frame_number)
frame_number += 1
def RenderFrameFromPointOfView(position, direction, up):
g_flux, red_flux, blue_flux = IntegrateFromPointOfView(position, direction, up)
RenderImageFromFlux(g_flux, red_flux, blue_flux)
num_frames = 10 * 30
up = np.array([0, 1, 0])
lmc = np.array([8950, 59000, 152880])
orbit_radius = 100 * 1000
for i in range(num_frames):
print 'Frame', (i + 1), 'of', num_frames
angle = 2 * math.pi * i / num_frames
direction = np.array([math.sin(angle), 0, -math.cos(angle)])
position = lmc - orbit_radius * direction
RenderFrameFromPointOfView(position, direction, up)
|
aluminiumgeek/organic
|
utils.py
|
Python
|
lgpl-3.0
| 212
| 0
|
# -*- coding: utf-8 -*-
# Some utils
import hashlib
impo
|
rt uuid
def get_hash(dat
|
a):
"""Returns hashed string"""
return hashlib.sha256(data).hexdigest()
def get_token():
return str(uuid.uuid4())
|
voutilad/courtlistener
|
cl/corpus_importer/dup_helpers.py
|
Python
|
agpl-3.0
| 14,568
| 0.003089
|
import string
from django.utils.text import slugify
from django.utils.timezone import now
from lxml import html
from lxml.html import tostring
from lxml.html.clean import Cleaner
from cl.lib.string_utils import anonymize, trunc
from cl.search.models import OpinionCluster
from juriscraper.lib.string_utils import clean_string, harmonize, titlecase
import re
import subprocess
BROWSER = 'firefox'
def merge_cases_simple(new, target_id):
"""Add `new` to the database, merging with target_id
Merging is done by picking the best fields from each item.
"""
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# !! THIS CODE IS OUT OF DATE AND UNMAINTAINED. FEEL FREE TO FIX IT, BUT !!
# !! DO NOT TRUST IT IN ITS CURRENT STATE. !!
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
target = OpinionCluster.objects.get(pk=target_id)
print "Merging %s with" % new.case_name
print " %s" % target.case_name
cached_source = target.source # Original value is needed below.
if target.source == 'C':
target.source = 'LC'
elif target.source == 'R':
target.source = 'LR'
elif target.source == 'CR':
target.source = 'LCR'
# Add the URL if it's not a court one, replacing public.resource.org's
# info in some cases.
if cached_source == 'R':
target.download_url = new.download_url
# Recreate the slug from the new case name (this changes the URL, but the
# old will continue working)
target.slug = slugify(trunc(new.case_name, 75))
# Take the case name from the new item; they tend to be pretty good
target.case_name = new.case_name
# Add the docket number if the old doesn't exist, but keep the old if one
# does.
if not target.docket.docket_number:
target.docket.docket_number = new.docket.docket_number
# Get the citations from the new item (ditch the old).
target.federal_cite_one = new.federal_cite_one
target.federal_cite_two = new.federal_cite_two
target.federal_cite_three = new.federal_cite_three
target.state_cite_one = new.state_cite_one
target.state_cite_two = new.state_cite_two
target.state_cite_three = new.state_cite_three
target.state_cite_regional = new.state_cite_regional
target.specialty_cite_one = new.specialty_cite_one
target.scotus_early_cite = new.scotus_early_cite
target.lexis_cite = new.lexis_cite
target.westlaw_cite = new.westlaw_cite
target.neutral_cite = new.neutral_cite
# Add judge information if lacking. New is dirty, but better than none.
if not target.judges:
target.judges = new.judges
# Add the text.
target.html_lawbox, blocked = anonymize(new.html)
if blocked:
target.blocked = True
target.date_blocked = now()
target.extracted_by_ocr = False # No longe
|
r true for any LB case.
# save_doc_and_cite(target, index=False)
def merge_cases_complex(case, target_ids):
"""Merge data from PRO with multiple cases that seem to be a
|
match.
The process here is a conservative one. We take *only* the information
from PRO that is not already in CL in any form, and add only that.
"""
# THIS CODE ONLY UPDATED IN THE MOST CURSORY FASHION. DO NOT TRUST IT.
for target_id in target_ids:
simulate = False
oc = OpinionCluster.objects.get(pk=target_id)
print "Merging %s with" % case.case_name
print " %s" % oc.case_name
oc.source = 'CR'
oc.west_cite = case.west_cite
if not simulate:
oc.save()
def find_same_docket_numbers(doc, candidates):
"""Identify the candidates that have the same docket numbers as doc after
each has been cleaned.
"""
new_docket_number = re.sub('(\D|0)', '', doc.docket.docket_number)
same_docket_numbers = []
for candidate in candidates:
old_docket_number = re.sub('(\D|0)', '', candidate.get('docketNumber', ''))
if all([len(new_docket_number) > 3, len(old_docket_number) > 3]):
if old_docket_number in new_docket_number:
same_docket_numbers.append(candidate)
return same_docket_numbers
def case_name_in_candidate(case_name_new, case_name_candidate):
"""When there is one candidate match, this compares their case names to see
if one is contained in the other, in the right order.
Returns True if so, else False.
"""
regex = re.compile('[%s]' % re.escape(string.punctuation))
case_name_new_words = regex.sub('', case_name_new.lower()).split()
case_name_candidate_words = regex.sub('', case_name_candidate.lower()).split()
index = 0
for word in case_name_new_words:
if len(word) <= 2:
continue
try:
index = case_name_candidate_words[index:].index(word)
except ValueError:
# The items were out of order or the item wasn't in the candidate.
return False
return True
def filter_by_stats(candidates, stats):
"""Looks at the candidates and their stats, and filters out obviously
different candidates.
"""
filtered_candidates = []
filtered_stats = {
'candidate_count': 0,
'case_name_similarities': [],
'length_diffs': [],
'gestalt_diffs': [],
'cos_sims': [],
}
for i in range(0, len(candidates)):
# Commented out because the casenames in public.resource.org can be so
# long this varies too much.
# if stats['case_name_similarities'][i] < 0.125:
# # The case name is wildly different
# continue
if stats['length_diffs'][i] > 400:
# The documents have wildly different lengths
continue
# Commented out because the headnotes sometimes included in Resource.org made this calculation vary too much.
#elif stats['gestalt_diffs'][i] < 0.4:
# # The contents are wildly different
# continue
elif stats['cos_sims'][i] < 0.90:
# Very different cosine similarities
continue
else:
# It's a reasonably close match.
filtered_candidates.append(candidates[i])
filtered_stats['case_name_similarities'].append(stats['case_name_similarities'][i])
filtered_stats['length_diffs'].append(stats['length_diffs'][i])
filtered_stats['gestalt_diffs'].append(stats['gestalt_diffs'][i])
filtered_stats['cos_sims'].append(stats['cos_sims'][i])
filtered_stats['candidate_count'] = len(filtered_candidates)
return filtered_candidates, filtered_stats
class Case(object):
def _get_case_name_and_status(self):
case_name = self.url_element.get('title').lower()
ca1regex = re.compile('(unpublished disposition )?notice: first circuit local rule 36.2\(b\)6 states unpublished opinions may be cited only in related cases.?')
ca2regex = re.compile('(unpublished disposition )?notice: second circuit local rule 0.23 states unreported opinions shall not be cited or otherwise used in unrelated cases.?')
ca2regex2 = re.compile('(unpublished disposition )?notice: this summary order may not be cited as precedential authority, but may be called to the attention of the court in a subsequent stage of this case, in a related case, or in any case for purposes of collateral estoppel or res judicata. see second circuit rule 0.23.?')
ca3regex = re.compile('(unpublished disposition )?notice: third circuit rule 21\(i\) states citations to federal decisions which have not been formally reported should identify the court, docket number and date.?')
ca4regex = re.compile('(unpublished disposition )?notice: fourth circuit (local rule 36\(c\)|i.o.p. 36.6) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the fourth circuit.?')
ca5regex = re.compile('(unpublished disposition )?notice: fifth circuit local rule 47.5.3 states that un
|
april/http-observatory
|
httpobs/scanner/retriever/retriever.py
|
Python
|
mpl-2.0
| 7,939
| 0.004157
|
from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
from urllib.parse import urlparse
from httpobs.conf import (RETRIEVER_CONNECT_TIMEOUT,
RETRIEVER_CORS_ORIGIN,
RETRIEVER_READ_TIMEOUT,
RETRIEVER_USER_AGENT)
from httpobs.scanner.utils import parse_http_equiv_headers
import logging
import requests
# Disable the requests InsecureRequestWarning -- we will track certificate errors manually when
# verification is disabled. Also disable requests errors at levels lower than CRITICAL, see:
# https://github.com/celery/celery/issues/3633 for crashy details
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
logging.getLogger('requests').setLevel(logging.CRITICAL)
# MIME types for HTML requests
HTML_TYPES = ('text/html', 'application/xhtml+xml')
# Maximum timeout for requests for all GET requests for anything but the TLS Observatory
# The default ConnectionTimeout is something like 75 seconds, which means that things like
# tiles can take ~600s to timeout, since they have 8 DNS entries. Setting it to lower
# should hopefully keep requests from taking forever
TIMEOUT = (RETRIEVER_CONNECT_TIMEOUT, RETRIEVER_READ_TIMEOUT)
# Create a session, returning the session and the HTTP response in a dictionary
# Don't create the sessions if it can't connect and retrieve the root of the website
# TODO: Allow people to scan a subdirectory instead of using '/' as the default path?
def __create_session(url: str, **kwargs) -> dict:
s = requests.Session()
# Allow certificate verification to be disabled on the initial request, which means that sites won't get
# penalized on things like HSTS, even for self-signed certificates
s.verify = kwargs['verify']
# Add the headers to the session
if kwargs['headers']:
s.headers.update(kwargs['headers'])
# Set all the cookies and force them to be sent only over HTTPS; this might change in the future
if kwargs['cookies']:
s.cookies.update(kwargs['cookies'])
for cookie in s.cookies:
cookie.secure = True
# Override the User-Agent; some sites (like twitter) don't send the CSP header unless you have a modern
# user agent
s.headers.update({
'User-Agent': RETRIEVER_USER_AGENT,
})
try:
r = s.get(url, timeout=TIMEOUT)
# No tls errors
r.verified = True
# Let celery exceptions percolate upward
except (SoftTimeLimitExceeded, TimeLimitExceeded):
raise
# We can try again if there's an SSL error, making sure to note it in the session
except requests.exceptions.SSLError:
try:
r = s.get(url, timeout=TIMEOUT, verify=False)
r.verified = False
except (KeyboardInterrupt, SystemExit):
raise
except:
r = None
s = None
except (KeyboardInterrupt, SystemExit):
raise
except:
r = None
s = None
# Store the domain name and scheme in the session
if r is not None and s is not None:
s.url = urlparse(r.url)
return {'session': s, 'response': r}
def __get(session, relative_path='/', headers=None, cookies=None):
if not headers:
headers = {}
if not cookies:
cookies = {}
try:
# TODO: limit the maximum size of the response, to keep malicious site operators from killing us
# TODO: Perhaps we can naively do it for now by simply setting a timeout?
# TODO: catch TLS errors instead of just setting it to None?
return session.get(session.url.scheme + '://' + session.url.netloc + relative_path,
headers=headers,
cookies=cookies,
timeout=TIMEOUT)
# Let celery exceptions percolate upward
except (SoftTimeLimitExceeded, TimeLimitExceeded):
raise
except (KeyboardInterrupt, SystemExit):
raise
except:
return None
def __get_page_text(response: requests.Response, force: bool = False) -> str:
if response is None:
return None
elif response.status_code == 200 or force: # Some pages we want to get the page text even with non-200s
# A quick and dirty check to make sure that somebody's 404 page didn't actually return 200 with html
ext = (response.history[0].url if response.history else response.url).split('.')[-1]
if response.headers.get('Content-Type', '') in HTML_TYPES and ext in ('json', 'txt', 'xml'):
|
return None
return response.text
else:
return None
def retrieve_all(hostname, **kwargs):
kwargs['cookies'] = kwargs.get('cookies', {}) # HTTP cookies to send, instead of from the database
kwargs['headers'] = kwargs.get('
|
headers', {}) # HTTP headers to send, instead of from the database
# This way of doing it keeps the urls tidy even if makes the code ugly
kwargs['http_port'] = ':' + str(kwargs.get('http_port', '')) if 'http_port' in kwargs else ''
kwargs['https_port'] = ':' + str(kwargs.get('https_port', '')) if 'https_port' in kwargs else ''
kwargs['path'] = kwargs.get('path', '/')
kwargs['verify'] = kwargs.get('verify', True)
retrievals = {
'hostname': hostname,
'resources': {
},
'responses': {
'auto': None, # whichever of 'http' or 'https' actually works, with 'https' as higher priority
'cors': None, # CORS preflight test
'http': None,
'https': None,
},
'session': None,
}
# The list of resources to get
resources = (
'/clientaccesspolicy.xml',
'/contribute.json',
'/crossdomain.xml',
'/robots.txt'
)
# Create some reusable sessions, one for HTTP and one for HTTPS
http_session = __create_session('http://' + hostname + kwargs['http_port'] + kwargs['path'], **kwargs)
https_session = __create_session('https://' + hostname + kwargs['https_port'] + kwargs['path'], **kwargs)
# If neither one works, then the site just can't be loaded
if http_session['session'] is None and https_session['session'] is None:
return retrievals
else:
# Store the HTTP only and HTTPS only responses (some things can only be retrieved over one or the other)
retrievals['responses']['http'] = http_session['response']
retrievals['responses']['https'] = https_session['response']
if https_session['session'] is not None:
retrievals['responses']['auto'] = https_session['response']
retrievals['session'] = https_session['session']
else:
retrievals['responses']['auto'] = http_session['response']
retrievals['session'] = http_session['session']
# Store the contents of the "base" page
retrievals['resources']['__path__'] = __get_page_text(retrievals['responses']['auto'], force=True)
# Do a CORS preflight request
retrievals['responses']['cors'] = __get(retrievals['session'],
kwargs['path'],
headers={'Origin': RETRIEVER_CORS_ORIGIN})
# Store all the files we retrieve
for resource in resources:
resp = __get(retrievals['session'], resource)
retrievals['resources'][resource] = __get_page_text(resp)
# Parse out the HTTP meta-equiv headers
if (retrievals['responses']['auto'].headers.get('Content-Type', '').split(';')[0] in HTML_TYPES and
retrievals['resources']['__path__']):
retrievals['responses']['auto'].http_equiv = parse_http_equiv_headers(retrievals['resources']['__path__'])
else:
retrievals['responses']['auto'].http_equiv = {}
return retrievals
|
azam-a/malaysiaflights
|
malaysiaflights/tests/test_aa.py
|
Python
|
mit
| 3,889
| 0
|
import unittest
import datetime
import httpretty as HP
import json
from urllib.parse import parse_qsl
from malaysiaflights.aa import AirAsia as AA
class AARequestTests(unittest.TestCase):
def url_helper(self, from_, to, date):
host = 'https://argon.airasia.com'
path = '/api/7.0/search'
body = {'origin': from_,
'destination': to,
'depart': date,
'passenger-count': '1',
'infant-count': '0',
'currency': 'MYR'}
return host, path, body
@HP.activate
def test_search_calls_api_using_correct_path_and_body(self):
host, path, body = self.url_helper('KUL', 'TGG', '18-06-2015')
HP.register_uri(HP.POST, host+path, status=200)
d = datetime.datetime(2015, 6, 18)
AA.search('KUL', 'TGG', d)
mocked_request = HP.last_request()
actual_body = dict(parse_qsl(mocked_request.body.decode()))
self.assertEqual(path, mocked_request.path)
self.assertEqual(body, actual_body)
class ResponseExtractionTests(unittest.TestCase):
def fixture_loader(self, path):
prefix = 'malaysiaflights/fixtures/'
with open(prefix + path, 'r') as file_:
return json.loads(file_.read())
def setUp(self):
self.single = self.fixture_loader('aa-single.json')
self.zero = self.fixture_loader('aa-no-flights.json')
def test_get_number_of_results_for_valid_response(self):
json = self.single
actual = AA.get_number_of_results(json)
self.assertEqual(4, actual)
def test_get_number_of_results_for_no_flights_on_date(self):
json = self.zero
actual = AA.get_number_of_results(json)
self.assertEqual(0, actual)
def test_get_flight_details_using_index_0_should_return_results(self):
json = self.single
expected = {
'flight_number': 'AK6225',
'departure_airport': 'TGG',
'arrival_airport': 'KUL',
'departure_time': 'Sat, 20 Jun 2015 08:20:00 +0800',
'arrival_time': 'Sat, 20 Jun 2015 09:15:00 +0800',
'total_fare': 133.99,
'fare_currency': 'MYR'}
actual = AA.get_direct_flight_details(json, 0)
self.assertEqual(expected, actual)
|
def test_get_flight_details_using_index_1_should_return_results(self):
json = self.single
expected = {
'flight_number': 'AK6229',
'departure_airport': 'TGG',
'arrival_airport': 'KUL',
'departure_time': 'Sat, 20 Jun 2015 13:10:00 +0800',
'arrival_time': 'Sat, 20 Jun 2015 14:05:00 +0800',
|
'total_fare': 133.99,
'fare_currency': 'MYR'}
actual = AA.get_direct_flight_details(json, 1)
self.assertEqual(expected, actual)
@unittest.skip('no-data-yet')
def test_is_connecting_flights_should_return_true_for_connecting(self):
json = ''
actual = AA.is_connecting_flights(json, 0)
self.assertTrue(actual)
def test_is_connecting_flights_should_return_false_for_direct(self):
json = self.single
actual = AA.is_connecting_flights(json, 2)
self.assertFalse(actual)
class TimeConversionTest(unittest.TestCase):
def test_convert_to_api_format_returns_correct_output(self):
date_object = datetime.datetime(2015, 9, 25)
expected = '25-09-2015'
actual = AA.to_api(date_object)
self.assertEqual(expected, actual)
def test_convert_extracted_time_to_datetime_returns_correct_object(self):
offset = datetime.timedelta(hours=8)
expected = datetime.datetime(2015, 6, 20, 13, 10,
tzinfo=datetime.timezone(offset))
actual = AA.to_datetime('Sat, 20 Jun 2015 13:10:00 +0800')
self.assertEqual(expected, actual)
|
plaid/plaid-python
|
plaid/model/pay_period_details.py
|
Python
|
mit
| 9,216
| 0.000434
|
"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from plaid.model.distribution_breakdown import DistributionBreakdown
globals()['DistributionBreakdown'] = DistributionBreakdown
class PayPeriodDetails(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('pay_frequency',): {
'None': None,
'PAY_FREQUENCY_UNKNOWN': "PAY_FREQUENCY_UNKNOWN",
'PAY_FREQUENCY_WEEKLY': "PAY_FREQUENCY_WEEKLY",
'PAY_FREQUENCY_BIWEEKLY': "PAY_FREQUENCY_BIWEEKLY",
'PAY_FREQUENCY_SEMIMONTHLY': "PAY_FREQUENCY_SEMIMONTHLY",
'PAY_FREQUENCY_MONTHLY': "PAY_FREQUENCY_MONTHLY",
'NULL': "null",
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'check_amount': (float, none_type,), # noqa: E501
'distribution_breakdown': ([DistributionBreakdown],), # noqa: E501
'end_date': (date, none_type,), # noqa: E501
'gross_earnings': (float, none_type,), # noqa: E501
'pay_date': (date, none_type,), # noqa: E501
'pay_frequency': (str, none_type,), # noqa: E501
'pay_day': (date, none_type,), # noqa: E501
'start_date': (date, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'check_amount': 'check_amount', # noqa: E501
'distribution_breakdown': 'distribution_breakdown', # noqa: E501
'end_date': 'end_date', # noqa: E501
'gross_earnings': 'gross_earnings', # noqa: E501
'pay_date': 'pay_date', # noqa: E501
'pay_frequency': 'pay_frequency', # noqa: E501
'pay_day': 'pay_day', # noqa: E501
'start_date': 'start_date', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PayPeriodDetails - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion
|
is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
W
|
hen traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
check_amount (float, none_type): The amount of the paycheck.. [optional] # noqa: E501
distribution_breakdown ([DistributionBreakdown]): [optional] # noqa: E501
end_date (date, none_type): The pay period end date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501
gross_earnings (float, none_type): Total earnings before tax/deductions.. [optional] # noqa: E501
pay_date (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501
pay_frequency (str, none_type): The frequency at which an individual is paid.. [optional] # noqa: E501
pay_day (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501
start_date (date, none_type): The pay period start date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiT
|
ShapeNet/RenderForCNN
|
view_estimation/run_evaluation.py
|
Python
|
mit
| 932
| 0.006438
|
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.dirname(BASE_DIR))
from global_variables import *
from evaluation_helper import *
cls_names = g_shape_names
img_name_file_list = [os.path.join(g_real_images_voc12val_det_bbox_folder, name+'.txt') for name in cls_names]
det_bbox_mat_file_list = [os.path.join(g_detection_results_folder, x.rstrip()) for x in open(g_rcnn_detection_bbox_mat_filelist)]
result_folder = os.path.join(BASE_DIR, 'avp_test_res
|
ults')
test_avp_nv(cls_names, img_name_file_list, det_bbox_mat_file_list, result_folder)
img_name_file_list = [os.path.join(g_real_images_voc12val_easy_gt_bbox_folder, name+'.txt') for name in cls_names]
view_label_folder = g_real_images_voc12val_easy_gt_bbox_folder
result_folder = os.path.join(BASE_DIR, 'vp_test_results')
test_v
|
p_acc(cls_names, img_name_file_list, result_folder, view_label_folder)
|
facebookexperimental/eden
|
eden/hg-server/edenscm/mercurial/hgweb/__init__.py
|
Python
|
gpl-2.0
| 3,073
| 0
|
# Portions Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# hgweb/__init__.py - web interface to a mercurial repository
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge <[email protected]>
# Copyright 2005 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import os
from .. import error, pycompat, util
from ..i18n import _
from . import hgweb_mod, hgwebdir_mod, server
def hgweb(config, name=None, baseui=None):
"""create an hgweb wsgi object
config can be one of:
- repo object (single repo view)
- path to repo (single repo view)
- path to config file (multi-repo view)
- dict of virtual:real pa
|
irs (multi-repo view)
- list of virtual:real tuples (multi-repo view)
"""
if (
(isinstance(config, str) and not os.path.isdir(config))
or isinstance(config, dict)
or isinstance(config, list)
):
# create a multi-dir interface
return hgwebdir_mod.hgwebdir(config, baseui=baseui)
return hgweb_mod.hgweb(config, name=name, baseui=baseui)
def hgwebdir(config, baseui=None):
return hgwebdir_mod.hgw
|
ebdir(config, baseui=baseui)
class httpservice(object):
def __init__(self, ui, app, opts):
self.ui = ui
self.app = app
self.opts = opts
def init(self):
util.setsignalhandler()
self.httpd = server.create_server(self.ui, self.app)
portfile = self.opts.get("port_file")
if portfile:
util.writefile(portfile, "%s" % self.httpd.port)
if (self.opts["port"] or portfile) and not self.ui.verbose:
return
if self.httpd.prefix:
prefix = self.httpd.prefix.strip("/") + "/"
else:
prefix = ""
port = r":%d" % self.httpd.port
if port == r":80":
port = r""
bindaddr = self.httpd.addr
if bindaddr == r"0.0.0.0":
bindaddr = r"*"
elif r":" in bindaddr: # IPv6
bindaddr = r"[%s]" % bindaddr
fqaddr = self.httpd.fqaddr
if r":" in fqaddr:
fqaddr = r"[%s]" % fqaddr
if self.opts["port"] or portfile:
write = self.ui.status
else:
write = self.ui.write
write(
_("listening at http://%s%s/%s (bound to %s:%d)\n")
% (fqaddr, port, prefix, bindaddr, self.httpd.port)
)
self.ui.flush() # avoid buffering of status message
def run(self):
self.httpd.serve_forever()
def createapp(baseui, repo, webconf):
if webconf:
return hgwebdir_mod.hgwebdir(webconf, baseui=baseui)
else:
if not repo:
raise error.RepoError(
_("there is no Mercurial repository" " here (.hg not found)")
)
return hgweb_mod.hgweb(repo, baseui=baseui)
|
mhnatiuk/phd_sociology_of_religion
|
scrapper/build/cffi/testing/test_ffi_backend.py
|
Python
|
gpl-2.0
| 9,406
| 0.001701
|
import py, sys, platform
import pytest
from testing import backend_tests, test_function, test_ownlib
from cffi import FFI
import _cffi_backend
class TestFFI(backend_tests.BackendTests,
test_function.TestFunction,
test_ownlib.TestOwnLib):
TypeRepr = "<ctype '%s'>"
@staticmethod
def Backend():
return _cffi_backend
def test_not_supported_bitfield_in_result(self):
ffi = FFI(backend=self.Backend())
ffi.cdef("struct foo_s { int a,b,c,d,e; int x:1; };")
e = py.test.raises(NotImplementedError, ffi.callback,
"struct foo_s foo(void)", lambda: 42)
assert str(e.value) == ("<struct foo_s(*)(void)>: "
"cannot pass as argument or return value a struct with bit fields")
def test_inspecttype(self):
ffi = FFI(backend=self.Backend())
assert ffi.typeof("long").kind == "primitive"
assert ffi.typeof("long(*)(long, long**, ...)").cname == (
"long(*)(long, long * *, ...)")
assert ffi.typeof("long(*)(long, long**, ...)").ellipsis is True
def test_new_handle(self):
ffi = FFI(backend=self.Backend())
o = [2, 3, 4]
p = ffi.new_handle(o)
assert ffi.typeof(p) == ffi.typeof("void *")
assert ffi.from_handle(p) is o
assert ffi.from_handle(ffi.cast("char *", p)) is o
py.test.raises(RuntimeError, ffi.from_handle, ffi.NULL)
class TestBitfield:
def check(self, source, expected_ofs_y, expected_align, expected_size):
# NOTE: 'expected_*' is the numbers expected from GCC.
# The numbers expected from MSVC are not explicitly written
# in this file, and will just be taken from the compiler.
ffi = FFI()
ffi.cdef("struct s1 { %s };" % source)
ctype = ffi.typeof("struct s1")
# verify the information with gcc
ffi1 = FFI()
ffi1.cdef("""
static const int Gofs_y, Galign, Gsize;
struct s1 *try_with_value(int fieldnum, long long value);
""")
fnames = [name for name, cfield in ctype.fields
if name and cfield.bitsize > 0]
setters = ['case %d: s.%s = value; break;' % iname
for iname in enumerate(fnames)]
lib = ffi1.verify("""
struct s1 { %s };
struct sa { char a; struct s1 b; };
#define Gofs_y offsetof(struct s1, y)
#define Galign offsetof(struct sa, b)
#define Gsize sizeof(struct s1)
struct s1 *try_with_value(int fieldnum, long long value)
{
static struct s1 s;
memset(&s, 0, sizeof(s));
switch (fieldnum) { %s }
return &s;
}
""" % (source, ' '.join(setters)))
if sys.platform == 'win32':
expected_ofs_y = lib.Gofs_y
expected_align = lib.Galign
expected_size = lib.Gsize
else:
assert (lib.Gofs_y, lib.Galign, lib.Gsize) == (
expected_ofs_y, expected_align, expected_size)
# the real test follows
assert ffi.offsetof("struct s1", "y") == expected_ofs_y
assert ffi.alignof("struct s1") == expected_align
assert ffi.sizeof("struct s1") == expected_size
# compare the actual storage of the two
for name, cfield in ctype.fields:
if cfield.bitsize < 0 or not name:
continue
if int(ffi.cast(cfield.type, -1)) == -1: # signed
min_value = -(1 << (cfield.bitsize-1))
max_value = (1 << (cfield.bitsize-1)) - 1
else:
min_value = 0
max_value = (1 << cfield.bitsize) - 1
for t in [1, 2, 4, 8, 16, 128, 2813, 89728, 981729,
-1,-2,-4,-8,-16,-128,-2813,-89728,-981729]:
if min_value <= t <= max_value:
self._fieldcheck(ffi, lib, fnames, name, t)
def _fieldcheck(self, ffi, lib, fnames, name, value):
s = ffi.new("struct s1 *")
setattr(s, name, value)
assert getattr(s, name) == value
raw1 = ffi.buffer(s)[:]
t = lib.try_with_value(fnames.index(name), value)
raw2 = ffi.buffer(t, len(raw1))[:]
assert raw1 == raw2
def test_bitfield_basic(self):
self.check("int a; int b:9; int c:20; int y;", 8, 4, 12)
self.check("int a; short b:9; short c:7; int y;", 8, 4, 12)
self.check("int a; short b:9; short c:9; int y;", 8, 4, 12)
def test_bitfield_reuse_if_enough_space(self):
self.check("int a:2; char y;", 1, 4, 4)
self.check("int a:1; char b ; int c:1; char y;", 3, 4, 4)
self.check("int a:1; char b:8; int c:1; char y;", 3, 4, 4)
self.check("char a; int b:9; char y;", 3, 4, 4)
self.check("char a; short b:9; char y;", 4, 2, 6)
self.check("int a:2; char b:6; char y;", 1, 4, 4)
self.check("int a:2; char b:7; char y;", 2, 4, 4)
self.check("int a:2; short b:15; char c:2; char y;", 5, 4, 8)
self.check("int a:2; char b:1; char c:1; char y;", 1, 4, 4)
@pytest.mark.skipif("platform.machine().startswith('arm')")
def test_bitfield_anonymous_no_align(self):
L = FFI().alignof("long long")
self.check("char y; int :1;", 0, 1, 2)
self.check("char x; int z:1; char y;", 2, 4, 4)
self.check("char x; int :1; char y;", 2, 1, 3)
self.check("char x; long long z:48; char y;", 7, L, 8)
self.check("char x; long long :48; char y;", 7, 1, 8)
self.check("char x; long long z:56; char y;", 8, L, 8 + L)
self.check("char x; long long :56; char y;", 8, 1, 9)
self.check("char x; long
|
long z:57; char y;", L + 8, L, L + 8 + L)
self.check("char x; long long :57; char y;"
|
, L + 8, 1, L + 9)
@pytest.mark.skipif("not platform.machine().startswith('arm')")
def test_bitfield_anonymous_align_arm(self):
L = FFI().alignof("long long")
self.check("char y; int :1;", 0, 4, 4)
self.check("char x; int z:1; char y;", 2, 4, 4)
self.check("char x; int :1; char y;", 2, 4, 4)
self.check("char x; long long z:48; char y;", 7, L, 8)
self.check("char x; long long :48; char y;", 7, 8, 8)
self.check("char x; long long z:56; char y;", 8, L, 8 + L)
self.check("char x; long long :56; char y;", 8, L, 8 + L)
self.check("char x; long long z:57; char y;", L + 8, L, L + 8 + L)
self.check("char x; long long :57; char y;", L + 8, L, L + 8 + L)
@pytest.mark.skipif("platform.machine().startswith('arm')")
def test_bitfield_zero(self):
L = FFI().alignof("long long")
self.check("char y; int :0;", 0, 1, 4)
self.check("char x; int :0; char y;", 4, 1, 5)
self.check("char x; int :0; int :0; char y;", 4, 1, 5)
self.check("char x; long long :0; char y;", L, 1, L + 1)
self.check("short x, y; int :0; int :0;", 2, 2, 4)
self.check("char x; int :0; short b:1; char y;", 5, 2, 6)
self.check("int a:1; int :0; int b:1; char y;", 5, 4, 8)
@pytest.mark.skipif("not platform.machine().startswith('arm')")
def test_bitfield_zero_arm(self):
L = FFI().alignof("long long")
self.check("char y; int :0;", 0, 4, 4)
self.check("char x; int :0; char y;", 4, 4, 8)
self.check("char x; int :0; int :0; char y;", 4, 4, 8)
self.check("char x; long long :0; char y;", L, 8, L + 8)
self.check("short x, y; int :0; int :0;", 2, 4, 4)
self.check("char x; int :0; short b:1; char y;", 5, 4, 8)
self.check("int a:1; int :0; int b:1; char y;", 5, 4, 8)
def test_error_cases(self):
ffi = FFI()
py.test.raises(TypeError,
'ffi.cdef("struct s1 { float x:1; };"); ffi.new("struct s1 *")')
py.test.raises(TypeError,
'ffi.cdef("struct s2 { char x:0; };"); ffi.new("struct s2 *")')
py.test.raises(TypeError,
'ffi.cdef("struct s3 { char x:9; };"); ffi.new("struct s3 *")')
def test_struct_with_typedef(self):
ffi
|
Lanceolata/code-problems
|
python/leetcode/Question_168_Excel_Sheet_Column_Title.py
|
Python
|
mit
| 255
| 0.003922
|
#!/usr/bin/python
# coding:
|
utf-8
class Solution(object):
def convertT
|
oTitle(self, n):
"""
:type n: int
:rtype: str
"""
return "" if n == 0 else self.convertToTitle((n - 1) / 26) + chr((n - 1) % 26 + ord('A'))
|
snakeleon/YouCompleteMe-x86
|
third_party/ycmd/third_party/JediHTTP/vendor/jedi/jedi/evaluate/compiled/__init__.py
|
Python
|
gpl-3.0
| 17,309
| 0.000924
|
"""
Imitate the parser representation.
"""
import inspect
import re
import sys
import os
from functools import partial
from jedi._compatibility import builtins as _builtins, unicode
from jedi import debug
from jedi.cache import underscore_memoization, memoize_method
from jedi.parser.tree import Param, Base, Operator, zero_position_modifier
from jedi.evaluate.helpers import FakeName
from . import fake
_sep = os.path.sep
if os.path.altsep is not None:
_sep += os.path.altsep
_path_re = re.compile('(?:\.[^{0}]+|[{0}]__init__\.py)$'.format(re.escape(_sep)))
del _sep
class CheckAttribute(object):
"""Raises an AttributeError if the attribute X isn't available."""
def __init__(self, func):
self.func = func
# Remove the py in front of e.g. py__call__.
self.check_name = func.__name__[2:]
def __get__(self, instance, owner):
# This might raise an AttributeError. That's wanted.
getattr(instance.obj, self.check_name)
return partial(self.func, instance)
class CompiledObject(Base):
# comply with the parser
start_pos = 0, 0
path = None # modules have this attribute - set it to None.
used_names = {} # To be consistent with modules.
def __init__(self, evaluator, obj, parent=None):
self._evaluator = evaluator
self.obj = obj
self.parent = parent
@CheckAttribute
def py__call__(self, params):
if inspect.isclass(self.obj):
from jedi.evaluate.representation import Instance
return set([Instance(self._evaluator, self, params)])
else:
return set(self._execute_function(params))
@CheckAttribute
def py__class__(self):
return create(self._evaluator, self.obj.__class__)
@CheckAttribute
def py__mro__(self):
return tuple(create(self._evaluator, cls) for cls in self.obj.__mro__)
@CheckAttribute
def py__bases__(self):
return tuple(create(self._evaluator, cls) for cls in self.obj.__bases__)
def py__bool__(self):
return bool(self.obj)
def py__file__(self):
return self.obj.__file__
def is_class(self):
return inspect.isclass(self.obj)
@property
def doc(self):
return inspect.getdoc(self.obj) or ''
@property
def params(self):
params_str, ret = self._parse_function_doc()
tokens = params_str.split(',')
if inspect.ismethoddescriptor(self.obj):
tokens.insert(0, 'self')
params = []
for p in tokens:
parts = [FakeName(part) for part in p.strip().split('=')]
if len(parts) > 1:
parts.insert(1, Operator(zero_position_modifier, '=', (0, 0)))
params.append(Param(parts, self))
return params
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, repr(self.obj))
@underscore_memoization
def _parse_function_doc(self):
if self.doc is None:
return '', ''
return _parse_function_doc(self.doc)
def api_type(self):
obj = self.obj
if inspect.isclass(obj):
return 'class'
elif inspect.ismodule(obj):
return 'module'
elif inspect.isbuiltin(obj) or inspect.ismethod(obj) \
or inspect.ismethoddescriptor(obj) or inspect.isfunction(obj):
return 'function'
# Everything else...
return 'instance'
@property
def type(self):
"""Imitate the tree.Node.type values."""
cls = self._get_class()
if inspect.isclass(cls):
return 'classdef'
elif inspect.ismodule(cls):
return 'file_input'
elif inspect.isbuiltin(cls) or inspect.ismethod(cls) or \
inspect.ismethoddescriptor(cls):
return 'funcdef'
@underscore_memoization
def _cls(self):
"""
We used to limit the lookups for instantiated objects like list(), but
this is not the case anymore. Python itself
"""
# Ensures that a CompiledObject is returned that is not an instance (like list)
return self
def _get_class(self):
if not fake.is_class_instance(self.obj) or \
inspect.ismethoddescriptor(self.obj): # slots
return self.obj
try:
return self.obj.__class__
except AttributeError:
# happens with numpy.core.umath._UFUNC_API (you get it
# automatically by doing `import numpy`.
return type
@property
def names_dict(self):
# For compatibility with `representation.Class`.
return self.names_dicts(False)[0]
def names_dicts(self, search_global, is_instance=False):
return self._names_dict_ensure_one_dict(is_instance)
@memoize_method
def _names_dict_ensure_one_dict(self, is_instance):
"""
search_global shouldn't change the fact that there's one dict, this way
there's only one `object`.
"""
return [LazyNamesDict(self._evaluator, self, is_instance)]
def get_subscope_by_name(self, name):
if name in dir(self.obj):
return CompiledName(self._evaluator, self, name).parent
else:
raise KeyError("CompiledObject doesn't have an attribute '%s'." % name)
@CheckAttribute
def py__getitem__(self, index):
if type(self.obj) not in (str, list, tuple, unicode, bytes, bytearray, dict):
# Get rid of side effects, we won't call custom `__getitem__`s.
return set()
return set([create(self._evaluator, self.obj[index])])
@CheckAttribute
def py__iter__(self):
if type(self.obj) not in (str, list, tuple, unicode, bytes, bytearray, dict):
# Get rid of side effects, we won't call custom `__getitem__`s.
return
for part in self.obj:
yield set([create(self._evaluator, part)])
@property
def name(self):
try:
name = self._get_class().__name__
except AttributeError:
name = repr(self.obj)
return FakeName(name, self)
def _execute_function(self, params):
if self.type != 'funcdef':
return
for name in self._parse_function_doc()[1].split():
try:
bltn_obj = getattr(_builtins, name)
except AttributeError:
continue
else:
if bltn_obj is None:
# We want to evaluate everything except None.
# TODO do we?
continue
bltn_obj = create(self._evaluator, bltn_obj)
for result in self._evaluator.execute(bltn_obj, params):
yield result
@property
@underscore_memoization
def subscopes(self):
"""
Returns only the fak
|
ed scopes - the other ones are not important for
internal analysis.
"""
module = self.get_parent_until()
faked_subscopes = []
for name in dir(self.obj):
try:
faked_subscopes.append(
fake.get_faked
|
(module.obj, self.obj, parent=self, name=name)
)
except fake.FakeDoesNotExist:
pass
return faked_subscopes
def is_scope(self):
return True
def get_self_attributes(self):
return [] # Instance compatibility
def get_imports(self):
return [] # Builtins don't have imports
class CompiledName(FakeName):
def __init__(self, evaluator, compiled_obj, name):
super(CompiledName, self).__init__(name)
self._evaluator = evaluator
self._compiled_obj = compiled_obj
self.name = name
def __repr__(self):
try:
name = self._compiled_obj.name # __name__ is not defined all the time
except AttributeError:
name = None
return '<%s: (%s).%s>' % (type(self).__name__, name, self.name)
def is_definition(self):
return True
@property
@underscore_memoization
def parent(self):
module = self._compiled_obj.get_p
|
duncan-r/SHIP
|
tests/test_rowdatacollection.py
|
Python
|
mit
| 10,219
| 0.002838
|
from __future__ import unicode_literals
import unittest
from ship.datastructures import rowdatacollection as rdc
from ship.datastructures import dataobject as do
from ship.fmp.datunits import ROW_DATA_TYPES as rdt
class RowDataCollectionTests(unittest.TestCase):
def setUp(self):
# Create some object to use and add a couple of rows
# create chainage in position 1
self.obj1 = do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)
self.obj1.data_collection.append(0.00)
self.obj1.data_collection.append(3.65)
# Create elevation in position 2
self.obj2 = do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)
self.obj2.data_collection.append(32.345)
self.obj2.data_collection.append(33.45)
# Create roughness in position 3
self.obj3 = do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=None, no_of_dps=3)
self.obj3.data_collection.append(0.035)
self.obj3.data_collection.append(0.035)
self.testcol = rdc.RowDataCollection()
self.testcol._collection.append(self.obj1)
self.testcol._collection.append(self.obj2)
self.testcol._collection.append(self.obj3)
def test_initCollection(self):
'''
'''
# Create a dummy collection
obj1 = do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)
obj2 = do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)
obj3 = do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3)
localcol = rdc.RowDataCollection()
localcol._collection.append(obj1)
localcol._collection.append(obj2)
localcol._collection.append(obj3)
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
# Check that they're the same
col_eq, msg = self.checkCollectionEqual(localcol, col)
self.assertTrue(col_eq, 'rdc.RowDataCollection initialisation fail - ' + msg)
def test_bulkInitCollection(self):
objs = [
do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3),
do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3),
do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3),
]
col = rdc.RowDataCollection.bulkInitCollection(objs)
localcol = rdc.RowDataCollection()
localcol._collection.append(objs[0])
localcol._collection.append(objs[1])
localcol._collection.append(objs[2])
# Check they're the same
col_eq, msg = self.checkCollectionEqual(localcol, col)
self.assertTrue(col_eq, 'rdc.RowDataCollection initialisation fail - ' + msg)
def checkCollectionEqual(self, c1, c2):
'''Check the two given collections to make sure that they contain the same data.
@param c1: First rdc.RowDataCollection object
@param c2: Second rdc.RowDataCollection object
@return: True if they're equal False and reason if not.
'''
if not len(c1._collection) == len(c2._collection):
return False, 'Collections are different lengths'
for i in range(0, len(c1._collection)):
if not c1._collection[i].data_type == c2._collection[i].data_type:
return False, 'Collections have different data_types'
if not c1._collection[i].format_str == c2._collection[i].format_str:
return False, 'Collections have different format_str'
if not c1._collection[i].default == c2._collection[i].default:
return False, 'Collections have different default'
for j in range(0, len(c1._collection[i].data_collection)):
if not c1._collection[i].data_collection[j] == c1._collection[i].data_collection[j]:
return False, 'Collections have different data'
return True, ''
def test_indexOfDataObject(self):
"""Should return the corrent index of a particular ADataObject in colleciton."""
index1 = self.testcol.indexOfDataObject(rdt.CHAINAGE)
index2 = self.testcol.indexOfDataObject(rdt.ELEVATION)
index3 = self.testcol.indexOfDataObject(rdt.ROUGHNESS)
self.assertEquals(index1, 0)
self.assertEquals(index2, 1)
self.assertEquals(index3, 2)
def test_iterateRows(self):
"""Test generator for complete row as a list"""
testrows = [
[0.00, 32.345, 0.035],
[3.65, 33.45, 0.035],
]
i = 0
for row in self.testcol.iterateRows():
self.assertListEqual
|
(row, testrows[i])
i += 1
def test_iterateRowsWithKey(se
|
lf):
"""Test generator for a single DataObject"""
testrows = [
32.345,
33.45,
]
i = 0
for row in self.testcol.iterateRows(rdt.ELEVATION):
self.assertEqual(row, testrows[i])
i += 1
def test_rowAsDict(self):
"""Shoud return a row as a dict of single values."""
test_dict = {rdt.CHAINAGE: 0.00, rdt.ELEVATION: 32.345, rdt.ROUGHNESS: 0.035}
row = self.testcol.rowAsDict(0)
self.assertDictEqual(row, test_dict)
def test_rowAsList(self):
test_list = [0.00, 32.345, 0.035]
row = self.testcol.rowAsList(0)
self.assertListEqual(row, test_list)
def test_dataObject(self):
"""Should return the correct ADataObject."""
test_vals = [0.00, 3.65]
obj = self.testcol.dataObject(rdt.CHAINAGE)
self.assertEqual(obj.data_type, rdt.CHAINAGE)
for i, o in enumerate(obj):
self.assertEqual(o, test_vals[i])
def test_dataObjectAsList(self):
"""Should return the contents of a DataObject as a list."""
test_list = [0.00, 3.65]
obj_list = self.testcol.dataObjectAsList(rdt.CHAINAGE)
self.assertListEqual(obj_list, test_list)
def test_toList(self):
test_list = [
[0.00, 3.65],
[32.345, 33.45],
[0.035, 0.035]
]
row_list = self.testcol.toList()
self.assertListEqual(row_list, test_list)
def test_toDict(self):
test_dict = {
rdt.CHAINAGE: [0.00, 3.65],
rdt.ELEVATION: [32.345, 33.45],
rdt.ROUGHNESS: [0.035, 0.035],
}
row_dict = self.testcol.toDict()
self.assertDictEqual(row_dict, test_dict)
def test_addValue(self):
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
col._addValue(rdt.CHAINAGE, 2.5)
self.assertEqual(col._collection[0][0], 2.5)
def test_setValue(self):
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
col._collection[0].addValue(2.5)
self.assertEqual(col._collection[0][0], 2.5)
col._setValue(rdt.CHAINAGE, 3.5, 0)
self.assertEqual(col._collection[0][0], 3.5)
def test_getPrintableRow(self):
test_row = ' 0.000 32.345 0.035'
row = self.testcol.getPrintableRow(0)
self.assertEqual(row, test_row)
def test_updat
|
Nomadblue/django-nomad-activity-feed
|
setup.py
|
Python
|
bsd-3-clause
| 1,187
| 0.000843
|
# coding: utf-8
import os
from
|
setuptools import setup, find_packages
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-nomad-activity-feed',
version='0.1.1',
packages=find_packages(),
description='A simple Django app atta
|
ch an activity feed to any Django model.',
long_description=README,
url='https://github.com/Nomadblue/django-activity-feed',
author='José Sazo',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
drnlm/sqlobject
|
sqlobject/tests/test_boundattributes.py
|
Python
|
lgpl-2.1
| 1,672
| 0
|
import pytest
from sqlobject import boundattributes
from sqlobject import declarative
pytestmark = pytest.mark.skipif(
True,
reason='The module "boundattributes" and its tests were not finished yet')
class SOTestMe(object):
pass
class AttrReplace(boundattributes.BoundAttribute):
__unpackargs__ = ('replace',)
replace = None
@declarative.classinstancemethod
|
def make_object(self, cls, added_class, attr_name, **attrs):
if not self:
return cls.singleton().make_object(
added_class, attr_name, **attrs)
self.replace.added_class = added_class
self.replace.name = attr_name
assert attrs['replace'] is self.replace
del attrs['replace']
self.replace.attrs = attrs
|
return self.replace
class Holder:
def __init__(self, name):
self.holder_name = name
def __repr__(self):
return '<Holder %s>' % self.holder_name
def test_1():
v1 = Holder('v1')
v2 = Holder('v2')
v3 = Holder('v3')
class V2Class(AttrReplace):
arg1 = 'nothing'
arg2 = ['something']
class A1(SOTestMe):
a = AttrReplace(v1)
v = V2Class(v2)
class inline(AttrReplace):
replace = v3
arg3 = 'again'
arg4 = 'so there'
for n in ('a', 'v', 'inline'):
assert getattr(A1, n).name == n
assert getattr(A1, n).added_class is A1
assert A1.a is v1
assert A1.a.attrs == {}
assert A1.v is v2
assert A1.v.attrs == {'arg1': 'nothing', 'arg2': ['something']}
assert A1.inline is v3
assert A1.inline.attrs == {'arg3': 'again', 'arg4': 'so there'}
|
panmari/tensorflow
|
tensorflow/tensorboard/scripts/serialize_tensorboard.py
|
Python
|
apache-2.0
| 6,341
| 0.008831
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Consume and serialize all of the data from a running TensorBoard instance.
This program connects to a live TensorBoard backend at given port, and saves
all of the data to local disk JSON in a predictable format.
This makes it easy to mock out the TensorBoard backend so that the frontend
may be tested in isolation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import os.path
import shutil
import threading
import urllib
import six
from six.moves import http_client
import tensorflow as tf
from tensorflow.python.summary import event_multiplexer
from tensorflow.tensorboard.backend import server
tf.flags.DEFINE_string('logdir', None, """the logdir to pass to the TensorBoard
backend; data will be read from this logdir for serialization.""")
tf.flags.DEFINE_string('target', None, """The directoy where serialized data
will be written""")
tf.flags.DEFINE_boolean('overwrite', False, """Whether to remove and overwrite
TARGET if it already exists.""")
tf.flags.DEFINE_boolean(
'purge_orphaned_data', True, 'Whether to purge data that '
'may have been orphaned due to TensorBoard restarts. '
'Disabling purge_orphaned_data can be used to debug data '
'disappearance.')
FLAGS = tf.flags.FLAGS
BAD_CHARACTERS = "#%&{}\\/<>*? $!'\":@+`|="
def Url(route, params):
"""Takes route and query params, and produce encoded url for that asset."""
out = route
if params:
# sorting ensures a unique filename for each query
sorted_params = sorted(six.iteritems(params))
out += '?' + urllib.urlencode(sorted_params)
return out
def Clean(s):
"""Clean a string so it can be used as a filepath."""
for c in BAD_CHARACTERS:
s = s.replace(c, '_')
return s
class TensorBoardStaticSerializer(object):
"""Serialize all the routes from a Tenso
|
rBoard server to static json."""
def __init__(self, connection, target_path):
self.connection = connection
EnsureDirectoryExists(os.path.join(target_path, 'data'))
self.path = target_path
def GetAndSave(self, url):
"""GET the given url. Serialize the result at clean path version of url."""
self.connection.reques
|
t('GET', '/data/' + url)
response = self.connection.getresponse()
destination = self.path + '/data/' + Clean(url)
if response.status != 200:
raise IOError(url)
content = response.read()
with open(destination, 'w') as f:
f.write(content)
return content
def GetRouteAndSave(self, route, params=None):
"""GET given route and params. Serialize the result. Return as JSON."""
url = Url(route, params)
return json.loads(self.GetAndSave(url))
def Run(self):
"""Serialize everything from a TensorBoard backend."""
# get the runs object, which is an index for every tag.
runs = self.GetRouteAndSave('runs')
# collect sampled data.
self.GetRouteAndSave('scalars')
# now let's just download everything!
for run, tag_type_to_tags in six.iteritems(runs):
for tag_type, tags in six.iteritems(tag_type_to_tags):
try:
if tag_type == 'graph':
# in this case, tags is a bool which specifies if graph is present.
if tags:
self.GetRouteAndSave('graph', {run: run})
elif tag_type == 'images':
for t in tags:
images = self.GetRouteAndSave('images', {'run': run, 'tag': t})
for im in images:
url = 'individualImage?' + im['query']
# pull down the images themselves.
self.GetAndSave(url)
else:
for t in tags:
# Save this, whatever it is :)
self.GetRouteAndSave(tag_type, {'run': run, 'tag': t})
except IOError as e:
PrintAndLog('Retrieval failed for %s/%s/%s' % (tag_type, run, tags),
tf.logging.WARN)
PrintAndLog('Got Exception: %s' % e, tf.logging.WARN)
PrintAndLog('continuing...', tf.logging.WARN)
continue
def EnsureDirectoryExists(path):
if not os.path.exists(path):
os.makedirs(path)
def PrintAndLog(msg, lvl=tf.logging.INFO):
tf.logging.log(lvl, msg)
print(msg)
def main(unused_argv=None):
target = FLAGS.target
logdir = FLAGS.logdir
if not target or not logdir:
PrintAndLog('Both --target and --logdir are required.', tf.logging.ERROR)
return -1
if os.path.exists(target):
if FLAGS.overwrite:
if os.path.isdir(target):
shutil.rmtree(target)
else:
os.remove(target)
else:
PrintAndLog('Refusing to overwrite target %s without --overwrite' %
target, tf.logging.ERROR)
return -2
path_to_run = server.ParseEventFilesSpec(FLAGS.logdir)
PrintAndLog('About to load Multiplexer. This may take some time.')
multiplexer = event_multiplexer.EventMultiplexer(
size_guidance=server.TENSORBOARD_SIZE_GUIDANCE,
purge_orphaned_data=FLAGS.purge_orphaned_data)
server.ReloadMultiplexer(multiplexer, path_to_run)
PrintAndLog('Multiplexer load finished. Starting TensorBoard server.')
s = server.BuildServer(multiplexer, 'localhost', 0)
server_thread = threading.Thread(target=s.serve_forever)
server_thread.daemon = True
server_thread.start()
connection = http_client.HTTPConnection('localhost', s.server_address[1])
PrintAndLog('Server setup! Downloading data from the server.')
x = TensorBoardStaticSerializer(connection, target)
x.Run()
PrintAndLog('Done downloading data.')
connection.close()
s.shutdown()
s.server_close()
if __name__ == '__main__':
tf.app.run()
|
rbu/mediadrop
|
mediadrop/migrations/versions/004-280565a54124-add_custom_head_tags.py
|
Python
|
gpl-3.0
| 1,908
| 0.007862
|
# This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2015 MediaDrop contributors
# For the exact contribution history, see the git revision log.
# The source code contained in this file is licensed under the GPLv3 or
# (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
"""add custom head tags
add setting for custom tags (HTML) in <head> section
added: 20
|
12-02-13 (v0.10dev)
previously migrate script v054
Revision ID: 280
|
565a54124
Revises: 4d27ff5680e5
Create Date: 2013-05-14 22:38:02.552230
"""
# revision identifiers, used by Alembic.
revision = '280565a54124'
down_revision = '4d27ff5680e5'
from alembic.op import execute, inline_literal
from sqlalchemy import Integer, Unicode, UnicodeText
from sqlalchemy import Column, MetaData, Table
# -- table definition ---------------------------------------------------------
metadata = MetaData()
settings = Table('settings', metadata,
Column('id', Integer, autoincrement=True, primary_key=True),
Column('key', Unicode(255), nullable=False, unique=True),
Column('value', UnicodeText),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
# -- helpers ------------------------------------------------------------------
def insert_setting(key, value):
execute(
settings.insert().\
values({
'key': inline_literal(key),
'value': inline_literal(value),
})
)
def delete_setting(key):
execute(
settings.delete().\
where(settings.c.key==inline_literal(key))
)
# -----------------------------------------------------------------------------
SETTINGS = [
(u'appearance_custom_head_tags', u''),
]
def upgrade():
for key, value in SETTINGS:
insert_setting(key, value)
def downgrade():
for key, value in SETTINGS:
delete_setting(key)
|
snfactory/cubefit
|
cubefit/main.py
|
Python
|
mit
| 26,267
| 0.000533
|
"""Main entry points for scripts."""
from __future__ import print_function, division
from argparse import ArgumentParser
from collections import OrderedDict
from copy import copy
from datetime import datetime
import glob
import json
import logging
import math
import os
import scipy.stats
import numpy as np
from .version import __version__
from .psffuncs import gaussian_moffat_psf
from .psf import TabularPSF, GaussianMoffatPSF
from .io import read_datacube, write_results, read_results
from .fitting import (guess_sky, fit_galaxy_single, fit_galaxy_sky_multi,
fit_position_sky, fit_position_sky_sn_multi,
RegularizationPenalty)
from .utils import yxbounds
from .extern import ADR, Hyper_PSF3D_PL
__all__ = ["cubefit", "cubefit_subtract", "cubefit_plot"]
MODEL_SHAPE = (32, 32)
SPAXEL_SIZE = 0.43
MIN_NMAD = 2.5 # Minimum Number of Median Absolute Deviations above
# the minimum spaxel value in fit_position
LBFGSB_FACTOR = 1e10
REFWAVE = 5000. # reference wavelength in Angstroms for PSF params and ADR
POSITION_BOUND = 3. # Bound on fitted positions relative in initial positions
def snfpsf(wave, psfparams, header, psftype):
"""Create a 3-d PSF based on SNFactory-specific parameterization of
Gaussian + Moffat PSF parameters and ADR."""
# Get Gaussian+Moffat parameters at each wavelength.
relwave = wave / REFWAVE - 1.0
ellipticity = abs(psfparams[0]) * np.ones_like(wave)
alpha = np.abs(psfparams[1] +
psfparams[2] * relwave +
psfparams[3] * relwave**2)
# correlated parameters (coefficients determined externally)
sigma = 0.545 + 0.215 * alpha # Gaussian parameter
beta = 1.685 + 0.345 * alpha # Moffat parameter
eta = 1.040 + 0.0 * alpha # gaussian ampl. / moffat ampl.
# Atmospheric differential refraction (ADR): Because of ADR,
# the center of the PSF will be different at each wavelength,
# by an amount that we can determine (pretty well) from the
# atmospheric conditions and the pointing and angle of the
# instrument. We calculate the offsets here as a function of
# observation and wavelength and input these to the model.
# Correction to parallactic angle and airmass for 2nd-order effects
# such as MLA rotation, mechanical flexures or finite-exposure
# corrections. These values have been trained on faint-std star
# exposures.
#
# `predict_adr_params` uses 'AIRMASS', 'PARANG' and 'CHANNEL' keys
# in input dictionary.
delta, theta = Hyper_PSF3D_PL.predict_adr_params(header)
# check for crazy values of pressure and temperature, and assign default
# values.
pressure = header.get('PRESSURE', 617.)
if not 550. < pressure < 650.:
pressure = 617.
temp = header.get('TEMP', 2.)
if not -20. < temp < 20.:
temp = 2.
adr = ADR(pressure, temp, lref=REFWAVE, delta=delta, theta=theta)
adr_refract = adr.refract(0, 0, wave, unit=SPAXEL_SIZE)
# adr_refract[0, :] corresponds to x, adr_refract[1, :] => y
xctr, yctr = adr_refract
if psftype == 'gaussian-moffat':
return GaussianMoffatPSF(sigma, alpha, beta, ellipticity, eta,
yctr, xctr, MODEL_SHAPE, subpix=3)
elif psftype == 'tabular':
A = gaussian_moffat_psf(sigma, alpha, beta, ellipticity, eta,
yctr, xctr, MODEL_SHAPE, subpix=3)
return TabularPSF(A)
else:
raise ValueError("unknown psf type: " + repr(psftype))
def setup_logging(loglevel, logfname=None):
# if loglevel isn't an integer, parse it as "debug", "info", etc:
if not isinstance(loglevel, int):
loglevel = getattr(logging, loglevel.upper(), None)
if not isinstance(loglevel, int):
print('Invalid log level: %s' % loglevel)
exit(1)
# remove logfile if it already exists
if logfname is not None and os.path.exists(logfname):
os.remove(logfname)
logging.basicConfig(filename=logfname, format="%(levelname)s %(message)s",
level=loglevel)
def cubefit(argv=None):
DESCRIPTION = "Fit SN + galaxy model to SNFactory data cubes."
parser = ArgumentParser(prog="cubefit", description=DESCRIPTION)
parser.add_argument("configfile",
help="configuration file name (JSON format)")
parser.add_argument("outfile", help="Output file name (FITS format)")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument("--logfile", help="Write log to this file "
"(default: print to stdout)", default=None)
parser.add_argument("--loglevel", default="info",
help="one of: debug, info, warning (default is info)")
parser.add_argument("--diagdir", default=None,
help="If given, write intermediate diagnostic results "
"to this directory")
parser.add_argument("--refitgal", default=False, action="store_true",
help="Add an iteration where galaxy model is fit "
"using all epochs and then data/SN positions are "
"refit")
parser.add_argument("--mu_wave", default=0.07, type=float,
help="Wavelength regularization parameter. "
"Default is 0.07.")
parser.add_argument("--mu_xy", default=0.001, type=float,
help="Spatial regularization parameter. "
"Default is 0.001.")
parser.add_argument("--psftype", default="gaussian-moffat",
help="Type of PSF: 'gaussian-moffat' or 'tabular'. "
"Currently, tabular means generate a tabular PSF from "
"gaussian-moffat parameters.")
args = parser.parse_args(argv)
setup_logging(args.loglevel, logfname=args.logfile)
# record start time
tstart = datetime.now()
logging.info("cubefit v%s started at %s", __version__,
tstart.strftime("%Y-%m-
|
%d %H:%M:%S"))
tsteps = OrderedDict() # finish time of each step.
logging.
|
info("parameters: mu_wave={:.3g} mu_xy={:.3g} refitgal={}"
.format(args.mu_wave, args.mu_xy, args.refitgal))
logging.info(" psftype={}".format(args.psftype))
logging.info("reading config file")
with open(args.configfile) as f:
cfg = json.load(f)
# basic checks on config contents.
assert (len(cfg["filenames"]) == len(cfg["xcenters"]) ==
len(cfg["ycenters"]) == len(cfg["psf_params"]))
# -------------------------------------------------------------------------
# Load data cubes from the list of FITS files.
nt = len(cfg["filenames"])
logging.info("reading %d data cubes", nt)
cubes = []
for fname in cfg["filenames"]:
logging.debug(" reading %s", fname)
cubes.append(read_datacube(os.path.join(args.dataprefix, fname)))
wave = cubes[0].wave
nw = len(wave)
# assign some local variables for convenience
refs = cfg["refs"]
master_ref = cfg["master_ref"]
if master_ref not in refs:
raise ValueError("master ref choice must be one of the final refs (" +
" ".join(refs.astype(str)) + ")")
nonmaster_refs = [i for i in refs if i != master_ref]
nonrefs = [i for i in range(nt) if i not in refs]
# Ensure that all cubes have the same wavelengths.
if not all(np.all(cubes[i].wave == wave) for i in range(1, nt)):
raise ValueError("all data must have same wavelengths")
# -------------------------------------------------------------------------
# PSF for each observation
logging.info("setting up PSF for all %d epochs", nt)
psfs = [snfpsf(wave, cfg["psf_params"][i], cubes[i].header, args.psftype)
for i in range(nt)]
# -------------------------------------------------------------------------
# Initialize all model parameters to be fi
|
talkincode/txportal
|
txportal/simulator/handlers/auth_handler.py
|
Python
|
mit
| 1,737
| 0.003454
|
#!/usr/bin/env python
# coding=utf-8
import struct
from twisted.internet import defer
from txportal.packet import cmcc, huawei
from txportal.simulator.handlers import base_handler
import functools
class AuthHandler(base_handler.BasicHandler):
def proc_cmccv1(self, req, rundata):
resp = cmcc.Portal.newMessage(
cmcc.ACK_AUTH,
req.userIp,
req.serialNo,
req.reqId,
secret=self.secret
)
resp.attrNum = 1
resp.attrs = [
(0x05, 'success'),
]
return resp
def proc_cmccv2(self, req, rundata):
resp = cmcc.Portal.newMessage(
cmcc.ACK_AUTH,
req.userIp,
req.serialNo,
req.reqId,
secret=self.secret
)
resp.attrNum = 1
resp.attrs = [
(0x05, 'success'),
]
return resp
def proc_huaweiv1(self, req, rundata):
resp = huawei.Portal.newMessage(
huawei.ACK_AUTH,
|
req.userIp,
req.serialNo,
req.reqId,
secret=self.secret
)
resp.attrNum = 1
resp.attrs = [
(0x05, 'success'),
]
return resp
@defer.inlineCallbacks
def proc_huaweiv2(self, req, rundata):
|
resp = huawei.PortalV2.newMessage(
huawei.ACK_AUTH,
req.userIp,
req.serialNo,
req.reqId,
self.secret,
auth=req.auth,
chap=(req.isChap==0x00)
)
resp.attrNum = 1
resp.attrs = [
(0x05, 'success'),
]
resp.auth_packet()
return resp
|
nugget/home-assistant
|
homeassistant/components/satel_integra/binary_sensor.py
|
Python
|
apache-2.0
| 3,430
| 0
|
"""Support for Satel Integra zone states- represented as binary sensors."""
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import (
CONF_OUTPUTS, CONF_ZONE_NAME, CONF_ZONE_TYPE, CONF_ZONES, DATA_SATEL,
SIGNAL_OUTPUTS_UPDATED, SIGNAL_ZONES_UPDATED)
DEPENDENCIES = ['satel_integra']
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the Satel Integra binary sensor devices."""
if not discovery_info:
return
configured_zones = discovery_info[CONF_ZONES]
devices = []
for zone_num, device_config_data in configured_zones.items():
zone_type = device_config_data[CONF_ZONE_TYPE]
zone_name = device_config_data[CONF_ZONE_NAME]
device = SatelIntegraBinarySensor(
zone_num, zone_name, zone_type, SIGNAL_ZONES_UPDATED)
devices.append(device)
configured_outputs = discovery_info[CONF_OUTPUTS]
for zone_num, device_config_data in configured_outputs.items():
zone_type = device_config_data[CONF_ZONE_TYPE]
zone_name = device_config_data[CONF_ZONE_NAME]
device = SatelIntegraBinarySensor(
zone_num, zone_name, zone_type, SIGNAL_OUTPUTS_UPDATED)
devices.append(device)
async_add_entities(devices)
class SatelIntegraBinarySensor(BinarySensorDevice):
"""Representation of an Satel Integra binary sensor."""
def __init__(self, device_number, device_name, zone_type, react_to_signal):
"""Initialize the binary_sensor."""
self._device_number = device_number
self._name = device_name
self._zone_type = zone_type
self._state = 0
self._react_to_signal = react_to_signal
async def async_added_to_hass(self):
"""Register callbacks."""
if self._react_to_signal == SIGNAL_OUTPUTS_UPDATED:
if self._device_number in\
self.hass.data[DATA_SATEL].violated_outputs:
self._state = 1
else:
self._state = 0
else:
if self._device_number in\
self.hass.data[DATA_SATEL].violated_zones:
self._state = 1
else:
self._state = 0
async_dispatcher_connect(
self.hass, self._react_to_signal, self._devices_updated)
@property
def name(self):
"""Return the name of the entity."""
|
return self._name
@property
def icon(self):
"""Icon for device by its type."""
if self._zone_type == 'smoke':
return "mdi:fire"
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state == 1
@property
def device_
|
class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@callback
def _devices_updated(self, zones):
"""Update the zone's state, if needed."""
if self._device_number in zones \
and self._state != zones[self._device_number]:
self._state = zones[self._device_number]
self.async_schedule_update_ha_state()
|
jaloren/robotframework
|
src/robot/libraries/BuiltIn.py
|
Python
|
apache-2.0
| 146,969
| 0.001082
|
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import difflib
import re
import time
import token
from tokenize import generate_tokens, untokenize
from robot.api import logger
from robot.errors import (ContinueForLoop, DataError, ExecutionFailed,
ExecutionFailures, ExecutionPassed, ExitForLoop,
PassExecution, ReturnFromKeyword)
from robot.running import Keyword, RUN_KW_REGISTER
from robot.running.context import EXECUTION_CONTEXTS
from robot.running.usererrorhandler import UserErrorHandler
from robot.utils import (DotDict, escape, format_assign_message,
get_error_message, get_time, is_falsy, is_integer,
is_string, is_truthy, is_unicode, IRONPYTHON, JYTHON,
Matcher, normalize, NormalizedDict, parse_time, prepr,
RERAISED_EXCEPTIONS, plural_or_not as s, roundup,
secs_to_timestr, seq2str, split_from_equals, StringIO,
timestr_to_secs, type_name, unic)
from robot.utils.asserts import assert_equal, assert_not_equal
from robot.variables import (is_list_var, is_var, DictVariableTableValue,
VariableTableValue, VariableSplitter,
variable_not_found)
from robot.version import get_version
if JYTHON:
from java.lang import String, Number
# TODO: Clean-up registering run keyword variants in RF 3.1.
# https://github.com/robotframework/robotframework/issues/2
|
190
def run_keyword_variant(resolve):
def decorator(method):
RUN_KW_REGISTER.register_run_keyword('BuiltIn', method.__name__,
resolve, deprecation_warning=False)
return method
return decorator
class _BuiltInBase(object):
@propert
|
y
def _context(self):
return self._get_context()
def _get_context(self, top=False):
ctx = EXECUTION_CONTEXTS.current if not top else EXECUTION_CONTEXTS.top
if ctx is None:
raise RobotNotRunningError('Cannot access execution context')
return ctx
@property
def _namespace(self):
return self._get_context().namespace
@property
def _variables(self):
return self._namespace.variables
def _matches(self, string, pattern):
# Must use this instead of fnmatch when string may contain newlines.
matcher = Matcher(pattern, caseless=False, spaceless=False)
return matcher.match(string)
def _is_true(self, condition):
if is_string(condition):
condition = self.evaluate(condition, modules='os,sys')
return bool(condition)
def _log_types(self, *args):
self._log_types_at_level('DEBUG', *args)
def _log_types_at_level(self, level, *args):
msg = ["Argument types are:"] + [self._get_type(a) for a in args]
self.log('\n'.join(msg), level)
def _get_type(self, arg):
# In IronPython type(u'x') is str. We want to report unicode anyway.
if is_unicode(arg):
return "<type 'unicode'>"
return str(type(arg))
class _Converter(_BuiltInBase):
def convert_to_integer(self, item, base=None):
"""Converts the given item to an integer number.
If the given item is a string, it is by default expected to be an
integer in base 10. There are two ways to convert from other bases:
- Give base explicitly to the keyword as ``base`` argument.
- Prefix the given string with the base so that ``0b`` means binary
(base 2), ``0o`` means octal (base 8), and ``0x`` means hex (base 16).
The prefix is considered only when ``base`` argument is not given and
may itself be prefixed with a plus or minus sign.
The syntax is case-insensitive and possible spaces are ignored.
Examples:
| ${result} = | Convert To Integer | 100 | | # Result is 100 |
| ${result} = | Convert To Integer | FF AA | 16 | # Result is 65450 |
| ${result} = | Convert To Integer | 100 | 8 | # Result is 64 |
| ${result} = | Convert To Integer | -100 | 2 | # Result is -4 |
| ${result} = | Convert To Integer | 0b100 | | # Result is 4 |
| ${result} = | Convert To Integer | -0x100 | | # Result is -256 |
See also `Convert To Number`, `Convert To Binary`, `Convert To Octal`,
`Convert To Hex`, and `Convert To Bytes`.
"""
self._log_types(item)
return self._convert_to_integer(item, base)
def _convert_to_integer(self, orig, base=None):
try:
item = self._handle_java_numbers(orig)
item, base = self._get_base(item, base)
if base:
return int(item, self._convert_to_integer(base))
return int(item)
except:
raise RuntimeError("'%s' cannot be converted to an integer: %s"
% (orig, get_error_message()))
def _handle_java_numbers(self, item):
if not JYTHON:
return item
if isinstance(item, String):
return unic(item)
if isinstance(item, Number):
return item.doubleValue()
return item
def _get_base(self, item, base):
if not is_string(item):
return item, base
item = normalize(item)
if item.startswith(('-', '+')):
sign = item[0]
item = item[1:]
else:
sign = ''
bases = {'0b': 2, '0o': 8, '0x': 16}
if base or not item.startswith(tuple(bases)):
return sign+item, base
return sign+item[2:], bases[item[:2]]
def convert_to_binary(self, item, base=None, prefix=None, length=None):
"""Converts the given item to a binary string.
The ``item``, with an optional ``base``, is first converted to an
integer using `Convert To Integer` internally. After that it
is converted to a binary number (base 2) represented as a
string such as ``1011``.
The returned value can contain an optional ``prefix`` and can be
required to be of minimum ``length`` (excluding the prefix and a
possible minus sign). If the value is initially shorter than
the required length, it is padded with zeros.
Examples:
| ${result} = | Convert To Binary | 10 | | | # Result is 1010 |
| ${result} = | Convert To Binary | F | base=16 | prefix=0b | # Result is 0b1111 |
| ${result} = | Convert To Binary | -2 | prefix=B | length=4 | # Result is -B0010 |
See also `Convert To Integer`, `Convert To Octal` and `Convert To Hex`.
"""
return self._convert_to_bin_oct_hex(item, base, prefix, length, 'b')
def convert_to_octal(self, item, base=None, prefix=None, length=None):
"""Converts the given item to an octal string.
The ``item``, with an optional ``base``, is first converted to an
integer using `Convert To Integer` internally. After that it
is converted to an octal number (base 8) represented as a
string such as ``775``.
The returned value can contain an optional ``prefix`` and can be
required to be of minimum ``length`` (excluding the prefix and a
possible minus sign). If the value is initially shorter than
the required length, it is padded with zeros.
Examples:
| ${result} = | Convert To Octal | 10 | | | # Result is 12
|
AWhetter/pacman
|
test/pacman/tests/remove047.py
|
Python
|
gpl-2.0
| 521
| 0
|
self.description = "Remove a package required by other packages"
lp1
|
= pmpkg("pkg1")
self.addpkg2db("local", lp1)
lp2 = pmpkg("pkg2")
lp2.depends = ["pkg1"]
self.addpkg2db("local", lp2)
lp3 = pmpkg("pkg3")
lp3.depends = ["pkg1"]
self.addpkg2db("local", lp3)
lp4 = pmpkg("pkg4")
lp4.depends = ["pkg1"]
self.addpkg2db("local", lp4)
self.args = "-R pkg1 pkg2"
self.addrule("!PACMAN_RETCODE=0")
self.addrule("PKG_EXIST=pkg1")
self.addrule("PKG_EXIST=pkg2")
self.addrule("PKG_EXIST=pkg3")
se
|
lf.addrule("PKG_EXIST=pkg4")
|
asedunov/intellij-community
|
python/testData/quickFixes/PyUpdatePropertySignatureQuickFixTest/getter.py
|
Python
|
apache-2.0
| 245
| 0.053061
|
class A(Aa)
|
:
@property
def <warning descr="Getter signature should be (self)">x<caret></warning>(self, r):
return ""
@x.setter
|
def <warning descr="Setter should not return a value">x</warning>(self, r):
return r
|
annarev/tensorflow
|
tensorflow/python/ops/list_ops.py
|
Python
|
apache-2.0
| 14,846
| 0.00714
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops to manipulate lists of tensors."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import cpp_shape_inference_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops im
|
port array_ops
from tensorflow.python.ops import gen_list_ops
from tensorflow.python.ops import handle_data_util
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_list_ops import *
# pylint: enable=wildcard-import
from tensorflow.python.util.lazy_loader import LazyLoader
# list_ops -> control_flow_ops -> tensor_array_ops -> list_o
|
ps
control_flow_ops = LazyLoader(
"control_flow_ops", globals(),
"tensorflow.python.ops.control_flow_ops")
ops.NotDifferentiable("TensorListConcatLists")
ops.NotDifferentiable("TensorListElementShape")
ops.NotDifferentiable("TensorListLength")
ops.NotDifferentiable("TensorListPushBackBatch")
def empty_tensor_list(element_shape,
element_dtype,
max_num_elements=None,
name=None):
if max_num_elements is None:
max_num_elements = -1
return gen_list_ops.empty_tensor_list(
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
max_num_elements=max_num_elements,
name=name)
def _set_handle_data(list_handle, element_shape, element_dtype):
"""Sets type information on `list_handle` for consistency with graphs."""
# TODO(b/169968286): It would be better if we had a consistent story for
# creating handle data from eager operations (shared with VarHandleOp).
if isinstance(list_handle, ops.EagerTensor):
if tensor_util.is_tf_type(element_shape):
element_shape = tensor_shape.TensorShape(None)
elif not isinstance(element_shape, tensor_shape.TensorShape):
element_shape = tensor_shape.TensorShape(element_shape)
handle_data = cpp_shape_inference_pb2.CppShapeInferenceResult.HandleData()
handle_data.is_set = True
handle_data.shape_and_type.append(
cpp_shape_inference_pb2.CppShapeInferenceResult.HandleShapeAndType(
shape=element_shape.as_proto(),
dtype=element_dtype.as_datatype_enum,
specialized_type=types_pb2.ST_TENSOR_LIST))
list_handle._handle_data = handle_data # pylint: disable=protected-access
def tensor_list_reserve(element_shape, num_elements, element_dtype, name=None):
result = gen_list_ops.tensor_list_reserve(
element_shape=_build_element_shape(element_shape),
num_elements=num_elements,
element_dtype=element_dtype,
name=name)
# TODO(b/169968286): gen_ops needs to ensure the metadata is properly
# populated for eager operations.
_set_handle_data(result, element_shape, element_dtype)
return result
def tensor_list_from_tensor(tensor, element_shape, name=None):
tensor = ops.convert_to_tensor(tensor)
result = gen_list_ops.tensor_list_from_tensor(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
name=name)
_set_handle_data(result, tensor.shape, tensor.dtype)
return result
def tensor_list_get_item(input_handle, index, element_dtype, element_shape=None,
name=None):
return gen_list_ops.tensor_list_get_item(
input_handle=input_handle,
index=index,
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
name=name)
def tensor_list_pop_back(input_handle, element_dtype, name=None):
return gen_list_ops.tensor_list_pop_back(
input_handle=input_handle,
element_shape=-1,
element_dtype=element_dtype,
name=name)
def tensor_list_gather(input_handle,
indices,
element_dtype,
element_shape=None,
name=None):
return gen_list_ops.tensor_list_gather(
input_handle=input_handle,
indices=indices,
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
name=name)
def tensor_list_scatter(tensor,
indices,
element_shape=None,
input_handle=None,
name=None):
"""Returns a TensorList created or updated by scattering `tensor`."""
tensor = ops.convert_to_tensor(tensor)
if input_handle is not None:
output_handle = gen_list_ops.tensor_list_scatter_into_existing_list(
input_handle=input_handle, tensor=tensor, indices=indices, name=name)
handle_data_util.copy_handle_data(input_handle, output_handle)
return output_handle
else:
output_handle = gen_list_ops.tensor_list_scatter_v2(
tensor=tensor,
indices=indices,
element_shape=_build_element_shape(element_shape),
num_elements=-1,
name=name)
_set_handle_data(output_handle, element_shape, tensor.dtype)
return output_handle
def tensor_list_stack(input_handle,
element_dtype,
num_elements=-1,
element_shape=None,
name=None):
return gen_list_ops.tensor_list_stack(
input_handle=input_handle,
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
num_elements=num_elements,
name=name)
def tensor_list_concat(input_handle, element_dtype, element_shape=None,
name=None):
# Ignore the lengths output of TensorListConcat. It is only used during
# gradient computation.
return gen_list_ops.tensor_list_concat_v2(
input_handle=input_handle,
element_dtype=element_dtype,
element_shape=_build_element_shape(element_shape),
leading_dims=ops.convert_to_tensor([], dtype=dtypes.int64),
name=name)[0]
def tensor_list_split(tensor, element_shape, lengths, name=None):
return gen_list_ops.tensor_list_split(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
lengths=lengths,
name=name)
def tensor_list_set_item(input_handle,
index,
item,
resize_if_index_out_of_bounds=False,
name=None):
"""Sets `item` at `index` in input list."""
if resize_if_index_out_of_bounds:
input_list_size = gen_list_ops.tensor_list_length(input_handle)
# TODO(srbs): This could cause some slowdown. Consider fusing resize
# functionality in the SetItem op.
input_handle = control_flow_ops.cond(
index >= input_list_size,
lambda: gen_list_ops.tensor_list_resize( # pylint: disable=g-long-lambda
input_handle, index + 1),
lambda: input_handle)
output_handle = gen_list_ops.tensor_list_set_item(
input_handle=input_handle, index=index, item=item, name=name)
handle_data_util.copy_handle_data(input_handle, output_handle)
return output_handle
@ops.RegisterGradient("TensorListPushBack")
def _PushBackGrad(op, dresult):
return gen_list_ops.tensor_list_pop_back(
dresult,
element_shape=array_ops.shape(op.inputs[1]),
element_dtype=op.get_attr("element_dtype"))
@ops.RegisterGradient(
|
Alexander-M-Waldman/local_currency_site
|
lib/python2.7/site-packages/allauth/socialaccount/south_migrations/0013_auto__chg_field_socialaccount_uid__chg_field_socialapp_secret__chg_fie.py
|
Python
|
gpl-3.0
| 7,605
| 0.00789
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'So
|
cialAccount.uid'
db.alter_column(u'socialaccount_socialaccount', 'uid', self.gf('django.db.models.fields.CharField')(max_length=1
|
91))
# Changing field 'SocialApp.secret'
db.alter_column(u'socialaccount_socialapp', 'secret', self.gf('django.db.models.fields.CharField')(max_length=191))
# Changing field 'SocialApp.client_id'
db.alter_column(u'socialaccount_socialapp', 'client_id', self.gf('django.db.models.fields.CharField')(max_length=191))
# Changing field 'SocialApp.key'
db.alter_column(u'socialaccount_socialapp', 'key', self.gf('django.db.models.fields.CharField')(max_length=191))
def backwards(self, orm):
# Changing field 'SocialAccount.uid'
db.alter_column(u'socialaccount_socialaccount', 'uid', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'SocialApp.secret'
db.alter_column(u'socialaccount_socialapp', 'secret', self.gf('django.db.models.fields.CharField')(max_length=100))
# Changing field 'SocialApp.client_id'
db.alter_column(u'socialaccount_socialapp', 'client_id', self.gf('django.db.models.fields.CharField')(max_length=100))
# Changing field 'SocialApp.key'
db.alter_column(u'socialaccount_socialapp', 'key', self.gf('django.db.models.fields.CharField')(max_length=100))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'socialaccount.socialaccount': {
'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'extra_data': ('allauth.socialaccount.fields.JSONField', [], {'default': "'{}'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '191'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'socialaccount.socialapp': {
'Meta': {'object_name': 'SocialApp'},
'client_id': ('django.db.models.fields.CharField', [], {'max_length': '191'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '191', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '191'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sites.Site']", 'symmetrical': 'False', 'blank': 'True'})
},
u'socialaccount.socialtoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'SocialToken'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['socialaccount.SocialAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['socialaccount.SocialApp']"}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.TextField', [], {}),
'token_secret': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['socialaccount']
|
alogg/dolfin
|
demo/undocumented/ale/python/demo_ale.py
|
Python
|
gpl-3.0
| 1,429
| 0.0007
|
"""This demo demonstrates how to move the vertex coordinates of a
boundary mesh and then updating the interior vertex coordinates of the
original mesh by
|
suitably interpolating the vertex coordinates (useful
for implementation of ALE methods)."""
# Copyright (C) 2008 Solveig Bruvoll and Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in th
|
e hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# First added: 2008-05-02
# Last changed: 2008-12-12
from dolfin import *
print "This demo is presently broken. See https://bugs.launchpad.net/dolfin/+bug/1047641"
exit()
# Create mesh
mesh = UnitSquareMesh(20, 20)
# Create boundary mesh
boundary = BoundaryMesh(mesh)
# Move vertices in boundary
for x in boundary.coordinates():
x[0] *= 3.0
x[1] += 0.1*sin(5.0*x[0])
# Move mesh
mesh.move(boundary)
# Plot mesh
plot(mesh, interactive=True)
|
bmi-forum/bmi-pyre
|
pythia-0.8/packages/journal/journal/components/Device.py
|
Python
|
gpl-2.0
| 1,235
| 0.006478
|
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2005 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from pyre.components.Component import Component
class Device(Component):
class Inventory(Component.Inventory):
from RendererFacility import RendererFacility
renderer = RendererFacility()
renderer.meta['tip'] = 'the facility that controls how the messages are formatted'
def createDe
|
vice(self):
raise NotImplementedError("class '%s' must override 'device'" % self.__class__.__name__)
def __init__(self, name):
Component.__init__(self, name, "journal-device")
self.device = None
return
def _init(self):
device = self.createDevice()
renderer = self.inventory.renderer.renderer
device.renderer = renderer
self.device = device
return
# version
__id_
|
_ = "$Id: Device.py,v 1.2 2005/03/10 06:16:37 aivazis Exp $"
# End of file
|
codefisher/djangopress
|
djangopress/accounts/models.py
|
Python
|
mit
| 5,282
| 0.006437
|
import random
import datetime
import time
import hashlib
from django.db import models
from django.conf import settings
from django.urls import reverse
from django.contrib.auth.models import User, Group
from django.db.models.signals import post_save
from djangopress.core.models import Property
from django.utils import timezone
from PIL import Image
DEFAULT_USER_GROUP = getattr(settings, 'DEFAULT_USER_GROUP', None)
def avatar_path(instance, filename):
return ("avatars/%s/%s-%s-%s" % (time.strftime("%y/%m"), instance.user.pk, instance.user.username.lower(), filename.lower()))
class UserProfile(models.Model):
EMAIL_SETTINGS = (
('HI', 'Hide Email'),
('SW', 'Show Email'),
('HB', 'Use Web Form')
)
title = models.CharField(max_length=100, default="New member")
homepage = models.CharField(max_length=100, blank=True, null=True)
#IM contact (jabber, icq, msn, aim, yahoo, gtalk, twitter, facebook)
location = models.CharField(max_length=50, blank=True, null=True)
avatar = models.ImageField(blank=True, null=True, upload_to=avatar_path)
signature = models.TextField(blank=True, null=True)
timezone = models.CharField(max_length=50, null=True, blank=True)
language = models.CharField(max_length=50, null=True, blank=True)
registration_ip = models.GenericIPAddressField(blank=True, null=True, )
last_ip_used = models.GenericIPAddressField(blank=True, null=True)
admin_note = models.TextField(blank=True, null=True)
activate_key = models.CharField(max_length=127, blank=True, editable=False)
activate_key_expirary = models.DateTimeField(blank=True, editable=False)
banned = models.BooleanField(default=False)
#remember_between_visits = models.BooleanField(default=True)
user = models.OneToOneField(User, related_name="profile", on_delete=models.CASCADE)
email_settings = models.CharField(choices=EMAIL_SETTINGS, default='HI', max_length=2)
gender = models.CharField(max_length=1, blank=True, null=True, default=None, choices=(('', 'Private'), ('M', 'Male'), ('F', 'Female')))
date_of_birth = models.DateTimeField(blank=True, null=True)
def get_ip(self):
if self.last_ip_used:
return self.last_ip_used
return self.registration_ip
def __getattr__(self, name):
if name.startswith("social_"):
try:
return self.user.social.filter(account=name[7:])[0]
except:
raise AttributeError(name)
return super(UserProfile, self).__getattr__(name)
def get_absolute_url(self):
return reverse('accounts-profile', kwargs={"username": self.user.username})
def __init__(self, *args, **kwargs):
super(UserProfile, self).__init__(*args, **kwargs)
self._banned = self.banned
self._avatar = self.avatar
def save(self, force_insert=False, force_update=False):
if self._banned == False and self.banned == True:
# if we banned them, they can't then login
self.user.is_active = False
self.user.save()
if self._avatar != self.avatar and self.avatar:
image = Image.open(self.avatar)
size = settings.ACCOUNTS_USER_LIMITS.get('avatar', {}).get('size', 50)
image.resize((size, size), Image.ANTIALIAS)
image.save(self.avatar.path)
super(UserProfile, self).save(force_insert, force_update)
self._banned = self.banned
self._avatar = self.avatar
def set_activate_key(self):
salt = hashlib.sha1((str(random.random()) + str(random.random())).encode('utf-8')).hexdigest()[:5]
key = "".join(str(item) for item in (self.user.username,
self.user.email, datetime.datetime.now()))
hsh = hashlib.sha1((salt + key).encode('utf-8')).hexdigest()
self.activate_key = hsh
self.activate_key_expirary = datetime.datetime.fromtimestamp(time.time() + (7 * 24 * 60 * 60))
def check_activate_key(self, hsh):
return (hsh == self.activate_key
and timezone.now() <= self.activate_key_expirary)
class UserSocial(models.Model):
ACCOUNTS
|
= (
('twitter', 'Twitter'),
('google_plus', 'Google Plus'),
('facebook', 'Facebook'),
('linkedin', 'Linked In'),
('pinterest', 'Pinterest'),
)
account = models.CharField(max_length=20, choices=ACCOUNTS)
value = mod
|
els.CharField(max_length=100)
user_profile = models.ForeignKey(User, related_name="social", on_delete=models.CASCADE)
class UserProperty(Property):
user_profile = models.ForeignKey(User, related_name="properties", on_delete=models.CASCADE)
def create_profile(sender, **kargs):
if kargs.get("created", False):
profile = UserProfile(user=kargs.get("instance"))
profile.set_activate_key()
profile.save()
post_save.connect(create_profile, User, dispatch_uid="djangopress.accounts.create_profile")
def add_to_group(sender, **kargs):
if DEFAULT_USER_GROUP and kargs.get("created", False):
user = kargs.get("instance")
user.groups.add(Group.objects.get(name=DEFAULT_USER_GROUP))
post_save.connect(add_to_group, User, dispatch_uid="djangopress.accounts.add_to_group")
|
r-rathi/ckt-apps
|
bin/report_net.py
|
Python
|
mit
| 2,251
| 0.01466
|
#!/usr/bin/env python
#-------------------------------------------------------------------------------
import os
import
|
sys
bin_dir = os.path.dirname(os.path.abspath(__file__))
pkg_dir = os.path.abspath(os.path.join(bin_dir, ".."))
sys.path.append(pkg_dir)
#-------------------------------------------------------------------------------
import argparse
import c
|
ollections
import cktapps
from cktapps import apps
from cktapps.formats import spice
#-------------------------------------------------------------------------------
def main(args=None):
parser = argparse.ArgumentParser(description="Report net capacitances "
"and fanout")
parser.add_argument('spice_files', metavar='file', nargs='+',
type=argparse.FileType('r'), help='spice netlist file(s)')
parser.add_argument('--lib', type=argparse.FileType('r'),
help='lib file(s) with model (e.g. nch, pch) defintions')
parser.add_argument('--cell', help='name of the cell to be analyzed '
'(top cell by default)')
arg_ns = parser.parse_args(args)
#---------------------------------------------------------------------------
ckt = cktapps.Ckt()
if arg_ns.lib:
ckt.read_spice(arg_ns.lib)
for spice_file in arg_ns.spice_files:
ckt.read_spice(spice_file)
ckt.link()
#topcellnames = [cell.name for cell in ckt.get_topcells()]
#print "Top cells: %s" % topcellnames
if arg_ns.cell:
cell = ckt.get_cell(arg_ns.cell)
else:
topcells = ckt.get_topcells()
if topcells:
cell = topcells[0]
else:
cell = ckt
#print cell
#print "-"*80
#apps.report_hierarchy(cell)
#ckt.write_spice(cell)
#print "-"*80
cell.ungroup(flatten=True)
#print cell
#ckt.write_spice(cell)
#print "-"*80
lib = arg_ns.lib.name
netlists = [f.name for f in arg_ns.spice_files]
apps.report_net(cell, lib, netlists)
#print "-"*80
#apps.report_hierarchy(cell)
return ckt
#-------------------------------------------------------------------------------
if __name__ == "__main__":
ckt = main()
|
DazWorrall/ansible
|
lib/ansible/modules/network/aci/aci_filter_entry.py
|
Python
|
gpl-3.0
| 10,104
| 0.003068
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_filter_entry
short_description: Manage filter entries on Cisco ACI fabrics (vz:Entry)
description:
- Manage filter entries for a filter on Cisco ACI fabrics.
- More information from the internal APIC class
I(vz:Entry) at U(https://developer.cisco.com/media/mim-ref/MO-vzEntry.html).
author:
- Swetha Chunduri (@schunduri)
- Dag Wieers (@dagwieers)
- Jacob McGill (@jmcgill298)
version_added: '2.4'
requirements:
- Tested with ACI Fabric 1.0(3f)+
notes:
- The C(tenant) and C(filter) used must exist before using this module in your playbook.
The M(aci_tenant) and M(aci_filter) modules can be used for this.
options:
arp_flag:
description:
- The arp flag to use when the ether_type is arp.
choices: [ arp_reply, arp_request, unspecified ]
description:
description:
- Description for the Filter Entry.
aliases: [ descr ]
dst_port:
description:
- Used to set both destination start and end ports to the same value when ip_protocol is tcp or udp.
choices: [ Valid TCP/UDP Port Ranges]
dst_port_end:
description:
- Us
|
ed to set the destination end port when ip_protocol is tcp or udp.
choices: [ Valid TCP/UDP Port Ranges]
d
|
st_port_start:
description:
- Used to set the destination start port when ip_protocol is tcp or udp.
choices: [ Valid TCP/UDP Port Ranges]
entry:
description:
- Then name of the Filter Entry.
aliases: [ entry_name, name ]
ether_type:
description:
- The Ethernet type.
choices: [ arp, fcoe, ip, mac_security, mpls_ucast, trill, unspecified ]
filter_name:
description:
The name of Filter that the entry should belong to.
icmp_msg_type:
description:
- ICMPv4 message type; used when ip_protocol is icmp.
choices: [ dst_unreachable, echo, echo_reply, src_quench, time_exceeded, unspecified ]
icmp6_msg_type:
description:
- ICMPv6 message type; used when ip_protocol is icmpv6.
choices: [ dst_unreachable, echo_request, echo_reply, neighbor_advertisement, neighbor_solicitation, redirect, time_exceeded, unspecified ]
ip_protocol:
description:
- The IP Protocol type when ether_type is ip.
choices: [ eigrp, egp, icmp, icmpv6, igmp, igp, l2tp, ospfigp, pim, tcp, udp, unspecified ]
state:
description:
- present, absent, query
default: present
choices: [ absent, present, query ]
stateful:
description:
- Determines the statefulness of the filter entry.
tenant:
description:
- The name of the tenant.
aliases: [ tenant_name ]
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- aci_filter_entry:
action: "{{ action }}"
entry: "{{ entry }}"
tenant: "{{ tenant }}"
ether_name: "{{ ether_name }}"
icmp_msg_type: "{{ icmp_msg_type }}"
filter_name: "{{ filter_name }}"
descr: "{{ descr }}"
host: "{{ inventory_hostname }}"
username: "{{ user }}"
password: "{{ pass }}"
protocol: "{{ protocol }}"
'''
RETURN = ''' # '''
from ansible.module_utils.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
VALID_ARP_FLAGS = ['arp_reply', 'arp_request', 'unspecified']
VALID_ETHER_TYPES = ['arp', 'fcoe', 'ip', 'mac_security', 'mpls_ucast', 'trill', 'unspecified']
VALID_ICMP_TYPES = ['dst_unreachable', 'echo', 'echo_reply', 'src_quench', 'time_exceeded',
'unspecified', 'echo-rep', 'dst-unreach']
VALID_ICMP6_TYPES = ['dst_unreachable', 'echo_request', 'echo_reply', 'neighbor_advertisement',
'neighbor_solicitation', 'redirect', 'time_exceeded', 'unspecified']
VALID_IP_PROTOCOLS = ['eigrp', 'egp', 'icmp', 'icmpv6', 'igmp', 'igp', 'l2tp', 'ospfigp', 'pim', 'tcp', 'udp', 'unspecified']
# mapping dicts are used to normalize the proposed data to what the APIC expects, which will keep diffs accurate
ARP_FLAG_MAPPING = dict(arp_reply='reply', arp_request='req', unspecified=None)
FILTER_PORT_MAPPING = {'443': 'https', '25': 'smtp', '80': 'http', '20': 'ftpData', '53': 'dns', '110': 'pop3', '554': 'rtsp'}
ICMP_MAPPING = {'dst_unreachable': 'dst-unreach', 'echo': 'echo', 'echo_reply': 'echo-rep', 'src_quench': 'src-quench',
'time_exceeded': 'time-exceeded', 'unspecified': 'unspecified', 'echo-re': 'echo-rep', 'dst-unreach': 'dst-unreach'}
ICMP6_MAPPING = dict(dst_unreachable='dst-unreach', echo_request='echo-req', echo_reply='echo-rep', neighbor_advertisement='nbr-advert',
neighbor_solicitation='nbr-solicit', redirect='redirect', time_exceeded='time-exceeded', unspecified='unspecified')
def main():
argument_spec = aci_argument_spec
argument_spec.update(
arp_flag=dict(type='str', choices=VALID_ARP_FLAGS),
description=dict(type='str'),
dst_port=dict(type='str'),
dst_port_end=dict(type='str'),
dst_port_start=dict(type='str'),
entry=dict(type='str', aliases=['entry_name', 'name']),
ether_type=dict(choices=VALID_ETHER_TYPES, type='str'),
filter_name=dict(type='str'),
icmp_msg_type=dict(type='str', choices=VALID_ICMP_TYPES),
icmp6_msg_type=dict(type='str', choices=VALID_ICMP6_TYPES),
ip_protocol=dict(choices=VALID_IP_PROTOCOLS, type='str'),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
stateful=dict(type='str', choices=['no', 'yes']),
tenant=dict(type="str", aliases=['tenant_name'])
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
arp_flag = module.params['arp_flag']
if arp_flag is not None:
arp_flag = ARP_FLAG_MAPPING[arp_flag]
description = module.params['description']
dst_port = module.params['dst_port']
if dst_port in FILTER_PORT_MAPPING.keys():
dst_port = FILTER_PORT_MAPPING[dst_port]
dst_end = module.params['dst_port_end']
if dst_end in FILTER_PORT_MAPPING.keys():
dst_end = FILTER_PORT_MAPPING[dst_end]
dst_start = module.params['dst_port_start']
if dst_start in FILTER_PORT_MAPPING.keys():
dst_start = FILTER_PORT_MAPPING[dst_start]
entry = module.params['entry']
ether_type = module.params['ether_type']
filter_name = module.params['filter_name']
icmp_msg_type = module.params['icmp_msg_type']
if icmp_msg_type is not None:
icmp_msg_type = ICMP_MAPPING[icmp_msg_type]
icmp6_msg_type = module.params['icmp6_msg_type']
if icmp6_msg_type is not None:
icmp6_msg_type = ICMP6_MAPPING[icmp6_msg_type]
ip_protocol = module.params['ip_protocol']
state = module.params['state']
stateful = module.params['stateful']
tenant = module.params['tenant']
aci = ACIModule(module)
# validate that dst_port is not passed with dst_start or dst_end
if dst_port is not None and (dst_end is not None or dst_start is not None):
module.fail_json(msg="Parameter 'dst_port' cannot be used with 'dst_end' and 'dst_start'")
elif dst_port is not None:
dst_end = dst_port
dst_start = dst_port
# validate that filter_name is not passed without tenant
if filter_name is not None and tenant is None:
module.fail_json(msg="Parameter 'filter_name' cannot be used without 'tenant'")
# TODO: Think through the logic here and see if there is a better way
if entry is not None:
# fail when entry is provided without tenant and filter_name
if tenant is not None and filter_name is not None:
path = 'api/mo/uni/tn-%(tenant)s/flt-%(filter_name)s/e-%(entry)s.json' % module.params
elif tenant is not None and state == 'query':
path = 'api/mo/uni/tn-%(tenant)s.json?rsp-subtree=full&rsp-subtree-class=vzEntry&rsp-subtree-filter=eq(vzEntry.name, \
|
Spoken-tutorial/spoken-website
|
events/migrations/0022_auto_20171023_1505.py
|
Python
|
gpl-3.0
| 2,412
| 0.002488
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0021_auto_20171023_1358'),
]
operations = [
migrations.AlterField(
model_name='inductioninterest',
name='age',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'20to25', b'20 to 25 years'), (b'26to30', b'26 to 30 years'), (b'31to35', b'31 to 35 years'), (b'35andabove', b'Above 35 years')]),
),
migrations.AlterField(
model_name='inductioninterest',
name='designation',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Lecturer', b'Lecturer'), (b'AssistantProfessor', b'Assistant Professor'), (b'AssociateProfessor', b'Associate Professor'), (b'Professor', b'Professor'), (b'Other', b'Other')]),
),
migrations.AlterField(
model_name='inductioninterest',
name='experience_in_college',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Lessthan1year', b'Less than 1 year'), (b'Morethan1yearbutlessthan2years', b'More than 1 year, but less than 2 years'), (b'Morethan2yearsbutlessthan5years', b'More than 2 years but less than 5 years'), (b'Morethan5years', b'More than 5 years')]),
),
migrations.AlterField(
model_name='inductioninterest',
name='gender',
field=models.CharField(max_length=50, choices=[(b'', b'-----'), (b'Male', b'Male'), (b'Female', b'Female')]),
),
migrations.AlterField(
|
model_name='inductioninterest',
name='medium_of_studies',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'English', b'English'), (b'Other', b'Other')]),
),
migrations.AlterField
|
(
model_name='inductioninterest',
name='phonemob',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='inductioninterest',
name='specialisation',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Arts', b'Arts'), (b'Science', b'Science'), (b'Commerce', b'Commerce'), (b'EngineeringorComputerScience ', b'Engineering or Computer Science'), (b'Management', b'Management'), (b'Other', b'Other')]),
),
]
|
RocioDSI/Carta-Servicios-STIC
|
servicios/GeneraNagios.py
|
Python
|
agpl-3.0
| 2,787
| 0.024408
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015
#
# STIC - Universidad de La Laguna (ULL) <[email protected]>
#
# This file is part of Modelado de Servicios TIC.
#
# Modelado de Servicios TIC is free software: you can redistribute it and/or modify it under
# t
|
he terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Modelado de Servicios TIC is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warrant
|
y of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Modelado de Servicios TIC. If not, see
# <http://www.gnu.org/licenses/>.
#
import funcionesxml
import generacionpaginas
ServicioSonda = [] #Vector que contendrá los servicios de una sonda
#Creacion fichero configuracion Nagios
def GeneraNagios():
nagstr1 = ""
nagstr2 = ""
nagstr3 = ""
funcionesxml.inicializacion()
for i in funcionesxml.SondaArray:
#CREACION DE FICHEROS NAGIOS
ficheroservicio = open("./confgSonda/servicio/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w")
ficherohost = open("./confgSonda/host/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w")
ficherohost_group = open("./confgSonda/host_group/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w")
#CREACION FICHEROS SERVICIO
nagstr1 += "## services/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n"
for j in funcionesxml.getGroupServices(funcionesxml.getGroupID(i[4])):
ServicioSonda.append(funcionesxml.getBusinessServiceName(j))
#print "Servicio: "+ str(funcionesxml.getBusinessServiceName(j)) + " PUERTO: " + str(funcionesxml.getPuerto(j))+ " PROTOCOLO: " + str(funcionesxml.getProtocolo(j))+ " URL: " +str(funcionesxml.getURL(j))
for k in ServicioSonda:
nagstr1 += "define service{\n use: "
nagstr1 += k + "\n" + " host_name: " + "---\n" + " contact_groups: " + "---\n"
nagstr1 += "}\n\n"
#CREACION FICHEROS HOST_GROUP
nagstr2 += "## host_group/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n"
nagstr2 += "define hostgroup{\n hostgroup_name: " + "---\n " + "alias: " + "---\n " + "members: " + "---\n"
nagstr2 += "}\n\n"
#CREACION FICHEROS HOST_GROUP
nagstr3 += "## host/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n"
nagstr3 += " "
ficheroservicio.write(nagstr1)
ficherohost.write(nagstr3)
ficherohost_group.write(nagstr2)
ficheroservicio.close
ficherohost.close
ficherohost_group.close
GeneraNagios()
|
jwessel/meta-overc
|
meta-cube/recipes-support/overc-system-agent/files/overc-system-agent-1.2/run_server.py
|
Python
|
mit
| 11,160
| 0.007437
|
#!/usr/bin/python
import sys, getopt, os, urllib2
import Overc
from flask import Flask
from flask import jsonify
from flask import request
from flask_httpauth import HTTPBasicAuth
from passlib.context import CryptContext
app = Flask(__name__)
# Password hash generation with:
#python<<EOF
#from passlib.context import CryptContext
#pwd_context = CryptContext(schemes=["pbkdf2_sha256"])
#print pwd_context.encrypt("adm");
#EOF
# Default admin username and password hash which can be overriden
# if /opt/overc-system-agent/pwfile exists with the format
# UserName:PasswordHash
app.config['ADMIN_USERNAME'] = 'adm'
app.config['ADMIN_PW_HASH'] = '$pbkdf2-sha256$29000$i3EOIeT8P8dY6703BgBgbA$XyesHZZmu.O54HfiwIhSd00rMJpyCKhH0gsh1atxgqA'
app.config['PW_FILE'] = "/opt/overc-system-agent/pwfile"
pwd_context = CryptContext(schemes=["pbkdf2_sha256"])
# Flask Extention
auth = HTTPBasicAuth()
@auth.verify_password
def verify_password(username, password):
if username != app.config['ADMIN_USERNAME']:
return False
if pwd_context.verify(password, app.config['ADMIN_PW_HASH']):
return True
return False
def json_msg(s):
message = {}
message['result'] = s.replace("\n", ";");
resp = jsonify(message)
return resp
@app.route('/system/rollback')
@auth.login_required
def system_rollback():
usage = 'Usage: ' + request.url_root + 'system/rollback?template=[dom0]'
overc=Overc.Overc()
template = request.args.get('template')
if template != 'dom0':
usage += "\n The only supported template is 'dom0'"
return json_msg(usage)
print "System will rollback and reboot!"
overc._system_rollback(template)
@app.route('/system/upgrade')
@auth.login_required
def system_upgrade():
usage = 'Usage: ' + request.url_root + 'system/upgrade?template=[dom0]&reboot=[True|False]&force=[True|False]'
overc=Overc.Overc()
reboot_s = request.args.get('reboot')
force_s = request.args.get('force')
template = request.args.get('template')
reboot=False
force=False
skipscan=True
skip_del=False
if template != 'dom0':
usage += "\n The only supported template is 'dom0'"
return json_msg(usage)
if reboot_s == "True":
print "do reboot"
if force_s == "True":
print "force upgra
|
de"
force=True
overc._system_upgrade(template, reboot, force, skipscan, skip_del)
return json_msg(overc.message)
@app.route('/host/rollback')
@auth.login_required
def host_rollback():
overc=Overc.Overc()
overc.host_rollback()
return json_msg(overc.message)
@app.route('/host/upgrade')
@auth.login_required
def host_upgrade():
usage = 'Usage: ' + request.url_root + 'host/upgrade?reboot=[True|False]&force=[True|False]'
overc=Overc.Ove
|
rc()
reboot_s = request.args.get('reboot')
force_s = request.args.get('force')
reboot=False
force=False
if reboot_s == "True":
print "do reboot"
reboot = True
if force_s == "True":
print "do force to upgrade"
force=True
overc._host_upgrade(reboot, force)
return json_msg(overc.message)
@app.route('/host/update')
@auth.login_required
def host_update():
overc=Overc.Overc()
overc.host_update()
return json_msg(overc.message)
@app.route('/host/newer')
@auth.login_required
def host_newer():
overc=Overc.Overc()
overc.host_newer()
return json_msg(overc.message)
@app.route('/container/rollback')
@auth.login_required
def container_rollback():
usage = 'Usage: ' + request.url_root + 'container/rollback?name=<container name>&snapshot=<snapshot name>&template=<template name> [snapshot optional]'
overc=Overc.Overc()
container_name = request.args.get('name')
snapshot = request.args.get('snapshot')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_rollback(container_name, snapshot, template)
return json_msg(overc.message)
@app.route('/container/update')
@auth.login_required
def container_update():
usage = 'Usage: ' + request.url_root + 'container/update?template=<template name>'
overc=Overc.Overc()
template = request.args.get('template')
if template is None:
return json_msg(usage)
overc._container_update(template)
return json_msg(overc.message)
@app.route('/container/list')
@auth.login_required
def container_list():
usage = 'Usage: ' + request.url_root + 'container/list?template=<template name>'
overc=Overc.Overc()
template = request.args.get('template')
if template is None:
return json_msg(usage)
overc._container_list(template)
return json_msg(overc.message)
@app.route('/container/snapshot')
@auth.login_required
def container_snapshot():
usage = 'Usage: ' + request.url_root + 'container/snapshot?name=<container name>&template=<template name>'
overc=Overc.Overc()
template = request.args.get('template')
container_name = request.args.get('name')
if template is None or container_name is None:
return json_msg(usage)
overc._container_snapshot(container_name, template)
return json_msg(overc.message)
@app.route('/container/list_snapshots')
@auth.login_required
def container_list_snapshots():
usage = 'Usage: ' + request.url_root + 'container/list_snapshots?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_snapshot_list(container_name, template)
return json_msg(overc.message)
@app.route('/container/send_image')
@auth.login_required
def container_send_image():
usage = 'Usage: ' + request.url_root + 'container/send_image?url=<image url>&template=<template name>'
overc=Overc.Overc()
url = request.args.get('url')
template = request.args.get('template')
if url is None or template is None:
return json_msg(usage)
template_list = os.listdir("/etc/overc/container")
if template not in template_list:
usage += "\n The template name is not valid"
return json_msg(usage)
req = urllib2.Request(url)
req.get_method = lambda: 'HEAD'
try:
status = urllib2.urlopen(req)
except Exception,e:
usage += "\n The image url is not valid"
return json_msg(usage)
re_code = status.getcode()
if ((re_code != None) and (re_code != 200)):
usage += "\n The image url is not valid, http status code is: %s" % re_code
return json_msg(usage)
overc._container_send_image(template, url)
return json_msg(overc.message)
@app.route('/container/activate')
@auth.login_required
def container_activate():
usage = 'Usage: ' + request.url_root + 'container/activate?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
force = True
overc._container_activate(container_name, template, force)
return json_msg(overc.message)
@app.route('/container/start')
@auth.login_required
def container_start():
usage = 'Usage: ' + request.url_root + 'container/start?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_start(container_name, template)
return json_msg(overc.message)
@app.route('/container/stop')
@auth.login_required
def container_stop():
usage = 'Usage: ' + request.url_root + 'container/stop?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_stop(container_name, template)
return json_msg(ov
|
craigderington/django-code-library
|
snippets/admin.py
|
Python
|
gpl-3.0
| 978
| 0.005112
|
from django.contrib import admin
from . import models
from django_markdown.admin import MarkdownModelAdmin
from django_markdown.widgets import AdminMarkdownWidget
from django.db.models import TextField
# Register your models here.
class SnippetTagAdmin(admin.ModelAdmin):
list_display = ('slug',)
class SnippetAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['snippet_title', 'snippet_body', 'author', 'publish']}),
('Dat
|
e Information', {'fields': ['modified_date'], 'classes': ['collapse']}),
('Tag Library', {'fields': ['snippet_tags']})
]
list_display = ('snippet_title', 'author', 'create_date', 'modified_date')
search
|
_fields = ['snippet_title']
formfield_overrides = {TextField: {'widget': AdminMarkdownWidget}}
list_filter = ['create_date', 'publish']
# register the classes with the Admin site
admin.site.register(models.Snippet, SnippetAdmin)
admin.site.register(models.SnippetTag, SnippetTagAdmin)
|
roberthodgen/thought-jot
|
src/api_v2.py
|
Python
|
mit
| 37,593
| 0.000718
|
"""
The MIT License (MIT)
Copyright (c) 2015 Robert Hodgen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from ndb_users import users
import webapp2
from google.appengine.ext import ndb
import json
import logging
from datetime import datetime, timedelta
import model
import re
import utilities
import setup
from google.appengine.api import mail
class Projects(webapp2.RequestHandler):
def get(self, project_id=None):
""" Return a list of Projects this User has access to. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if project_id:
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if user.email not in project.users:
self.abort(401)
response_object = project.json_object()
else:
# Query for Projects this User owns, contributes to, or may observe
projects = model.Project.query(model.Project.users == user.email)
response_object = []
for project in projects:
response_object.append(project.json_object())
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def post(self):
""" Create a new Project for this User. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not self.request.body:
szelf.abort(400)
request_object = json.loads(self.request.body)
name = request_object.get('name')
if not name:
self.abort(400)
new_project_key = model.Project.create_project(name)
new_project = new_project_key.get()
if len(request_object.keys()) > 1:
# Process optional items...
description = request_object.get('description')
if description:
new_project.description = description
new_project.put()
setup.default_project_labels(new_project)
response_object = new_project.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def put(self, project_id):
""" Update a Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# GET JSON request body
if not project_id or not self.request.body:
self.abort(400)
request_object = json.loads(self.request.body)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key or len(request_object) < 1:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if (not project.is_owner(user.email) and not
project.has_contributor(user.email)):
self.abort(401)
# Process changes...
name = request_object.get('name')
if name:
project.name = name
description = request_object.get('description')
if description:
project.description = description
active = request_object.get('active')
if isinstance(active, bool):
project.active = active
project.put()
response_object = project.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def delete(self, project_id):
""" Delete this user's Project. """
response_object = {}
user = users.get_current_user()
if not user:
# No user
self.abort(401)
return None
# Get JSON request body
if not project_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if not project.is_owner(user.email):
self.abort(401)
ndb.delete_multi(ndb.Query(ancestor=project_key).iter(keys_only=True))
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
class Contributors(webapp2.RequestHandler):
def post(self, project_id, contributor_email):
""" Add Contributors to this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not project_id or not contributor_email:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
# new_contributor = users.User.user_for_email(contributor_email)
# if not new_contributor:
# self.abort(404)
if not mail.is_email_valid(contributor_email):
self.abort(400)
if (not project.is_owner(user.email) and not
project.has_contributor(user.email)):
self.abort(401)
project.add_contributors([contributor_email])
utilities.send_project_contributor_email(contributor_email, user,
project)
response_object = project.json_object()
# Send response
self.response.content_type = 'application/json'
self.response
|
.out.write(json.dumps(response_object))
def delete(self, project_id, contributor_email):
""" Remove Contributors from this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not project_id or not contributor_email:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(p
|
roject_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if not project.is_owner(user.email):
self.abort(401)
project.remove_contributors([contributor_email])
response_object = project.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
class TimeRecords(webapp2.RequestHandler):
def get(self, project_id, time_record_id=None):
""" List the Time Records associated with a Project. """
response_object = {}
user = users.get_current_user()
if n
|
hardingnj/xpclr
|
setup.py
|
Python
|
mit
| 1,538
| 0.0013
|
from setuptools import setup
from ast import literal_eval
def get_version(source='xpclr/__init__.py'):
with open(source) as sf:
for line in sf:
if line.
|
startswith('__version__'):
return literal_eval(line.split('=')[-1].lstrip())
raise ValueError("__version__ not found")
VERSION = get_version()
DISTNAME = 'xpclr'
PACKAGE_NAME = 'xpclr'
DESCRIPTION = 'Co
|
de to compute xpclr as described in Chen 2010'
with open('README.md') as f:
LONG_DESCRIPTION = f.read()
MAINTAINER = 'Nicholas Harding',
MAINTAINER_EMAIL = '[email protected]',
URL = 'https://github.com/hardingnj/xpclr'
DOWNLOAD_URL = 'http://github.com/hardingnj/xpclr'
LICENSE = 'MIT'
# strictly speaking, allel requires numpy, scipy and numexpr, but numexpr
# won't install unless numpy is already installed, so leave this blank for now
# and require user to pre-install numpy, scipy and numexpr themselves
INSTALL_REQUIRES = []
CLASSIFIERS = []
def setup_package():
metadata = dict(
name=DISTNAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
license=LICENSE,
url=URL,
download_url=DOWNLOAD_URL,
version=VERSION,
package_dir={'': '.'},
packages=['xpclr'],
scripts=['bin/xpclr'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
)
setup(**metadata)
if __name__ == '__main__':
setup_package()
|
bioothod/zbuilder
|
conf.d/deb_install_build_deps.py
|
Python
|
apache-2.0
| 3,249
| 0.004925
|
#!/usr/bin/python
import apt
import apt.progress
import apt_pkg
import logging
import re
import sys
logging.basicConfig(filename1='/var/log/supervisor/rps.log',
format='%(asctime)s %(levelname
|
)s: deb_install: %(message)s',
level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
class control_parser():
def __init__(self):
apt_pkg.init()
self.cache = apt.Cache()
self.cache.update()
self.cache.open()
def parse(self, path = 'debian/control'):
try:
tagfile = apt_pkg.TagFile(path)
for section in tagfile:
deps = section.get('Build-Depends', None)
if not deps:
continue
packages = deps.split(',')
for p in packages:
self.mark_install(p)
self.install()
except Exception as e:
print "E: %s" % e
def mark_install(self, pstr):
deps = apt_pkg.parse_depends(pstr)
have_version = False
for ord in deps:
if have_version:
break
print pstr, ord
for d in ord:
name = d[0]
version_num = d[1]
version_op = d[2]
p = self.cache[name]
if not p:
logging.error("Could not find package %s in cache", name)
continue
if len(version_num) > 0:
highest_v = None
highest_vnum = 0
for version in p.versions:
if apt_pkg.check_dep(version.version, version_op, version_num):
have_version = True
logging.info("package: %s, version: %s, priority: %s/%d",
name, version.version, version.priority, version.policy_priority)
if (version.policy_priority > highest_vnum):
highest_vnum = version.policy_priority
highest_v = version
if not have_version:
logging.error("Could not required version of the package %s, must be %s %s",
name, version_op, version_num)
# going for the next ORed version if any
continue
p.candidate = highest_v
logging.info("package %s, selected version: %s, priority: %s/%d",
name, p.candidate.version, p.candidate.priority, p.candidate.policy_priority)
logging.info("Going to install package %s", name)
p.mark_install(auto_fix=True, auto_inst=True)
have_version = True
# do not run for the subsequent ORed packages
break
if not have_version:
logging.fatal("Could not find suitable package %s", pstr)
def install(self):
self.cache.commit()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "E: usage: %s /path/to/debian/control" % sys.argv[0]
cp = control_parser()
cp.parse(path = sys.argv[1])
|
|
devtronics/heck_site
|
polls/models.py
|
Python
|
agpl-3.0
| 953
| 0.002099
|
from django.db import models
from django.utils import timezone
import datetime
class Question(models.Model):
""" Question object model
"""
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __unicode__(self): # __unicode__ on Python 2
return self.question_text
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Choice(models.Model):
""" Choice object model
"""
question = models.ForeignKey(Question)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __unicode__(self): # __
|
unicode__ on Python 2
retu
|
rn self.choice_text
|
glukolog/calc256
|
cgserial.py
|
Python
|
gpl-3.0
| 673
| 0.007429
|
#!/usr/bin/python
import simplejson as json
i = open('/proc/cpuinfo')
my_text = i.readlines()
i.close()
username
|
= ""
for line in my_text:
line = line.strip()
ar = line.split(' ')
if ar[0].startswith('Serial'):
username = "a" + ar[1]
if not username:
exit(-1)
o = open('/home/pi/.cgminer/cgminer.conf', 'w');
pools = []
pools.append({"url": "stratum+tcp://ghash.io:3333",
"user": username, "pass": "12345"})
conf = {"pools": pools,
|
"api-listen" : "true",
"api-port" : "4028",
"api-allow" : "W:127.0.0.1"}
txt = json.dumps(conf, sort_keys=True, indent=4 * ' ')
o.write(txt)
o.write("\n");
o.close()
|
dwillis/fumblerooski
|
urls.py
|
Python
|
bsd-3-clause
| 1,591
| 0.005657
|
from django.conf.urls.defaults import *
from django.contrib import admin
from fumblerooski.feeds import CoachesFeed
feeds = {
'coaches': CoachesFeed,
}
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/coach_totals/', "fumblerooski.college.views.admin_coach_totals"),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r"^admin/(.*)", admin.site.root),
url(r"^blog/", include("fumblerooski.blog.urls")),
url(r"^college/", include("fumblerooski.college.urls")),
url(r"^rankings/", include("fumblerooski.rankings.urls")),
url(r"^api/", include("fumblerooski.api.urls")),
url(r"^$", "fumblerooski.college.views.homepage"),
(r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
)
urlpatterns += patterns('fumblerooski.college.views',
url(r'^coaches/$', 'coach_index'),
url(r'^coaches/active/$', 'active_coaches'),
url(r'^coaches/feeds/recent_hires/$', 'recent_hires_feed'),
url(r'^coaches/detail/(?P<coach>\d+-[-a-z]+)/$', 'coach_detail', name="coach_detail"),
url(r'^coaches/detail/(?P<coach>\d+-[-a-z]+)/vs/$', 'coach_vs', n
|
ame="coach_vs"),
|
url(r'^coaches/detail/(?P<coach>\d+-[-a-z]+)/vs/(?P<coach2>\d+-[-a-z]+)/$', 'coach_compare', name="coach_compare"),
url(r'^coaches/assistants/$', 'assistant_index'),
url(r'^coaches/common/(?P<coach>\d+-[-a-z]+)/(?P<coach2>\d+-[-a-z]+)/$', 'coach_common'),
url(r'^coaches/departures/(?P<year>\d\d\d\d)/$', 'departures'),
url(r'^coaches/hires/(?P<year>\d\d\d\d)/$', 'coaching_hires'),
)
|
hilgroth/fiware-IoTAgent-Cplusplus
|
tests/e2e_tests/common/gw_configuration.py
|
Python
|
agpl-3.0
| 1,230
| 0.017073
|
# -*- coding: utf-8 -*-
'''
(c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights
Reserved.
The copyright to the software program(s) is property of Telefonica I+D.
The program(s) may be used and or copied only with the express written
consent of Telefonica I+D or in accordance with the terms and conditions
stipulated in the agreement/contract under which the program(s) have
been supplied.
'''
HEADERS={'content-type': 'application/json'}
MQTT_BROKER_HOSTNAME='iotagent'
MQTT_BROKER_PORT='1883'
GW_HOSTNAME='iotagent'
GW_PORT='8002'
IOT_PORT='8080'
MANAGER_PORT='8081'
GW_SERVER_ROOT = 'http://{}:{}'.format(GW_HOSTNAME, GW_PORT)
IOT_SERVER_ROOT = 'http://{}:{}'.format(GW_HOSTNAME, IOT_PORT)
MANAGER_SERVER_ROOT =
|
'http://{}:{}'.format(GW_HOSTNAME, MANAGER_PORT)
CBROKER_URL='http://10.95.213.159:6500'
CBROKER_HEADER='Fiware-Service'
CBROKER_PATH_HEADER='Fiware-ServicePath'
SMPP_URL='http://sbc04:5371'
SMPP_FROM='682996050'
DEF_ENTITY_TYPE='thing'
DEF_TYPE='string'
PATH_UL20_COMMAND='/iot/ngsi/d/updateContext'
PATH_MQTT_COMMAND='/iot/ngsi/mqtt/update
|
Context'
PATH_UL20_SIMULATOR='/simulaClient/ul20Command'
TIMEOUT_COMMAND=10
MQTT_APIKEY='1234'
UL20_APIKEY='apikey3'
|
mbr/tinyrpc
|
tests/test_server.py
|
Python
|
mit
| 1,748
| 0.006293
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from unittest.mock import Mock, call
from tinyrpc.server import RPCServer
from tinyrpc.transports import ServerTransport
from tinyrpc.protocols import RPCProtocol, RPCResponse
from tinyrpc.dispatch import RPCDispatcher
CONTEXT='sapperdeflap'
RECMSG='out of receive_message'
PARMSG='out of parse_request'
SERMSG='out of serialize'
@pytest.fixture
def transport():
transport = Mock(ServerTransport)
transport.receive_message = Mock(return_value=(CONTEXT, RECMSG))
return transport
@pytest.fixture
def protocol():
protocol = Mock(RPCProtocol)
protocol.parse_request = Mock(return_value=PARMSG)
return protocol
@pytest.fixture()
def response():
response = Mock(RPCResponse)
response.serialize = Mock(return_value=SERMSG)
return response
@pytest.fixture
def dispatcher(response):
dispatcher = Mock(RPCDispatcher)
dispatcher.dispatch = Mock(return_value=response)
return dispatcher
def test_handle_message(transport, protocol, dispatcher):
server = RPCServer(transport, protocol, dispatcher)
server.receive_one_message()
transport.receive_message.assert_called()
protocol.parse_request.assert_called_with(RECMSG)
dispatcher.dispatch.assert_called_with(PARMSG, None)
dispatcher.dispatch().serialize.assert_called()
transport.send_reply.assert_calle
|
d_with(CONTEXT, SERMSG)
def test_handle_message_callback(transport, protocol, dispatcher):
server = RPCServer(transport, protocol, dispatcher)
server.trace = Mock(return_value=None)
server.receive_one_message()
assert server.trace.call_args_list == [cal
|
l('-->', CONTEXT, RECMSG), call('<--', CONTEXT, SERMSG)]
server.trace.assert_called()
|
woutdenolf/wdncrunch
|
wdncrunch/tests/__init__.py
|
Python
|
mit
| 1,226
| 0.000816
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: Wout De Nolf ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permis
|
sion notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTIO
|
N OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
|
kaushik94/sympy
|
examples/advanced/grover_example.py
|
Python
|
bsd-3-clause
| 2,081
| 0.001442
|
#!/usr/bin/env python
"""Grover's quantum search algorithm example."""
from sympy import pprint
from sympy.physics.quantum import qapply
from sympy.physics.quantum.qubit import IntQubit
from sympy.physics.quantum.grover import (OracleGate, superposition_basis,
WGate, grover_iteration)
def demo_vgate_app(v):
for i in range(2**v.nqubits):
print('qapply(v*IntQubit(%i, %r))' % (i, v.nqubits))
pprint(qapply(v*IntQubit(i, nqubits=v.nqubits)))
qapply(v*IntQubit(i, nqubits=v.nqubits))
def black_box(qubits):
return True if qubits == IntQubit(1, nqubits=qubits.nqubits) else False
def main():
print()
print('Demonstration of Grover\'s Algorithm')
print('The OracleGate or V Gate carries the unknown function f(x)')
print('> V|x> = ((-1)^f(x))|x> where f(x) = 1 when x = a (True in our case)')
print('> and 0 (False in our case) otherwise')
print()
nqubits = 2
print('nqubits = ', nqubits)
v = OracleGate(nqubits, black_box)
print('Oracle or v = OracleGate(%r, black_box)' % nqubits)
print()
psi = superposition_basis(nqubits)
print('psi:')
pprint(psi)
demo_vgate_app(v)
print('qapply(v*psi)')
pprint(qapply(v*psi))
print()
w = WGate(nqubits)
print('WGate or w = WGate(%r)' % nqubits)
print('On a 2 Qubit system like psi, 1 iteration is enough to yield |1>')
print('qapply(w*v*psi)')
pprint(qapply(w*v*psi))
print()
nqubits = 3
print('On a 3 Qubit system, it requires 2 iterations to achieve')
print('|1> with high enough probability')
psi = superposition_basis(nqubits)
print('psi:')
pprint(psi)
v = Orac
|
leGate(nqubits, black_box)
print('Oracle or v = OracleGate(%r, black_box)' % nqubits)
print()
print('iter1 = grover.grover_iteration
|
(psi, v)')
iter1 = qapply(grover_iteration(psi, v))
pprint(iter1)
print()
print('iter2 = grover.grover_iteration(iter1, v)')
iter2 = qapply(grover_iteration(iter1, v))
pprint(iter2)
print()
if __name__ == "__main__":
main()
|
jaantollander/Convergence-of-Fourier-Series
|
src_legacy/fourier_series/basis_functions/legendre/fast_evaluation.py
|
Python
|
mit
| 1,726
| 0
|
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import numba
from src_legacy.fourier_series.buffer.ringbuffer import Ringbuffer
@numba.vectorize(nopython=True)
def legendre_recursion(n, x, p1, p2):
if n == 0:
return 1
elif n == 1:
return x
else:
c0 = (2*n-1)/n
c1 = (n-1)/n
return c0 * x * p1 - c1 * p2
class FastLegendreEval:
"""
Pure float-64 class for evaluation of legendre polynomials recursively.
"""
start_index = 0
def __init__(self, arg, max_degree):
if isinstance(arg, np.ndarray):
self.arg = arg
self.size = self.arg.size
eli
|
f isinstance(arg, float):
self.arg = arg
self.size = 1
else:
raise ValueError()
self.max_degree = max_degree
# @profile
def generator(self, skip=0):
buffer = Ringbuffer(buffer_size=3,
array_size=self.size,
dtype=float,
start_index=self.start_index,
|
array_size_increment=None,
array_margin=0)
deg = self.start_index
while self.max_degree is None or deg <= self.max_degree - 1:
p1 = buffer[deg - 1, :]
p2 = buffer[deg - 2, :]
arr = legendre_recursion(deg, self.arg, p1, p2) # ~73%
buffer[:] = arr # ~27%
if skip == 0:
yield deg, buffer
else:
skip -= 1
deg += 1
|
samizdatco/corduroy
|
corduroy/config.py
|
Python
|
bsd-3-clause
| 1,593
| 0.009416
|
# encoding: utf-8
"""
corduroy.config
Internal state
"""
from __future__ import with_statement
import os, sys
from .atoms import odict, adict, Document
# LATER: add some sort of rcfile support...
# from inspect import getouterframes, currentframe
# _,filename,_,_,_,_ = getouterframes(currentframe())[-1]
# print "from", os.path.dirname(os.path.abspath(filename))
defaults = adict({
"host":"http://127.0.0.1",
"port":5984,
"uuid_cache":50,
"types":adict({
"doc":Document,
"dict":adict
}),
"http":adict({
"max_clients":10,
"max_redirects":6,
"timeout":60*60,
"io_loop":None
})
})
try:
import simplejson as _json
except ImportError:
import json as _json
class json(object):
@classmethod
def decode(cls, string, **opts):
"""Decode the given JSON string.
:param string: the JSON string to decode
:type string: basestring
:return: the corresponding Python data structure
:rtype: object
"""
return _json.loads(string, object_hook=defaults.
|
types.dict, **opts)
@classmethod
def encode(cls, obj, **opts):
"""Encode the given object as a JSON string.
|
:param obj: the Python data structure to encode
:type obj: object
:return: the corresponding JSON string
:rtype: basestring
"""
return _json.dumps(obj, allow_nan=False, ensure_ascii=False, encoding='utf-8', **opts)
|
Havate/havate-openstack
|
proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/dashboards/project/database_backups/tables.py
|
Python
|
apache-2.0
| 3,772
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse # noqa
from django.template.defaultfilters import title # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import tables
from horizon.utils import filters
from openstack_dashboard import api
STATUS_CHOICES = (
("BUILDING", None),
("COMPLETED", True),
("DELETE_FAILED", False),
("FAILED", False),
("NEW", None),
("SAVING", None),
)
class LaunchLink(tables.LinkAction):
name = "create"
verbose_name = _("Create Backup")
url = "horizon:project:database_backups:create"
classes = ("btn-launch", "ajax-modal")
class RestoreLink(tables.LinkAction):
name = "restore"
verbose_name = _("Restore Backup")
url = "horizon:project:databases:launch"
classes = ("btn-launch", "ajax-modal")
def get_link_url(self, datam):
url = reverse(self.url)
return url + '?backup=%s' % datam.id
class DeleteBackup(tables.BatchAction):
name = "delete"
action_present = _("Delete")
action_past = _("Scheduled deletion of")
data_type_singular = _("Backup")
data_type_plural = _("Backups")
classes = ('btn-danger', 'btn-terminate')
def action(self, request, obj_id):
api.trove.backup_delete(request, obj_id)
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, backup_id):
backup = api.trove.backup_get(request, backup_id)
try:
backup.instance = api.trove.instance_get(request,
backup.instance_id)
except Exception:
pass
return backup
def db_link(obj):
if not hasattr(obj, 'instance'
|
):
return
if hasattr(obj.instance, 'name'):
return reverse(
'horizon:project:databases:detail',
kwargs={'instance_id': obj.instance_id
|
})
def db_name(obj):
if hasattr(obj.instance, 'name'):
return obj.instance.name
return obj.instance_id
class BackupsTable(tables.DataTable):
name = tables.Column("name",
link=("horizon:project:database_backups:detail"),
verbose_name=_("Name"))
created = tables.Column("created", verbose_name=_("Created At"),
filters=[filters.parse_isotime])
location = tables.Column(lambda obj: _("Download"),
link=lambda obj: obj.locationRef,
verbose_name=_("Backup File"))
instance = tables.Column(db_name, link=db_link,
verbose_name=_("Database"))
status = tables.Column("status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES)
class Meta:
name = "backups"
verbose_name = _("Backups")
status_columns = ["status"]
row_class = UpdateRow
table_actions = (LaunchLink, DeleteBackup)
row_actions = (RestoreLink, DeleteBackup)
|
ellmetha/django-machina
|
machina/apps/forum_conversation/forum_attachments/models.py
|
Python
|
bsd-3-clause
| 346
| 0.00578
|
"""
|
Forum attachme
|
nts models
========================
This module defines models provided by the ``forum_attachments`` application.
"""
from machina.apps.forum_conversation.forum_attachments.abstract_models import AbstractAttachment
from machina.core.db.models import model_factory
Attachment = model_factory(AbstractAttachment)
|
mozilla/remo
|
remo/profiles/tests/test_forms.py
|
Python
|
bsd-3-clause
| 4,057
| 0.000739
|
from nose.tools import eq_, ok_
from remo.base.tests import RemoTestCase
from remo.base.utils import get_date
from remo.profiles.forms import ChangeUserForm, UserStatusForm
from remo.profiles.models import UserStatus
from remo.profiles.tests import UserFactory, UserStatusFactory
class ChangeUserFormTest(RemoTestCase):
def test_change_valid_login_email(self):
"""Test change login email with a valid one."""
mentor = UserFactory.create(groups=['Mentor'], userprofile__initial_council=True)
rep = UserFactory.create(groups=['Rep'], userprofile__mentor=mentor, last_name='Doe')
data = {'first_name': rep.first_name,
'last_name': rep.last_name,
'email': rep.email}
form = ChangeUserForm(data=data, instance=rep)
ok_(form.is_valid())
def test_change_invalid_login_email(self):
"""Test change login email with an invalid one."""
mentor = UserFactory.create(groups=['Mentor'], userprofile__initial_council=True)
rep = UserFactory.create(groups=['Rep'], userprofile__mentor=mentor)
data = {'first_name': rep.first_name,
'last_name': rep.last_name,
'email': mentor.email}
form = ChangeUserForm(data=data, instance=rep)
ok_(not form.is_valid())
class UserStatusFormTests(RemoTestCase):
def test_base(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date()
expected_date = get_date(days=1)
data = {'start_date': start_date,
'expe
|
cted_date': expected_date}
form = UserStatus
|
Form(data, instance=UserStatus(user=user))
ok_(form.is_valid())
db_obj = form.save()
eq_(db_obj.expected_date, get_date(days=1))
eq_(db_obj.user.get_full_name(), user.get_full_name())
def test_invalid_expected_date(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date()
expected_date = get_date(weeks=15)
data = {'start_date': start_date,
'expected_date': expected_date}
form = UserStatusForm(data, instance=UserStatus(user=user))
ok_(not form.is_valid())
ok_('expected_date' in form.errors)
def test_start_date_in_the_past(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date(-1)
expected_date = get_date(days=2)
data = {'start_date': start_date,
'expected_date': expected_date}
form = UserStatusForm(data, instance=UserStatus(user=user))
ok_(not form.is_valid())
ok_('start_date' in form.errors)
def test_expected_date_before_start_date(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date(4)
expected_date = get_date(days=2)
data = {'start_date': start_date,
'expected_date': expected_date}
form = UserStatusForm(data, instance=UserStatus(user=user))
ok_(not form.is_valid())
ok_('expected_date' in form.errors)
def remove_unavailability_status(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date()
expected_date = get_date(days=1)
data = {'start_date': start_date,
'expected_date': expected_date}
user_status = UserStatusFactory.create(user=user,
expected_date=expected_date,
start_date=start_date)
form = UserStatusForm(data, instance=user_status)
ok_(form.is_valid())
ok_(not user_status.end_date)
db_obj = form.save()
eq_(db_obj.expected_date, get_date())
eq_(db_obj.user.get_full_name(), user.get_full_name())
ok_(db_obj.return_date)
|
pearu/f2py
|
fparser/api.py
|
Python
|
bsd-3-clause
| 7,543
| 0.006364
|
"""Public API for Fortran parser.
Module content
--------------
"""
from __future__ import absolute_import
#Author: Pearu Peterson <[email protected]>
#Created: Oct 2006
__autodoc__ = ['get_reader', 'parse', 'walk']
from . import Fortran2003
# import all Statement classes:
from .base_classes import EndStatement, classes
from .block_statements import *
# CHAR_BIT is used to convert object bit sizes to byte sizes
from .utils import CHAR_BIT
def get_reader(input, isfree=None, isstrict=None, include_dirs = None, source_only = None,
ignore_comments = True):
""" Returns Fortran reader instance.
Parameters
----------
input : str
Specify a string or filename containing Fortran code.
isfree, isstrict : {None, bool}
Specify input Fortran format. The values are determined from the
input. If that fails then isfree=True and isstrict=False is assumed.
include_dirs : {None, list}
Specify a list of include directories. The default list (when
include_dirs=None) contains the current working directory and
the directory of ``filename``.
source_only : {None, list}
Specify a list of Fortran file names that are searched when the
``USE`` statement is encountered.
Returns
-------
reader : `FortranReader`
Notes
-----
If ``input`` is a C filename then the functions searches for comment
lines starting with ``/*f2py`` and reads following lines as PYF file
|
content until a line ``*/`` is found.
See also
--------
parse
"""
import os
import re
from .readfortran import FortranFileRe
|
ader, FortranStringReader
if os.path.isfile(input):
name,ext = os.path.splitext(input)
if ext.lower() in ['.c']:
# get signatures from C file comments starting with `/*f2py` and ending with `*/`.
# TODO: improve parser to take line number offset making line numbers in
# parser messages correct.
f2py_c_comments = re.compile('/[*]\s*f2py\s.*[*]/',re.I | re.M)
f = open(filename,'r')
c_input = ''
for s1 in f2py_c_comments.findall(f.read()):
c_input += s1[2:-2].lstrip()[4:] + '\n'
f.close()
if isfree is None: isfree = True
if isstrict is None: isstrict = True
return parse(c_input, isfree, isstrict, include_dirs)
reader = FortranFileReader(input, include_dirs = include_dirs, source_only = source_only)
elif isinstance(input, str):
reader = FortranStringReader(input, include_dirs = include_dirs, source_only = source_only)
else:
raise TypeError('Expected string or filename input but got %s' % (type(input)))
if isfree is None: isfree = reader.isfree
if isstrict is None: isstrict = reader.isstrict
reader.set_mode(isfree, isstrict)
return reader
def parse(input, isfree=None, isstrict=None, include_dirs = None, source_only = None,
ignore_comments = True, analyze=True):
""" Parse input and return Statement tree.
Parameters
----------
input : str
Specify a string or filename containing Fortran code.
isfree, isstrict : {None, bool}
Specify input Fortran format. The values are determined from the
input. If that fails then isfree=True and isstrict=False is assumed.
include_dirs : {None, list}
Specify a list of include directories. The default list (when
include_dirs=None) contains the current working directory and
the directory of ``filename``.
source_only : {None, list}
Specify a list of Fortran file names that are searched when the
``USE`` statement is encountered.
ignore_comments : bool
When True then discard all comment lines in the Fortran code.
analyze : bool
When True then apply run analyze method on the Fortran code tree.
Returns
-------
block : `fparser.api.BeginSource`
Examples
--------
>>> code = '''
... c comment
... subroutine foo(a)
... integer a
... print*, "a=",a
... end
... '''
>>> tree = parse(code,isfree=False)
>>> print tree
!BEGINSOURCE <cStringIO.StringI object at 0x1798030> mode=fix90
SUBROUTINE foo(a)
INTEGER a
PRINT *, "a=", a
END SUBROUTINE foo
>>> print `tree`
BeginSource
blocktype='beginsource'
name='<cStringIO.StringI object at 0x1798030> mode=fix90'
a=AttributeHolder:
external_subprogram=<dict with keys ['foo']>
content:
Subroutine
args=['a']
item=Line('subroutine foo(a)',(3, 3),'')
a=AttributeHolder:
variables=<dict with keys ['a']>
content:
Integer
selector=('', '')
entity_decls=['a']
item=Line('integer a',(4, 4),'')
Print
item=Line('print*, "a=",a',(5, 5),'')
EndSubroutine
blocktype='subroutine'
name='foo'
item=Line('end',(6, 6),'')
See also
--------
get_reader
"""
from .parsefortran import FortranParser
reader = get_reader(input, isfree, isstrict, include_dirs, source_only)
parser = FortranParser(reader, ignore_comments = ignore_comments)
parser.parse()
if analyze:
parser.analyze()
return parser.block
def walk(stmt, depth=-1, _initial_depth = None):
""" Generate Fortran statements by walking the stmt tree until given depth.
For each block statement in stmt, the walk functions yields a
tuple ``(statement, depth)`` where ``depth`` is the depth of tree
stucture for statement.
Parameters
----------
stmt : Statement
depth : int
If depth is positive then walk in the tree until given depth.
If depth is negative then walk the whole tree.
Returns
-------
generator
Examples
--------
::
from fparser import api
source_str = '''
subroutine foo
integer i, r
do i=1,100
r = r + i
end do
end
'''
tree = api.parse(source_str)
for stmt, depth in api.walk(tree):
print depth, stmt.item
that will print::
1 line #2'subroutine foo'
2 line #3'integer i, r'
2 line #4'do i=1,100'
3 line #5'r = r + i'
2 line #6'end do'
1 line #7'end'
"""
if _initial_depth is None:
if depth==0:
return
_initial_depth = depth
if not isinstance(stmt, classes.BeginSource):
yield stmt, _initial_depth - depth
if isinstance(stmt, classes.BeginStatement):
last_stmt = stmt.content[-1]
last_index = len(stmt.content)
if isinstance(last_stmt, classes.EndStatement):
last_index -= 1
else:
last_stmt = None
if depth != 0:
for substmt in stmt.content[:last_index]:
for statement, statement_depth in walk(substmt, depth-1, _initial_depth):
yield statement, statement_depth
if last_stmt is not None:
yield last_stmt, _initial_depth - depth
|
Pysellus/streaming-api-test
|
rx-tests/rx-stream-pacing.py
|
Python
|
mit
| 1,709
| 0.004096
|
#!/usr/bin/env pyt
|
hon3
'''
Make a stream emit at the pace of
|
a slower stream
Pros:
Introduce a delay between events in an otherwise rapid stream (like range)
Cons:
When the stream being delayed runs out of events to push, the zipped stream
will keep pushing events, defined with the lambda fn passed to the zip operation.
'''
from time import sleep
from rx import Observable
# Generate an interval sequece, firing once each second
interval = Observable.interval(1000)
# 5..10
numbers = Observable.from_(range(5, 11))
# Zip two streams together so it emits at the pace of the slowest stream
source = Observable.zip(
interval,
numbers,
# Because we only push the elements of the `numbers` stream,
# As soon as it runs out of events, it will keep sending empty
# events to the subscribers
lambda _, n: n
)
sub1 = source.subscribe(
lambda v : print("Value published to observer 1: {0}".format(v)),
lambda e : print("Error! {0}".format(e)),
lambda : print("Completed!")
)
sub2 = source.subscribe(
lambda v : print("Value published to observer 2: {0}".format(v)),
lambda e : print("Error! {0}".format(e)),
lambda : print("Completed!")
)
# As noted above, we have to dispose the subscriptions before the `numbers`
# streams runs out, or the program will get stuck listening to empty events
sleep(5)
sub1.dispose()
sub2.dispose()
# => Value published to observer 1: 5
# => Value published to observer 2: 5
# => Value published to observer 1: 6
# => Value published to observer 2: 6
# => Value published to observer 2: 7
# => Value published to observer 1: 7
# => Value published to observer 2: 8
# => Value published to observer 1: 8
|
ProfessionalIT/maxigenios-website
|
sdk/google_appengine/google/appengine/cron/GrocParser.py
|
Python
|
mit
| 29,691
| 0.00906
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from antlr3 import *
from antlr3.compat import set, frozenset
allOrdinals = set([1, 2, 3, 4, 5])
numOrdinals = len(allOrdinals)
HIDDEN = BaseRecognizer.HIDDEN
MONTH=27
THURSDAY=23
FOURTH_OR_FIFTH=16
THIRD=13
DECEMBER=39
FROM=41
EVERY=6
WEDNESDAY=22
QUARTER=40
SATURDAY=25
SYNCHRONIZED=9
JANUARY=28
SUNDAY=26
TUESDAY=21
SEPTEMBER=36
UNKNOWN_TOKEN=45
AUGUST=35
JULY=34
MAY=32
FRIDAY=24
DIGITS=8
FEBRUARY=29
TWO_DIGIT_HOUR_TIME=43
OF=4
WS=44
EOF=-1
APRIL=31
COMMA=10
JUNE=33
OCTOBER=37
TIME=5
FIFTH=15
NOVEMBER=38
FIRST=11
DIGIT=7
FOURTH=14
MONDAY=20
HOURS=17
MARCH=30
SECOND=12
MINUTES=18
TO=42
DAY=19
tokenNames = [
"<invalid>", "<EOR>", "<DOWN>", "<UP>",
"OF", "TIME", "EVERY", "DIGIT", "DIGITS", "SYNCHRONIZED", "COMMA", "FIRST",
"SECOND", "THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES",
"DAY", "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY",
"SUNDAY", "MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE",
"JULY", "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
"FROM", "TO", "TWO_DIGIT_HOUR_TIME", "WS", "UNKNOWN_TOKEN"
]
class GrocParser(Parser):
grammarFileName = "Groc.g"
antlr_version = version_str_to_tuple("3.1.1")
antlr_version_str = "3.1.1"
tokenNames = tokenNames
def __init__(self, input, state=None):
if state is None:
state = RecognizerSharedState()
Parser.__init__(self, input, state)
self.dfa4 = self.DFA4(
self, 4,
eot = self.DFA4_eot,
eof = self.DFA4_eof,
min = self.DFA4_min,
max = self.DFA4_max,
accept = self.DFA4_accept,
special = self.DFA4_special,
transition = self.DFA4_transition
)
self.ordinal_set = set()
self.weekday_set = set()
self.month_set = set()
self.monthday_set = set()
self.time_string = ''
self.interval_mins = 0
self.period_string = ''
self.synchronized = False
self.start_time_string = ''
self.end_time_string = ''
valuesDict = {
SUNDAY: 0,
FIRST: 1,
MONDAY: 1,
JANUARY: 1,
TUESDAY: 2,
SECOND: 2,
FEBRUARY: 2,
WEDNESDAY: 3,
THIRD: 3,
MARCH: 3,
THURSDAY: 4,
FOURTH: 4,
APRIL: 4,
FRIDAY: 5,
FIFTH: 5,
MAY: 5,
SATURDAY: 6,
JUNE: 6,
JULY: 7,
AUGUST: 8,
SEPTEMBER: 9,
OCTOBER: 10,
NOVEMBER: 11,
DECEMBER: 12,
}
def ValueOf(self, token_type):
return self.valuesDict.get(token_type, -1)
def timespec(self, ):
try:
try:
pass
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == EVERY) :
LA1_1 = self.input.LA(2)
if ((DIGIT <= LA1_1 <= DIGITS)) :
alt1 = 2
elif ((DAY <= LA1_1 <= SUNDAY)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 1, self.input)
raise nvae
elif ((DIGIT <= LA1_0 <= DIGITS) or (FIRST <= LA1_0 <= FOUR
|
TH_OR_FIFTH)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 0, self.input)
raise nvae
if alt1 == 1:
pass
self._state.following.append(self.FOLLOW_specifictime_in_timespec44)
|
self.specifictime()
self._state.following.pop()
elif alt1 == 2:
pass
self._state.following.append(self.FOLLOW_interval_in_timespec48)
self.interval()
self._state.following.pop()
self.match(self.input, EOF, self.FOLLOW_EOF_in_timespec52)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def specifictime(self, ):
TIME1 = None
try:
try:
pass
pass
alt4 = 2
alt4 = self.dfa4.predict(self.input)
if alt4 == 1:
pass
pass
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == EVERY or (FIRST <= LA2_0 <= FOURTH_OR_FIFTH)) :
alt2 = 1
elif ((DIGIT <= LA2_0 <= DIGITS)) :
alt2 = 2
else:
nvae = NoViableAltException("", 2, 0, self.input)
raise nvae
if alt2 == 1:
pass
pass
self._state.following.append(self.FOLLOW_ordinals_in_specifictime72)
self.ordinals()
self._state.following.pop()
self._state.following.append(self.FOLLOW_weekdays_in_specifictime74)
self.weekdays()
self._state.following.pop()
elif alt2 == 2:
pass
self._state.following.append(self.FOLLOW_monthdays_in_specifictime77)
self.monthdays()
self._state.following.pop()
self.match(self.input, OF, self.FOLLOW_OF_in_specifictime80)
alt3 = 2
LA3_0 = self.input.LA(1)
if ((MONTH <= LA3_0 <= DECEMBER)) :
alt3 = 1
elif ((FIRST <= LA3_0 <= THIRD) or LA3_0 == QUARTER) :
alt3 = 2
else:
nvae = NoViableAltException("", 3, 0, self.input)
raise nvae
if alt3 == 1:
pass
self._state.following.append(self.FOLLOW_monthspec_in_specifictime83)
self.monthspec()
self._state.following.pop()
elif alt3 == 2:
pass
self._state.following.append(self.FOLLOW_quarterspec_in_specifictime85)
self.quarterspec()
self._state.following.pop()
elif alt4 == 2:
pass
pass
self._state.following.append(self.FOLLOW_ordinals_in_specifictime101)
self.ordinals()
self._state.following.pop()
self._state.following.append(self.FOLLOW_weekdays_in_specifictime103)
self.weekdays()
self._state.following.pop()
self.month_set = set(range(1,13))
TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime117)
self.time_string = TIME1.text
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def interval(self, ):
|
eduNEXT/edunext-platform
|
import_shims/studio/contentstore/management/commands/tests/test_sync_courses.py
|
Python
|
agpl-3.0
| 491
| 0.010183
|
"""Deprecated import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression
|
,line-too-long
from import_shims.warn import warn_deprecated_import
warn_deprecated_import('contentstore.management.commands.tests.test_sync_courses', 'cms.djangoapps.contentstore.management.commands.tests.test_sync_courses')
from cms.djangoapps.contents
|
tore.management.commands.tests.test_sync_courses import *
|
vaquerizaslab/tadtool
|
tadtool/plot.py
|
Python
|
mit
| 28,848
| 0.002634
|
from __future__ import division, print_function
from abc import ABCMeta, abstractmethod
import matplotlib as mpl
mpl.use('TkAgg')
from matplotlib.ticker import MaxNLocator, Formatter, Locator
from matplotlib.widgets import Slider, Button
import matplotlib.patches as patches
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
from tadtool.tad import GenomicRegion, sub_matrix_regions, sub_data_regions, \
data_array, insulation_index, sub_vector_regions, sub_regions, \
call_tads_insulation_index, directionality_index, call_tads_directionality_index, normalised_insulation_index
import math
import copy
import numpy as np
from bisect import bisect_left
from future.utils import string_types
try:
import Tkinter as tk
import tkFileDialog as filedialog
except ImportError:
import tkinter as tk
from tkinter import filedialog
class BasePlotter(object):
__metaclass__ = ABCMeta
def __init__(self, title):
self._ax = None
self.cax = None
self.title = title
@abstractmethod
def _plot(self, region=None, **kwargs):
raise NotImplementedError("Subclasses need to override _plot function")
@abstractmethod
def plot(self, region=None, **kwargs):
raise NotImplementedError("Subclasses need to override plot function")
@property
def fig(self):
return self._ax.figure
@property
def ax(self):
if not self._ax:
_, self._ax = plt.subplots()
return self._ax
@ax.setter
def ax(self, value):
self._ax = value
class GenomeCoordFormatter(Formatter):
"""
Process axis tick labels to give nice representations
of genomic coordinates
"""
def __init__(self, chromosome, display_scale=True):
"""
:param chromosome: :class:`~kaic.data.genomic.GenomicRegion` or string
:param display_scale: Boolean
Display distance scale at bottom right
"""
if isinstance(chromosome, GenomicRegion):
self.chromosome = chromosome.chromosome
else:
self.chromosome = chromosome
self.display_scale = display_scale
def _format_val(self, x, prec_offset=0):
if x == 0:
oom_loc = 0
else:
oom_loc = int(math.floor(math.log10(abs(x))))
view_range = self.axis.axes.get_xlim()
oom_range = int(math.floor(math.log10(abs(view_range[1] - view_range[0]))))
if oom_loc >= 3:
return "{:.{prec}f}kb".format(x/1000, prec=max(0, 3 + prec_offset - oom_range))
return "{:.0f}b".format(x)
def __call__(self, x, pos=None):
"""
Return label for tick at coordinate x. Relative position of
ticks can be specified with pos. First tick gets chromosome name.
"""
s = self._format_val(x, prec_offset=1)
if pos == 0 or x == 0:
return "{}:{}".format(self.chromosome, s)
return s
def get_offset(self):
"""
Return information about the distances between
tick bars and the size of the view window.
Is called by matplotlib and displayed in lower right corner
of plots.
"""
if not self.display_scale:
return ""
view_range = self.axis.axes.get_xlim()
view_dist = abs(view_range[1] - view_range[0])
tick_dist = self.locs[2] - self.locs[1]
minor_tick_dist = tick_dist/5
minor_tick_dist_str = self._format_val(minor_tick_dist, prec_offset=2)
tick_dist_str = self._format_val(tick_dist, prec_offset=1)
view_dist_str = self._format_val(view_dist)
return "{}|{}|{}".format(minor_tick_dist_str, tick_dist_str, view_dist_str)
class GenomeCoordLocator(MaxNLocator):
"""
Choose locations of genomic coordinate ticks on the plot axis.
Behaves like default Matplotlib locator, except that it always
places a tick at the start and the end of the window.
"""
def __call__(self):
vmin, vmax = self.axis.get_view_interval()
ticks = self.tick_values(vmin, vmax)
# Make sure that first and last tick are the start
# and the end of the genomic range plotted. If next
# ticks are too close, remove them.
ticks[0] = vmin
ticks[-1] = vmax
if ticks[1] - vmin < (vmax - vmin)/(self._nbins*3):
ticks = np.delete(ticks, 1)
if vmax - ticks[-2] < (vmax - vmin)/(self._nbins*3):
ticks = np.delete(ticks, -2)
return self.raise_if_exceeds(np.array(ticks))
class MinorGenomeCoordLocator(Locator):
"""
Choose locations of minor tick marks between major
tick labels. Modification of the Matplotlib AutoMinorLocator,
except that it uses the distance between 2nd and 3rd major
mark as reference, instead of 2nd and 3rd.
"""
def __init__(self, n):
self.ndivs = n
def __call__(self):
majorlocs = self.axis.get_majorticklocs()
try:
majorstep = majorlocs[2] - majorlocs[1]
except IndexError:
# Need at least two major ticks to find minor tick locations
# TODO: Figure out a way to still be able to display minor
# ticks without two major ticks visible. For now, just display
# no ticks at all.
majorstep = 0
if self.ndivs is None:
if majorstep == 0:
# TODO: Need a better way to figure out ndivs
ndivs = 1
else:
x = int(np.round(10 ** (np.log10(majorstep) % 1)))
if x in [1, 5, 10]:
ndivs = 5
else:
ndivs = 4
else:
ndivs = self.ndivs
minorstep = majorstep / ndivs
vmin, vmax = self.axis.get_view_interval()
if vmin > vmax:
vmin, vmax = vmax, vmin
if len(majorlocs) > 0:
t0 = majorlocs[1]
tmin = ((vmin - t0) // minorstep + 1) * minorstep
tmax = ((vmax - t0) // minorstep + 1) * minorstep
locs = np.arange(tm
|
in, tmax, minorstep) + t0
cond = np.abs((locs - t0) % majorstep) > minorstep / 10.0
locs = locs.compress(cond)
else:
locs = []
return self.raise_if_exceeds(np.array(locs))
class BasePlotter1D(BasePlotter):
__metaclass__ = ABCMeta
def __init__(self, title):
BasePlotter.__init__(self, title=title)
def plot(self, region=None, ax=None, **kwargs):
|
if isinstance(region, string_types):
region = GenomicRegion.from_string(region)
if ax:
self.ax = ax
# set genome tick formatter
self.ax.xaxis.set_major_formatter(GenomeCoordFormatter(region))
self.ax.xaxis.set_major_locator(GenomeCoordLocator(nbins=5))
self.ax.xaxis.set_minor_locator(MinorGenomeCoordLocator(n=5))
self.ax.set_title(self.title)
self._plot(region, **kwargs)
self.ax.set_xlim(region.start, region.end)
return self.fig, self.ax
def prepare_normalization(norm="lin", vmin=None, vmax=None):
if isinstance(norm, mpl.colors.Normalize):
norm.vmin = vmin
norm.vmax = vmax
return norm
if norm == "log":
return mpl.colors.LogNorm(vmin=vmin, vmax=vmax)
elif norm == "lin":
return mpl.colors.Normalize(vmin=vmin, vmax=vmax)
else:
raise ValueError("'{}'' not a valid normalization method.".format(norm))
class BasePlotterHic(object):
__metaclass__ = ABCMeta
def __init__(self, hic_matrix, regions=None, colormap='RdBu', norm="log",
vmin=None, vmax=None, show_colorbar=True, blend_masked=False):
if regions is None:
for i in range(hic_matrix.shape[0]):
regions.append(GenomicRegion(chromosome='', start=i, end=i))
self.regions = regions
self.hic_matrix = hic_matrix
self.colormap = copy.copy(mpl.cm.get_cmap(colormap))
if blend_masked:
self.colormap.set_bad(self.colormap(0))
self._vmin = vmin
self._vmax = vmax
|
saloni10/librehatti_new
|
src/authentication/models.py
|
Python
|
gpl-2.0
| 1,479
| 0.00879
|
from django.db import models
from django.contrib.auth.models import User
class OrganisationType(models.Model):
type_desc = models.CharField(max_length=200)
def __unicode__(self):
return self.type_desc
class Address(models.Model):
street_address = models.CharField(max_length=100)
city = models.CharField(max_length=100)
pin = models.CharField(max_length=10)
province = models.CharField(max_length=100)
nationality = models.CharField(max_length=100)
def __unicode__(self):
return self.street_address + ',' + self.city
class HattiUser(models.Model):
user = models.OneToOneField(User)
address = models.ForeignKey(Address)
telephone = models.CharField(max_length=500)
date_joined = models.DateTimeField(auto_now_add=True)
fax = models.CharField(max_length=100)
avatar = models.CharField(max_length=100, null=True, blank=True)
tagline = models.CharField(
|
max_length=140)
class Meta:
abstract = True
class AdminOrganisations(HattiUser):
title = models.CharField(max_length=200)
organisation_type = models.ForeignKey(OrganisationType)
def
|
__unicode__(self):
return self.title
class Customer(HattiUser):
title = models.CharField(max_length=200, blank=True, null=True)
is_org = models.BooleanField();
org_type = models.ForeignKey(OrganisationType)
company = models.CharField(max_length = 200)
def __unicode__(self, arg):
return unicode(self.user)
|
stscieisenhamer/ginga
|
ginga/examples/gtk/example2_gtk.py
|
Python
|
bsd-3-clause
| 8,631
| 0.00139
|
#! /usr/bin/env python
#
# example2_gtk.py -- Simple, configurable FITS viewer.
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
from __future__ import print_function
import sys, os
import logging, logging.handler
|
s
from ginga import AstroImage
from ginga.gtkw import GtkHelp
from ginga.gtkw.ImageViewGtk import CanvasView
from ginga.canvas.CanvasObject import get_canvas_types
from ginga import colors
from ginga.misc import log
import gtk
STD_FORMAT = '%(asctime)s | %(levelname)1.1s | %(filename)s:%(lineno)d (%(funcName)s) | %(message)s'
class FitsViewer(object):
def __init__(self,
|
logger):
self.logger = logger
self.drawcolors = colors.get_colors()
self.dc = get_canvas_types()
root = gtk.Window(gtk.WINDOW_TOPLEVEL)
root.set_title("Gtk2 CanvasView Example")
root.set_border_width(2)
root.connect("delete_event", lambda w, e: quit(w))
self.root = root
self.select = GtkHelp.FileSelection(root)
vbox = gtk.VBox(spacing=2)
fi = CanvasView(logger)
fi.enable_autocuts('on')
fi.set_autocut_params('zscale')
fi.enable_autozoom('on')
fi.set_zoom_algorithm('rate')
fi.set_zoomrate(1.4)
fi.show_pan_mark(True)
fi.set_callback('drag-drop', self.drop_file)
fi.set_callback('none-move', self.motion)
fi.set_bg(0.2, 0.2, 0.2)
fi.ui_setActive(True)
self.fitsimage = fi
bd = fi.get_bindings()
bd.enable_all(True)
# canvas that we will draw on
canvas = self.dc.DrawingCanvas()
canvas.enable_draw(True)
canvas.set_drawtype('rectangle', color='lightblue')
canvas.setSurface(fi)
self.canvas = canvas
# add canvas to view
private_canvas = fi.get_canvas()
private_canvas.register_for_cursor_drawing(fi)
private_canvas.add(canvas)
canvas.ui_setActive(True)
self.drawtypes = canvas.get_drawtypes()
self.drawtypes.sort()
# add a color bar
#fi.show_color_bar(True)
fi.show_focus_indicator(True)
# add little mode indicator that shows keyboard modal states
fi.show_mode_indicator(True, corner='ur')
w = fi.get_widget()
w.set_size_request(512, 512)
vbox.pack_start(w, fill=True, expand=True)
self.readout = gtk.Label("")
vbox.pack_start(self.readout, fill=True, expand=False)
hbox = gtk.HBox(spacing=5)
wdrawtype = GtkHelp.combo_box_new_text()
index = 0
for name in self.drawtypes:
wdrawtype.insert_text(index, name)
index += 1
index = self.drawtypes.index('rectangle')
wdrawtype.set_active(index)
wdrawtype.connect('changed', self.set_drawparams)
self.wdrawtype = wdrawtype
wdrawcolor = GtkHelp.combo_box_new_text()
index = 0
for name in self.drawcolors:
wdrawcolor.insert_text(index, name)
index += 1
index = self.drawcolors.index('lightblue')
wdrawcolor.set_active(index)
wdrawcolor.connect('changed', self.set_drawparams)
self.wdrawcolor = wdrawcolor
wfill = GtkHelp.CheckButton("Fill")
wfill.sconnect('toggled', self.set_drawparams)
self.wfill = wfill
walpha = GtkHelp.SpinButton()
adj = walpha.get_adjustment()
adj.configure(0.0, 0.0, 1.0, 0.1, 0.1, 0)
walpha.set_value(1.0)
walpha.set_digits(1)
walpha.sconnect('value-changed', self.set_drawparams)
self.walpha = walpha
wclear = gtk.Button("Clear Canvas")
wclear.connect('clicked', self.clear_canvas)
wopen = gtk.Button("Open File")
wopen.connect('clicked', self.open_file)
wquit = gtk.Button("Quit")
wquit.connect('clicked', quit)
for w in (wquit, wclear, walpha, gtk.Label("Alpha:"),
wfill, wdrawcolor, wdrawtype, wopen):
hbox.pack_end(w, fill=False, expand=False)
vbox.pack_start(hbox, fill=False, expand=False)
root.add(vbox)
def get_widget(self):
return self.root
def set_drawparams(self, w):
index = self.wdrawtype.get_active()
kind = self.drawtypes[index]
index = self.wdrawcolor.get_active()
fill = self.wfill.get_active()
alpha = self.walpha.get_value()
params = { 'color': self.drawcolors[index],
'alpha': alpha,
#'cap': 'ball',
}
if kind in ('circle', 'rectangle', 'polygon', 'triangle',
'righttriangle', 'ellipse', 'square', 'box'):
params['fill'] = fill
params['fillalpha'] = alpha
self.canvas.set_drawtype(kind, **params)
def clear_canvas(self, w):
self.canvas.delete_all_objects()
def load_file(self, filepath):
image = AstroImage.AstroImage(logger=self.logger)
image.load_file(filepath)
self.fitsimage.set_image(image)
self.root.set_title(filepath)
def open_file(self, w):
self.select.popup("Open FITS file", self.load_file)
def drop_file(self, fitsimage, paths):
fileName = paths[0]
self.load_file(fileName)
def motion(self, fitsimage, button, data_x, data_y):
# Get the value under the data coordinates
try:
#value = fitsimage.get_data(data_x, data_y)
# We report the value across the pixel, even though the coords
# change halfway across the pixel
value = fitsimage.get_data(int(data_x+0.5), int(data_y+0.5))
except Exception:
value = None
fits_x, fits_y = data_x + 1, data_y + 1
# Calculate WCS RA
try:
# NOTE: image function operates on DATA space coords
image = fitsimage.get_image()
if image is None:
# No image loaded
return
ra_txt, dec_txt = image.pixtoradec(fits_x, fits_y,
format='str', coords='fits')
except Exception as e:
self.logger.warning("Bad coordinate conversion: %s" % (
str(e)))
ra_txt = 'BAD WCS'
dec_txt = 'BAD WCS'
text = "RA: %s DEC: %s X: %.2f Y: %.2f Value: %s" % (
ra_txt, dec_txt, fits_x, fits_y, value)
self.readout.set_text(text)
def quit(self, w):
gtk.main_quit()
return True
def main(options, args):
logger = log.get_logger("example2", options=options)
# Check whether user wants to use OpenCv
if options.opencv:
from ginga import trcalc
try:
trcalc.use('opencv')
except Exception as e:
logger.warning("failed to set OpenCv preference: %s" % (str(e)))
# Check whether user wants to use OpenCL
elif options.opencl:
from ginga import trcalc
try:
trcalc.use('opencl')
except Exception as e:
logger.warning("failed to set OpenCL preference: %s" % (str(e)))
fv = FitsViewer(logger)
root = fv.get_widget()
root.show_all()
if len(args) > 0:
fv.load_file(args[0])
gtk.main()
if __name__ == "__main__":
# Parse command line options with nifty optparse module
from optparse import OptionParser
usage = "usage: %prog [options] cmd [args]"
optprs = OptionParser(usage=usage, version=('%%prog'))
optprs.add_option("--debug", dest="debug", default=False, action="store_true",
help="Enter the pdb debugger on main()")
optprs.add_option("--opencv", dest="opencv", default=False,
action="store_true",
help="Use OpenCv acceleration")
optprs.add_option("--opencl", dest="opencl", default=False,
action="store_true",
help="Use OpenCL acceleration")
optprs.add_option("--profile", dest="profile", action="store_true",
default=False,
|
le9i0nx/ansible
|
test/units/modules/network/mlnxos/mlnxos_module.py
|
Python
|
gpl-3.0
| 2,693
| 0.001485
|
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; wi
|
thout even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOS
|
E. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class TestMlnxosModule(ModuleTestCase):
def execute_module(self, failed=False, changed=False, commands=None, is_updates=False, sort=True, transport='cli'):
self.load_fixtures(commands, transport=transport)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if is_updates:
commands_res = result.get('updates')
else:
commands_res = result.get('commands')
if sort:
self.assertEqual(sorted(commands), sorted(commands_res), commands_res)
else:
self.assertEqual(commands, commands_res, commands_res)
return result
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None, transport='cli'):
pass
|
pankshok/xoinvader
|
xoinvader/utils.py
|
Python
|
mit
| 3,433
| 0
|
"""Various useful tools."""
import copy
import datetime
import logging
# FIXME: temporary backward compatibility
from eaf.core import Vec3 as Point
LOG_FORMAT = (
"[%(asctime)s] %(levelname)-8s %(name)s[%(funcName)s]:%(lineno)s: "
"%(message)s"
)
"""Log message format string."""
TIME_FORMAT = "%H:%M:%S,%03d"
"""Log time format string."""
DATE_FORMAT = "%Y-%m-%d %a"
"""Initial log entry date format string."""
def setup_logger(name, debug=False, msgfmt=None, timefmt=None):
"""Setup logger with linked log file.
Do not use it for getting logger, call this once on init,
then use logging.getLogger(__name__) for getting actual logger.
:param str name: logger relative name
:param bool debug: debug mode
:param str msgfmt: message format
:param str timefmt: time format
:return: prepared logger instance
:rtype: `logging.Logger`
"""
logger = logging.getLogger(name)
logger.propagate = False
level = logging.DEBUG if debug else logging.INFO
logger.setLevel(level)
handler = logging.FileHandler("{0}.log".format(name))
handler.setLevel(level)
formatter = logging.Formatter(msgfmt or LOG_FORMAT, timefmt or TIME_FORMAT)
handler.setFo
|
rmatter(formatter)
logger.addHandler(handler)
date = datetime.date.today().strftime(DATE_FORMAT)
logger.info("*** (%s) Initializing XOInvader ***", date)
return logger
def clamp(val, min_val, max_val):
"""Clamp value between boundaries."""
if max_val < min_val:
raise ValueError("max_val must be >= min_val")
return mi
|
n(max(val, min_val), max_val)
class dotdict(dict): # pylint: disable=invalid-name
"""Container for dot elements access."""
def __init__(self, *args, **kwargs):
super(dotdict, self).__init__(*args, **kwargs)
self.__dict__ = self
self._wrap_nested()
def _wrap_nested(self):
"""Wrap nested dicts for deep dot access."""
for key, value in self.items():
if isinstance(value, dict):
self[key] = dotdict(value)
def fullcopy(self):
"""Return full copy of internal structure as dotdict.
:return :class:`xoinvader.utils.dotdict`: full copy
"""
return dotdict(copy.deepcopy(self))
class InfiniteList(list):
"""Infinite list container."""
def __init__(self, *args, **kwargs):
super(InfiniteList, self).__init__(*args, **kwargs)
self._index = 0
def select(self, index: int) -> object:
"""Set index and return selected element."""
if not len(self):
raise IndexError("List is empty")
if not (0 <= index < len(self)):
raise IndexError("Index out of bounds.")
self._index = index
return self[self._index]
def current(self) -> object:
"""Return current element."""
return self[self._index]
def next(self) -> object:
"""Select next element and return it."""
try:
self._index = (self._index + 1) % len(self)
except ZeroDivisionError:
raise IndexError("List is empty.")
return self[self._index]
def prev(self) -> object:
"""Select previous element and return it."""
try:
self._index = (self._index - 1) % len(self)
except ZeroDivisionError:
raise IndexError("List is empty.")
return self[self._index]
|
vsergeyev/os2online
|
desktop/desktop_items.py
|
Python
|
mit
| 3,022
| 0.009927
|
# -*- coding: utf-8 -*-
import json
from flask import jsonify
from flask import render_template, request, url_for, redirect
import time, random
#------------------------------------------------------------------------------
def get_desktop_items_data():
"""
Returns items for Desktop in JSON array:
t
|
itle
"""
items = [
{'title': 'OS/2 System', 'icon': '/appmedia/imgs/system_folder.png', 'left': '0px', 'top': '40px', 'action': '/system_folder/'},
{'title': 'Information', 'icon': '/
|
appmedia/imgs/help.png', 'left': '0px', 'top': '120px', 'action': '/appmedia/help/desktop.html'},
{'title': 'Virtual PC', 'icon': '/appmedia/imgs/system/minimized.png', 'left': '0px', 'top': '200px', 'action': '/'},
{'title': 'WebExplorer', 'icon': '/appmedia/imgs/web/explore.gif', 'left': '0px', 'top': '280px', 'action': '/webexplorer/'},
{'title': 'WIN-OS/2 Window', 'icon': '/appmedia/imgs/cmd/win_wnd.png', 'left': '0px', 'top': '360px', 'action': '/cmd/?cmd=win_wnd', 'app': 'yes'},
{'title': 'Solitaire', 'icon': '/appmedia/imgs/files/sol.jpg', 'left': '0px', 'top': '440px', 'action': 'http://www.webolog.com/online_games/solitaire/loaderwm.swf', 'app': 'yes'},
]
#return jsonify(items=items)
return json.dumps(items)
#------------------------------------------------------------------------------
def get_lanchpad_data():
return render_template("lanchpad.html")
#------------------------------------------------------------------------------
def get_window_data():
"Returns rendered window with iframe inside"
title = request.args.get("title", "")
src = request.args.get("src", "")
width = request.args.get("width", "634")
height = request.args.get("height", "450")
win_id = int(time.time())
template = "pm/base_window.html"
if src.find("win_") != -1:
template = "pm/win_window.html"
#title = "Program Manager"
content = {
"title": title,
"src": src,
"win_id": win_id,
"wnd_left": random.randint(120, 300),
"wnd_top": random.randint(20, 100),
"width": width,
"height": height,
}
return render_template(template, **content)
#------------------------------------------------------------------------------
def get_dialog_data():
"Returns rendered dialog"
dlg = request.args.get("dlg", "")
title = request.args.get("title", "")
win_id = int(time.time())
template = "dialogs/%s.html" % dlg
content = {
"title": title,
"dlg": dlg,
"win_id": win_id,
"wnd_left": 400,
"wnd_top": 300,
"width": 290,
"height": 150,
}
return render_template(template, **content)
#------------------------------------------------------------------------------
|
fthuin/artificial-intelligence
|
assignment3/Code/zipremise/SimpleHTTPServer.py
|
Python
|
mit
| 238
| 0
|
#!/u
|
sr/bin/env python3
from http.server import HTTPServer, CGIHTTPRequestHandler
port = 8000
httpd = HTTPServer(('', port), CGIHTTPRequestHandler)
print("Starting simple_httpd on port: " + str(httpd.server_port))
httpd.serv
|
e_forever()
|
mancoast/CPythonPyc_test
|
cpython/266_test_strftime.py
|
Python
|
gpl-3.0
| 6,966
| 0.004594
|
"""
Unittest for time.strftime
"""
import calendar
import sys
import os
import re
from test import test_support
import time
import unittest
# helper functions
def fixasctime(s):
if s[8] == ' ':
s = s[:8] + '0' + s[9:]
return s
def escapestr(text, ampm):
"""
Escape text to deal with possible locale values that have regex
syntax while allowing regex syntax used for comparison.
"""
new_text = re.escape(text)
new_text = new_text.replace(re.escape(ampm), ampm)
new_text = new_text.replace('\%', '%')
new_text = new_text.replace('\:', ':')
new_text = new_text.replace('\?', '?')
return new_text
class StrftimeTest(unittest.TestCase):
def __init__(self, *k, **kw):
unittest.TestCase.__init__(self, *k, **kw)
def _update_variables(self, now):
# we must update the local variables on every cycle
self.gmt = time.gmtime(now)
now = time.localtime(now)
if now[3] < 12: self.ampm='(AM|am)'
else: self.ampm='(PM|pm)'
self.jan1 = time.localtime(time.mktime((now[0], 1, 1, 0, 0, 0, 0, 1, 0)))
try:
if now[8]: self.tz = time.tzname[1]
else: self.tz = time.tzname[0]
except AttributeError:
self.tz = ''
if now[3] > 12: self.clock12 = now[3] - 12
elif now[3] > 0: self.clock12 = now[3]
else: self.clock12 = 12
self.now = now
def setUp(self):
try:
import java
java.util.Locale.setDefault(java.util.Locale.US)
except ImportError:
import locale
locale.setlocale(locale.LC_TIME, 'C')
def test_strftime(self):
now = time.time()
self._update_variables(now)
self.strftest1(now)
self.strftest2(now)
if test_support.verbose:
print "Strftime test, platform: %s, Python version: %s" % \
(sys.platform, sys.version.split()[0])
for j in range(-5, 5):
for i in range(25):
arg = now + (i+j*100)*23*3603
self._update_variables(arg)
self.strftest1(arg)
self.strftest2(arg)
def strftest1(self, now):
if test_support.verbose:
print "strftime test for", time.ctime(now)
now = self.now
# Make sure any characters that could be taken as regex syntax is
# escaped in escapestr()
expectations = (
('%a', calendar.day_abbr[now[6]], 'abbreviated weekday name'),
('%A', calendar.day_name[now[6]], 'full weekday name'),
('%b', calendar.month_abbr[now[1]], 'abbreviated month name'),
('%B', calendar.month_name[now[1]], 'full month name'),
# %c see below
('%d', '%02d' % now[2], 'day of month as number (00-31)'),
('%H', '%02d' % now[3], 'hour (00-23)'),
('%I', '%02d' % self.clock12, 'hour (01-12)'),
('%j', '%03d' % now[7], 'julian day (001-366)'),
('%m', '%02d' % now[1], 'month as number (01-12)'),
('%M', '%02d' % now[4], 'minute, (00-59)'),
('%p', self.ampm, 'AM or PM as appropriate'),
('%S', '%02d' % now[5], 'seconds of current time (00-60)'),
('%U', '%02d' % ((now[7] + self.jan1[6])//7),
'week number of the year (Sun 1st)'),
('%w', '0?%d' % ((1+now[6]) % 7), 'weekday as a number (Sun 1st)'),
('%W', '%02d' % ((now[7] + (self.jan1[6] - 1)%7)//7),
'week number of the year (Mon 1st)'),
# %x see below
('%X', '%02d:%02d:%02d' % (now[3], now[4], now[5]), '%H:%M:%S'),
('%y', '%02d' % (now[0]%100), 'year without century'),
('%Y', '%d' % now[0], 'year with century'),
# %Z see below
('%%', '%', 'single percent sign'),
)
for e in expectations:
# musn't raise a value error
try:
result = time.strftime(e[0], now)
except ValueError, error:
self.fail("strftime '%s' format gave error: %s" % (e[0], error))
if re.match(escapestr(e[1], self.ampm), result):
continue
if not result or result[0] == '%':
self.fail("strftime does not support standard '%s' format (%s)"
% (e[0], e[2]))
else:
self.fail("Conflict for %s (%s): expected %s, but got %s"
% (e[0], e[2], e[1], result))
def strftest2(self, now):
nowsecs = str(long(now))[:-1]
now = self.now
nonstandard_expectations = (
# These are standard but don't have predictable output
('%c', fixasctime(time.asctime(now)), 'near-asctime() format'),
('%x', '%02d/%02d/%02d' % (now[1], now[2], (now[0]%100)),
'%m/%d/%y %H:%M:%S'),
('%Z', '%s' % self.tz, 'time zone name'),
# These are some platform specific extensions
('%D', '%02d/%02d/%02d' % (now[1], now[2], (now[0]%100)), 'mm/dd/yy'),
('%e', '%2d' % now[2], 'day of month as number, blank padded ( 0-31)'),
('%h', calendar.month_abbr[now[1]], 'abbreviated month name'),
('%k', '%2d' % now[3], 'hour, blank padded ( 0-23)'),
('%n', '\n', 'newline character'),
('%r', '%02d:%02d:%02d %s' % (self.clock12, now[4], now[5], self.ampm),
'%I:%M:%S %p'),
('%R', '%02d:%02d' % (now[3], now[4]), '%H:%M'),
('%s', nowsecs, 'seconds since the Epoch in UCT'),
('%t', '\t', 'tab character'),
('%T', '%02d:%02d:%02d' % (now[3], now[4], now[5]), '%H:%M:%S'),
('%3y', '%03d' % (now[0]%100),
'year without century ren
|
dered using fieldwidth'),
)
for e in nonstandard_expectations:
try:
result = time.strftime(e[0], now)
except ValueError, result:
msg = "Error for nonstandard '
|
%s' format (%s): %s" % \
(e[0], e[2], str(result))
if test_support.verbose:
print msg
continue
if re.match(escapestr(e[1], self.ampm), result):
if test_support.verbose:
print "Supports nonstandard '%s' format (%s)" % (e[0], e[2])
elif not result or result[0] == '%':
if test_support.verbose:
print "Does not appear to support '%s' format (%s)" % \
(e[0], e[2])
else:
if test_support.verbose:
print "Conflict for nonstandard '%s' format (%s):" % \
(e[0], e[2])
print " Expected %s, but got %s" % (e[1], result)
def test_main():
test_support.run_unittest(StrftimeTest)
if __name__ == '__main__':
test_main()
|
ckot/django-namespaced-session
|
setup.py
|
Python
|
mit
| 1,469
| 0.000681
|
# pylint: disable=I0011,C0301
from __future__ import absolute_import, unicode_literals
import os
from setuptools import find_packages, setup
from namespaced_session import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-namespaced-session',
ver
|
sion=__version__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
test_suite="runtests.main",
license='MIT',
description='Django app which makes it easier to work with dictionaries in sessions',
long_description=README,
url='https://github.com/ckot/django-namespaced-session/',
author='Scott Silliman',
author_email='scott.t.silliman@gmai
|
l.com',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8'
'Framework :: Django :: 1.9'
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7'
],
)
|
jamesfolberth/NGC_STEM_camp_AWS
|
notebooks/data8_notebooks/lab03/tests/q3_2.py
|
Python
|
bsd-3-clause
| 530
| 0.003774
|
test = {
'name': '',
'points': 1,
'suites': [
{
'cases': [
{
|
'code': r"""
>>> type(imdb_by_year) == tables.Table
True
>>> imdb_by_year.column('Title').take(range(3))
array(['The Kid (1921)', 'The Gold Rush (1925)', 'The General (192
|
6)'],
dtype='<U75')
""",
'hidden': False,
'locked': False
},
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
|
GabrielNicolasAvellaneda/riak-python-client
|
commands.py
|
Python
|
apache-2.0
| 15,906
| 0.000063
|
"""
distutils commands for riak-python-client
"""
from distutils import log
from distutils.core import Command
from distutils.errors import DistutilsOptionError
from subprocess import Popen, PIPE
from string import Template
import shutil
import re
import os.path
__all__ = ['create_bucket_types', 'setup_security', 'enable_security',
'disable_security', 'preconfigure', 'configure']
# Exception classes used by this module.
class CalledProcessError(Exception):
"""This exception is raised when a process run by check_call() or
check_output() returns a non-zero exit status.
The exit status will be stored in the returncode attribute;
check_output() will also store the output in the output attribute.
"""
def __init__(self, returncode, cmd, output=None):
self.returncode = returncode
self.cmd = cmd
self.output = output
def __str__(self):
return "Command '%s' returned non-zero exit status %d" % (self.cmd,
self
.returncode)
def check_output(*popenargs, **kwargs):
"""Run command with arguments and return its output as a byte string.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-l", "/dev/null"])
'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> import sys
>>> check_output(["/bin/sh", "-c",
... "ls -l non_existent_file ; exit 0"],
... stderr=sys.stdout)
'ls: non_existent_file: No such file or directory\n'
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
try:
import simplejson as json
except ImportError:
import json
class create_bucket_types(Command):
"""
Creates bucket-types appropriate for testing. By default this will create:
* `pytest-maps` with ``{"datatype":"map"}``
* `pytest-sets` with ``{"datatype":"set"}``
* `pytest-counters` with ``{"datatype":"counter"}``
* `pytest-consistent` with ``{"consistent":true}``
* `pytest-mr`
* `pytest` with ``{"allow_mult":false}``
"""
description = "create bucket-types used in integration tests"
user_options = [
('riak-admin=', None, 'pat
|
h to the riak-admin script')
]
_props = {
'pytest-maps': {'datatype': 'map'},
'pytest-sets': {'datatype': 'set'},
'pytest-counters': {'datatype': 'counter'},
'pytest-consistent': {'consistent': True},
'pytest-mr': {},
'pytest': {'allow_mult': False}
}
def initialize_options(self):
self.riak_admin = None
def finalize_o
|
ptions(self):
if self.riak_admin is None:
raise DistutilsOptionError("riak-admin option not set")
def run(self):
if self._check_available():
for name in self._props:
self._create_and_activate_type(name, self._props[name])
def check_output(self, *args, **kwargs):
if self.dry_run:
log.info(' '.join(args))
return bytearray()
else:
return check_output(*args, **kwargs)
def _check_available(self):
try:
self.check_btype_command("list")
return True
except CalledProcessError:
log.error("Bucket types are not supported on this Riak node!")
return False
def _create_and_activate_type(self, name, props):
# Check status of bucket-type
exists = False
active = False
try:
status = self.check_btype_command('status', name)
except CalledProcessError as e:
status = e.output
exists = ('not an existing bucket type' not in status.decode('ascii'))
active = ('is active' in status.decode('ascii'))
if exists or active:
log.info("Updating {0} bucket-type with props {1}"
.format(repr(name), repr(props)))
self.check_btype_command("update", name,
json.dumps({'props': props},
separators=(',', ':')))
else:
log.info("Creating {0} bucket-type with props {1}"
.format(repr(name), repr(props)))
self.check_btype_command("create", name,
json.dumps({'props': props},
separators=(',', ':')))
if not active:
log.info('Activating {0} bucket-type'.format(repr(name)))
self.check_btype_command("activate", name)
def check_btype_command(self, *args):
cmd = self._btype_command(*args)
return self.check_output(cmd)
def run_btype_command(self, *args):
self.spawn(self._btype_command(*args))
def _btype_command(self, *args):
cmd = [self.riak_admin, "bucket-type"]
cmd.extend(args)
return cmd
class security_commands(object):
def check_security_command(self, *args):
cmd = self._security_command(*args)
return self.check_output(cmd)
def run_security_command(self, *args):
self.spawn(self._security_command(*args))
def _security_command(self, *args):
cmd = [self.riak_admin, "security"]
if isinstance(args, tuple):
for elem in args:
cmd.extend(elem)
else:
cmd.extend(args)
return cmd
def check_output(self, *args, **kwargs):
if self.dry_run:
log.info(' '.join(args))
return bytearray()
else:
return check_output(*args, **kwargs)
class setup_security(Command, security_commands):
"""
Sets up security for testing. By default this will create:
* User `testuser` with password `testpassword`
* User `certuser` with password `certpass`
* Two security sources
* Permissions on
* riak_kv.get
* riak_kv.put
* riak_kv.delete
* riak_kv.index
* riak_kv.list_keys
* riak_kv.list_buckets
* riak_kv.mapreduce
* riak_core.get_bucket
* riak_core.set_bucket
* riak_core.get_bucket_type
* riak_core.set_bucket_type
* search.admin
* search.query
"""
description = "create security settings used in integration tests"
user_options = [
('riak-admin=', None, 'path to the riak-admin script'),
('username=', None, 'test user account'),
('password=', None, 'password for test user account'),
('certuser=', None, 'certificate test user account'),
('certpass=', None, 'password for certificate test user account')
]
_commands = [
"add-user $USERNAME password=$PASSWORD",
"add-source $USERNAME 127.0.0.1/32 password",
"add-user $CERTUSER password=$CERTPASS",
"add-source $CERTUSER 127.0.0.1/32 certificate"
]
_grants = {
"riak_kv.get": ["any"],
"riak_kv.put": ["any"],
"riak_kv.delete": ["any"],
"riak_kv.index": ["any"],
"riak_kv.list_keys": ["any"],
"riak_kv.list_buckets": ["any"],
"riak_kv.mapreduce": ["any"],
"riak_core.get_bucket": ["any"],
"riak_core.set_bucket": ["any"],
"riak_core.get_bucket_type": ["any"],
"riak_core.set_bucket_type": ["any"],
"search.admi
|
laslabs/odoo-connector-carepoint
|
connector_carepoint/tests/test_carepoint_import_mapper.py
|
Python
|
agpl-3.0
| 1,081
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2015-2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.addons.connector_carepoint.unit import mapper
from .common import SetUpCarepointBase
class TestCarepointImporterMapper(SetUpCarepointBase):
def setUp(self):
super(TestCarepointImporterMapper, self).setUp()
self.Importer = mapper.CarepointImportMapper
self.model = 'carepoint.carepoint.store'
self.mock_env = self.get_carepoint_helper(
|
self.model
)
self.importer = self.Importer(self.mock_env)
def test_backend_id(self):
""" It should map backend_id correctly """
res = self.importer.backend_id(True)
expect = {'backend_id': self.importer.backend_record.id}
self.assertDictEqual(expect, res)
def t
|
est_company_id(self):
""" It should map company_id correctly """
res = self.importer.company_id(True)
expect = {'company_id': self.importer.backend_record.company_id.id}
self.assertDictEqual(expect, res)
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/opus_gui/abstract_manager/models/xml_model.py
|
Python
|
gpl-2.0
| 21,520
| 0.002556
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from PyQt4.QtCore import Qt, QVariant, SIGNAL, QModelIndex, QAbstractItemModel
from PyQt4.QtCore import QString
from PyQt4.QtGui import QColor, QIcon, QStyle, QMessageBox
from PyQt4.Qt import qApp # For platform specific icons
from opus_gui.util.icon_library import IconLibrary
from opus_gui.abstract_manager.models.xml_item import XmlItem
from opus_gui.main.controllers.instance_handlers import update_mainwindow_savestate
# What node types we want checkboxes for
# _CHECKBOX_NODE_TYPES = ('selectable')
class XmlModel(QAbstractItemModel):
'''
A data model for a XML tree.
The model exposes a subset of the entire XML tree containing only
XML nodes that do not have the attribute "hidden" set to "True".
'''
def __init__(self, model_root_node, project = None, parent_widget = None):
'''
@param model_root_node (ElementTree.Element): Root node for this model
@param project (OpusProject): Loaded project file
@param parent_widget (QObject): Parent object for this ItemModel
'''
QAbstractItemModel.__init__(self, parent_widget)
# Root element
self._root_node = model_root_node
# Root for the subtree of visible items
self._root_item = XmlItem(self._root_node, None)
# Rebuild the (whole) tree of visible items
self.rebuild_tree()
# Optional reference to loaded project for inheritance handling.
self.project = project
# NOTE: when setting the dirty flag, make sure to use self.dirty rather than
# self.__dirty.
self.__dirty = False
# Column headers
self._headers = ['Name', 'Value']
# Index of the last inserted item
self.last_inserted_index = None
# use platform specific folder and file icons
self.folderIcon = QIcon()
self.fileIcon = QIcon()
std_icon = qApp.style().standardPixmap
self.fileIcon.addPixmap(std_icon(QStyle.SP_FileIcon))
self.folderIcon.addPixmap(std_icon(QStyle.SP_DirClosedIcon), QIcon.Normal, QIcon.Off)
def __is_dirty(self):
return self.__dirty
def __set_dirty(self, dirty):
self.__dirty = dirty
if self.project is not None:
self.project.dirty = True
dirty = property(__is_dirty, __set_dirty)
def columnCount(self, parent):
''' PyQt API Method -- See the PyQt documentation for a description '''
return len(self._headers)
def rebuild_tree(self):
''' Rebuilds the tree from the underlying XML structure '''
self._root_item.rebuild()
self.emit(SIGNAL('layoutChanged()'))
def rowCount(self, parent_index):
''' PyQt API Method -- See the PyQt documentation for a description '''
if not parent_index.isValid():
i
|
tem = self._root_item
else:
item = parent_index.internalPointer()
return len(item.child_items)
def remove_node(self, node):
'''
Convenience method to remove a node without bothering with the internal model representation
|
@param node (Element): Node to remove.
'''
index = self.index_for_node(node)
row = index.row()
parent_index = self.parent(index)
self.removeRow(row, parent_index)
def removeRow(self, row, parent_index):
'''
Removes an object from the data model
@param row (int) row number to remove
@param parent_index (QModelIndex) index of parent element
'''
# Make sure we have a valid parent_index
if parent_index == QModelIndex():
parent_item = self._root_item
else:
parent_item = parent_index.internalPointer()
# Validate the row number
if row < 0 or row > len(parent_item.child_items):
return False
child_item = parent_item.child_item(row)
self.emit(SIGNAL("layoutAboutToBeChanged()"))
self.beginRemoveRows(parent_index, row, row)
# remove the child item from it's parent's list of children
child_item.parent_item.child_items.remove(child_item)
# handle inheritance if we are dealing with a project
reinserted_node = None
if self.project is None:
child_item.node.getparent().remove(child_item.node)
else:
reinserted_node = self.project.delete_node(child_item.node)
self.endRemoveRows()
self.emit(SIGNAL("layoutChanged()"))
if reinserted_node is not None:
self.insertRow(row, parent_index, reinserted_node, reinserting = True)
self.dirty = True
return True
def data(self, index, role):
''' PyQt API Method -- See the PyQt documentation for a description '''
if not index.isValid():
return QVariant()
node = index.internalPointer().node
# Foreground Coloring
if role == Qt.ForegroundRole:
if node.get('inherited'):
return QVariant(QColor(Qt.darkBlue))
return QVariant() # = default color
# Display
elif role == Qt.DisplayRole:
if index.column() == 0:
if node.get('type') == 'selectable':
return QVariant(node.get('return_value') or node.get('name') or node.tag)
return QVariant(node.get('name') or node.tag)
elif index.column() == 1:
if node.get('type') == "password":
return QVariant(QString("*********"))
# hide the text value for checkable nodes
elif node.tag == 'selectable' or node.get('type') == 'boolean':
return QVariant()
elif node.text:
return QVariant(node.text.strip())
return QVariant()
elif role == Qt.ToolTipRole:
if index.column() == 0 and self.project: # don't need to worry about inheritance when there is no project
if node.get('inherited'):
return QVariant('Inherited value from file: %s' % node.get('inherited'))
elif self.project.is_shadowing(node):
prototype_node = self.project.get_prototype_node(node)
return QVariant('Original value defined in file: %s' % prototype_node.get('inherited'))
else:
return QVariant('Value is defined in this file.')
# elif role == Qt.FontRole:
# if index.column() == 0:
# if node.tag == 'model':
# font = QFont()
# font.setPointSize(14)
# return QVariant(font)
# CK: Experimenting with making shadowing nodes bold to differentiate them from local nodes
# elif role == Qt.FontRole:
# f = QFont()
# if self.project is not None:
# f.setBold(self.project.is_shadowing(node))
# return QVariant(f)
# Icons
elif role == Qt.DecorationRole:
if index.column() == 0:
return QVariant(IconLibrary.icon_for_type(node.tag))
# Checkboxes
elif role == Qt.CheckStateRole and index.column() == 1:
if node.tag == 'selectable' or node.get('type') == 'boolean':
return QVariant(Qt.Checked if (node.text.strip() == 'True') else Qt.Unchecked)
# Unhandled index/role
return QVariant()
def index_for_item(self, item):
'''
Looks up a QModelIndex() for a given item.
@param item (XmlItem): item to find in the model
@return: The index (QModelIndex) for the given item.
'''
if item is self._root_item:
return QModelIndex()
parent_index = self.index_for_item(item.parent_item)
return self.index(item.row(), 0, parent_index)
def update_node(self, node):
'''
Refreshes the node by removing it and reinserting it.
'''
item = self.item_for_node(node)
if item is None:
|
elasticsales/quotequail
|
setup.py
|
Python
|
mit
| 1,101
| 0.000908
|
from setuptools import setup
setup(
name='quotequail',
version='0.2.3',
url='http://github.com/closeio/quotequail',
license='MIT',
author='Thomas Steinacher',
author_email='[email protected]',
maintainer='Thomas Steinacher',
maintainer_email='[email protected]',
description='A library that identifies quoted text in plain text and HTML email messages.',
long_description=__doc__,
packages=[
'quotequail',
],
test_suite='tests',
tests_require=['lxml'],
platforms
|
='any',
classifiers=[
'Environment :: Web Environment',
|
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Communications :: Email',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
twistedretard/LaserSimulatedSecurityTurret
|
src/turret/camera.py
|
Python
|
mit
| 620
| 0.001613
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
class CameraClass(object):
'''
docstring for CameraClass
'''
def __init__(self):
super(CameraClass, self).__init__()
def
|
visible_target(self):
'''
Returns true if target is visible
'''
return True
if __name__ == '__main__':
try:
from picamera import PiCamera
camera = PiCamera()
try:
camera.start_preview()
time.sleep(10)
camera.stop_preview()
finally:
camera.close()
except ImportError:
|
pass
|
natedileas/ImageRIT
|
Server/qt_main.py
|
Python
|
gpl-3.0
| 762
| 0.002625
|
import sys
import socket
from PyQt5.QtWidgets import QApplication
from qt_DisplayWindow import DisplayWindow
from Server import Server
def main(camID):
hostname = socket.gethostname()
ip_address = socket.gethostbyname_ex(hostname)[2][-1]
print(hostna
|
me, ip_address)
port = 12349
app = QApp
|
lication(sys.argv)
server = Server(ip_address, port)
# set up main display window
display = DisplayWindow(camID, server.get_state)
display.show()
# connect server -> display slots
server.selfie.connect(display.selfie)
server.email.connect(display.email)
server.status.connect(display.show_msg)
server.start()
ret = app.exec_()
server.join()
sys.exit(ret)
if __name__ == '__main__':
main(0)
|
nasfarley88/dicebeard
|
python/dicebeard/skb_roll/beardeddie.py
|
Python
|
unlicense
| 1,060
| 0
|
import os
from pathlib import Path
from PIL import Image
import pyconfig
import pydice
class ImageNotSupported(Exception):
pass
class BeardedDie:
def __init__(self, die):
self.die = die
# Time to strap our to_image to pydice's Die
if pyconfig.get('dicebeard.images_path'):
pydice.dice.Die.images_path = Path(
pyconfig.get('dicebeard.images_path'))
else:
pydice.dice.Die.images_path = Path(
os.path.dirname(__file__)) / 'images'
def __getattr__(self, attr):
return getattr(self.die, attr)
def to_image(self):
'''Emits a PIL.Image of the die is possible'''
die_image_path = (self.images_path /
'd{}'.format(self.faces.stop-1) /
|
'{}.png'.format(self.result))
try:
|
return Image.open(str(die_image_path))
except FileNotFoundError:
raise ImageNotSupported(
'{} is not currently supported.'.format(self.name))
|
pizzapanther/Church-Source
|
churchsource/people/migrations/0013_auto__add_field_group_auth.py
|
Python
|
gpl-3.0
| 6,849
| 0.008906
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Group.auth'
db.add_column('people_group', 'auth', self.gf('django.db.models.fields.BooleanField')(default=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Group.auth'
db.delete_column('people_group', 'auth')
models = {
'people.address': {
'Meta': {'ordering': "('address1',)", 'object_name': 'Address'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'atype': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Household']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'zipcode
|
': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
'people.group': {
'Meta': {'ordering': "('name',)", 'object_name': 'Group'},
'auth': ('django.db.models.fields.BooleanField', [], {'default':
|
'True'}),
'desc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'gtype': ('django.db.models.fields.CharField', [], {'default': "'general'", 'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'people.groupadmin': {
'Meta': {'ordering': "('group__name', 'person__lname', 'person__fname')", 'object_name': 'GroupAdmin'},
'can_send': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"})
},
'people.household': {
'Meta': {'ordering': "('name',)", 'object_name': 'Household'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'anniversary': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'barcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'first_visit': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'image_temp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.TempImage']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'})
},
'people.person': {
'Meta': {'ordering': "('lname', 'fname')", 'object_name': 'Person'},
'alerts': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'allergies': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'ddate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fname': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['people.Group']", 'null': 'True', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Household']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'image_temp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.TempImage']", 'null': 'True', 'blank': 'True'}),
'lname': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'mname': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'})
},
'people.phone': {
'Meta': {'ordering': "('person__lname', 'person__fname', 'number')", 'object_name': 'Phone'},
'alerts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"}),
'type1': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}),
'type2': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'})
},
'people.tempimage': {
'Meta': {'ordering': "('-ts',)", 'object_name': 'TempImage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
}
}
complete_apps = ['people']
|
anselmobd/fo2
|
src/base/queries/models.py
|
Python
|
mit
| 391
| 0
|
from pprint import pprint
from base.models import Colaborador
def get_create_colaborad
|
or_by_user(user):
try:
colab = Colaborador.objects.get(user__username=user.username)
except Colaborador.DoesNotExist:
colab = Colaborador(
user=user,
matricula=72000+user.id,
|
cpf=72000+user.id,
)
colab.save()
return colab
|
LICEF/edx-platform
|
cms/djangoapps/contentstore/views/checklist.py
|
Python
|
agpl-3.0
| 6,004
| 0.002665
|
import json
import copy
from util.json_request import JsonResponse
from django.http import HttpResponseBadRequest
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django_future.csrf import ensure_csrf_cookie
from edxmako.shortcuts import render_to_response
from django.http import HttpResponseNotFound
from django.core.exceptions import PermissionDenied
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import modulestore
from contentstore.utils import reverse_course_url
from .access import has_course_access
from xmodule.course_module import CourseDescriptor
from django.utils.translation import ugettext
__all__ = ['checklists_handler']
# pylint: disable=unused-argument
@require_http_methods(("GET", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def checklists_handler(request, course_key_string, checklist_index=None):
"""
The restful handler for checklists.
GET
html: return html page for all checklists
json: return json representing all checklists. checklist_index is not supported for GET at this time.
POST or PUT
json: updates the checked state for items within a particular checklist. checklist_index is required.
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_access(request.user, course_key):
raise PermissionDenied()
course_module = modulestore().get_course(course_key)
json_request = 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json')
if request.method == 'GET':
# If course was created before checklists were introduced, copy them over
# from the template.
if not course_module.checklists:
course_module.checklists = CourseDescriptor.checklists.default
modulestore().update_item(course_module, request.user.id)
expanded_checklists = expand_all_action_urls(course_module)
if json_request:
return JsonResponse(expanded_checklists)
else:
handler_url = reverse_course_url('checklists_handler', course_key)
return render_to_response('checklists.html',
|
{
'handler_url': handler_url,
# context_course is used by analytics
'context_course': course_module,
'checklists':
|
expanded_checklists
})
elif json_request:
# Can now assume POST or PUT because GET handled above.
if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists):
index = int(checklist_index)
persisted_checklist = course_module.checklists[index]
modified_checklist = json.loads(request.body)
# Only thing the user can modify is the "checked" state.
# We don't want to persist what comes back from the client because it will
# include the expanded action URLs (which are non-portable).
for item_index, item in enumerate(modified_checklist.get('items')):
persisted_checklist['items'][item_index]['is_checked'] = item['is_checked']
# seeming noop which triggers kvs to record that the metadata is
# not default
course_module.checklists = course_module.checklists
course_module.save()
modulestore().update_item(course_module, request.user.id)
expanded_checklist = expand_checklist_action_url(course_module, persisted_checklist)
return JsonResponse(localize_checklist_text(expanded_checklist))
else:
return HttpResponseBadRequest(
("Could not save checklist state because the checklist index "
"was out of range or unspecified."),
content_type="text/plain"
)
else:
return HttpResponseNotFound()
def expand_all_action_urls(course_module):
"""
Gets the checklists out of the course module and expands their action urls.
Returns a copy of the checklists with modified urls, without modifying the persisted version
of the checklists.
"""
expanded_checklists = []
for checklist in course_module.checklists:
expanded_checklists.append(localize_checklist_text(expand_checklist_action_url(course_module, checklist)))
return expanded_checklists
def expand_checklist_action_url(course_module, checklist):
"""
Expands the action URLs for a given checklist and returns the modified version.
The method does a copy of the input checklist and does not modify the input argument.
"""
expanded_checklist = copy.deepcopy(checklist)
urlconf_map = {
"ManageUsers": "course_team_handler",
"CourseOutline": "course_handler",
"SettingsDetails": "settings_handler",
"SettingsGrading": "grading_handler",
}
for item in expanded_checklist.get('items'):
action_url = item.get('action_url')
if action_url in urlconf_map:
item['action_url'] = reverse_course_url(urlconf_map[action_url], course_module.id)
return expanded_checklist
def localize_checklist_text(checklist):
"""
Localize texts for a given checklist and returns the modified version.
The method does an in-place operation so the input checklist is modified directly.
"""
# Localize checklist name
checklist['short_description'] = ugettext(checklist['short_description'])
# Localize checklist items
for item in checklist.get('items'):
item['short_description'] = ugettext(item['short_description'])
item['long_description'] = ugettext(item['long_description'])
item['action_text'] = ugettext(item['action_text']) if item['action_text'] != "" else u""
return checklist
|
chocjy/randomized-quantile-regression-solvers
|
hadoop/src/gen_id.py
|
Python
|
apache-2.0
| 97
| 0.010309
|
import numpy as np
import sys
R = np.eye(int(sys.argv[2]))
np.savetxt
|
(sys.arg
|
v[1]+'/R.txt', R)
|
TuSimple/simpledet
|
config/dcn/faster_dcnv2_r50v1bc4_c5_512roi_1x.py
|
Python
|
apache-2.0
| 7,639
| 0.004451
|
from symbol.builder import FasterRcnn as Detector
from models.dcn.builder import DCNResNetC4 as Backbone
from symbol.builder import Neck
from symbol.builder import RpnHead
from symbol.builder import RoiAlign as RoiExtractor
from symbol.builder import BboxC5V1Head as BboxHead
from mxnext.complicate import normalizer_factory
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 2 if is_train else 1
fp16 = False
class KvstoreParam:
kvstore = "local"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
# normalizer = normalizer_factory(type="syncbn", ndev=len(KvstoreParam.gpus))
normalizer = normalizer
|
_factory(type="fixbn")
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
depth = 50
num_c3_block = 4
num_c4_block = 6
class NeckParam:
fp16 = General.fp16
normalizer = Norm
|
alizeParam.normalizer
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image
class anchor_generate:
scale = (2, 4, 8, 16, 32)
ratio = (0.5, 1.0, 2.0)
stride = 16
image_anchor = 256
class head:
conv_channel = 512
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 12000 if is_train else 6000
post_nms_top_n = 2000 if is_train else 300
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = False
image_roi = 512
fg_fraction = 0.25
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 2
class_agnostic = True
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 512
batch_image = General.batch_image
class regress_target:
class_agnostic = True
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = 16
class DatasetParam:
if is_train:
image_set = ("coco_train2017", )
else:
image_set = ("coco_val2017", )
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam)
roi_extractor = RoiExtractor(RoiParam)
bbox_head = BboxHead(BboxParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
rpn_test_sym = None
test_sym = None
else:
train_sym = None
rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head)
test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
rpn_test_symbol = rpn_test_sym
from_scratch = False
random = True
memonger = False
memonger_until = "stage3_unit21_plus"
class pretrain:
prefix = "pretrain_model/resnet%s_v1b" % BackboneParam.depth
epoch = 0
fixed_param = ["conv0", "stage1", "gamma", "beta"]
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 0.0001
clip_gradient = 35
class schedule:
begin_epoch = 0
end_epoch = 6
lr_iter = [60000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
80000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0.0
iter = 1000
class TestParam:
min_det_score = 0.05
max_det_per_image = 100
process_roidb = lambda x: x
process_output = lambda x, y: x
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_minival2014.json"
# data processing
class NormParam:
mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order
std = tuple(i * 255 for i in (0.229, 0.224, 0.225))
class ResizeParam:
short = 800
long = 1200 if is_train else 2000
class PadParam:
short = 800
long = 1200
max_num_gt = 100
class AnchorTarget2DParam:
class generate:
short = 800 // 16
long = 1200 // 16
stride = 16
scales = (2, 4, 8, 16, 32)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, AnchorTarget2D, Norm2DImage
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Flip2DImageBbox(),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
AnchorTarget2D(AnchorTarget2DParam),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "gt_bbox"]
label_name = ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output"],
["rpn_cls_label"]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output"],
["rpn_cls_label"]
)
# for bbox, the label is generated in network so it is an output
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
|
fermat618/pida
|
pida/ui/views.py
|
Python
|
gpl-2.0
| 1,810
| 0.001105
|
# -*- coding: utf-8 -*-
"""
:copyright: 2005-2008 by The PIDA Project
:license: GPL 2 or later (see README/COPYING/LICENSE)
"""
import gtk
from pygtkhelpers.delegates import SlaveView
# locale
from pida.core.locale import Locale
locale = Locale('pida')
_ = locale.gettext
class PidaView(SlaveView):
# Set this to make your views memorable.
key = None
icon_name = gtk.STOCK_INFO
label_text = _('Pida View')
pane = None
def create_ui(self):
"""Create the user interface here"""
def create_tab_label_icon(self):
return gtk.image_new_from_stock(self.icon_name, gtk.ICON_SIZE_MENU)
def get_parent_window(self):
return self.toplevel.get_parent_window()
parent_window = property(get_parent_window)
def on_remove_attempt(self, pane):
return not self.can_be_closed()
def can_be_closed(self):
re
|
turn False
gladefile = None
def __init__(self, service, title=None, icon=None, *args, **kw):
if not self.builder_file:
self.builder_file = self.gladefile
self.svc = service
self.label_text = tit
|
le or self.label_text
self.icon_name = icon or self.icon_name
if self.key:
pass
#self.toplevel.set_name(self.key.replace(".", "_"))
super(PidaView, self).__init__()
def get_toplevel(self):
return self.widget
toplevel = property(get_toplevel)
def add_main_widget(self, widget, *args, **kw):
self.widget.pack_start(widget, *args, **kw)
class WindowConfig(object):
"""
WindowConfig objects are used to register
a window in the windows service so they
can get proper shortcuts
"""
key = None
label_text = ""
description = ""
default_shortcut = ""
action = None
|
DailyActie/Surrogate-Model
|
01-codes/tensorflow-master/tensorflow/python/client/notebook.py
|
Python
|
mit
| 4,766
| 0.002098
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Notebook front-end to TensorFlow.
When you run this binary, you'll see something like below, which indicates
the serving URL of the notebook:
The IPython Notebook is running at: http://127.0.0.1:8888/
Press "Shift+Enter" to execute a cell
Press "Enter" on a cell to go into edit mode.
Press "Escape" to go back into command mode and use arrow keys to navigate.
Press "a" in command mode to insert cell above or "b" to insert cell below.
Your root notebooks directory is FLAGS.notebook_dir
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import socket
import sys
# pylint: disable=g-import-not-at-top
# Official recommended way of turning on fast protocol buffers as of 10/21/14
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "cpp"
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION"] = "2"
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
FLAGS = flags.FLAGS
flags.DEFINE_string(
"password", None,
"Password to require. If set, the server will allow public access."
" Only used if notebook config file does not exist.")
flags.DEFINE_string("notebook_dir", "experimental/brain/notebooks",
"root location where to store notebooks")
ORIG_ARGV = sys.argv
# Main notebook process calls itself with argv[1]="kernel" t
|
o start kernel
# subprocesses.
IS_KERNEL = len(sys.argv) > 1 and sys.argv[1] == "kernel"
def main(unused_argv):
sys.argv = ORIG_ARGV
if not IS_KERNEL:
# Drop all flags.
sys.argv = [sys.argv[0]]
# NOTE(sadovsky): For some reason, putting this import at the top level
# breaks inline plotting. It's probably a bug in the stone-age version of
# matplotlib.
from IPython.h
|
tml.notebookapp import NotebookApp # pylint: disable=g-import-not-at-top
notebookapp = NotebookApp.instance()
notebookapp.open_browser = True
# password functionality adopted from quality/ranklab/main/tools/notebook.py
# add options to run with "password"
if FLAGS.password:
from IPython.lib import passwd # pylint: disable=g-import-not-at-top
notebookapp.ip = "0.0.0.0"
notebookapp.password = passwd(FLAGS.password)
else:
print("\nNo password specified; Notebook server will only be available"
" on the local machine.\n")
notebookapp.initialize(argv=["--notebook-dir", FLAGS.notebook_dir])
if notebookapp.ip == "0.0.0.0":
proto = "https" if notebookapp.certfile else "http"
url = "%s://%s:%d%s" % (proto, socket.gethostname(), notebookapp.port,
notebookapp.base_project_url)
print("\nNotebook server will be publicly available at: %s\n" % url)
notebookapp.start()
return
# Drop the --flagfile flag so that notebook doesn't complain about an
# "unrecognized alias" when parsing sys.argv.
sys.argv = ([sys.argv[0]] +
[z for z in sys.argv[1:] if not z.startswith("--flagfile")])
from IPython.kernel.zmq.kernelapp import IPKernelApp # pylint: disable=g-import-not-at-top
kernelapp = IPKernelApp.instance()
kernelapp.initialize()
# Enable inline plotting. Equivalent to running "%matplotlib inline".
ipshell = kernelapp.shell
ipshell.enable_matplotlib("inline")
kernelapp.start()
if __name__ == "__main__":
# When the user starts the main notebook process, we don't touch sys.argv.
# When the main process launches kernel subprocesses, it writes all flags
# to a tmpfile and sets --flagfile to that tmpfile, so for kernel
# subprocesses here we drop all flags *except* --flagfile, then call
# app.run(), and then (in main) restore all flags before starting the
# kernel app.
if IS_KERNEL:
# Drop everything except --flagfile.
sys.argv = ([sys.argv[0]] +
[x for x in sys.argv[1:] if x.startswith("--flagfile")])
app.run()
|
iulian787/spack
|
var/spack/repos/builtin/packages/r-evd/package.py
|
Python
|
lgpl-2.1
| 597
| 0.00335
|
# Copyright
|
2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for detail
|
s.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class REvd(RPackage):
"""evd: Functions for Extreme Value Distributions"""
homepage = "https://cloud.r-project.org/package=evd"
url = "https://cloud.r-project.org/src/contrib/evd_2.3-3.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/evd"
version('2.3-3', sha256='2fc5ef2e0c3a2a9392425ddd45914445497433d90fb80b8c363877baee4559b4')
|
demis001/scikit-bio
|
skbio/util/tests/test_decorator.py
|
Python
|
bsd-3-clause
| 9,694
| 0.000103
|
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import unittest
import inspect
import warnings
from skbio.util._decorator import classproperty, overrides
from skbio.util._decorator import (stable, experimental, deprecated,
_state_decorator)
from skbio.util._exception import OverrideError
class TestOverrides(unittest.TestCase):
def test_raises_when_missing(self):
class A(object):
pass
with self.assertRaises(OverrideError):
class B(A):
@overrides(A)
def test(self):
pass
def test_doc_inherited(self):
class A(object):
def test(self):
"""Docstring"""
pass
class B(A):
@overrides(A)
def test(self):
pass
self.assertEqual(B.test.__doc__, "Docstring")
def test_doc_not_inherited(self):
class A(object):
def test(self):
"""Docstring"""
pass
class B(A):
@overrides(A)
def test(self):
"""Different"""
pass
self.assertEqual(B.test.__doc__, "Different")
class TestClassProperty(unittest.TestCase):
def test_getter_only(self):
class Foo(object):
_foo = 42
@classproperty
def foo(cls):
return cls._foo
# class-level getter
self.assertEqual(Foo.foo, 42)
# instance-level getter
f = Foo()
self.assertEqual(f.foo, 42)
with self.assertRaises(AttributeError):
f.foo = 4242
class TestStabilityState(unittest.TestCase):
# the indentation spacing gets weird, so I'm defining the
# input doc string explicitly and adding it after function
# defintion
_test_docstring = (" Add 42, or something else, to x.\n"
"\n"
" Parameters\n"
" ----------\n"
" x : int, x\n"
" y : int, optional\n")
class TestBase(TestStabilityState):
def test_get_indentation_level(self):
c = _state_decorator()
self.assertEqual(c._get_indentation_level([]), 0)
self.assertEqual(
c._get_indentation_level([], default_no_existing_docstring=3), 3)
self.assertEqual(c._get_indentation_level([""]), 4)
self.assertEqual(
c._get_indentation_level([""], default_existing_docstring=3), 3)
in_ = (["summary"])
self.assertEqual(c._get_indentation_level(in_), 4)
in_ = (["summary", "", "", " ", "", " ", ""])
self.assertEqual(c._get_indentation_level(in_), 4)
in_ = (["summary", " More indentation", " Less indentation"])
self.assertEqual(c._get_indentation_level(in_), 5)
def test_update_docstring(self):
c = _state_decorator()
in_ = None
exp = ("""State: Test!!""")
self.assertEqual(c._update_docstring(in_, "Test!!"), exp)
in_ = """"""
exp = ("""\n\n State: Test!!""")
self.assertEqual(c._update_docstring(in_, "Test!!"), exp)
in_ = ("""Short summary\n\n Parameters\n\n----------\n """
"""x : int\n""")
exp = ("""Short summary\n\n State: Test!!\n\n"""
""" Parameters\n\n----------\n x : int\n""")
self.assertEqual(c._update_docstring(in_, "Test!!"), exp)
in_ = ("""Short summary\n\n Parameters\n\n----------\n """
"""x : int\n""")
exp = ("""Short summary\n\n State: Test!!\n\n"""
""" Parameters\n\n----------\n x : int\n""")
self.assertEqual(c._update_docstring(in_, "Test!!"), exp)
in_ = ("""Short summary\n\n Parameters\n\n----------\n """
"""x : int\n""")
exp = ("""Short summary\n\n State: Test!!Test!!Test!!Test!!Test!!"""
"""Test!!Test!!Test!!Test!!Test!!Test!!Te\n st!!T"""
"""est!!Test!!Test!!Test!!Test!!Test!!Test!!Test!!\n\n"""
""" Parameters\n\n----------\n x : int\n""")
self.assertEqual(c._update_docstring(in_, "Test!!"*20), exp)
class TestStable(TestStabilityState):
def _get_f(self, as_of):
def f(x, y=42):
return x + y
f.__doc__ = self._test_docstring
f = stable(as_of=as_of)(f)
return f
def test_function_output(self):
f = self._get_f('0.1.0')
self.assertEqual(f(1), 43)
def test_function_docstring(self):
f = self._get_f('0.1.0')
e1 = (" Add 42, or something else, to x.\n\n"
" State: Stable as of 0.1.0.\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
f = self._get_f('0.1.1')
e1 = (" Add 42, or something else, to x.\n\n"
" State: Stable as of 0.1.1.\n\n"
|
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
def test_function_signature(self):
f = self._get_f('0.1.0')
expected = inspect.ArgSpec(
args=['x', 'y'], varargs=None, keywords=None, defaults=(42,))
self.a
|
ssertEqual(inspect.getargspec(f), expected)
self.assertEqual(f.__name__, 'f')
def test_missing_kwarg(self):
self.assertRaises(ValueError, stable)
self.assertRaises(ValueError, stable, '0.1.0')
class TestExperimental(TestStabilityState):
def _get_f(self, as_of):
def f(x, y=42):
return x + y
f.__doc__ = self._test_docstring
f = experimental(as_of=as_of)(f)
return f
def test_function_output(self):
f = self._get_f('0.1.0')
self.assertEqual(f(1), 43)
def test_function_docstring(self):
f = self._get_f('0.1.0')
e1 = (" Add 42, or something else, to x.\n\n"
" State: Experimental as of 0.1.0.\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
f = self._get_f('0.1.1')
e1 = (" Add 42, or something else, to x.\n\n"
" State: Experimental as of 0.1.1.\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
def test_function_signature(self):
f = self._get_f('0.1.0')
expected = inspect.ArgSpec(
args=['x', 'y'], varargs=None, keywords=None, defaults=(42,))
self.assertEqual(inspect.getargspec(f), expected)
self.assertEqual(f.__name__, 'f')
def test_missing_kwarg(self):
self.assertRaises(ValueError, experimental)
self.assertRaises(ValueError, experimental, '0.1.0')
class TestDeprecated(TestStabilityState):
def _get_f(self, as_of, until, reason):
def f(x, y=42):
return x + y
f.__doc__ = self._test_docstring
f = deprecated(as_of=as_of, until=until, reason=reason)(f)
return f
def test_function_output(self):
f = self._get_f('0.1.0', until='0.1.4',
reason='You should now use skbio.g().')
self.assertEqual(f(1), 43)
def test_deprecation_warning(self):
f = self._get_f('0.1.0', until='0.1.4',
reason='You should now use skbio.g().')
# adapted from SO example here: http://stackoverflow.com/a/3892301
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
f(1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
expected_str = "is deprecated as of scikit-bio version 0.1.0"
self.assertTrue(expected_str in str(w[0].message))
def test_function_docstring(self)
|
ProReNata/django-castle
|
setup.py
|
Python
|
bsd-3-clause
| 1,596
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import django_castle
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = django_castle.__version__
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
if sys.argv[-1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a %s -m 'version %s'" % (version, version))
os.system("git push --tags")
sys.exit()
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='django-castle',
version=version,
description="""A django integration for the castle.io service""",
long_description=readme + '\n\n' + history,
author='Jens Alm',
author_email='[email protected]',
url='https://github.com/ulmus/django-castle',
packages=[
'django_castle',
],
include_package_da
|
ta=True,
install_requires=[
],
license="BSD",
zip_safe=False,
keywords='django-castle',
classifiers=[
'Development Sta
|
tus :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
redpawfx/massiveImporter
|
python/ns/bridge/io/WReader.py
|
Python
|
mit
| 2,920
| 0.037329
|
# The MIT License
#
# Copyright (c) 2008 James Piechota
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
#
|
in the Software without restriction, including without limitation
|
the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import os.path
# Node definition
class WReader:
def __init__(self):
self._fullName = ""
self._path = ""
self._maxInfluences = 0
self.deformers = []
self.weights = []
def name(self):
return self._fullName
def read( self, fullName ):
'''Load skin weights from a Massive .w (weights) file'''
try:
if not os.path.isfile(fullName):
return
self._fullName = fullName
self._path = os.path.dirname( fullName )
fileHandle = open(self._fullName, "r")
deformers = []
tokens = []
weights = []
maxInfluences = 0
for line in fileHandle:
tokens = line.strip().split()
if tokens:
if tokens[0][0] == "#":
# Comment
continue
elif tokens[0] == "deformer":
id = int(tokens[1])
numDeformers = len(self.deformers)
if id >= numDeformers:
self.deformers.extend([ "" ] * (id - numDeformers + 1))
self.deformers[id] = tokens[2]
else:
# TODO: see if storing 0s for joints that have
# no influence is a problem. Storing the influences
# sparsely may make applying the weights later more
# complex
#
numTokens = len(tokens)
vtx = int(tokens[0][:-1])
influences = [0] * len(self.deformers)
count = 0
for i in range(1, numTokens, 2):
influences[int(tokens[i])] = float(tokens[i+1])
count += 1
# keep track of the maximum number of influences on a
# given vertex so we can use it to optimize the skin
# deformers later
#
if count > self._maxInfluences:
self._maxInfluences = count
self.weights.append(influences)
fileHandle.close()
except:
print >> sys.stderr, "Error reading Weights file: %s" % self._fullName
raise
|
seakers/daphne_brain
|
daphne_context/migrations/0011_auto_20201109_1100.py
|
Python
|
mit
| 1,011
| 0.002967
|
# Generated by Django 2.2.11 on 2020-11-09 17:00
import daphne_context.utils
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('daphne_context', '0010_userinformation_mycroft_connection'),
]
operations = [
migrations.RemoveField(
model_name='userinformation',
name='mycroft_session',
),
migrations.CreateModel(
name='MycroftUser',
fields=[
('id', models.AutoField(auto_created=
|
True, primary_key=True, serialize=False, verbose_name='ID')),
('mycroft_session', models.CharField(default=daphne_context.utils.generate_mycroft_session, max_length=9)),
('user', models.OneToOneField(on_de
|
lete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
emory-libraries/findingaids
|
findingaids/fa/migrations/0001_initial.py
|
Python
|
apache-2.0
| 1,937
| 0.004646
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Archive',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('label', models.CharField(help_text=b'Short label to identify an archive', max_length=10)),
|
('name', models.CharField(help_text=b'repository name (subarea) in EAD to identify finding aids associated with this archive', max_length=255)),
('svn', models.URLField(help_text=b'URL to subversion repository containing EAD for this archive', verbose_name=b'Subversion Repository')),
('slug', models.SlugField(help_text=b'shorthand id\n (auto-generated from label;
|
do not modify after initial archive definition)')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Deleted',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('eadid', models.CharField(unique=True, max_length=50, verbose_name=b'EAD Identifier')),
('title', models.CharField(max_length=200)),
('date', models.DateTimeField(auto_now_add=True, verbose_name=b'Date removed')),
('note', models.CharField(help_text=b'Optional: Enter the reason this document is being deleted. These comments will be displayed to anyone who had the finding aid bookmarked and returns after it is gone.', max_length=400, blank=True)),
],
options={
'verbose_name': 'Deleted Record',
},
bases=(models.Model,),
),
]
|
SiLab-Bonn/pyBAR
|
pybar/scans/scan_crosstalk.py
|
Python
|
bsd-3-clause
| 6,325
| 0.004111
|
import logging
import inspect
import numpy as np
from pybar.analysis.analyze_raw_data import AnalyzeRawData
from pybar.fei4.register_utils import invert_pixel_mask, make_xtalk_mask, make_pixel_mask
from pybar.fei4_run_base import Fei4RunBase
from pybar.fei4.register_utils import scan_loop
from pybar.run_manager import RunManager
from pybar.analysis.plotting.plotting import plot_occupancy
class CrosstalkScan(Fei4RunBase):
'''Crosstalk Scan
Implementation of a crosstalk scan. Injection in long edge pixels (row - 1, row + 1).
Crosstalk exists when a threshold higher 0 can be measured (s-curve fit successful).
'''
_default_run_conf = {
"broadcast_commands": True,
"threaded_scan": False,
"mask_steps": 6, # number of injections per PlsrDAC step
"n_injections": 100, # number of injections per PlsrDAC step
|
"scan_parameters": [('PlsrDAC', [None, 800])], # the PlsrDAC range
"step_size": 10, # step size of the PlsrDAC during scan
|
"use_enable_mask": False, # if True, use Enable mask during scan, if False, all pixels will be enabled
"enable_shift_masks": ["Enable"], # enable masks shifted during scan
"disable_shift_masks": [], # disable masks shifted during scan
"xtalk_shift_mask": ["C_High", "C_Low"], # crosstalk mask derived from enable_shift_masks
"pulser_dac_correction": False # PlsrDAC correction for each double column
}
def configure(self):
commands = []
commands.extend(self.register.get_commands("ConfMode"))
# C_Low
if "C_Low".lower() in map(lambda x: x.lower(), self.enable_shift_masks):
self.register.set_pixel_register_value('C_Low', 1)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_Low'))
else:
self.register.set_pixel_register_value('C_Low', 0)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_Low'))
# C_High
if "C_High".lower() in map(lambda x: x.lower(), self.enable_shift_masks):
self.register.set_pixel_register_value('C_High', 1)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_High'))
else:
self.register.set_pixel_register_value('C_High', 0)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_High'))
commands.extend(self.register.get_commands("RunMode"))
self.register_utils.send_commands(commands)
def scan(self):
scan_parameter_range = [0, (2 ** self.register.global_registers['PlsrDAC']['bitlength'])]
if self.scan_parameters.PlsrDAC[0]:
scan_parameter_range[0] = self.scan_parameters.PlsrDAC[0]
if self.scan_parameters.PlsrDAC[1]:
scan_parameter_range[1] = self.scan_parameters.PlsrDAC[1]
scan_parameter_range = range(scan_parameter_range[0], scan_parameter_range[1] + 1, self.step_size)
logging.info("Scanning %s from %d to %d", 'PlsrDAC', scan_parameter_range[0], scan_parameter_range[-1])
def set_xtalk_mask():
frame = inspect.currentframe()
if frame.f_back.f_locals['index'] == 0:
mask = make_pixel_mask(steps=self.mask_steps, shift=frame.f_back.f_locals['mask_step'])
mask = make_xtalk_mask(mask)
map(lambda mask_name: self.register.set_pixel_register_value(mask_name, mask), self.disable_shift_masks)
commands = []
commands.append(self.register.get_commands("ConfMode")[0])
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name=self.xtalk_shift_mask, joint_write=True))
commands.append(self.register.get_commands("RunMode")[0])
self.register_utils.send_commands(commands, concatenate=True)
for scan_parameter_value in scan_parameter_range:
if self.stop_run.is_set():
break
commands = []
commands.extend(self.register.get_commands("ConfMode"))
self.register.set_global_register_value('PlsrDAC', scan_parameter_value)
commands.extend(self.register.get_commands("WrRegister", name=['PlsrDAC']))
self.register_utils.send_commands(commands)
with self.readout(PlsrDAC=scan_parameter_value):
cal_lvl1_command = self.register.get_commands("CAL")[0] + self.register.get_commands("zeros", length=40)[0] + self.register.get_commands("LV1")[0]
scan_loop(self, cal_lvl1_command, repeat_command=self.n_injections, use_delay=True, mask_steps=self.mask_steps, enable_mask_steps=None, enable_double_columns=None, same_mask_for_all_dc=False, fast_dc_loop=False, bol_function=set_xtalk_mask, eol_function=None, digital_injection=False, enable_shift_masks=self.enable_shift_masks, disable_shift_masks=self.disable_shift_masks, restore_shift_masks=False, mask=invert_pixel_mask(self.register.get_pixel_register_value('Enable')) if self.use_enable_mask else None, double_column_correction=self.pulser_dac_correction)
def analyze(self):
with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data:
analyze_raw_data.create_tot_hist = False
analyze_raw_data.create_fitted_threshold_hists = True
analyze_raw_data.create_threshold_mask = True
analyze_raw_data.n_injections = 100
analyze_raw_data.interpreter.set_warning_output(False) # so far the data structure in a threshold scan was always bad, too many warnings given
analyze_raw_data.interpret_word_table()
analyze_raw_data.interpreter.print_summary()
analyze_raw_data.plot_histograms()
thr_hist = analyze_raw_data.out_file_h5.root.HistThresholdFitted[:, :].T
xtalk_mask = np.zeros(shape=thr_hist.shape, dtype=np.dtype('>u1'))
xtalk_mask[thr_hist > 0.0] = 1
plot_occupancy(xtalk_mask.T, title='Crosstalk', z_max=1, filename=analyze_raw_data.output_pdf)
if __name__ == "__main__":
with RunManager('configuration.yaml') as runmngr:
runmngr.run_run(CrosstalkScan)
|
alexisbellido/programming-in-python
|
parse_file.py
|
Python
|
bsd-3-clause
| 1,192
| 0.00755
|
#!/usr/bin/env
|
python
"""
Parse a file and write output to another.
"""
from optparse import OptionParser
import re
from collections import OrderedDict
parser = OptionParser()
parser.add_option("
|
-i", "--input", dest="input_filepath", help="input filepath")
parser.add_option("-o", "--output", dest="output_filepath", help="output filepath")
(options, args) = parser.parse_args()
#print options
#print args
input_filepath = options.input_filepath
output_filepath = options.output_filepath
lines = {}
pattern_key = re.compile(r'ednKey="(.*?)"')
pattern_value = re.compile(r'ednvalue="(.*?)"')
with open(input_filepath, 'r') as input_file:
for line in input_file:
line = line.strip()
key = pattern_key.search(line)
value = pattern_value.search(line)
if (key and value):
lines[key.group(1)] = value.group(1)
ordered_lines = OrderedDict(sorted(lines.items(), key = lambda t: int(t[0])))
with open(output_filepath, 'w') as output_file:
for line in ordered_lines.items():
#output_file.write('%s,%s\n' % (line[0], line[1]))
output_file.write("{0} => __( '{1}', 'ev' ),\n".format(line[0], line[1]))
print "Completed"
|
selvagit/experiments
|
nptel/nptel_programming_data_structure/week_1/q3.py
|
Python
|
gpl-3.0
| 97
| 0.082474
|
def f(m,n):
|
ans = 1
while (m - n >= 0):
(ans,m) = (ans*2,m-n)
return
|
(ans)
|
bitcraft/pyglet
|
tests/interactive/window/event_resize.py
|
Python
|
bsd-3-clause
| 801
| 0
|
"""Test that resize event works correctly.
Expected behaviour:
One window will be opened. Resize the window and ensure that the
dimensions printed to the terminal are correct. You should see
a green border inside the window but no red.
Close the window or press ESC to end the test.
"""
import unittest
from pyglet import window
from tests.interactive.window import window_util
class EVENT_RESIZE(unittest.TestCase):
def on_resize(self, width, height):
print('Window resized to %dx%d.' % (width, height))
def test_resize(self):
w
|
= w
|
indow.Window(200, 200, resizable=True)
w.push_handlers(self)
while not w.has_exit:
w.dispatch_events()
window_util.draw_client_border(w)
w.flip()
w.close()
|
uwosh/uwosh.themebase
|
uwosh/themebase/browser/interfaces.py
|
Python
|
gpl-2.0
| 138
| 0.014493
|
from zope.interface import Interface
class IUWOshTheme
|
Layer(Interface):
"""
Marker int
|
erface that defines a browser layer
"""
|
anandology/pyjamas
|
library/gwt/ui/UIObject.py
|
Python
|
apache-2.0
| 10,085
| 0.002776
|
# Copyright 2006 James Tauber and contributors
# Copyright (C) 2009 Luke Kenneth Casson Leighton <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas import Factory
from pyjamas import Window
from pyjamas.ui import Applier
def setStyleName(element, style, add):
oldStyle = DOM.getAttribute(element, "className")
if oldStyle is None:
oldStyle = ""
idx = oldStyle.find(style)
# Calculate matching index
lastPos = len(oldStyle)
while idx != -1:
if idx == 0 or (oldStyle[idx - 1] == " "):
last = idx + len(style)
if (last == lastPos) or ((last < lastPos) and (oldStyle[last] == " ")):
break
idx = oldStyle.find(style, idx + 1)
if add:
if idx == -1:
DOM.setAttribute(element, "className", oldStyle + " " + style)
else:
if idx != -1:
if idx == 0:
begin = ''
else:
begin = oldStyle[:idx-1]
end = oldStyle[idx + len(style):]
DOM.setAttribute(element, "className", begin + end)
class UIObject(Applier):
_props = [ ("visible", "Visibility", "Visible", None),
("element", "Element", "Element", None),
("stylename", "Style name", "StyleName", None),
("width", "Width", "Width", None),
("height", "Height", "Height", None),
("size", "Size", "Size", None),
("title", "Title", "Title", None),
("zindex", "Z Index", "zIndex", None),
]
@classmethod
def _getProps(self):
return Applier._getProps() + self._props
def __init__(self, **kwargs):
# do not initialise element, here, to None, whatever you do.
# there are circumstances where UIObject.__init__ is the last
# thing that is done in derived classes, where self.setElement
# will _already_ have been called.
Applier.__init__(self, **kwargs)
def getAbsoluteLeft(self):
return DOM.getAbsoluteLeft(self.getElement())
def getAbsoluteTop(self):
return DOM.getAbsoluteTop(self.getElement())
def getElement(self):
"""Get the DOM element associated with the UIObject, if any"""
return self.element
def getOffsetHeight(self):
return DOM.getIntAttribute(self.element, "offsetHeight")
def getOffsetWidth(self):
return DOM.getIntAttribute(self.element, "offsetWidth")
def getStyleName(self):
return DOM.getAttribute(self.element, "className")
def getStylePrimaryName(self):
"""Return with the first className if there are multiples"""
fullClassName = self.getStyleName()
if fullClassName: return fullClassName.split()[0]
def getStyleAttribute(self, attribute):
""" can be called with two forms:
getStyleAttribute(self, attr) - returns value
getStyleAttribute(self, (attr1,attr2,...)) - returns dictionary
|
of attr:value pairs
"""
if isinstance(attribute, basestring):
return DOM.getStyleAttribute(self.getElement(), attribute)
# if attribute is not a string, assume it is iterable,
# and return the multi-attribute form
el = self.getElement()
result = {}
f
|
or attr in attribute:
result[attr] = DOM.getStyleAttribute(el,attr)
return result
def getTitle(self):
return DOM.getAttribute(self.element, "title")
def setElement(self, element):
"""Set the DOM element associated with the UIObject."""
self.element = element
def setHeight(self, height):
"""Set the height of the element associated with this UIObject. The
value should be given as a CSS value, such as 100px, 30%, or 50pi
"""
if height is None:
height = ""
DOM.setStyleAttribute(self.element, "height", str(height))
def getHeight(self):
return DOM.getStyleAttribute(self.element, "height")
def setPixelSize(self, width, height):
"""Set the width and height of the element associated with this UIObject
in pixels. Width and height should be numbers.
"""
if width >= 0:
self.setWidth("%dpx" % width)
if height >= 0:
self.setHeight("%dpx" % height)
def setSize(self, width, height):
"""Set the width and height of the element associated with this
UIObject. The values should be given as a CSS value,
such as 100px, 30%, or 50pi
"""
self.setWidth(width)
self.setHeight(height)
def addStyleName(self, style):
"""Append a style to the element associated with this UIObject.
This is a CSS class name. It will be added after any
already-assigned CSS class for the element.
"""
self.setStyleName(self.element, style, True)
def addStyleDependentName(self, styleSuffix):
"""Adds a secondary or dependent style name to this element.
For example if the primary stylename is gwt-TextBox,
self.addStyleDependentName("readonly") will return
gwt-TextBox-readonly.
"""
self.addStyleName(self.getStylePrimaryName()+"-"+styleSuffix)
def removeStyleName(self, style):
"""Remove a style from the element associated with this UIObject. This is
a CSS class name."""
self.setStyleName(self.element, style, False)
def removeStyleDependentName(self, styleSuffix):
"""Remove a dependent style name by specifying the style name's suffix.
"""
self.removeStyleName(self.getStylePrimaryName()+"-"+styleSuffix)
# also callable as: setStyleName(self, style)
def setStyleName(self, element, style=None, add=True):
"""When called with a single argument, this replaces all the CSS
classes associated with this UIObject's element with the given
parameter. Otherwise, this is assumed to be a worker function
for addStyleName and removeStyleName.
"""
# emulate setStyleName(self, style)
if style is not None:
setStyleName(element, style, add)
return
style = element
DOM.setAttribute(self.element, "className", style)
def setStyleAttribute(self, attribute, value=None):
""" can be called with two forms:
single attr: setStyleAttribute(self, attr, value)
multi attr: setStyleAttribute(self, {attr1:val1, attr2:val2, ...})
"""
if value is not None: # assume single attr form
DOM.setStyleAttribute(self.getElement(), attribute, value)
return
# assume multi value form
el = self.getElement()
for attr, val in attribute.items():
DOM.setStyleAttribute(el, attr, val)
def setTitle(self, title):
DOM.setAttribute(self.element, "title", title)
def setWidth(self, width):
"""Set the width of the element associated with this UIObject. The
value should be given as a CSS value, such as 100px, 30%, or 50pi
"""
if width is None:
width = ""
DOM.setStyleAttribute(self.element, "width", str(width))
def getWidth(self):
return DOM.getStyleAttribute(self.element, "width")
def sinkEvents(self, eventBitsToAdd):
"""Request that the given events be delivered to the event handler
for this element. The event bits passed are added (using inclusive
OR) to the
|
johnson1228/pymatgen
|
pymatgen/io/lammps/tests/test_sets.py
|
Python
|
mit
| 2,130
| 0.000469
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, print_function, unicode_literals, \
absolute_import
import os
import unittest
from pymatgen.io.lammps.sets import LammpsInputSet
__author__ = 'Kiran Mathew'
__email__ = '[email protected]'
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
"test_files", "lammps")
cl
|
ass TestLammpsInputSet(unittest.TestCase):
def setUp(self):
template_file = os.path.join(test_dir, "in.peptide.template")
data_file = os.path.join(test_dir, "data.peptide")
self.
|
data_filename = "test_data.peptide"
self.input_filename = "test_input.peptide"
self.settings = {
"pair_style": "lj/charmm/coul/long 8.0 10.0 10.0",
"kspace_style": "pppm 0.0001",
"fix_1": "1 all nvt temp 275.0 275.0 100.0 tchain 1",
"fix_2": "2 all shake 0.0001 10 100 b 4 6 8 10 12 14 18 a 31"
}
self.lammps_input_set = LammpsInputSet.from_file(
"test", template_file, self.settings, lammps_data=data_file,
data_filename=self.data_filename)
def test_input(self):
self.assertEqual(self.lammps_input_set.lammps_input.settings["data_file"],
self.data_filename)
for k, v in self.settings.items():
self.assertEqual(self.lammps_input_set.lammps_input.settings[k], v)
def test_write_input_set(self):
self.lammps_input_set.write_input(self.input_filename)
self.assertTrue(os.path.exists(self.input_filename))
self.assertTrue(os.path.exists(self.data_filename))
os.remove(self.input_filename)
os.remove(self.data_filename)
# now change both input and data filenames
self.lammps_input_set.write_input("xxxx.input", "yyy.data")
self.assertTrue(os.path.exists("xxxx.input"))
self.assertTrue(os.path.exists("yyy.data"))
os.remove("xxxx.input")
os.remove("yyy.data")
if __name__ == "__main__":
unittest.main()
|
lucalianas/opendata_gym
|
odatagym_app/datasets_handler/views.py
|
Python
|
mit
| 1,158
| 0.002591
|
from csv import DictReader
import os
from rest_framework import status
from rest_framework.viewsets import ViewSet
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
import odatagym_app.settings as ods
import logging
logger = logging.getLogger('odata_gym')
class DatasetsHandler(ViewSet):
def get(self, request, dataset_folder, dataset_name, format=None):
DELIMITERS_MAP = {
'c': ',',
'sc': ';',
'sp': ' '
}
dataset_path = os.path.join(ods.DATASETS_DIR, datas
|
et_folder, dataset_name)
print dataset_path
if os.path.exists(dataset_path):
print request.query_params
delimiter = request.GET.get('file_delimiter', 'c')
print 'Delimiter is %s' % delimiter
with open(dataset_path) as dataset:
reader = DictReader(dataset, delimiter=DELIMITERS_MAP[delimiter])
data = [x for x in reader]
return Response(data, status=status.HTTP_200_OK)
|
else:
raise NotFound('There is no dataset %s for %s' % (dataset_name, dataset_folder))
|
fulfilio/trytond-waiting-customer-shipment-report
|
setup.py
|
Python
|
bsd-3-clause
| 4,152
| 0
|
#!/usr/bin/env python
import re
import os
import time
import sys
import unittest
import ConfigParser
from setuptools import setup, Command
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class SQLiteTest(Command):
"""
Run the tests on SQLite
"""
description = "Run tests on SQLite"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://'
os.environ['DB_NAME'] = ':memory:'
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
class PostgresTest(Command):
"""
Run the tests on Postgres.
"""
description = "Run tests on Postgresql"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'postgresql://'
os.environ['DB_NAME'] = 'test_' + str(int(time.time()))
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
config = ConfigParser.ConfigParser()
config.readfp(open('tryton.cfg'))
info = dict(config.items('tryton'))
for key in ('depends', 'extras_depend', 'xml'):
if key in info:
info[key] = info[key].strip().splitlines()
major_version, minor_version, _ = info.get('version', '0.0.1').split('.', 2)
majo
|
r_version = int(major_version)
minor_version = int(minor_version)
requires = []
MODULE2PREFIX
|
= {
'report_webkit': 'openlabs'
}
MODULE = "waiting_customer_shipment_report"
PREFIX = "fio"
for dep in info.get('depends', []):
if not re.match(r'(ir|res|webdav)(\W|$)', dep):
requires.append(
'%s_%s >= %s.%s, < %s.%s' % (
MODULE2PREFIX.get(dep, 'trytond'), dep,
major_version, minor_version, major_version,
minor_version + 1
)
)
requires.append(
'trytond >= %s.%s, < %s.%s' % (
major_version, minor_version, major_version, minor_version + 1
)
)
setup(
name='%s_%s' % (PREFIX, MODULE),
version=info.get('version', '0.0.1'),
description="",
author="Fulfil.IO Inc., Openlabs Technologies and Consulting (P) Ltd.",
author_email='[email protected]',
url='http://www.fulfil.io/',
package_dir={'trytond.modules.%s' % MODULE: '.'},
packages=[
'trytond.modules.%s' % MODULE,
'trytond.modules.%s.tests' % MODULE,
],
package_data={
'trytond.modules.%s' % MODULE: info.get('xml', []) +
info.get('translation', []) +
['tryton.cfg', 'locale/*.po', 'tests/*.rst', 'reports/*.odt'] +
['view/*.xml', 'reports/*.html', 'reports/css/bootstrap/css/*'] +
['reports/css/bootstrap/fonts/*', 'reports/css/font-awesome/css/*'] +
['reports/css/font-awesome/fonts/*', 'reports/js/*.js']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Office/Business',
],
long_description=open('README.rst').read(),
license='BSD',
install_requires=requires,
zip_safe=False,
entry_points="""
[trytond.modules]
%s = trytond.modules.%s
""" % (MODULE, MODULE),
test_suite='tests',
test_loader='trytond.test_loader:Loader',
cmdclass={
'test': SQLiteTest,
'test_on_postgres': PostgresTest,
}
)
|
monovertex/ygorganizer
|
manage.py
|
Python
|
mit
| 247
| 0
|
#!/usr/b
|
in/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings.prod')
from django.core.management import execute_from_command_line
execute_from_command_line
|
(sys.argv)
|
Panos512/invenio
|
modules/webaccess/lib/external_authentication_oauth1.py
|
Python
|
gpl-2.0
| 8,849
| 0.007232
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This module contains functions and methods to authenticate with OAuth1
providers.
"""
__revision__ = \
"$Id$"
from invenio.containerutils import get_substructure
from invenio.dbquery import run_sql
from invenio.external_authentication import ExternalAuth
class ExternalOAuth1(ExternalAuth):
"""
Contains methods for authenticate with an OpenID provider.
"""
@staticmethod
def __init_req(req):
req.g['oauth1_provider_name'] = ''
req.g['oauth1_debug'] = 0
req.g['oauth1_msg'] = ''
req.g['oauth1_debug_msg'] = ''
req.g['oauth1_response'] = None
def auth_user(self, username, password, req=None):
"""
Tries to find email and identity of the user from OAuth1 provider. If it
doesn't find any of them, returns (None, None)
@param username: Isn't used in this function
@type username: str
@param password: Isn't used in this function
@type password: str
@param req: request
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
@rtype: str|NoneType, str|NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
from invenio.access_control_config import CFG_OAUTH1_PROVIDERS
from invenio.webinterface_handler import wash_urlargd
from rauth.service import OAuth1Service
self.__init_req(req)
args = wash_urlargd(req.form, {'provider': (str, ''),
'login_method': (str, ''),
'oauth_token': (str, ''),
'oauth_verifier': (str, ''),
'denied': (str, '')
})
provider_name = req.g['oauth1_provider_name'] = args['provider']
if not provider_name in CFG_OAUTH1_PROVIDERS:
req.g['oauth1_msg'] = 22
return None, None
# Load the configurations to construct OAuth1 service
config = CFG_OAUTH1_CONFIGURATIONS[args['provider']]
req.g['oauth1_debug'] = config.get('debug', 0)
if not args['oauth_token']:
# In case of an error, display corresponding message
if args['denied']:
req.g['oauth1_msg'] = 21
return None, None
else:
req.g['oauth1_msg'] = 22
return None, None
provider = OAuth1Service(
name = req.g['oauth1_provider_name'],
consumer_key = config['consumer_key'],
consumer_secret = config['consumer_secret'],
request_token_url = config['request_token_url'],
access_token_url = config['access_token_url'],
authorize_url = config['authorize_url'],
header_auth = True)
# Get the request token secret from database and exchange it with the
# access token.
query = """SELECT secret FROM oauth1_storage WHERE token = %s"""
params = (args['oauth_token'],)
try:
# If the request token is already used, return
request_token_secret = run_sql(query, params)[0][0]
except IndexError:
req.g['oauth1_msg'] = 22
return None, None
response = provider.get_access_token(
'GET',
request_token = args['oauth_token'],
request_token_secret = request_token_secret,
params = {
'oauth_verifier': args['oauth_verifier']
}
)
if req.g['oauth1_debug']:
req.g['oauth1_debug_msg'] = str(response.content) + "<br/>"
# Some providers send the identity and access token together.
email, identity = self._get_user_email_and_id(response.content, req)
if not identity and config.has_key('request_url'):
# For some providers, to reach user profile we need to make request
# to a specific url.
params = config.get('request_parameters', {})
response = provider.get(config['request_url'],
params = params,
access_token = response.content['oauth_token'],
access_token_secret = response.content['oauth_token_secret']
)
if req.oauth1_debug:
req.g['oauth1_debug_msg'] += str(response.content) + "<br/>"
email, identity = self._get_user_email_and_id(response.content, req)
if identity:
# If identity is found, add the name of the provider at the
# beginning of the identity because different providers may have
# different users with same id.
identity = "%s:%s" % (req.g['oauth1_provider_name'], identity)
else:
req.g['oauth1_msg'] = 23
# Delete the token saved in the database since it is useless now.
query = """
DELETE FROM oauth1_storage
WHERE token=%s
OR date_creation < DATE_SUB(NOW(), INTERVAL 1 HOUR)
"""
params = (args['oauth_token'],)
run_sql(query, params)
if req.g['oauth1_debug']:
req.g['oauth1_msg'] = "<code>%s</code>" % req.g['oauth1_debug_msg'].replace("\n", "<br/>")
return None, None
return email, identity
def fetch_user_nickname(self, username, password=None, req=None):
"""
Fetches the OAuth1 provider for nickname of the user. If it doesn't
find any, returns None.
This function doesn't need username, password or req. They are exist
just because this class is derived from ExternalAuth
@param username: Isn't used in this function
@type username: str
@param password: Isn't used in this function
@type password: str
@param req: Isn't used in this function
|
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
@rtype: str or NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
if req.g['oauth1_provid
|
er_name']:
path = None
if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key(
'nickname'
):
path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']]['nickname']
if path:
return get_substructure(req.oauth1_response, path)
else:
return None
def _get_user_email_and_id(self, container, req):
"""
Returns external identity and email address together. Since identity is
essential for OAuth1 authentication, if it doesn't find external
identity returns None, None.
@param container: container which contains email and id
@type container: list|dict
@rtype str|NoneType, str|NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
identity
|
EthereumWebhooks/blockhooks
|
lib/ethereum/tests/bintrie.py
|
Python
|
apache-2.0
| 8,042
| 0.000373
|
# All nodes are of the form [path1, child1, path2, child2]
# or <value>
from ethereum import utils
from ethereum.db import EphemDB, ListeningDB
import rlp, sys
import copy
hashfunc = utils.sha3
HASHLEN = 32
# 0100000101010111010000110100100101001001 -> ASCII
def decode_bin(x):
return ''.join([chr(int(x[i:i+8], 2)) for i in range(0, len(x), 8)])
# ASCII -> 0100000101010111010000110100100101001001
def encode_bin(x):
o = ''
for c in x:
c = ord(c)
p = ''
for i in range(8):
p = str(c % 2) + p
c /= 2
o += p
return o
# Encodes a binary list [0,1,0,1,1,0] of any length into bytes
def encode_bin_path(li):
if li == []:
return ''
b = ''.join([str(x) for x in li])
b2 = '0' * ((4 - len(b)) % 4) + b
prefix = ['00', '01', '10', '11'][len(b) % 4]
if len(b2) % 8 == 4:
return decode_bin('00' + prefix + b2)
else:
return decode_bin('100000' + prefix + b2)
# Decodes bytes into a binary list
def decode_bin_path(p):
if p == '':
return []
p = encode_bin(p)
if p[0] == '1':
p = p[4:]
assert p[0:2] == '00'
L = ['00', '01', '10', '11'].index(p[2:4])
p = p[4+((4 - L) % 4):]
return [(1 if x == '1' else 0) for x in p]
# Get a node from a database if needed
def dbget(node, db):
if len(node) == HASHLEN:
return rlp.decode(db.get(node))
return node
# Place a node into a database if needed
def dbput(node, db):
r = rlp.encode(node)
if len(r) == HASHLEN or len(r) > HASHLEN * 2:
h = hashfunc(r)
db.put(h, r)
return h
return node
# Get a value from a tree
def get(node, db, key):
node = dbget(node, db)
if key == []:
return node[0]
elif len(node) == 1 or len(node) == 0:
return ''
else:
sub = dbget(node[key[0]], db)
if len(sub) == 2:
subpath, subnode = sub
else:
subpath, subnode = '', sub[0]
subpath = decode_bin_path(subpath)
if key[1:len(subpath)+1] != subpath:
return ''
return get(subnode, db, key[len(subpath)+1:])
# Get length of shared prefix of inputs
def get_shared_length(l1, l2):
i = 0
while i < len(l1) and i < len(l2) and l1[i] == l2[i]:
i += 1
return i
# Replace ['', v] with [v] and compact nodes into hashes
# if needed
def contract_node(n, db):
if len(n[0]) == 2 and n[0][0] == '':
n[0] = [n[0][1]]
if len(n[1]) == 2 and n[1][0] == '':
n[1] = [n[1][1]]
if len(n[0]) != 32:
n[0] = dbput(n[0], db)
if len(n[1]) != 32:
n[1] = dbput(n[1], db)
return dbput(n, db)
# Update a trie
def update(node, db, key, val):
node = dbget(node, db)
# Unfortunately this particular design does not allow
# a node to have one child, so at the root for empty
# tries we need to add two dummy children
if node == '':
node = [dbput([encode_bin_path([]), ''], db),
dbput([encode_bin_path([1]), ''], db)]
if key == []:
node = [val]
elif len(node) == 1:
raise Exception("DB must be prefix-free")
else:
assert len(node) == 2, node
sub = dbget(node[key[0]], db)
if len(sub) == 2:
_subpath, subnode = sub
else:
_subpath, subnode = '', sub[0]
subpath = decode_bin_path(_subpath)
sl = get_shared_length(subpath, key[1:])
if sl == le
|
n(subpath):
node[key[0]] = [_subpath, update(subnode, db, key[sl+1:], val)]
else:
subpath_next = subpath[sl]
n = [0, 0]
n[subpath_next] = [encode_bin_path(subpath[sl+1:]), subnode]
n[(1 - subpath_next)] = [encode_bin_path(key[sl+2:]), [val]]
n = contract_node(n, db)
|
node[key[0]] = dbput([encode_bin_path(subpath[:sl]), n], db)
return contract_node(node, db)
# Compression algorithm specialized for merkle proof databases
# The idea is similar to standard compression algorithms, where
# you replace an instance of a repeat with a pointer to the repeat,
# except that here you replace an instance of a hash of a value
# with the pointer of a value. This is useful since merkle branches
# usually include nodes which contain hashes of each other
magic = '\xff\x39'
def compress_db(db):
out = []
values = db.kv.values()
keys = [hashfunc(x) for x in values]
assert len(keys) < 65300
for v in values:
o = ''
pos = 0
while pos < len(v):
done = False
if v[pos:pos+2] == magic:
o += magic + magic
done = True
pos += 2
for i, k in enumerate(keys):
if v[pos:].startswith(k):
o += magic + chr(i // 256) + chr(i % 256)
done = True
pos += len(k)
break
if not done:
o += v[pos]
pos += 1
out.append(o)
return rlp.encode(out)
def decompress_db(ins):
ins = rlp.decode(ins)
vals = [None] * len(ins)
def decipher(i):
if vals[i] is None:
v = ins[i]
o = ''
pos = 0
while pos < len(v):
if v[pos:pos+2] == magic:
if v[pos+2:pos+4] == magic:
o += magic
else:
ind = ord(v[pos+2]) * 256 + ord(v[pos+3])
o += hashfunc(decipher(ind))
pos += 4
else:
o += v[pos]
pos += 1
vals[i] = o
return vals[i]
for i in range(len(ins)):
decipher(i)
o = EphemDB()
for v in vals:
o.put(hashfunc(v), v)
return o
# Convert a merkle branch directly into RLP (ie. remove
# the hashing indirection). As it turns out, this is a
# really compact way to represent a branch
def compress_branch(db, root):
o = dbget(copy.copy(root), db)
def evaluate_node(x):
for i in range(len(x)):
if len(x[i]) == HASHLEN and x[i] in db.kv:
x[i] = evaluate_node(dbget(x[i], db))
elif isinstance(x, list):
x[i] = evaluate_node(x[i])
return x
o2 = rlp.encode(evaluate_node(o))
return o2
def decompress_branch(branch):
branch = rlp.decode(branch)
db = EphemDB()
def evaluate_node(x):
if isinstance(x, list):
x = [evaluate_node(n) for n in x]
x = dbput(x, db)
return x
evaluate_node(branch)
return db
# Test with n nodes and k branch picks
def test(n, m=100):
assert m <= n
db = EphemDB()
x = ''
for i in range(n):
k = hashfunc(str(i))
v = hashfunc('v'+str(i))
x = update(x, db, [int(a) for a in encode_bin(rlp.encode(k))], v)
print(x)
print(sum([len(val) for key, val in db.db.items()]))
l1 = ListeningDB(db)
o = 0
p = 0
q = 0
ecks = x
for i in range(m):
x = copy.deepcopy(ecks)
k = hashfunc(str(i))
v = hashfunc('v'+str(i))
l2 = ListeningDB(l1)
v2 = get(x, l2, [int(a) for a in encode_bin(rlp.encode(k))])
assert v == v2
o += sum([len(val) for key, val in l2.kv.items()])
cdb = compress_db(l2)
p += len(cdb)
assert decompress_db(cdb).kv == l2.kv
cbr = compress_branch(l2, x)
q += len(cbr)
dbranch = decompress_branch(cbr)
assert v == get(x, dbranch, [int(a) for a in encode_bin(rlp.encode(k))])
# for k in l2.kv:
# assert k in dbranch.kv
o = {
'total_db_size': sum([len(val) for key, val in l1.kv.items()]),
'avg_proof_size': sum([len(val) for key, val in l1.kv.items()]),
'avg_compressed_proof_size': (p // min(n, m)),
'avg_branch_size': (q // min(n, m)),
'compressed_db_size': len(compress_db(l1))
}
return o
|
rxuriguera/bibtexIndexMaker
|
src/bibim/gui/custom_widgets.py
|
Python
|
gpl-3.0
| 3,769
| 0.006898
|
# Copyright 2010 Ramon Xuriguera
#
# This file is part of BibtexIndexMaker.
#
# BibtexIndexMaker is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BibtexIndexMaker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BibtexIndexMaker. If not, see <http://www.gnu.org/licenses/>.
from PyQt4 import QtCore, QtGui #@UnresolvedImport
from bibim.gui.ui.ui_file_chooser import Ui_FileChooser
from bibim.gui.ui.ui_new_collection_dialog import Ui_NewWrapperCollection
class FileChooser(QtGui.QWidget):
DIR = 0
FILE = 1
pathChanged = QtCore.pyqtSignal()
def __init__(self):
super(FileChooser, self).__init__()
# Setup ui
self.ui = Ui_FileChooser()
self.ui.setupUi(self)
self.path = QtCore.QString()
self.mode = self.DIR
# Connect signals and slots
#self.connect(self.ui.browseButton, QtCore.SIGNAL('clicked()'), self.chooseFile)
self.ui.browseButton.clicked.connect(self.chooseFile)
def get_path(self):
return self.__path
def set_path(self, value):
self.__path = value
self.
|
pathChanged.emit()
path = QtCore.pyqtProperty(QtCore.QString, get_path, set_path)
@QtCore.pyqtSlot()
def chooseFile(self):
if self.mode == self.DIR:
self.path = QtGui.QFileDialog.getExistingDirectory(self)
else:
self.path = QtGui.QFileDialog.getOpenFileName(self)
if self.path:
self.ui.pathLine.setText(self.path)
class LogsTextEdit(QtGui.QTextEdit):
colors = {'DEBUG':QtGui.QColor(100, 100, 100),
'I
|
NFO':QtGui.QColor(0, 0, 0),
'WARNING':QtGui.QColor(222, 145, 2),
'ERROR':QtGui.QColor(191, 21, 43),
'CRITICAL':QtGui.QColor(191, 21, 43)}
def __init__(self, parent):
QtGui.QTextEdit.__init__(self, parent)
self.setReadOnly(True)
@QtCore.pyqtSlot(QtCore.QString, QtCore.QString)
def updateText(self, message, level='INFO'):
self.setTextColor(self.colors[str(level)])
self.append(message)
class WrapperCollectionBox(QtGui.QDialog):
def __init__(self, parent=None):
super(WrapperCollectionBox, self).__init__()
self.ui = Ui_NewWrapperCollection()
self.ui.setupUi(self)
self.setModal(True)
# OK Button disabled until both url and field are not empty
self.ok_button = self.ui.buttonBox.button(QtGui.QDialogButtonBox.Ok)
self.ok_button.setEnabled(False)
self.ui.urlLine.textChanged.connect(self._enable_ok_button)
self.ui.fieldLine.textChanged.connect(self._enable_ok_button)
def _enable_ok_button(self):
if not (self.ui.urlLine.text() and self.ui.fieldLine.text()):
self.ok_button.setEnabled(False)
else:
self.ok_button.setEnabled(True)
class ConfirmMessageBox(QtGui.QMessageBox):
def __init__(self, parent=None):
super(ConfirmMessageBox, self).__init__(parent)
self.setModal(True)
self.setStandardButtons(QtGui.QMessageBox.Ok |
QtGui.QMessageBox.Cancel)
self.setDefaultButton(QtGui.QMessageBox.Cancel)
self.setIcon(QtGui.QMessageBox.Question)
|
jcmcclurg/serverpower
|
utilities/intel_pcm/pmu-query.py
|
Python
|
gpl-2.0
| 3,641
| 0.014556
|
#!/usr/bin/python
import urllib2
import json, csv
import subprocess
import sys
import platform
import getopt
all_flag = False
download_flag = False
filename=None
offcore_events=[]
try:
opts, args = getopt.getopt(sys.argv[1:],'a,f:,d',['all','file=','download'])
for o, a in opts:
|
if o in ('-a','--all'):
all_flag=True
if o in ('-f','--file'):
filename=a
if o in ('-d','--download'):
download_flag=True
except getopt.GetoptError, err:
print("parse error: %s\n" %(str(err)))
exit(-2)
if filename == None:
map_file_raw=urllib2.urlopen('https://download.01.org/perfmon/mapfile.csv')
map_dict = csv.DictReader(map_file_raw)
map_file = []
core_path =
|
''
offcore_path = ''
while True:
try:
map_file.append(map_dict.next())
except StopIteration:
break
if platform.system() == 'CYGWIN_NT-6.1':
p = subprocess.Popen(['./pcm-core.exe -c'],stdout=subprocess.PIPE,shell=True)
elif platform.system() == 'Windows':
p = subprocess.Popen(['pcm-core.exe -c'],stdout=subprocess.PIPE,shell=True)
else:
p = subprocess.Popen(['./pcm-core.x -c'],stdout=subprocess.PIPE,shell=True)
(output, err) = p.communicate()
p_status = p.wait()
for model in map_file:
if model['Family-model'] in output:
if(model['EventType'] == 'core'):
core_path = model['Filename']
elif(model['EventType'] == 'offcore'):
offcore_path = model['Filename']
print (model)
if core_path != '':
json_core_data=urllib2.urlopen('https://download.01.org/perfmon'+core_path)
core_events=json.load(json_core_data)
if(download_flag == True):
with open(core_path.split('/')[-1],'w') as outfile:
json.dump(core_events, outfile, sort_keys=True, indent=4)
else:
print ('no core event found for %s CPU, program abort...' % (output))
exit(-1)
if offcore_path != '':
json_offcore_data=urllib2.urlopen('https://download.01.org/perfmon'+offcore_path)
offcore_events=json.load(json_offcore_data)
if(download_flag == True):
with open(offcore_path.split('/')[-1],'w') as outfile:
json.dump(offcore_events, outfile, sort_keys=True, indent=4)
else:
core_events=json.load(open(filename))
if all_flag == True:
for event in core_events+offcore_events:
if event.has_key('EventName') and event.has_key('BriefDescription'):
print (event['EventName']+':'+event['BriefDescription'])
sys.exit(0)
name=raw_input("Event to query (empty enter to quit):")
while(name != ''):
for event in core_events+offcore_events:
if event.has_key('EventName') and name.lower() in event['EventName'].lower():
print (event['EventName']+':'+event['BriefDescription'])
for ev_code in event['EventCode'].split(', '):
print ('cpu/umask=%s,event=%s,name=%s%s%s%s%s/' % (
event['UMask'], ev_code, event['EventName'],
(',offcore_rsp=%s' % (event['MSRValue'])) if event['MSRValue'] != '0' else '',
(',inv=%s' % (event['Invert'])) if event['Invert'] != '0' else '',
(',any=%s' % (event['AnyThread'])) if event['AnyThread'] != '0' else '',
(',edge') if event['EdgeDetect'] != '0' else ''))
name=raw_input("Event to query (empty enter to quit):")
|
lzw120/django
|
tests/regressiontests/admin_custom_urls/tests.py
|
Python
|
bsd-3-clause
| 3,056
| 0.002291
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.template.response import TemplateResponse
from django.test import TestCase
from django.test.utils import override_settings
from .models import Action
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminCustomUrlsTest(TestCase):
fixtures = ['users.json', 'actions.json']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testBasicAddGet(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/!add/')
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def testAddWithGETArgs(self):
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/!add/', {'name': 'My Action'})
self.assertEqual(response.status_code, 200)
self.assertTrue(
'value="My Action"' in response.content,
"Couldn't find an input with the right value in the response."
)
def testBasicAddPost(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
'_popup': u'1',
"name": u'Action added through a popup',
"description": u"Description of added action",
}
response = self.client.post('/custom_urls/admin/admin_custom_urls/action/!add/', post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddAnotherPopup')
self.assertContains(response, 'Action added through a popup')
def testAdminUrlsNoClash(self):
"""
Test that some admin URLs work correctly. The model has a CharField
PK and the add_view URL has been customized.
"""
# Should get the change_view for model instance with PK 'add', not show
# the add_view
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
# Ditto, but use reverse()
|
to build the URL
path = reverse('admin:%s_action_change' % Action._meta.app_label,
args=('add',))
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
# Should correctly get the change_view for the model instance with the
# funny-looking PK
path = reverse('admin:%s_action_change' % Ac
|
tion._meta.app_label,
args=("path/to/html/document.html",))
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
self.assertContains(response, 'value="path/to/html/document.html"')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.