repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
ConstellationApps/Forms
|
constellation_forms/models/log.py
|
Python
|
isc
| 1,879
| 0
|
from .formSubmission import FormSubmission
from django.contrib.auth.models import User
from django.db
|
import models
from django.template.defaultfilters import slugify
class Log(models.Model):
"""
Form Submission Log Database Model
Attributes:
* owner - user submitting the m
|
essage
* submission - form submission associated
* timestamp - time of submission entry
* private - display to non-owners?
* message - log entry
* mtype - type of log entry
* 1 - user message (default)
* 2 - system action
* 3 - form status change
* 4 - attached file
* file - attached file entry
"""
owner = models.ForeignKey(User, blank=True, null=True)
submission = models.ForeignKey(FormSubmission)
timestamp = models.DateTimeField(auto_now_add=True)
private = models.BooleanField(default=False)
message = models.TextField(blank=True)
mtype = models.IntegerField(default=1)
file = models.FileField(upload_to='private/constellation_forms/log_files/')
class Meta:
db_table = "form_log"
ordering = ("timestamp",)
@property
def extension(self):
return self.file.name.split(".")[-1]
@property
def content_type(self):
if self.extension == "pdf":
return "application/pdf"
if self.extension == "txt":
return "text/plain"
if self.extension == "png":
return "image/png"
if self.extension == "jpeg" or self.extension == "jpg":
return "image/jpeg"
if self.extension == "gif":
return "image/gif"
return "application/force-download"
@property
def file_name(self):
return slugify("{0}_{1}_{2}".format(self.submission.form.name, self.pk,
self.owner.username)) + "." + \
self.extension
|
beanbaginc/django-evolution
|
django_evolution/utils/datastructures.py
|
Python
|
bsd-3-clause
| 2,717
| 0
|
"""Utilities for working with data structures.
Version Added:
2.1
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django_evolution.compat import six
def filter_dup_list_items(items):
"""Return list items with duplicates filtered out.
The order of items will be preserved, but only the first occurrence of
any given item will remain in the list.
Version Added:
2.1
Args:
items (list):
The list of items.
Returns:
list:
The resulting de-duplicated list of items.
"""
return list(six.iterkeys(OrderedDict(
(item, True)
for item in items
)))
def merge_dicts(dest, source):
"""Merge two dictionaries together.
This will recursively merge a source dictionary into a destination
dictionary with the following rules:
* Any keys in the source that aren't in the destination will be placed
directly to the destination (using the same instance of the value, not
a
|
copy).
* Any lists that are in both the source and destination will be combined
by appending the source list to the destinataion list (and this will not
recurse into lists).
* Any dictionaries that are in both the source and destinataion will be
merged using this function.
* Any keys that are not a list or
|
dictionary that exist in both
dictionaries will result in a :py:exc:`TypeError`.
Version Added:
2.1
Args:
dest (dict):
The destination dictionary to merge into.
source (dict):
The source dictionary to merge into the destination.
Raises:
TypeError:
A key was present in both dictionaries with a type that could not
be merged.
"""
for key, value in six.iteritems(source):
if key in dest:
if isinstance(value, list):
if not isinstance(dest[key], list):
raise TypeError(
'Cannot merge a list into a %r for key "%s".'
% (type(dest[key]), key))
dest[key] += value
elif isinstance(value, dict):
if not isinstance(dest[key], dict):
raise TypeError(
'Cannot merge a dictionary into a %r for key "%s".'
% (type(dest[key]), key))
merge_dicts(dest[key], value)
else:
raise TypeError(
'Key "%s" was not an expected type (found %r) '
'when merging dictionaries.'
% (key, type(value)))
else:
dest[key] = value
|
UB-Heidelberg/UBHD-OMPArthistorikum
|
controllers/home.py
|
Python
|
gpl-3.0
| 318
| 0.006289
|
# -*- coding: utf-8 -*-
'''
Copyright (c) 2015 Heidelbe
|
rg University Library
Distributed under the GNU GPL v3. For full terms see the file
LICENSE.md
'''
from ompannouncements import Announcements
def index():
a = Announcemen
|
ts(myconf, db, locale)
news_list = a.create_announcement_list()
return locals()
|
edio/randrctl
|
randrctl/xrandr.py
|
Python
|
gpl-3.0
| 10,440
| 0.002969
|
import os
from functools import reduce, lru_cache
import logging
import re
import subprocess
from randrctl import DISPLAY, XAUTHORITY
from randrctl.exception import XrandrException, ParseException
from randrctl.model import Profile, Viewport, XrandrConnection, Display
logger = logging.getLogger(__name__)
class Xrandr:
"""
Interface for xrandr application. Provides methods for calling xrandr operating with python objects such as
randrctl.profile.Profile
"""
EXECUTABLE = "/usr/bin/xrandr"
OUTPUT_KEY = "--output"
MODE_KEY = "--mode"
POS_KEY = "--pos"
ROTATE_KEY = "--rotate"
PANNING_KEY = "--panning"
RATE_KEY = "--rate"
SCALE_KEY = "--scale"
PRIMARY_KEY = "--primary"
CRTC_KEY = "--crtc"
QUERY_KEY = "-q"
VERBOSE_KEY = "--verbose"
OFF_KEY = "--off"
OUTPUT_DETAILS_REGEX = re.compile(
'(?P<primary>primary )?(?P<geometry>[\dx\+]+) (?:(?P<rotate>\w+) )?.*?(?:panning (?P<panning>[\dx\+]+))?$')
MODE_REGEX = re.compile("(\d+x\d+)\+(\d+\+\d+)")
CURRENT_MODE_REGEX = re.compile("\s*(\S+)\s+([0-9\.]+)(.*$)")
def __init__(self, display: str, xauthority: str):
env = dict(os.environ)
if display:
env[DISPLAY] = display
if xauthority:
env[XAUTHORITY] = xauthority
self.env = env
def apply(self, profile: Profile):
"""
Apply given profile by calling xrandr
"""
logger.debug("Applying profile %s", profile.name)
args = self._compose_mode_args(profile, self.get_all_outputs())
self._xrandr(*args)
@lru_cache()
def _xrandr(self, *args):
"""
Perform call to xrandr executable with passed arguments.
Returns subprocess.Popen object
"""
args = list(args)
logger.debug("Calling xrandr with args %s", args)
args.insert(0, self.EXECUTABLE)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, env=self.env)
err = p.stderr.readlines()
if err:
# close descriptors
p.stderr.close()
p.stdout.close()
err_str = ''.join(map(lambda x: x.decode(), err)).strip()
raise XrandrException(err_str, args)
out = list(map(lambda x: x.decode(), p.stdout.readlines()))
if out:
out.pop(0) # remove first line. It describes Screen
return out
def _compose_mode_args(self, profile: Profile, xrandr_connections: list):
"""
Composes list of arguments to xrandr to apply profile settings and disable the other outputs
"""
args = []
active_names = []
for name, o in profile.outputs.items():
active_names.append(name)
args.append(self.OUTPUT_KEY)
args.append(name)
args.append(self.MODE_KEY)
args.append(o.mode)
args.append(self.POS_KEY)
args.append(o.pos)
args.append(self.ROTATE_KEY)
args.append(o.rotate)
args.append(self.PANNING_KEY)
args.append(o.panning)
args.append(self.SCALE_KEY)
args.append(o.scale)
if o.rate:
args.append(self.RATE_KEY)
args.append(str(o.rate))
if name == profile.primary:
args.append(self.PRIMARY_KEY)
if o.crtc is not None:
args.append(self.CRTC_KEY)
args.append(str(o.crtc))
# turn off the others
for c in xrandr_connections:
if active_names.count(c.name) == 0:
args.append(self.OUTPUT_KEY)
args.append(c.name)
args.append(self.OFF_KEY)
return args
def get_all_outputs(self):
"""
Query xrandr for all supported outputs.
Performs call to xrandr with -q key and parses output.
Returns list of outputs with some properties missing (only name and status are guaranteed)
"""
outputs = []
items = self._xrandr(self.QUERY_KEY)
items = self._group_query_result(items)
logger.debug("Detected total %d outputs", len(items))
crtcs = self._get_verbose_fields('CRTC')
for i in items:
o = self._parse_xrandr_connection(i)
o.crtc = int(crtcs[o.name]) if o.name in crtcs and len(crtcs[o.name]) else None
outputs.append(o)
return outputs
def get_connected_outputs(self):
"""
Query xrandr and return list of connected outputs.
Performs call to xrandr with -q and --verbose keys.
Returns list of connected outputs with all properties set
"""
outputs = list(filter(lambda o: o.display is not None, self.get_all_outputs()))
edids = self._get_verbose_fields('EDID')
for o in outputs:
o.display.edid = edids[o.name]
if logger.isEnabledFor(logging.DEBUG):
logger.debug("Connected outputs: %s", list(map(lambda o: o.name, outputs)))
return outputs
def _get_verbose_fields(self, field):
"""
Get particular field of all connected displays.
Return dictionary of {"connection_name": field_value}
"""
ret = dict()
items = self._xrandr(self.QUERY_KEY, self.VERBOSE_KEY)
items = self._group_query_result(items)
items = filter(lambda x: x[0].find(' connected') > 0, items)
for i in items:
name_idx = i[0].find(' ')
name = i[0][:name_idx]
ret[name] = self._field_from_query_item(i, field)
return ret
def _field_from_query_item(self, item_lines: list, field: str):
"""
Extracts display field from xrandr --verbose output
"""
val = ''
indent = ''
in_field = False
lines_collected = 0
for i, line in enumerate(item_lines):
m = re.match(r'(\s+)(.*):\s*(.*)$', line)
if m and m.group(2).lower() == field.lower():
indent = m.group(1)
in_fie
|
ld = True
val = m.group(3).strip()
elif in_field and m and (len(indent) >= len(
|
m.group(1)) or m.group(1) == indent):
return val
elif in_field and not line.startswith(indent):
return val
elif in_field:
val += line.strip()
lines_collected += 1
if field == 'EDID' and lines_collected >= 8:
return val
return val
def _parse_xrandr_connection(self, item_lines: list):
"""
Creates XrandrConnection from lines returned by xrandr --query.
Example:
LVDS1 connected primary 1366x768+0+312 (normal left inverted right x axis y axis) 277mm x 156mm
1366x768 60.02*+
1024x768 60.00
"""
connection_info = item_lines[0]
name, status, state = connection_info.split(' ', 2)
if status != 'connected':
# We are not connected, do not parse the rest.
return XrandrConnection(name)
# We are connected parse connected display.
display = self._parse_display(item_lines[1:])
if not display.is_on():
# inactive output
return XrandrConnection(name, display)
parsed = self.OUTPUT_DETAILS_REGEX.match(state)
if parsed is None:
raise ParseException(name, status, state)
primary = parsed.group('primary') is not None
rotate = parsed.group('rotate')
panning = parsed.group('panning')
geometry = parsed.group('geometry')
size, pos = self._parse_geometry(geometry)
is_rotated = rotate in ['left', 'right']
if is_rotated:
size = 'x'.join(size.split('x')[::-1])
scale = '1x1'
if size != display.mode:
dw, dh = map(lambda s: int(s), display.mode.split('x'))
vw, vh = map(lambda s: int(s), size.split('x'))
sw, sh = vw / dw, vh / dh
if is_rotated:
sw, sh = sh, sw
|
sunil07t/e-mission-server
|
bin/debug/load_timeline_for_day_and_user.py
|
Python
|
bsd-3-clause
| 1,612
| 0.008685
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--
|
retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
|
print(fn)
print("Loading file " + fn)
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print("About to save %s" % entry)
tsdb.save(entry)
|
Vastra-Gotalandsregionen/verifierad.nu
|
helper.py
|
Python
|
mit
| 10,385
| 0.003467
|
# coding: utf-8
""" This file is where things are stuffed away. Probably you don't ever need to alter these definitions.
"""
import sys
import os.path
import uuid
import dateutil.parser
import datetime
from bs4 import BeautifulSoup
from urllib.parse import urlparse, urljoin
import gzip
import requests
import json
# internal
import _privatekeys as privatekeys
i = 0 # global iterator
def writeFile(file, content):
"""Writes a file at given location
Attributes: file for location, content for the file's contents
"""
f = open(file, 'w')
f.write(content)
f.close()
def delete_file(file):
os.remove(file)
def getUniqueId(length=5):
return str(uuid.uuid1()).replace('-', '')[:length]
def getKey(item):
return item[0]
def fetchUrlsFromSitemap(url, limit=None):
"""Given a URL of a sitemap or sitemapindex the contained URLs are returned as a list with tuples. Optional to limit the age of URLs.
Attributes: url (string), limit (datetime)
"""
# Documentation for sitemaps - https://www.sitemaps.org
found_urls = list()
sitemap = httpRequestGetContent(url)
global i
if limit is not None:
limit = dateutil.parser.parse(limit).replace(tzinfo=None) # converts to same format
if ('<sitemapindex' in str(sitemap)): # is the sitemap itself an index of sitemaps
sitemap_content = BeautifulSoup(sitemap, "html.parser")
for url in sitemap_content.findAll("loc"):
print("Siteindex found. Including URL:s from sitemap: '{0}'".format(url.text))
# fetching sitemap
sitemap_from_index = httpRequestGetContent(url.text)
sitemap_iteration = BeautifulSoup(sitemap_from_index, "html.parser")
for lvl1_url in sitemap_iteration.findAll("url"):
date = None
if (".pdf" not in lvl1_url.text.lower()) and (
".jpg" not in lvl1_url.text.lower()) and (
".mp4" not in lvl1_url.text.lower()) and (
".mp3" not in lvl1_url.text.lower()) and (
".txt" not in lvl1_url.text.lower()) and (
".png" not in lvl1_url.text.lower()) and (
".gif" not in lvl1_url.text.lower()) and (
".svg" not in lvl1_url.text.lower()) and (
".eps" not in lvl1_url.text.lower()) and (
".doc" not in lvl1_url.text.lower()) and (
".docx" not in lvl1_url.text.lower()) and (
".xls" not in lvl1_url.text.lower()) and (
".js" not in lvl1_url.text.lower()) and (
".css" not in lvl1_url.text.lower()) and (
".xlsx" not in lvl1_url.text.lower()) and (
".ttf" not in lvl1_url.text.lower()) and (
".eot" not in lvl1_url.text.lower()) and (
".bak" not in lvl1_url.text.lower()) and (
".woff"
|
not in lvl1_url.text.lower()) and (
"javascript:" not in lvl1_url.text.lower()) and (
"tel:" not in lvl1_url.text.lower()) and (
"mailto:" not in lvl1_url.text.lower()) and (
"#" not in lvl1_url.text.lower()):
if lvl1_url.lastmod is not None:
date = dateutil.pars
|
er.parse(lvl1_url.lastmod.string).replace(tzinfo=None)
if limit is not None and date is not None and date > limit:
date_and_url = (lvl1_url.lastmod.string, lvl1_url.loc.string)
found_urls.append(
date_and_url) # if date (lastmod) is missing the URL will not be checked
print(
'Found {0} URLs from multiple sitemaps in the siteindex you provided.'.format(
len(found_urls)))
return sorted(found_urls, key=getKey, reverse=True)
else:
soup = BeautifulSoup(sitemap, "html.parser")
for url in soup.findAll("url"):
date = None
if url.lastmod is not None:
date = dateutil.parser.parse(url.lastmod.string).replace(tzinfo=None)
if limit is not None and date is not None and date > limit:
date_and_url = (url.lastmod.string, url.loc.string)
found_urls.append(
date_and_url) # if date (lastmod) is missing the URL will not be checked
print('Found {0} URLs in the sitemap you provided.'.format(len(found_urls)))
return sorted(found_urls, key=getKey, reverse=True)
def fetchUrlsFromPage(url, num_limit=None, local_only=True):
"""Given a URL contained URLs are returned as a list with tuples. Optional to number of URLs and if to only include URLs within the local website.
Attributes: url (string), num_limit (integer), local_only (bool)
"""
main_url = urlparse(url)
found_urls = list()
page = httpRequestGetContent(url)
soup = BeautifulSoup(page, "html.parser")
i = 0
for the_url in soup.find_all('a', href=True):
if (".pdf" not in the_url['href'].lower()) and (
".jpg" not in the_url['href'].lower()) and (
".mp4" not in the_url['href'].lower()) and (
".mp3" not in the_url['href'].lower()) and (
".txt" not in the_url['href'].lower()) and (
".png" not in the_url['href'].lower()) and (
".gif" not in the_url['href'].lower()) and (
".svg" not in the_url['href'].lower()) and (
".eps" not in the_url['href'].lower()) and (
".doc" not in the_url['href'].lower()) and (
".docx" not in the_url['href'].lower()) and (
".xls" not in the_url['href'].lower()) and (
".js" not in the_url['href'].lower()) and (
".css" not in the_url['href'].lower()) and (
".xlsx" not in the_url['href'].lower()) and (
".ttf" not in the_url['href'].lower()) and (
".eot" not in the_url['href'].lower()) and (
".bak" not in the_url['href'].lower()) and (
".woff" not in the_url['href'].lower()) and (
"javascript:" not in the_url['href'].lower()) and (
"tel:" not in the_url['href'].lower()) and (
"callto:" not in the_url['href'].lower()) and (
"mailto:" not in the_url['href'].lower()) and (
"#" not in the_url['href'].lower()):
found_url = urlparse(the_url['href'])
if local_only and (len(found_url.netloc) is 0 or found_url.netloc is main_url.netloc):
if len(found_url.netloc) is 0:
found_url = urljoin(url, found_url.geturl())
if found_url not in found_urls: # making the entries unique
found_urls.append(found_url)
i+=1
if num_limit is not None:
found_urls = found_urls[:num_limit]
print('Found {0} URLs on the page you provided, returning {1} of them.'.format(i, len(found_urls)))
return found_urls[:num_limit]
def getGzipedContentFromUrl(url):
"""
Fetching a gziped file from Internet, unpacks it and returns its contents.
"""
unique_id = getUniqueId(5)
file_name = 'tmp/file-{0}.gz'.format(unique_id)
try:
r = requests.get(url, stream=True)
with open(file_name, 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
with gzip.open(file_name, 'rb') as f:
file_content = f.read()
return file_content
except SSLError:
if 'http://' in url: # trying the same URL over SSL/TLS
return getGzipedContentFromUrl(url.replace('http://', 'https://'))
else:
return None
|
birdsarah/bokeh
|
bokeh/models/plots.py
|
Python
|
bsd-3-clause
| 15,281
| 0.001701
|
""" Mo
|
dels for representing top-level plot objects.
"""
from __future__ import absolute_import
from six import string_types
from ..enums import Location
from ..mixins import LineProps, TextProps
from ..plot_object import PlotObject
from ..properties import Bool, Int, String, Color, Enum, Auto, Instance, Either, List, Dict, Include
from ..query import find
from ..util.string impo
|
rt nice_join
from .glyphs import Glyph
from .ranges import Range, Range1d
from .renderers import Renderer, GlyphRenderer
from .sources import DataSource, ColumnDataSource
from .tools import Tool, ToolEvents
from .widget import Widget
def _select_helper(args, kwargs):
"""
Allow fexible selector syntax.
Returns:
a dict
"""
if len(args) > 1:
raise TypeError("select accepts at most ONE positional argument.")
if len(args) > 0 and len(kwargs) > 0:
raise TypeError("select accepts EITHER a positional argument, OR keyword arguments (not both).")
if len(args) == 0 and len(kwargs) == 0:
raise TypeError("select requires EITHER a positional argument, OR keyword arguments.")
if args:
arg = args[0]
if isinstance(arg, dict):
selector = arg
elif isinstance(arg, string_types):
selector = dict(name=arg)
elif issubclass(arg, PlotObject):
selector = {"type" : arg}
else:
raise RuntimeError("Selector must be a dictionary, string or plot object.")
else:
selector = kwargs
return selector
class PlotContext(PlotObject):
""" A container for multiple plot objects.
``PlotContext`` objects are a source of confusion. Their purpose
is to collect together different top-level objects (e.g., ``Plot``
or layout widgets). The reason for this is that different plots may
need to share ranges or data sources between them. A ``PlotContext``
is a container in which such sharing can occur between the contained
objects.
"""
children = List(Instance(PlotObject), help="""
A list of top level objects in this ``PlotContext`` container.
""")
# TODO (bev) : is this used anywhere?
class PlotList(PlotContext):
# just like plot context, except plot context has special meaning
# everywhere, so plotlist is the generic one
pass
class Plot(Widget):
""" Model representing a plot, containing glyphs, guides, annotations.
"""
def __init__(self, **kwargs):
if "tool_events" not in kwargs:
kwargs["tool_events"] = ToolEvents()
super(Plot, self).__init__(**kwargs)
def select(self, *args, **kwargs):
''' Query this object and all of its references for objects that
match the given selector.
There are a few different ways to call the ``select`` method.
The most general is to supply a JSON-like query dictionary as the
single argument or as keyword arguments:
Args:
selector (JSON-like) : some sample text
Keyword Arguments:
kwargs : query dict key/values as keyword arguments
For convenience, queries on just names can be made by supplying
the ``name`` string as the single parameter:
Args:
name (str) : the name to query on
Also queries on just type can be made simply by supplying the
``PlotObject`` subclass as the single parameter:
Args:
type (PlotObject) : the type to query on
Returns:
seq[PlotObject]
Examples:
.. code-block:: python
# These two are equivalent
p.select({"type": HoverTool})
p.select(HoverTool)
# These two are also equivalent
p.select({"name": "mycircle"})
p.select("mycircle")
# Keyword arguments can be supplied in place of selector dict
p.select({"name": "foo", "type": HoverTool})
p.select(name="foo", type=HoverTool)
'''
selector = _select_helper(args, kwargs)
# Want to pass selector that is a dictionary
from ..plotting_helpers import _list_attr_splat
return _list_attr_splat(find(self.references(), selector, {'plot': self}))
def row(self, row, gridplot):
''' Return whether this plot is in a given row of a GridPlot.
Args:
row (int) : index of the row to test
gridplot (GridPlot) : the GridPlot to check
Returns:
bool
'''
return self in gridplot.row(row)
def column(self, col, gridplot):
''' Return whether this plot is in a given column of a GridPlot.
Args:
col (int) : index of the column to test
gridplot (GridPlot) : the GridPlot to check
Returns:
bool
'''
return self in gridplot.column(col)
def add_layout(self, obj, place='center'):
''' Adds an object to the plot in a specified place.
Args:
obj (Renderer) : the object to add to the Plot
place (str, optional) : where to add the object (default: 'center')
Valid places are: 'left', 'right', 'above', 'below', 'center'.
Returns:
None
'''
valid_places = ['left', 'right', 'above', 'below', 'center']
if place not in valid_places:
raise ValueError(
"Invalid place '%s' specified. Valid place values are: %s" % (place, nice_join(valid_places))
)
if hasattr(obj, 'plot'):
if obj.plot is not None:
raise ValueError("object to be added already has 'plot' attribute set")
obj.plot = self
self.renderers.append(obj)
if place is not 'center':
getattr(self, place).append(obj)
def add_tools(self, *tools):
''' Adds an tools to the plot.
Args:
*tools (Tool) : the tools to add to the Plot
Returns:
None
'''
if not all(isinstance(tool, Tool) for tool in tools):
raise ValueError("All arguments to add_tool must be Tool subclasses.")
for tool in tools:
if tool.plot is not None:
raise ValueError("tool %s to be added already has 'plot' attribute set" % tool)
tool.plot = self
self.tools.append(tool)
def add_glyph(self, source_or_glyph, glyph=None, **kw):
''' Adds a glyph to the plot with associated data sources and ranges.
This function will take care of creating and configurinf a Glyph object,
and then add it to the plot's list of renderers.
Args:
source (DataSource) : a data source for the glyphs to all use
glyph (Glyph) : the glyph to add to the Plot
Keyword Arguments:
Any additional keyword arguments are passed on as-is to the
Glyph initializer.
Returns:
glyph : Glyph
'''
if glyph is not None:
source = source_or_glyph
else:
source, glyph = ColumnDataSource(), source_or_glyph
if not isinstance(source, DataSource):
raise ValueError("'source' argument to add_glyph() must be DataSource subclass")
if not isinstance(glyph, Glyph):
raise ValueError("'glyph' argument to add_glyph() must be Glyph subclass")
g = GlyphRenderer(data_source=source, glyph=glyph, **kw)
self.renderers.append(g)
return g
x_range = Instance(Range, help="""
The (default) data range of the horizontal dimension of the plot.
""")
y_range = Instance(Range, help="""
The (default) data range of the vertical dimension of the plot.
""")
x_mapper_type = Either(Auto, String, help="""
What kind of mapper to use to convert x-coordinates in data space
into x-coordinates in screen space.
Typically this can be determined automatically, but this property
can be useful to, e.g., show datetime values as floating point
"seconds since epoch" instead of format
|
heyman/locust
|
locust/test/test_util.py
|
Python
|
mit
| 1,232
| 0.000812
|
import unittest
from locust.util.timespan import parse_timespan
from locust.util.rounding import proper_round
class TestParseTimespan(unittest.TestCase):
def test_parse_timespan_invalid_values(self):
self.assertRaises(ValueError, parse_timespan, None)
self.assertRaises(ValueError, parse_timespan, "")
self.assertRaises(ValueError, parse_timespan, "q")
def test_parse_timespan(self):
self.assertEqual(7, parse_timespan("7"))
self.assertEqual(7, parse_timespan("7s"))
self.assertEqual(60, parse_timespan("1m"))
self.assertEqual(7200, parse_timespan("2h"))
self.assertEqual(3787, parse_timespan("1h3m7s"))
class TestRounding(unittest.TestCase):
def test_rounding_down(self):
self.assertEqual(1, proper_round(1.499999999))
self.assertEqual(5, proper_round(5.499999999))
self.assertEqual(2, proper_round(2.05))
self.assertEqual(3, proper_round(3.05))
def test_rounding_up(self):
self.assertEqual(2, proper_round(1.5))
self.as
|
sertEqual(3, proper_round(2.5))
self.assertEqual(4, proper_round(3.5))
self.assertEqual(5, proper_round(4.5))
self.assertEqual(6, pro
|
per_round(5.5))
|
dw/scratch
|
tcp_ka2.py
|
Python
|
mit
| 658
| 0
|
import socket
import sys
def set_keepalive(sock, interval=1, probes=5):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, interval)
if hasattr(socket, 'TCP_KEEPCNT'):
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, probes)
if hasattr(socket, 'TCP_KEEPIDLE'):
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, interval)
if hasattr(socket, 'TCP_KEEPINTVL'):
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, interval)
s = socket.socket()
s.bind(('', 0))
print s.getsockname
|
()
set_keepalive(s)
s.listen(1)
while True:
csock, addr = s.acc
|
ept()
set_keepalive(csock)
print csock.recv(512)
|
Eric89GXL/vispy
|
vispy/visuals/transforms/base_transform.py
|
Python
|
bsd-3-clause
| 7,578
| 0.001715
|
# -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
API Issues to work out:
- MatrixTransform and STTransform both have 'scale' and 'translate'
attributes, but they are used in very different ways. It would be nice
to keep this consistent, but how?
- Need a transform.map_rect function that returns the bounding rectangle of
a rect after transformation. Non-linear transforms might need to work
harder at this, but we can provide a default implementation that
works by mapping a selection of points across a grid within the original
rect.
"""
from __future__ import division
from ..shaders import Function
from ...util.event import EventEmitter
class BaseTransform(object):
"""
BaseTransform is a base class that defines a pair of complementary
coordinate mapping functions in both python and GLSL.
All BaseTransform subclasses define map() and imap() methods that map
an object through the forward or inverse transformation, respectively.
The two class variables glsl_map and glsl_imap are instances of
shaders.Function that define the forward- and inverse-mapping GLSL
function code.
Optionally, an inverse() method returns a new transform performing the
inverse mapping.
Note that although all classes should define both map() and imap(), it
is not necessarily the case that imap(map(x)) == x; there may be instances
where the inverse mapping is ambiguous or otherwise meaningless.
"""
glsl_map = None # Must be GLSL code
glsl_imap = None
# Flags used to describe the transformation. Subclasses should define each
# as True or False.
# (usually used for making optimization decisions)
# If True, then for any 3 colinear points, the
# transformed points will also be colinear.
Linear = None
# The transformation's effect on one axis is independent
# of the input position along any other axis.
Orthogonal = None
# If True, then the distance between two points is the
# same as the distance between the transformed points.
NonScaling = None
# Scale factors are applied equally to all axes.
Isometric = None
def __init__(self):
self._inverse = None
self._dynamic = False
self.changed = EventEmitter(source=self, type='transform_changed')
if self.glsl_map is not None:
self._shader_map = Function(self.glsl_map)
if self.glsl_imap is not None:
self._shader_imap = Function(self.glsl_imap)
def map(self, obj):
"""
Return *obj* mapped through the forward transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
def imap(self, obj):
"""
Return *obj* mapped through the inverse transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
@property
def inverse(self):
""" The inverse of this transform.
"""
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse
@property
def dynamic(self):
"""Boolean flag that indicates whether this transform is expected to
change frequently.
Transforms that are flagged as dynamic will not be collapsed in
``ChainTransform.simplified``. This allows changes to the transform
to propagate through the chain without requiring the chain to be
re-simplified.
"""
return self._dynamic
@dynamic.setter
def dynamic(self, d):
self._dynamic = d
def shader_map(self):
"""
Return a shader Function that accepts only a single vec4 argument
and defines new attributes / uniforms supplying the Function with
any static input.
"""
return self._shader_map
def shader_imap(self):
"""
see shader_map.
"""
return self._shader_imap
def _shader_object(self):
""" This method allows transforms to be assigned directly t
|
o shader
template variables.
Example::
code = 'void main() { gl_Position = $transform($position); }'
func = shaders.Function(code)
tr =
|
STTransform()
func['transform'] = tr # use tr's forward mapping for $function
"""
return self.shader_map()
def update(self, *args):
"""
Called to inform any listeners that this transform has changed.
"""
self.changed(*args)
def __mul__(self, tr):
"""
Transform multiplication returns a new transform that is equivalent to
the two operands performed in series.
By default, multiplying two Transforms `A * B` will return
ChainTransform([A, B]). Subclasses may redefine this operation to
return more optimized results.
To ensure that both operands have a chance to simplify the operation,
all subclasses should follow the same procedure. For `A * B`:
1. A.__mul__(B) attempts to generate an optimized transform product.
2. If that fails, it must:
* return super(A).__mul__(B) OR
* return NotImplemented if the superclass would return an
invalid result.
3. When BaseTransform.__mul__(A, B) is called, it returns
NotImplemented, which causes B.__rmul__(A) to be invoked.
4. B.__rmul__(A) attempts to generate an optimized transform product.
5. If that fails, it must:
* return super(B).__rmul__(A) OR
* return ChainTransform([B, A]) if the superclass would return
an invalid result.
6. When BaseTransform.__rmul__(B, A) is called, ChainTransform([A, B])
is returned.
"""
# switch to __rmul__ attempts.
# Don't use the "return NotImplemted" trick, because that won't work if
# self and tr are of the same type.
return tr.__rmul__(self)
def __rmul__(self, tr):
return ChainTransform([tr, self])
def __repr__(self):
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
def __del__(self):
# we can remove ourselves from *all* events in this situation.
self.changed.disconnect()
class InverseTransform(BaseTransform):
def __init__(self, transform):
BaseTransform.__init__(self)
self._inverse = transform
self.map = transform.imap
self.imap = transform.map
@property
def Linear(self):
return self._inverse.Linear
@property
def Orthogonal(self):
return self._inverse.Orthogonal
@property
def NonScaling(self):
return self._inverse.NonScaling
@property
def Isometric(self):
return self._inverse.Isometric
@property
def shader_map(self):
return self._inverse.shader_imap
@property
def shader_imap(self):
return self._inverse.shader_map
def __repr__(self):
return ("<Inverse of %r>" % repr(self._inverse))
# import here to avoid import cycle; needed for BaseTransform.__mul__.
from .chain import ChainTransform # noqa
|
liampauling/flumine
|
tests/test_config.py
|
Python
|
mit
| 865
| 0
|
import unittest
from flumine import config
class ConfigTest(unittest.TestCase):
def test_init(self):
self.assertFalse(config.simulated)
self.assertTrue(config.simulated_stra
|
tegy_isolation)
self.assertIsInstance(config.customer_strategy_ref, str)
self.assertIsInstance(con
|
fig.process_id, int)
self.assertIsNone(config.current_time)
self.assertFalse(config.raise_errors)
self.assertEqual(config.max_execution_workers, 32)
self.assertFalse(config.async_place_orders)
self.assertEqual(config.place_latency, 0.120)
self.assertEqual(config.cancel_latency, 0.170)
self.assertEqual(config.update_latency, 0.150)
self.assertEqual(config.replace_latency, 0.280)
self.assertEqual(config.order_sep, "-")
self.assertEqual(config.execution_retry_attempts, 10)
|
tochikuji/pyPyrTools
|
pyrtools/mkRamp.py
|
Python
|
mit
| 1,617
| 0.002474
|
import numpy
import math
def mkRamp(*args):
''' mkRamp(SIZE, DIRECTION, SLOPE, INTERCEPT, ORIGIN)
Compute a matrix of dimension SIZE (a [Y X] 2-vector, or a scalar)
containing samples of a ramp function, with given gradient DIRECTION
(radians, CW from X-axis, default = 0), SLOPE (per pixel, default =
1), and a value of INTERCEPT (default = 0) at the ORIGIN (default =
(size+1)/2, [1 1] = upper left). All but the first argument are
optional '''
if len(args) == 0:
print("mkRamp(SIZE, DIRECTION, SLOPE, INTERCEPT, ORIGIN)")
print("first argument is required")
exit(1)
else:
sz = args[0]
if isinstance(sz, (int)):
sz = (sz, sz)
elif not isinstance(sz, (tuple)):
print("first argument must be a two element tuple or an integer")
exit(1)
# OPTIONAL args:
if len(args) > 1:
direction = args[1]
else:
direction = 0
if len(args) >
|
2:
slope = args[2]
|
else:
slope = 1
if len(args) > 3:
intercept = args[3]
else:
intercept = 0
if len(args) > 4:
origin = args[4]
else:
origin = (float(sz[0] - 1) / 2.0, float(sz[1] - 1) / 2.0)
#--------------------------
xinc = slope * math.cos(direction)
yinc = slope * math.sin(direction)
[xramp, yramp] = numpy.meshgrid(xinc * (numpy.array(list(range(sz[1]))) - origin[1]),
yinc * (numpy.array(list(range(sz[0]))) - origin[0]))
res = intercept + xramp + yramp
return res
|
Acimaz/Google_Apple_Financial_Reporter
|
AppleReporter.py
|
Python
|
mit
| 4,732
| 0.005283
|
from subprocess import *
import gzip
import string
import os
import time
import ApplePythonReporter
class ApplePythonReport:
vendorId = YOUR_VENDOR_ID
userId = 'YOUR_ITUNES_CONNECT_ACCOUNT_MAIL'
password = 'ITUNES_CONNECT_PASSWORD'
account = 'ACCOUNT_ID'
mode = 'Robot.XML'
dateType = 'Daily'
eventIndex = 1
activeSubscriberIndex = 16
quantityIndex = 25
subscribers = 0
cancellations = 0
activeSubscribers = 0
maxAttempts = 5
def __init__(self, reportDate):
self.DownloadSubscriptionEventReport(reportDate)
self.DownloadSubscriptionReport(reportDate)
self.FetchSubscriptionEventData(reportDate)
self.FetchSubscriptionData(reportDate)
self.CleanUp(reportDate)
def DownloadSubscriptionEventReport(self, date):
print 'Downloading Apple Financial Report for Subscriptions (' + date + ')..'
credentials = (self.userId, self.password, self.account, self.mode)
command = 'Sales.getReport, {0},SubscriptionEvent,Summary,{1},{2}'.format(self.vendorId, self.dateType, date)
try:
ApplePythonReporter.output_result(ApplePythonReporter.post_request(ApplePythonReporter.ENDPOINT_SALES,
credentials, command))
except Exception:
pass
#return iter(p.stdout.readline, b'')
def DownloadSubscriptionReport(self, date):
print 'Downloading Apple Financial Report for Active Users (' + date + ')..'
credentials = (self.userId, self.password, self.account, self.mode)
command = 'Sales.getReport, {0},Subscription,Summary,{1},{2}'.format(self.vendorId, self.dateType, date)
try:
ApplePythonReporter.output_result(ApplePythonReporter.post_request(ApplePythonReporter.ENDPOINT_SALES,
credentials, command))
except:
pass
#return iter(p.stdout.readline, b'')
#Uncompress and extract needed values (cancellations and new subscribers)
def FetchSubscriptionEventData(self, date):
fileName = 'Subscription_Event_'+self.vendorId+'_' + date + '.txt'
attempts = 0
while not os.path.isfile(fileName):
|
if(attempts >= self.maxAttempts):
break
attempts += 1
time.sleep(1)
if os.path.isfile(fileName):
print 'Fetching SubscriptionEvents..'
with open(fileName, 'rb') as inF:
text = inF.read().splitlines
|
()
for row in text[1:]:
line = string.split(row, '\t')
# print line[self.eventIndex].__str__()
if line[0].__str__().endswith(date[-2:]):
if line[self.eventIndex] == 'Cancel':
self.cancellations += int(line[self.quantityIndex])
if line[self.eventIndex] == 'Subscribe':
self.subscribers += int(line[self.quantityIndex])
else:
print 'SubscriptionEvent: There were no sales for the date specified'
# Uncompress and extract needed values (active users)
def FetchSubscriptionData(self, date):
fileName = 'Subscription_'+self.vendorId+'_' + date + '.txt'
attempts = 0
while not os.path.isfile(fileName):
if (attempts >= self.maxAttempts):
break
attempts += 1
time.sleep(1)
if os.path.isfile(fileName):
print 'Fetching Subscriptions..'
with open(fileName, 'rb') as inF:
text = inF.read().splitlines()
for row in text[1:]:
line = string.split(row, '\t')
# print line[0].__str__()
self.activeSubscribers += int(line[self.activeSubscriberIndex])
else:
print 'Subscription: There were no sales for the date specified'
def CleanUp(self, date):
if os.path.isfile('Subscription_'+self.vendorId.__str__() +'_' + date + '.txt'):
os.remove('Subscription_'+self.vendorId.__str__()+'_' + date + '.txt')
else:
print 'Subscription_'+self.vendorId.__str__()+'_' + date + '.txt doesnt exist: Maybe there were no Sales at the specified date'
if os.path.isfile('Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt'):
os.remove('Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt')
else:
print 'Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt doesnt exist: Maybe there were no Sales at the specified date'
|
markreidvfx/pyaaf2
|
examples/import_media.py
|
Python
|
mit
| 17,403
| 0.008045
|
#!/usr/bin/env python
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
import aaf2
import traceback
import subprocess
import json
import os
import datetime
import sys
import tempfile
import shutil
import time
import fractions
from aaf2 import auid
from pprint import pprint
FFMPEG_EXEC = "ffmpeg"
FFPROBE_EXEC = "ffprobe"
Audio_Profiles = aaf2.audio.pcm_profiles
Video_Profiles = aaf2.video.dnx_profiles
# FFMPEG_EXEC = "/Users/mark/Dev/ffmpeg/ffmpeg_g"
# FFPROBE_EXEC = "/Users/mark/Dev/ffmpeg/ffprobe_g"
def probe(path, show_packets=False):
cmd = [FFPROBE_EXEC, '-of','json','-show_format','-show_streams', path]
if show_packets:
cmd.extend(['-show_packets',])
print(subprocess.list2cmdline(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout,stderr = p.communicate()
if p.returncode != 0:
raise subprocess.CalledProcessError(p.returncode, subprocess.list2cmdline(cmd), stderr)
return json.loads(stdout)
def timecode_to_seconds(time_string):
try:
return float(time_string)
except:
pass
for format in ("%H:%M:%S.%f", "%H:%M:%S", "%M:%S.%f","%M:%S"):
try:
t = datetime.datetime.strptime(time_string, format)
seconds = 0
if t.minute:
seconds += 60*t.minute
if t.hour:
seconds += 60 * 60 * t.hour
seconds += t.second
seconds += float(t.strftime(".%f"))
return seconds
except:
#print traceback.format_exc()
pass
raise ValueError("invalid time format: %s" % time_string)
def seconds_to_timecode(seconds)
|
:
format = "%S.%f"
t = datetime.timedelta(seconds=float(seconds))
return str(t)
def has_alpha(stream):
if stream['pix_fmt'] in ('yuva444p10le','rgba'):
return True
return False
def c
|
onform_media(path,
output_dir,
start=None,
end=None,
duration=None,
width=None,
height=None,
frame_rate=None,
video_profile_name=None,
audio_profile_name=None,
ignore_alpha=False):
if not video_profile_name:
video_profile_name = 'dnx_1080p_36_23.97'
if not audio_profile_name:
audio_profile_name = 'pcm_48000_s16le'
video_profile = Video_Profiles[video_profile_name]
audio_profile = Audio_Profiles[audio_profile_name]
format = probe(path)
out_files = []
cmd = [FFMPEG_EXEC,'-y', '-nostdin']
# cmd.extend(['-loglevel', 'debug'])
if end:
duration = timecode_to_seconds(end) - timecode_to_seconds(start)
duration = seconds_to_timecode(duration)
end = None
if start:
start_seconds = timecode_to_seconds(start)
fast_start = max(0,int(start_seconds-30))
if fast_start:
start = seconds_to_timecode(start_seconds - fast_start)
cmd.extend(['-ss', seconds_to_timecode(fast_start)])
frame_rate = video_profile['frame_rate']
pix_fmt = video_profile['pix_fmt']
bitrate = video_profile['bitrate']
dnxhd_profile = video_profile.get("video_profile", None)
if format['format']['format_name'] == "image2":
frame_rate = frame_rate or "24000/1001"
cmd.extend([ '-framerate', frame_rate])
cmd.extend(['-i', path,])
if video_profile['size']:
width, height = video_profile['size']
else:
width = None
height = None
interlaced = video_profile['interlaced']
#sample_rate =44100
sample_rate = audio_profile['sample_rate']
for stream in format['streams']:
#pprint(stream)
stream_index = stream['index']
if stream['codec_type'] == 'video':
out_meta = {}
# pprint(stream)
alpha = has_alpha(stream)
passes = 1
if alpha and not ignore_alpha:
passes = 2
for i in range(passes):
if i == 1:
cmd.extend(['-an', '-f', 'rawvideo', '-pix_fmt', 'gray'])
if frame_rate:
cmd.extend(['-r', frame_rate])
else:
cmd.extend(['-an','-vcodec', 'dnxhd', '-pix_fmt', pix_fmt])
if dnxhd_profile:
cmd.extend(['-profile:v', dnxhd_profile])
if bitrate:
cmd.extend(['-vb', '%dM' % bitrate])
if frame_rate:
cmd.extend(['-r', frame_rate])
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
vfilter = []
if i == 1:
vfilter.append("alphaextract")
if width and height:
out_width = width
out_height = height
input_width = stream['width']
input_height = stream['height']
max_width = width
max_height = height
scale = min(max_width/ float(input_width), max_height/float(input_height) )
scale_width = int(input_width*scale)
scale_height = int(input_height*scale)
padding_ofs_x = (max_width - scale_width)//2
padding_ofs_y = (max_height - scale_height)//2
vfilter.append("scale=%d:%d,pad=%d:%d:%d:%d" % (scale_width,scale_height,
max_width,max_height, padding_ofs_x,padding_ofs_y))
else:
out_width = stream['width']
out_height = stream['height']
if vfilter:
cmd.extend(['-vf', ','.join(vfilter)])
# cmd.extend(['-s', "%dx%d" % (width, height)])
if i == 1:
out_file = os.path.join(output_dir, 'out_%d.alpha' % (stream_index))
out_meta['path_alpha'] = out_file
else:
out_rate = frame_rate or str(stream['avg_frame_rate'])
out_file = os.path.join(output_dir, 'out_%d.dnxhd' % (stream_index))
out_meta = {'path':out_file, 'frame_rate':out_rate, 'type': 'video', 'profile':video_profile_name}
out_meta['width'] = out_width
out_meta['height'] = out_height
cmd.extend([out_file])
#pprint(stream)
print("USING FRAMREATE", out_rate, str(stream['avg_frame_rate']))
out_files.append(out_meta)
elif stream['codec_type'] == 'audio':
input_sample_rate = int(stream['sample_rate'])
channels = stream['channels']
cmd.extend(['-vn', '-acodec', 'pcm_s16le', '-ar', str(sample_rate)])
# afilter = ['-af', "aresample=async=1:first_pts=0"]
# cmd.extend(afilter)
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
out_file = os.path.join(output_dir, 'out_%d_%d_%d.wav' % (stream_index, sample_rate, channels))
cmd.extend([out_file])
out_files.append({'path':out_file, 'sample_rate':sample_rate, 'channels':channels,'type': 'audio'})
print(subprocess.list2cmdline(cmd))
subprocess.check_call(cmd)
return out_files
def create_matte_key_definition(f):
opdef = f.create.OperationDef(auid.AUID("0c864774-e428-3b2d-8115-1c736806191a"), 'MatteKey_2')
opdef['IsTimeWarp'].value = False
opdef['OperationCategory'].value = 'OperationCategory_Effect'
opdef['NumberInputs'].value = 3
opdef['Bypass'].value = 2
opdef.media_kind = "picture"
f.dictionary.register_def(opdef)
return opdef
def import_video
|
arfc/moltres
|
property_file_dir/cnrs-benchmark/feedback.py
|
Python
|
lgpl-2.1
| 2,292
| 0
|
import numpy as np
def extrapolate(xs_name):
"""Extrapolate cross section based on thermal salt expansion feedback.
Extrapolates cross section data at 900 K to 1500 K at 50 K intervals
based on the thermal salt expansion feedback formula from [1]. Writes
the extrapolated data back into the .txt cross section files in the
Moltres-compatible format.
Parameters
----------
xs_name : list of str
Names of cross sections to be extrapolated.
Returns
-------
None
References
----------
[1] Tiberga et al., "Results from a multi-physics nurmerical benchmark for
|
codes dedicated to molten salt fast reactors," Annals of Nuclear Energy,
vol. 142, July 2020, 107428.
"""
rho_900 = 2.0e3 # Density at 900 K [kg m-3]
alpha = 2.0e-4 # Thermal expansion coeff [K-1]
input_file = "benchmark_" + xs_name + ".txt"
# Setup temperature values to extrapolate to
temp = np.linspace(950, 1500, 12)
# Read cross section data at 900K
|
f = open(input_file, 'r+')
lines = f.readlines()
data_900 = list(lines[0].split())
f.close()
# Setup space separated data to be written back into txt
s = " "
xs = [s.join(data_900) + "\n"]
h = open(input_file, 'w')
for i in range(len(temp)):
# Calculate density at temp[i]
rho = rho_900 * (1 - alpha * (temp[i]-900))
# Apply extrapolation formula at temp[i]
data_next = [0, ] * len(data_900)
data_next[0] = str(temp[i])
for i in range(1, len(data_900)):
if xs_name == "DIFFCOEF":
data = float(data_900[i]) / rho * rho_900
data_next[i] = '{:0.5e}'.format(data)
else:
data = float(data_900[i]) * rho / rho_900
data_next[i] = '{:0.5e}'.format(data)
data_next = s.join(data_next) + "\n"
xs.append(data_next)
# Write cross section data into txt file
h = open(input_file, 'w')
h.writelines(xs)
h.close()
return
def main():
"""Runs extrapolate() for the relevant cross sections.
"""
xs_names = ["DIFFCOEF", "FISS", "NSF", "REMXS", "SP0"]
for i in xs_names:
extrapolate(i)
return
if __name__ == "__main__":
main()
|
particl/particl-core
|
test/functional/feature_maxtipage.py
|
Python
|
mit
| 1,997
| 0.002003
|
#!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test logic for setting nMaxTipAge on command line.
Nodes don't consider themselves out of "initial block download" as long as
their best known block header time is more than nMaxTipAge in the past.
"""
import time
from test_framework.test_framework import BitcoinTestFra
|
mework
from test_framework.util import assert_equal
DEFAULT_MAX_TIP_AGE = 24 * 60 * 60
class MaxTipAgeTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def test_maxtipage(self, maxtipage, set_parameter=True):
node_miner = self.nodes[0]
node_ibd = self.nodes[1]
self.restart_node(1, [f'-maxtipage={maxtipage}'] if set_parameter else None)
self.con
|
nect_nodes(0, 1)
# tips older than maximum age -> stay in IBD
cur_time = int(time.time())
node_ibd.setmocktime(cur_time)
for delta in [5, 4, 3, 2, 1]:
node_miner.setmocktime(cur_time - maxtipage - delta)
self.generate(node_miner, 1)
assert_equal(node_ibd.getblockchaininfo()['initialblockdownload'], True)
# tip within maximum age -> leave IBD
node_miner.setmocktime(cur_time - maxtipage)
self.generate(node_miner, 1)
assert_equal(node_ibd.getblockchaininfo()['initialblockdownload'], False)
def run_test(self):
self.log.info("Test IBD with maximum tip age of 24 hours (default).")
self.test_maxtipage(DEFAULT_MAX_TIP_AGE, set_parameter=False)
for hours in [20, 10, 5, 2, 1]:
maxtipage = hours * 60 * 60
self.log.info(f"Test IBD with maximum tip age of {hours} hours (-maxtipage={maxtipage}).")
self.test_maxtipage(maxtipage)
if __name__ == '__main__':
MaxTipAgeTest().main()
|
liqd/adhocracy4
|
tests/forms/test_forms.py
|
Python
|
agpl-3.0
| 1,198
| 0
|
import pytest
from dateutil.parser import parse
from django import forms
from adhocracy4.forms.fields import DateTimeField
class DateTimeForm(forms.Form):
date = DateTimeField(
time_format='%H:%M',
required=False,
require_all_fields=False,
)
@pytest.mark.django_db
def test_datetimefield_valid(user):
data = {'date_0': '2023-01-01', 'date_1': '12:30'}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_data['date'] == \
parse('2023-01-01 12:30:00 UTC')
@pytest.mark.django_db
def test_datetimefield_invalid(user):
data = {'date_0': 'not a date', 'date_1': '12:30'}
form = DateTimeForm(data=data)
assert not form.is_valid()
@pytest.mark.django_db
def test_datetimefield_empty_none(user):
data = {'date_0': '', 'date_1': ''}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_data['date'] is None
@pytest.mark.django_db
def test_datetimefield_default_time(user):
da
|
ta = {'date_0': '2023-01-01', 'date_1': ''}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_
|
data['date'] == \
parse('2023-01-01 00:00:00 UTC')
|
redhat-openstack/manila
|
manila/tests/api/v1/test_share_types.py
|
Python
|
apache-2.0
| 8,424
| 0
|
# Copyright 2011 OpenStack Foundation
# aLL Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from oslo_utils import timeutils
import webob
from manila.api.v1 import share_types as types
from manila.api.views import types as views_types
from manila.common import constants
from manila import exception
from manila import policy
from manila.share import share_types
from manila import test
from manila.tests.api import fakes
def stub_share_type(id):
specs = {
"key1": "value1",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5": "value5",
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS: "true",
}
return dict(
id=id,
name='share_type_%s' % str(id),
extra_specs=specs,
required_extra_specs={
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS: "true",
}
)
def return_share_types_get_all_types(context, search_opts=None):
return dict(
share_type_1=stub_share_type(1),
share_type_2=stub_share_type(2),
share_type_3=stub_share_type(3)
)
def return_empty_share_types_get_all_types(context, search_opts=None):
return {}
def return_share_types_get_share_type(context, id=1):
if id == "777":
raise exception.ShareTypeNotFound(share_type_id=id)
return stub_share_type(int(id))
def return_share_types_get_by_name(context, name):
if name == "777":
raise exception.ShareTypeNotFoundByName(share_type_name=name)
return stub_share_type(int(name.split("_")[2]))
@ddt.ddt
class ShareTypesApiTest(test.TestCase):
def setUp(self):
super(ShareTypesApiTest, self).setUp()
self.controller = types.ShareTypesController()
self.mock_object(policy, 'check_policy',
mock.Mock(return_value=True))
@ddt.data(True, False)
def test_share_types_index(self, admin):
self.mock_object(share_types, 'get_all_types',
return_share_types_get_all_types)
req = fakes.HTTPRequest.blank('/v2/fake/types',
use_admin_context=admin)
res_dict = self.controller.index(req)
self.assertEqual(3, len(res_dict['share_types']))
expected_names = ['share_type_1', 'share_type_2', 'share_type_3']
actual_names = map(lambda e: e['name'], res_dict['share_types'])
self.assertEqual(set(actual_names), set(expected_names))
for entry in res_dict['share_types']:
if admin:
self.assertEqual('value1', entry['extra_specs'].get('key1'))
else:
self.assertIsNone(entry['extra_specs'].get('key1'))
self.assertTrue('required_extra_specs' in entry)
required_extra_spec = entry['required_extra_specs'].get(
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS, '')
self.assertEqual('true', required_extra_spec)
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'index')
def test_share_types_index_no_data(self):
self.mock_object(share_types, 'get_all_types',
return_empty_share_types_get_all_types)
req = fakes.HTTPRequest.blank('/v2/fake/types')
res_dict = self.controller.index(req)
self.assertEqual(0, len(res_dict['share_types']))
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'index')
def test_share_types_show(self):
self.mock_object(share_types, 'get_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/1')
res_dict = self.controller.show(req, 1)
self.assertEqual(2, len(res_dict))
self.assertEqual('1', res_dict['share_type']['id'])
self.assertEqual('share_type_1', res_dict['share_type']['name'])
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'show')
def test_share_types_show_not_found(self):
self.mock_object(share_types, 'get_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/777')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, '777')
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'show')
def test_share_types_default(self):
self.mock_object(share_types, 'get_default_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/default')
res_dict = self.controller.default(req)
self.assertEqual(2, len(res_dict))
self.assertEqual('1', res_dict['share_type']['id'])
self.assertEqual('share_type_1', res_dict['share_type']['name'])
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'default')
def test_share_types_default_not_found(self):
self.mock_object(share_types, 'get_default_share_type',
mock.Mock(side_effect=exception.ShareTypeNotFound(
share_type_id="fake")))
req = fakes.HTTPRequest.blank('/v2/fake/types/default')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.default, req)
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'default')
def test_view_builder_show(self):
view_builder = views_types.ViewBuilder()
now = timeutils.isotime()
raw_share_type = dict(
name='new_type',
deleted=False,
created_at=now,
updated_at=now,
extra_specs={},
deleted_at=None,
required_extra_specs={},
id=42,
)
request = fakes.HTTPRequest.blank("/v2")
output = view_builder.show(request, raw_share_type)
self.assertIn('share_type', output)
expected_share_type = dict(
name='new_type',
extra_specs={},
required_extra_specs={},
id=42,
)
self.assertDictMatch(output['share_type'], expected_share_type)
def test_view_builder_list(self):
view_builder = views_types.ViewBuilder()
now = timeutils.isotime()
raw_share_types = []
for i in range(0, 10):
raw_share_types.append(
dict(
name='new_type',
deleted=False,
created_at=now,
updated_at=now,
extra_specs={},
required_extra_specs={},
deleted_at=None,
id=42 + i
|
)
)
request = fakes.HTTPRequest.blank("/v2")
output = view_builder.index(request, raw_share_types)
self.assertIn('share_types', output)
for i in range(0, 10):
expected_share_type = dict(
name='new_type',
extra_specs={},
required_extra_specs={},
id=42 + i
)
self.assertDictMatch(output['share_types']
|
[i],
expected_share_type)
@ddt.data(None, True, 'true', 'false', 'all')
def test_parse_is_public_valid(self, value):
result = self.controller._parse_is_public(value)
se
|
brettwooldridge/buck
|
scripts/artificialproject/file_path_generator.py
|
Python
|
apache-2.0
| 7,481
| 0.000535
|
# Copyright 2018-present Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import os
from artificialproject.field_generators import (
GenerationFailedException,
StringGenerator,
)
from artificialproject.random import weighted_choice
class FilePathGenerator:
BUILD_FILE_NAME = "BUCK"
def __init__(self):
self._component_generator = StringGenerator()
self._file_samples = collections.defaultdict(
lambda: collections.defaultdict(set)
)
self._file_samples_dirty = False
self._package_depths = collections.Counter()
self._file_depths_in_package = collections.Counter()
self._sizes_by_depth = collections.defaultdict(collections.Counter)
self._sizes_by_depth_in_package = collections.defaultdict(collections.Counter)
self._build_file_sizes = collections.Counter()
self._root = {}
self._package_paths = {}
self._available_directories = {}
self._last_package_path = None
self._last_package_remaining_targets = None
def analyze_project_data(self, project_data):
dir_entries = collections.defaultdict(set)
build_file_entries = collections.defaultdict(set)
for target_data in project_data.values():
base_path = target_data["buck.base_path"]
build_file_entries[base_path].add(target_data["name"])
components = self._split_path_into_components(base_path)
# TODO(jakubzika): Targets in the root of the repo are ignored
# because _generate_path does not handle depth == 0.
if components:
self._package_depths.update([len(components)])
for component in components:
self._component_generator.add_string_sample(component)
for i, name in enumerate(components):
prefix = components[:i]
dir_entries[tuple(prefix)].add(name)
for base_path, names in build_file_entries.items():
self._build_file_sizes.update([len(names)])
for path, entries in dir_entries.items():
self._sizes_by_depth[len(path)].update([len(entries)])
def add_package_file_sample(self, package_path, relative_path):
components = self._split_path_into_components(relative_path)
self._file_depths_in_package.update([len(components)])
for i, name in enumerate(components):
prefix = components[:i]
self._file_samples[package_path][tuple(prefix)].add(name)
self._file_samples_dirty = True
def generate_package_path(self):
if self._last_package_path is not None:
path = self._last_package_path
self._last_package_remaining_targets -= 1
if self._last_package_remaining_targets <= 0:
self._last_package_path = None
return path
depth = weighted_choice(self._package_depths)
path, parent_dir = self._generate_path(
"//", self._root, depth, self._sizes_by_depth, self._component_generator
)
directory = {self.BUILD_FILE_NAME.lower(): None}
parent_dir[os.path.basename(path).lower()] = directory
self._last_package_p
|
ath = path
self._last_package_remaining_targets = (
weighted_choice(self._build_file_sizes) - 1
|
)
return path
def generate_path_in_package(
self, package_path, depth, component_generator, extension
):
if depth == 0:
return ""
if self._file_samples_dirty:
self._sizes_by_depth_in_package.clear()
for dir_entries in self._file_samples.values():
for path, entries in dir_entries.items():
self._sizes_by_depth_in_package[len(path)].update([len(entries)])
self._file_samples_dirty = False
root = self._root
components = self._split_path_into_components(package_path)
for component in components:
root = root[component.lower()]
path, parent_dir = self._generate_path(
package_path,
root,
depth,
self._sizes_by_depth_in_package,
component_generator,
extension,
)
parent_dir[os.path.basename(path).lower()] = None
return path
def register_path(self, path):
directory = self._root
existed = True
for component in self._split_path_into_components(path):
if component not in directory:
directory[component] = {}
existed = False
directory = directory[component]
if directory is None:
raise GenerationFailedException()
if existed:
raise GenerationFailedException()
def _split_path_into_components(self, path):
components = []
while path:
path, component = os.path.split(path)
components.append(component)
return components[::-1]
def _generate_path(
self,
package_key,
root,
depth,
sizes_by_depth,
component_generator,
extension=None,
):
assert depth >= 1
parent_path, parent_dir = self._generate_parent(
package_key, root, depth - 1, sizes_by_depth, component_generator
)
name = self._generate_name(parent_dir, component_generator, extension)
return os.path.join(parent_path, name), parent_dir
def _generate_parent(
self, package_key, root, depth, sizes_by_depth, component_generator
):
if depth == 0:
return "", root
key = (package_key, depth)
value = self._available_directories.get(key)
if value is not None:
key_found = True
path, directory, size = value
else:
key_found = False
parent_path, parent_dir = self._generate_parent(
package_key, root, depth - 1, sizes_by_depth, component_generator
)
name = self._generate_name(parent_dir, component_generator)
path = os.path.join(parent_path, name)
directory = {}
parent_dir[name.lower()] = directory
size = weighted_choice(sizes_by_depth[depth])
size -= 1
if size > 0:
self._available_directories[key] = (path, directory, size)
elif key_found:
del self._available_directories[key]
return path, directory
def _generate_name(self, directory, generator, extension=None):
for i in range(1000):
name = generator.generate_string()
if extension is not None:
name += extension
if (
name.lower() not in directory
and name.lower() != self.BUILD_FILE_NAME.lower()
):
return name
raise GenerationFailedException()
|
ukBaz/ble_beacon
|
tests/data/pkt_capture.py
|
Python
|
gpl-2.0
| 39,109
| 0.006648
|
data = [
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>+\x02\x01\x03\x01\x97\xe7/s\x18b\x1f\x1e\xff\x06\x00\x01\t \x02[=cdI\xb9kQl\x977W\xc2V?\xa2k\xe7\x1c\xf4\x9d\xd7\x85\xc9',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x0b\x1c\xfd\xf3\xc6\xad',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xae',
b'\x04>\x1a\x02\x01\x00\x01\xd1e\xa9\x85\x0bI\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x18\x9bF\x86\xa7',
b'\x04>(\x02\x01\x02\x01\xc9\x9b1\xca\x82i\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb2',
b'\x04>\x0c\x02\x01\x04\x01\xc9\x9b1\xca\x82i\x00\xb3',
b'\x04>\x1e\x02\x01\x00\x01\x1bQm\xb7Qd\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\xa0\xdeI?\xac',
b'\x04>\x0c\x02\x01\x04\x01\x1bQm\xb7Qd\x00\xad',
b"\x04>'\x02\x01\x02\x01\n\t9\x1b\xf6y\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb6",
b'\x04>\x0c\x02\x01\x04\x01\n\t9\x1b\xf6y\x00\xb6',
b"\x04>\x1f\x02\x01\x02\x01\x9c\xa0\xd0L'P\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb4",
b"\x04>\x0c\x02\x01\x04\x01\x9c\xa0\xd0L'P\x00\xb4",
b'\x04>(\x02\x01\x02\x01\xb9\xf6\x0f\xfd\xe2\\\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xab',
b'\x04>\x0c\x02\x01\x04\x01\xd1e\xa9\x85\x0bI\x00\xa8',
b'\x04>\x16\x02\x01\x04\x01\xb9\xf6\x0f\xfd\xe2\\\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xaa',
b'\x04>(\x02\x01\x03\x00k\xa0\xd0.\x04\xf8\x1c\x1b\xffu\x00B\x04\x01\x80\xac\xf8\x04.\xd0\xa0k\xfa\x04.\xd0\xa0j\x01\x17@\x00\x00\x00\x00\xa8',
b'\x04>\x1a\x02\x01\x00\x01\xc7\xaf\x92\x15!b\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x13\x1c\x0c\xb2G\xa4',
b'\x04>\x0c\x02\x01\x04\x01\xc7\xaf\x92\x15!b\x00\xa5',
b'\x04>&\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xc0',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04\x0f\x04\x00\x01\x01\x04',
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>+\x02\x01\x03\x01\x97\xe7/s\x18b\x1f\x1e\xff\x06\x00\x01\t \x02[=cdI\xb9kQl\x977W\xc2V?\xa2k\xe7\x1c\xf4\x9d\xd7\x85\xc9',
b'\x04>(\x02\x01\x02\x01\xc9\x9b1\xca\x82i\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb1',
b'\x04>\x0c\x02\x01\x04\x01\xc9\x9b1\xca\x82i\x00\xb1',
b'\x04>\x1a\x02\x01\x00\x01\xd1e\xa9\x85\x0bI\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x18\x9bF\x86\xab',
b'\x04>&\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xb9',
b'\x04>\x0c\x02\x01\x04\x01\xd1e\xa9\x85\x0bI\x00\xac',
b'\x
|
04>\x1e\x02\x01\x00\x01\x1bQm\xb7Qd\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\xa0\xdeI?\xae',
b'\x04>\x0c
|
\x02\x01\x04\x01\x1bQm\xb7Qd\x00\xac',
b'\x04>(\x02\x01\x02\x01\xb9\xf6\x0f\xfd\xe2\\\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaf',
b'\x04>\x16\x02\x01\x04\x01\xb9\xf6\x0f\xfd\xe2\\\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xb0',
b"\x04>\x1f\x02\x01\x02\x01\x9c\xa0\xd0L'P\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb2",
b"\x04>\x0c\x02\x01\x04\x01\x9c\xa0\xd0L'P\x00\xb1"
b"\x04>'\x02\x01\x02\x01\n\t9\x1b\xf6y\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb6"
b'\x04>\x0c\x02\x01\x04\x01\n\t9\x1b\xf6y\x00\xb5',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x0b\x1c\xfd\xf3\xc6\xa8',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03ukbaz.github.io\xbb',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xae',
]
beacon_only = [
b"\x04>\x1f\x02\x01\x02\x01\x9c\xa0\xd0L'P\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb4",
b"\x04>\x1f\x02\x01\x02\x01\x9c\xa0\xd0L'P\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb2",
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03ukbaz.github.io\xbd',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03ukbaz.github.io\xbb',
b'\x04>&\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xc0',
b'\x04>&\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xbc',
b"\x04>'\x02\x01\x02\x01\n\t9\x1b\xf6y\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb6",
b"\x04>'\x02\x01\x02\x01\n\t9\x1b\xf6y\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb6",
b'\x04>(\x02\x01\x02\x01\xc9\x9b1\xca\x82i\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb2',
b'\x04>(\x02\x01\x02\x01\xc9\x9b1\xca\x82i\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb1',
]
bytes_only = [
# Eddystone URL with secondary URL encoding (e.g. \x01 = .org
b'\x04\x3e\x1f\x02\x01\x02\x01\x9c\xa0\xd0\x4c\x27\x50\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01\x66\x69\x72\x73\x74\x75\x6b\x01\xb4',
b'\x04\x3e\x1f\x02\x01\x02\x01\x9c\xa0\xd0\x4c\x27\x50\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01\x66\x69\x72\x73\x74\x75\x6b\x01\xb2',
# Eddystone URL without secondary URL encoding
b'\x04\x3e\x29\x02\x01\x03\x01\xbe\x43\xe7\x35\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03\x75\x6b\x62\x61\x7a\x2e\x67\x69\x74\x68\x75\x62\x2e\x69\x6f\xbd',
b'\x04\x3e\x29\x02\x01\x03\x01\xbe\x43\xe7\x35\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03\x75\x6b\x62\x61\x7a\x2e\x67\x69\x74\x68\x75\x62\x2e\x69\x6f\xbb',
# Eddystone UID
b'\x04\x3e\x26\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb\x35\xfd\r\x17\x69\x1d\x64\xaa\x90\xab\xcd\xef\t\x87\x65\xc0',
b'\x04\x3e\x26\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb\x35\xfd\r\x17\x69\x1d\x64\xaa\x90\xab\xcd\xef\t\x87\x64\xbc',
# iBeacon
b'\x04\x3e\x27\x02\x01\x02\x01\x0a\x09\x39\x1b\xf6\x79\x1b\x1a\xff\x4c\x00\x02\x15\x6a\xb1\x7c\x17\xf4\x7b\x4d\x41\x80\x36\x52\x6a\xee\xd2\x2f\x73\x01\x16\x03\x68\xbf\xb6',
b'\x04\x3e\x27\x02\x01\x02\x01\x0a\x09\x39\x1b\xf6\x79\x1b\x1a\xff\x4c\x00\x02\x15\x6a\xb1\x7c\x17\xf4\x7b\x4d\x41\x80\x36\x52\x6a\xee\xd2\x2f\x73\x01\x16\x03\x68\xbf\xb6',
# Alt Beacon
b'\x04\x3e\x28\x02\x01\x02\x01\xc9\x9b\x31\xca\x82\x69\x1c\x1b\xff\xff\xff\xbe\xac\x48\x25\x3e\x59\x72\x24\x44\x63\xb9\xb8\x03\x3f\xfa\xb5\x81\x04\x00\x7b\x01\x41\xbc\x00\xb2',
b'\x04\x3e\x28\x02\x01\x02\x01\xc9\x9b\x31\xca\x82\x69\x1c\x1b\xff\xff\xff\xbe\xac\x48\x25\x3e\x59\x72\x24\x44\x63\xb9\xb8\x03\x3f\xfa\xb5\x81\x04\x00\x7b\x01\x41\xbc\x00\xb1',
]
more_beacons = [
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>\x1e\x02\x01\x00\x01\x95\xfdu\xa2>N\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\x86\xdc\xb98\xac',
b'\x04>\x0c\x02\x01\x04\x01\x95\xfdu\xa2>N\x00\xab',
b'\x04>(\x02\x01\x03\x00iX\xf1\xf4\xc3\x00\x1c\x1b\xffu\x00B\x04\x01\x80`\x00\xc3\xf4\xf1Xi\x02\xc3\xf4\xf1Xh\x01\x00\x00\x00\x00\x00\x00\xa7',
b'\x04>\x1f\x02\x01\x02\x01^\x89\xf3\x12\xfa_\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb9',
b'\x04>\x0c\x02\x01\x04\x01^\x89\xf3\x12\xfa_\x00\xb9',
b"\x04>'\x02\x01\x02\x01\x8eml\xe2\x83b\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17{\x00MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb5",
b'\x04>\x0c\x02\x01\x04\x01\x8eml\xe2\x83b\x00\xb5',
b'\x04>\x1e\x02\x01\x00\x01W\xc32c!K\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06G\x1d\x96[\x97\x80\xac',
b'\x04>\x0c\x02\x01\x04\x01W\xc32c!K\x00\xac',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x01\x10\xfd\xf3\xc6\xa4
|
atumanov/ray
|
python/ray/rllib/models/torch_action_dist.py
|
Python
|
apache-2.0
| 1,516
| 0
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
import torch
except ImportError:
pass # soft dep
from ray.rllib.models.action_dist import ActionDistribution
from ray.rllib.utils.annotations import override
class TorchDistributionWrapper(ActionDistribution):
"""Wrapper class for torch.distributions."""
@override(ActionDistribution)
def logp(self, actions):
return self.dist.log_prob(actions)
@override(ActionDistribution)
def entropy(self):
return self.dist.entropy()
@override(ActionDistribution)
def kl(self, other):
return torch.distributions.kl.kl_divergence(self.dist, other)
@override(ActionDistribution)
def sample(self):
return self.dist.sample()
class TorchCategorical(TorchDistributionWrapper):
"""Wrapper class for PyTorch Categorical distribution."""
@override(ActionDistribution)
|
def __init__(self, inputs):
self.dist = torch.distributions.categorical.Categorical(logits=inputs)
class TorchDiagGaussian(TorchDistributionWrapper):
"""Wrapper class for PyTorch Normal distribution."""
@override(ActionDistribution)
def __init__(self, inputs):
mean, log_std = torch.chunk(inputs, 2, dim=1)
|
self.dist = torch.distributions.normal.Normal(mean, torch.exp(log_std))
@override(TorchDistributionWrapper)
def logp(self, actions):
return TorchDistributionWrapper.logp(self, actions).sum(-1)
|
cans/tappy-pkg
|
tap/tests/__init__.py
|
Python
|
bsd-2-clause
| 105
| 0
|
# Copyright (c) 2
|
015, Matt Layman
"""Tests for tappy"""
from tap.
|
tests.testcase import TestCase # NOQA
|
rhoml/lemur
|
lemur/auth/views.py
|
Python
|
apache-2.0
| 8,442
| 0.002961
|
"""
.. module: lemur.auth.views
:platform: Unix
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Kevin Glisson <[email protected]>
"""
import jwt
import base64
import requests
from flask import g, Blueprint, current_app
from flask.ext.restful import reqparse, Resource, Api
from flask.ext.principal import Identity, identity_changed
from lemur.common.utils import get_psuedo_random_string
from lemur.users import service as user_service
from lemur.roles import service as role_service
from lemur.auth.service import create_token, fetch_token_header, get_rsa_public_key
mod = Blueprint('auth', __name__)
api = Api(mod)
class Login(Resource):
"""
Provides an endpoint for Lemur's basic authentication. It takes a username and password
combination and returns a JWT token.
This token token is required for each API request and must be provided in the Authorization Header for the request.
::
Authorization:Bearer <token>
Tokens have a set expiration date. You can inspect the token expiration be base64 decoding the token and inspecting
it's contents.
.. note:: It is recommended that the token expiration is fairly short lived (hours not days). This will largely depend \
on your uses cases but. It is important to not that there is currently no build in method to revoke a users token \
and force re-authentication.
"""
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Login, self).__init__()
def post(self):
"""
.. http:post:: /auth/login
Login with username:password
**Example request**:
.. sourcecode:: http
POST /auth/login HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
{
"username": "test",
"password": "test"
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"token": "12343243243"
}
:arg username: username
:arg password: password
:statuscode 401: invalid credentials
:statuscode 200: no error
"""
self.reqparse.add_argument('username', type=str, required=True, location='json')
self.reqparse.add_argument('password', type=str, required=True, location='json')
args = self.reqparse.parse_args()
if '@' in args['username']:
user = user_service.get_by_email(args['username'])
else:
user = user_service.get_by_username(args['username'])
if user and user.check_password(args['password']):
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(),
identity=Identity(user.id))
return dict(token=create_token(user))
return dict(message='The supplied credentials are invalid'), 401
def get(self):
return {'username': g.current_user.username, 'roles': [r.name for r in g.current_user.roles]}
class Ping(Resource):
"""
This class serves as an example of how one might implement an SSO provider for use with Lemur. In
this example we use a OpenIDConnect authentication flow, that is essentially OAuth2 underneath. If you have an
OAuth2 provider you want to use Lemur there would be two steps:
1. Define your own class that inherits from :class:`flask.ext.restful.Resource` and create the HTTP methods the \
provider uses for it's callbacks.
2. Add or change the Lemur AngularJS Configuration to point to your new provider
"""
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Ping, self).__init__()
def post(self):
self.reqparse.add_argument('clientId', type=str, required=True, location='json')
self.reqparse.add_argument('redirectUri', type=str, required=True, location='json')
self.reqparse.add_argument('code', type=str, required=True, location='json')
args = self.reqparse.parse_args()
# take the information we have received from the provider to create a new request
params = {
'client_id': args['clientId'],
'grant_type': 'authorization_code',
'scope': 'openid email profile address',
'redirect_uri': args['redirectUri'],
'code': args['code']
}
# you can either discover these dynamically or simply configure them
access_token_url = current_app.config.get('PING_ACCESS_TOKEN_URL')
user_api_url = current_app.config.get('PING_USER_API_URL')
# the secret and cliendId will be given to you when you signup for the provider
basic = base64.b64encode('{0}:{1}'.format(args['clientId'], current_app.config.get("PING_SECRET")))
headers = {'Authorization': 'Basic {0}'.format(basic)}
# exchange authorization code for access token.
r = requests.post(access_token_url, headers=headers, params=params)
id_token = r.json()['id_token']
access_token = r.json()['access_token']
# fetch token public key
header_data = fetch_token_header(id_token)
jwks_url = current_app.config.get('PING_JWKS_URL')
# retrieve the key material as specified by the token header
r = requests.get(jwks_url)
for key in r.json()['keys']:
if key['kid'] == header_data['kid']:
secret = get_rsa_public_key(key['n'], key['e'])
algo = header_data['alg']
break
else:
return dict(message='Key not found'), 403
# validate your token based on the key it was signed with
try:
jwt.decode(id_token, secret, algorithms=[algo], audience=args['clientId'])
except jwt.DecodeError:
return dict(message='Token is invalid'), 403
except jwt.ExpiredSignatureError:
return dict(message='Token has expired'), 403
except jwt.InvalidTokenError:
return dict(message='Token is invalid'), 403
user_params = dict(access_token=access_token, schema='profile')
# retrieve information about the current user.
r = requests.get(user_api_url, params=user_params)
profile = r.json()
user = user_service.get_by_email(profile['email'])
# update their google 'roles'
roles = []
for group in profile['googleGroups']:
role = role_service.get_by_name(group)
if not role:
role = role_service.create(group, description='This is a google group based role created by Lemur')
roles.append(role)
# if we get an sso user create them an account
# we still pick a random password in case sso is down
if not user:
# every user is an operator (tied to a default role)
if current_app.config.get('LEMUR_DEFAULT_ROLE'):
v = role_service.get_by_name(current_app.config.get('LEMUR_DEFAULT_ROLE'))
if v:
roles.append(v)
user = user_service.create(
profile['email'],
get_psuedo_random_string(),
profile['email'],
True,
profile.get('thumbnailPhotoUrl'),
roles
)
else:
# we add 'lemur' specific roles, so they do not get marked as removed
for ur in user.roles:
if ur.authority_id:
roles.append(ur)
# update any changes to the user
user_service.update(
user.id,
profile['email'],
profile['email'],
True,
profi
|
le.get('thumbnailPhotoUrl'), # incase profile isn't google+ enabled
ro
|
les
)
# Tell Flask-Principal the identity
|
RedHatInsights/insights-core
|
insights/parsers/pluginconf_d.py
|
Python
|
apache-2.0
| 3,141
| 0
|
"""
pluginconf.d configuration file - Files
=======================================
Shared mappers for parsing and extracting data from
``/etc/yum/pluginconf.d/*.conf`` files. Parsers contained
in this module are:
PluginConfD - files ``/etc/yum/pluginconf.d/*.conf``
---------------------------------------------------
PluginConfDIni - files ``/etc/yum/pluginconf.d/*.conf``
-------------------------------------------------------
"""
from insights.core import IniConfigFile, LegacyItemAccess, Parser
from insights.core.plugins import parser
from insights.parsers import get_active_lines
from insights.specs import Specs
from insights.util import deprecated
@parser(Specs.pluginconf_d)
class PluginConfD(LegacyItemAccess, Parser):
"""
.. warning::
This parser is deprecated, please use
:py:class:`insights.parsers.pluginconf_d.PluginConfDIni` instead
Class to parse configuration file under ``pluginconf.d``
Sample configuration::
[main]
enabled = 0
gpgcheck = 1
timeout = 120
# You can specify options per channel, e.g.:
#
#[rhel-i386-server-5]
#enabled = 1
#
|
#[some-unsigned-custom-channel]
#gpgcheck = 0
"""
def parse_content(self, content):
deprecated(PluginConfD, "Deprecated. Use 'PluginConfDIni' instead.")
plugin_dict =
|
{}
section_dict = {}
key = None
for line in get_active_lines(content):
if line.startswith('['):
section_dict = {}
plugin_dict[line[1:-1]] = section_dict
elif '=' in line:
key, _, value = line.partition("=")
key = key.strip()
section_dict[key] = value.strip()
else:
if key:
section_dict[key] = ','.join([section_dict[key], line])
self.data = plugin_dict
def __iter__(self):
for sec in self.data:
yield sec
@parser(Specs.pluginconf_d)
class PluginConfDIni(IniConfigFile):
"""
Read yum plugin config files, in INI format, using the standard INI file
parser class.
Sample configuration::
[main]
enabled = 0
gpgcheck = 1
timeout = 120
# You can specify options per channel, e.g.:
#
#[rhel-i386-server-5]
#enabled = 1
#
#[some-unsigned-custom-channel]
#gpgcheck = 0
[test]
test_multiline_config = http://example.com/repos/test/
http://mirror_example.com/repos/test/
Examples:
>>> type(conf)
<class 'insights.parsers.pluginconf_d.PluginConfDIni'>
>>> conf.sections()
['main', 'test']
>>> conf.has_option('main', 'gpgcheck')
True
>>> conf.get("main", "enabled")
'0'
>>> conf.getint("main", "timeout")
120
>>> conf.getboolean("main", "enabled")
False
>>> conf.get("test", "test_multiline_config")
'http://example.com/repos/test/ http://mirror_example.com/repos/test/'
"""
pass
|
zuun77/givemegoogletshirts
|
leetcode/python/839_similar-string-groups.py
|
Python
|
apache-2.0
| 1,451
| 0.009649
|
import collections
class Solution:
def numSimilarGroups(self, A):
UF = {}
for i in range(len(A)): UF[i] = i
def find(x):
if x != UF[x]:
UF[x] = find(UF[x])
return UF[x]
def union(x, y):
UF.setdefault(x, x)
UF.setdefault(y, y)
UF[find(x)] = find(y)
def match(s1, s2):
|
i = 0
j = -1
while i<len(s1):
if s1[i] != s2[i]:
if j == -1: j = i
else: break
i += 1
return s1[i+1:] == s2[i+1:]
N, W = len(A), len(A[0])
if N < W*W:
for i in range(len(A)): UF[i] = i
for i in range(len(A)):
for j in
|
range(i+1, len(A)):
if match(A[i], A[j]):
union(i, j)
else:
d = collections.defaultdict(set)
for idx, w in enumerate(A):
lw = list(w)
for i in range(W):
for j in range(i+1, W):
lw[i], lw[j] = lw[j], lw[i]
d["".join(lw)].add(idx)
lw[i], lw[j] = lw[j], lw[i]
for i, w in enumerate(A):
for j in d[w]:
union(i, j)
return len({find(x) for x in UF})
print(Solution().numSimilarGroups(["tars","rats","arts","star"]))
|
zoucaitou/azeroth-spider
|
azeroth_spider/dytt.py
|
Python
|
mit
| 310
| 0.006452
|
# -*- coding: utf-8 -*-
from queue.producer import Producer
from queue.consumer import Consumer
from queue.bloom_filter import BloomFilter
class Dytt:
def main():
for i in range(15):
# Producer().start()
Consumer().start()
if __name__ == '__main__':
|
main()
| |
ESOedX/edx-platform
|
lms/djangoapps/utils.py
|
Python
|
agpl-3.0
| 368
| 0
|
"""
Helper Methods
"""
im
|
port six
def _get_key(key_or_id, key_cls):
"""
Helper method to get a course/usage key either from a string or a key_cls,
where the key_cls (CourseKey or UsageKey) will simply be returned.
"""
return (
key_cls.from_string(key_or_id)
if isinstance(key_or_id, six.string_types)
else key_or_id
|
)
|
giupo/flypwd
|
flypwd/keys.py
|
Python
|
bsd-3-clause
| 658
| 0.00304
|
# -*- coding:utf-8 -*-
import logging
import warnings
from flypwd.config import config
with warnings.catch_warnings():
warnin
|
gs.simplefilter("ignore")
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_v1_5
log = logging.getLogger(__name__)
def check_key(keyfile):
"""
checks the RSA key file
raises ValueError if not valid
"""
with open(keyfile, 'r') as f:
return RSA.importKey(f.read(), passphrase="")
def gen_key():
return RSA.generate(config.getint('keys', 'dimension')
|
)
def encrypt_with_pub(pwd, pub):
cipher = PKCS1_v1_5.new(pub)
return cipher.encrypt(pwd.encode('utf-8'))
|
alexforencich/hdg2000
|
fpga/tb/test_wb_mcb_32.py
|
Python
|
mit
| 10,990
| 0.012648
|
#!/usr/bin/env python2
"""
Copyright (c) 2015 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import wb
import mcb
module = 'wb_mcb_32'
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("test_%s.v" % module)
src = ' '.join(srcs)
build_cmd = "iverilog -o test_%s.vvp %s" % (module, src)
def dut_wb_mcb_32(clk,
rst,
current_test,
wb_adr_i,
wb_dat_i,
wb_dat_o,
wb_we_i,
wb_sel_i,
wb_stb_i,
wb_ack_o,
wb_cyc_i,
mcb_cmd_clk,
mcb_cmd_en,
mcb_cmd_instr,
mcb_cmd_bl,
mcb_cmd_byte_addr,
mcb_cmd_empty,
mcb_cmd_full,
mcb_wr_clk,
mcb_wr_en,
mcb_wr_mask,
mcb_wr_data,
mcb_wr_empty,
mcb_wr_full,
mcb_wr_underrun,
mcb_wr_count,
mcb_wr_error,
mcb_rd_clk,
mcb_rd_en,
mcb_rd_data,
mcb_rd_empty,
mcb_rd_full,
mcb_rd_overflow,
mcb_rd_count,
mcb_rd_error):
if os.system(build_cmd):
raise Exception("Error running build command")
return Cosimulation("vvp -m myhdl test_%s.vvp -lxt2" % module,
clk=clk,
|
rst=rst,
current_test=current_test,
wb_adr_i=wb_adr_i,
wb_dat_i=wb_dat_i,
wb_dat_o=wb_dat_o,
wb_we_i=wb_we_i,
wb_sel_i=wb_sel_i,
wb_stb_i=wb_stb_i,
wb_ack_o=wb_ack_o,
wb_cyc_i=wb_cyc_i,
mcb_cmd_clk=mcb_cmd_clk,
mcb_cmd_en=mcb_cmd_en,
mcb_cmd_instr=mcb_cmd_instr,
|
mcb_cmd_bl=mcb_cmd_bl,
mcb_cmd_byte_addr=mcb_cmd_byte_addr,
mcb_cmd_empty=mcb_cmd_empty,
mcb_cmd_full=mcb_cmd_full,
mcb_wr_clk=mcb_wr_clk,
mcb_wr_en=mcb_wr_en,
mcb_wr_mask=mcb_wr_mask,
mcb_wr_data=mcb_wr_data,
mcb_wr_empty=mcb_wr_empty,
mcb_wr_full=mcb_wr_full,
mcb_wr_underrun=mcb_wr_underrun,
mcb_wr_count=mcb_wr_count,
mcb_wr_error=mcb_wr_error,
mcb_rd_clk=mcb_rd_clk,
mcb_rd_en=mcb_rd_en,
mcb_rd_data=mcb_rd_data,
mcb_rd_empty=mcb_rd_empty,
mcb_rd_full=mcb_rd_full,
mcb_rd_overflow=mcb_rd_overflow,
mcb_rd_count=mcb_rd_count,
mcb_rd_error=mcb_rd_error)
def bench():
# Parameters
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
wb_adr_i = Signal(intbv(0)[32:])
wb_dat_i = Signal(intbv(0)[32:])
wb_we_i = Signal(bool(0))
wb_sel_i = Signal(intbv(0)[4:])
wb_stb_i = Signal(bool(0))
wb_cyc_i = Signal(bool(0))
mcb_cmd_empty = Signal(bool(0))
mcb_cmd_full = Signal(bool(0))
mcb_wr_empty = Signal(bool(0))
mcb_wr_full = Signal(bool(0))
mcb_wr_underrun = Signal(bool(0))
mcb_wr_count = Signal(intbv(0)[7:])
mcb_wr_error = Signal(bool(0))
mcb_rd_data = Signal(intbv(0)[32:])
mcb_rd_empty = Signal(bool(0))
mcb_rd_full = Signal(bool(0))
mcb_rd_overflow = Signal(bool(0))
mcb_rd_count = Signal(intbv(0)[7:])
mcb_rd_error = Signal(bool(0))
# Outputs
wb_dat_o = Signal(intbv(0)[32:])
wb_ack_o = Signal(bool(0))
mcb_cmd_clk = Signal(bool(0))
mcb_cmd_en = Signal(bool(0))
mcb_cmd_instr = Signal(intbv(0)[3:])
mcb_cmd_bl = Signal(intbv(0)[6:])
mcb_cmd_byte_addr = Signal(intbv(0)[32:])
mcb_wr_clk = Signal(bool(0))
mcb_wr_en = Signal(bool(0))
mcb_wr_mask = Signal(intbv(0)[4:])
mcb_wr_data = Signal(intbv(0)[32:])
mcb_rd_clk = Signal(bool(0))
mcb_rd_en = Signal(bool(1))
# WB master
wbm_inst = wb.WBMaster()
wbm_logic = wbm_inst.create_logic(clk,
adr_o=wb_adr_i,
dat_i=wb_dat_o,
dat_o=wb_dat_i,
we_o=wb_we_i,
sel_o=wb_sel_i,
stb_o=wb_stb_i,
ack_i=wb_ack_o,
cyc_o=wb_cyc_i,
name='master')
# MCB model
mcb_inst = mcb.MCB(2**16)
mcb_controller = mcb_inst.create_controller(clk, rst)
mcb_port0 = mcb_inst.create_readwrite_port(cmd_clk=mcb_cmd_clk,
cmd_en=mcb_cmd_en,
cmd_instr=mcb_cmd_instr,
cmd_bl=mcb_cmd_bl,
cmd_byte_addr=mcb_cmd_byte_addr,
cmd_empty=mcb_cmd_empty,
cmd_full=mcb_cmd_full,
wr_clk=mcb_wr_clk,
wr_en=mcb_wr_en,
wr_mask=mcb_wr_mask,
wr_data=mcb_wr_data,
wr_empty=mcb_wr_empty,
wr_full=mcb_wr_full,
wr_underrun=mcb_wr_underrun,
wr_count=mcb_wr_count,
wr_error=mcb_wr_error,
rd_clk=mcb_rd_clk,
rd_en=mcb_rd_en,
rd_data=mcb_rd_data,
rd_empty=mcb_rd_empty,
rd_full=mcb_rd_full,
rd_overflow=mcb_rd_overflow,
rd_count=mcb_rd_count,
rd_error=mcb_rd_error,
name='port0')
# DUT
dut = dut_wb_mcb_32(clk,
rst,
current_test,
wb_adr_i,
wb_dat_i,
wb_dat_o,
wb_we_i,
wb_sel_i,
wb_stb_i,
wb_ack_o,
wb_cyc_i,
mcb_cmd_clk,
mcb_cmd_en,
mcb_cmd_instr,
mcb_cmd_bl,
mcb_cmd_byte_addr,
mcb_cmd_empty,
mcb_cmd_full,
mcb_wr_clk,
mcb_wr_en,
mcb_wr_mask,
mcb_wr_data,
mcb_wr_empty,
mcb_wr_full,
mcb_wr_underrun,
mcb_wr_count,
mcb_wr_error,
|
sgkang/PhysPropIP
|
codes/ZarcFit2016-01-26.py
|
Python
|
mit
| 44,212
| 0.011626
|
#### ZarcFit.py #### for interactive model fitting of spectral electrical impedance observations.
# Seogi Kang and Randy Enkin, developed starting November 2015.
# Based on ZarcFit.vi, written in LabView by Randy Enkin, Geological Survey of Canada
# Using Python version 3.4 and QT version 4.8
#
# requires files ZarcFit2015-12-01.ui, ZarcfitCalculations.py, whichsystem.py
import numpy as np
import sys, glob, os, time
from PyQt4 import QtGui, QtCore
from PyQt4.uic import loadUiType
import matplotlib
matplotlib.use('Qt4Agg')
from matplotlib.figure import Figure
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt4agg import (
FigureCanvasQTAgg as FigureCanvas,
NavigationToolbar2QT as NavigationToolbar)
from ZarcfitCalculations import *
from whichsystem import whichsystem
matplotlib.rcParams['axes.facecolor']="white"
Ui_MainWindow, QMainWindow = loadUiType('ZarcFit2016-01-26.ui')
class PathPicker(QtGui.QWidget):
pathNameStr = None
def __init__(self, ZarcFitWindow, parent=None):
# create GUI
super(PathPicker, self).__init__()
self.setWindowTitle('path picker')
# Set the window dimensions
self.resize(300,75)
# vertical layout for widgets
self.vbox = QtGui.QVBoxLayout()
self.setLayout(self.vbox)
# Create a label which displays the path to our chosen path
self.lbl = QtGui.QLabel('No path selected')
self.vbox.addWidget(self.lbl)
# Create a push button labelled 'choose' and add it to our layout
btn = QtGui.QPushButton('Choose path', self)
self.vbox.addWidget(btn)
# Connect the clicked signal to the getPathName handler
self.connect(btn, QtCore.SIGNAL('clicked()'), self.getPathName)
# Create a push button labelled 'Return' and add it to our layout
btn1 = QtGui.QPushButton('Return to main window', self)
self.vbox.addWidget(btn1)
# Connect the clicked signal to the getPathName handler
self.connect(btn1, QtCore.SIGNAL('clicked()'), self.close)
# Connect to ZarcFitWindow
self.ZarcFitWindow = ZarcFitWindow
def getPathName(self):
"""
Handler called when 'choose path' is clicked
"""
# When you call getOpenPathName, a path picker dialog is created
# and if the user selects a path, it's path is returned, and if not
# (ie, the user cancels the operation) None is returned
fname = QtGui.QFileDialog.getExistingDirectory(self, "Select Path")
self.pathNameStr = str(fname)
if fname:
self.lbl.setText(fname)
self.ZarcFitWindow.pathNameStr = fname
self.ZarcFitWindow.lineEditPath.setText(fname)
self.ZarcFitWindow.getObsFName()
with open(scriptPath+mysys.filesep+"ZarcFit.ini", "w") as ini_file:
print(fname, file=ini_file)
else:
self.lbl.setText('No path selected')
class Main(QMainWindow, Ui_MainWindow):
fwdType = "series"
plotType = "bode"
axComplexReal = None
axComplexImag = None
obsFName = None
nFreq = None
freqIndLow = None
freqIndHigh = None
frequencyOrig = None
obsOrig = None
t0 = None
forcePlot = False
thresholdtime = 0.2
def __init__(ZarcFitWindow, pathNameStr, zarc, obs, frequency):
super(Main, ZarcFitWindow).__init__()
ZarcFitWindow.setupUi(ZarcFitWindow)
ZarcFitWindow.zarc = zarc
ZarcFitWindow.obs = obs
ZarcFitWindow.obsorig = obs.copy()
ZarcFitWindow.PathPickerWindow = PathPicker(ZarcFitWindow)
|
# Set-up frequency range
ZarcFitWindow.frequency = frequency
ZarcFitWindow.frequencyorig = frequency.copy()
ZarcFitWindow.nfreq = ZarcFitWindow.frequency.size
ZarcFitWindow.spinBoxHighFreq.setValue(0)
ZarcFitWindow.labelHighFreq.setText("{:,}".format(ZarcFitWindow.frequencyorig[0])+" Hz")
ZarcFitWindow.freqindlow = 0
ZarcFitWindow.spinBoxLowFreq.setValue(frequencyN-1)
ZarcFitWindow.labelLowFreq.setText("{:,}".format(ZarcFitWindow
|
.frequencyorig[-1])+" Hz")
ZarcFitWindow.freqindhigh = ZarcFitWindow.nfreq
ZarcFitWindow.initializeFigure()
ZarcFitWindow.addmplCole()
ZarcFitWindow.t0 = time.time()
# super(Main, ZarcFitWindow).__init__()
# ZarcFitWindow.setupUi(ZarcFitWindow)
# ZarcFitWindow.t0 = time.time()
# ZarcFitWindow.zarc = zarc
# ZarcFitWindow.obs = obs
# ZarcFitWindow.obsOrig = obs.copy()
# ZarcFitWindow.PathPickerWindow = PathPicker(ZarcFitWindow)
# # Set-up frequency range
# ZarcFitWindow.frequency = frequency
# ZarcFitWindow.frequencyOrig = frequency.copy()
# ZarcFitWindow.nFreq = ZarcFitWindow.frequency.size
# ZarcFitWindow.spinBoxHighFreq.setValue(0)
# ZarcFitWindow.labelHighFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[0])+" Hz")
# ZarcFitWindow.freqIndLow = 0
# ZarcFitWindow.spinBoxLowFreq.setValue(frequencyN-1)
# ZarcFitWindow.labelLowFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[-1])+" Hz")
# ZarcFitWindow.freqIndHigh = ZarcFitWindow.nFreq
# ZarcFitWindow.initializeFigure()
#Read in Obs files from path in ZarcFit.ini and plot first file
ZarcFitWindow.pathNameStr = pathNameStr
ZarcFitWindow.lineEditPath.setText(pathNameStr)
ZarcFitWindow.getObsFName()
# Observed Data File Events
ZarcFitWindow.actionSelect_Path.triggered.connect(ZarcFitWindow.PickPath)
ZarcFitWindow.actionSelect_Parameter_File.triggered.connect(ZarcFitWindow.SelectParameterFile)
ZarcFitWindow.actionObs_File_Type.triggered.connect(ZarcFitWindow.SelectObsFileType)
ZarcFitWindow.actionNext_Obs_File.triggered.connect(ZarcFitWindow.NextObsFile)
ZarcFitWindow.pushButtonNextFile.clicked.connect(ZarcFitWindow.NextObsFile)
ZarcFitWindow.actionPrev_Obs_File.triggered.connect(ZarcFitWindow.PrevObsFile)
ZarcFitWindow.pushButtonPrevFile.clicked.connect(ZarcFitWindow.PrevObsFile)
ZarcFitWindow.spinBoxObsFileNumber.valueChanged.connect(ZarcFitWindow.ReadObsFile)
#Model Fitting Events
ZarcFitWindow.actionF1_Fit_Spectrum_Cartesian_Cole.triggered.connect(ZarcFitWindow.FitCole)
ZarcFitWindow.pushButtonFitCole.clicked.connect(ZarcFitWindow.FitCole)
ZarcFitWindow.actionF2_Fit_Spectrum_Polar_Bode.triggered.connect(ZarcFitWindow.FitBode)
ZarcFitWindow.pushButtonFitBode.clicked.connect(ZarcFitWindow.FitBode)
#Frequency Range Events
ZarcFitWindow.spinBoxHighFreq.valueChanged.connect(ZarcFitWindow.updateHighFreq)
ZarcFitWindow.spinBoxLowFreq.valueChanged.connect(ZarcFitWindow.updateLowFreq)
ZarcFitWindow.actionF3_All_Freq_s.triggered.connect(ZarcFitWindow.AllFreqs)
ZarcFitWindow.pushButtonAllFreqs.clicked.connect(ZarcFitWindow.AllFreqs)
#Parameter File Events
ZarcFitWindow.actionF7_Read_Parameters.triggered.connect(ZarcFitWindow.ReadParameters)
ZarcFitWindow.pushButtonReadParams.clicked.connect(ZarcFitWindow.ReadParameters)
ZarcFitWindow.actionF8_Default_Start_Model.triggered.connect(ZarcFitWindow.DefaultStartModel)
ZarcFitWindow.pushButtonDefaultParams.clicked.connect(ZarcFitWindow.DefaultStartModel)
ZarcFitWindow.actionWrite_Header.triggered.connect(ZarcFitWindow.WriteHeader)
ZarcFitWindow.actionF4_Write_Fit.triggered.connect(ZarcFitWindow.WriteParam)
ZarcFitWindow.pushButtonWriteParam.clicked.connect(ZarcFitWindow.WriteParam)
ZarcFitWindow.actionOptions.triggered.connect(ZarcFitWindow.Options)
#Help Events
ZarcFitWindow.actionZarcFit_Help.triggered.connect(ZarcFitWindow.ZarcFitHelp)
ZarcFitWindow.actionAbout_ZarcFit.triggered.connect(ZarcFitWindow.AboutZarc
|
uudiin/bleachbit
|
bleachbit/GuiPreferences.py
|
Python
|
gpl-3.0
| 18,737
| 0.000907
|
# vim: ts=4:sw=4:expandtab
# -*- coding: UTF-8 -*-
# BleachBit
# Copyright (C) 2008-2015 Andrew Ziem
# http://bleachbit.sourceforge.net
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Preferences dialog
"""
import gtk
import os
import sys
import traceback
from Common import _, _p, online_update_notification_enabled
from Options import options
import GuiBasic
if 'nt' == os.name:
import Windows
if 'posix' == os.name:
import Unix
LOCATIONS_WHITELIST = 1
LOCATIONS_CUSTOM = 2
class PreferencesDialog:
"""Present the preferences dialog and save changes"""
def __init__(self, parent, cb_refresh_operations):
self.cb_refresh_operations = cb_refresh_operations
self.parent = parent
self.dialog = gtk.Dialog(title=_("Preferences"),
parent=parent,
flags=gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
self.dialog.set_default_size(300, 200)
notebook = gtk.Notebook()
notebook.append_page(self.__general_page(), gtk.Label(_("General")))
notebook.append_page(self.__locations_page(
LOCATIONS_CUSTOM), gtk.Label(_("Custom")))
notebook.append_page(self.__drives_page(), gtk.Label(_("Drives")))
if 'posix' == os.name:
notebook.append_page(
self.__languages_page(), gtk.Label(_("Languages")))
notebook.append_page(self.__locations_page(
LOCATIONS_WHITELIST), gtk.Label(_("Whitelist")))
self.dialog.vbox.pack_start(notebook, True)
self.dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)
def __toggle_callback(self, cell, path):
"""Callback function to toggle option"""
options.toggle(path)
if online_update_notification_enabled:
self.cb_beta.set_sensitive(options.get('check_online_updates'))
if 'nt' == os.name:
self.cb_winapp2.set_sensitive(
options.get('check_online_updates'))
if 'auto_hide' == path:
self.cb_refresh_operations()
if 'auto_start' == path:
if 'nt' == os.name:
swc = Windows.start_with_computer
if 'posix' == os.name:
swc = Unix.start_with_computer
try:
swc(options.get(path))
except:
traceback.print_exc()
dlg = gtk.MessageDialog(self.parent,
type=gtk.MESSAGE_ERROR,
buttons=gtk.BUTTONS_OK,
message_format=str(sys.exc_info()[1]))
dlg.run()
dlg.destroy()
def __general_page(self):
"""Return a widget containing the general page"""
if 'nt' == os.name:
swcc = Windows.start_with_computer_check
if 'posix' == os.name:
swcc = Unix.start_with_computer_check
options.set('auto_start', swcc())
vbox = gtk.VBox()
if online_update_notification_enabled:
cb_updates = gtk.CheckButton(
_("Check periodically for software updates via the Internet"))
cb_updates.set_active(options.get('check_online_updates'))
cb_updates.connect(
'toggled', self.__toggle_callback, 'check_online_updates')
cb_updates.set_tooltip_text(
_("If an update is found, you will be given the option to view information about it. Then, you may manually download and install the update."))
vbox.pack_start(cb_updates, False)
updates_box = gtk.VBox()
updates_box.set_border_width(10)
self.cb_beta = gtk.CheckButton(_("Check for new beta releases"))
self.cb_beta.set_active(options.get('check_beta'))
self.cb_b
|
eta.set_sensitive(options.get('check_online_updates'))
self.cb_beta.connect(
'toggled', self.__toggle_callback, 'check_beta')
updates_box.pack_start(self.cb_beta, False)
if 'nt' == os.name:
self.cb_winapp2 = gtk.CheckButton(
_("Download and update cleaners from community (winapp2.ini)"))
self.cb_winapp2.set_active(options.get('update_winapp2'))
|
self.cb_winapp2.set_sensitive(
options.get('check_online_updates'))
self.cb_winapp2.connect(
'toggled', self.__toggle_callback, 'update_winapp2')
updates_box.pack_start(self.cb_winapp2, False)
vbox.pack_start(updates_box, False)
# TRANSLATORS: This means to hide cleaners which would do
# nothing. For example, if Firefox were never used on
# this system, this option would hide Firefox to simplify
# the list of cleaners.
cb_auto_hide = gtk.CheckButton(_("Hide irrelevant cleaners"))
cb_auto_hide.set_active(options.get('auto_hide'))
cb_auto_hide.connect('toggled', self.__toggle_callback, 'auto_hide')
vbox.pack_start(cb_auto_hide, False)
# TRANSLATORS: Overwriting is the same as shredding. It is a way
# to prevent recovery of the data. You could also translate
# 'Shred files to prevent recovery.'
cb_shred = gtk.CheckButton(_("Overwrite files to hide contents"))
cb_shred.set_active(options.get('shred'))
cb_shred.connect('toggled', self.__toggle_callback, 'shred')
cb_shred.set_tooltip_text(
_("Overwriting is ineffective on some file systems and with certain BleachBit operations. Overwriting is significantly slower."))
vbox.pack_start(cb_shred, False)
cb_start = gtk.CheckButton(_("Start BleachBit with computer"))
cb_start.set_active(options.get('auto_start'))
cb_start.connect('toggled', self.__toggle_callback, 'auto_start')
vbox.pack_start(cb_start, False)
# Close the application after cleaning is complete.
cb_exit = gtk.CheckButton(_("Exit after cleaning"))
cb_exit.set_active(options.get('exit_done'))
cb_exit.connect('toggled', self.__toggle_callback, 'exit_done')
vbox.pack_start(cb_exit, False)
# Disable delete confirmation message.
cb_popup = gtk.CheckButton(_("Confirm before delete"))
cb_popup.set_active(options.get('delete_confirmation'))
cb_popup.connect(
'toggled', self.__toggle_callback, 'delete_confirmation')
vbox.pack_start(cb_popup, False)
return vbox
def __drives_page(self):
"""Return widget containing the drives page"""
def add_drive_cb(button):
"""Callback for adding a drive"""
title = _("Choose a folder")
pathname = GuiBasic.browse_folder(self.parent, title,
multiple=False, stock_button=gtk.STOCK_ADD)
if pathname:
liststore.append([pathname])
pathnames.append(pathname)
options.set_list('shred_drives', pathnames)
def remove_drive_cb(button):
"""Callback for removing a drive"""
treeselection = treeview.get_selection()
(model, _iter) = treeselection.get_selected()
if None == _iter:
# nothing selected
return
pathname = model[_iter][0]
liststore.remove(_iter)
pathnames.remove(pathname)
options.set_list('shred_drives', pathnames)
|
hzlf/openbroadcast
|
website/tools/dgs2/discogs_client/models.py
|
Python
|
gpl-3.0
| 21,790
| 0.000964
|
from dgs2.discogs_client.exceptions import HTTPError
from dgs2.discogs_client.utils import parse_timestamp, update_qs, omit_none
class SimpleFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method.
If transform is a callable, the value will be passed through transform when
read. Useful for strings that should be ints, parsing timestamps, etc.
Shorthand for:
@property
def foo(self):
return self.fetch('foo')
"""
def __init__(self, name, writable=False, transform=None):
self.name = name
self.writable = writable
self.transform = transform
def __get__(self, instance, owner):
if instance is None:
return self
value = instance.fetch(self.name)
if self.transform:
value = self.transform(value)
return value
def __set__(self, instance, value):
if self.writable:
|
instance.changes[self.name] = value
|
return
raise AttributeError("can't set attribute")
class ObjectFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method,
and passes the resulting value through an APIObject.
If optional = True, the value will be None (rather than an APIObject
instance) if the key is missing from the response.
If as_id = True, the value is treated as an ID for the new APIObject rather
than a partial dict of the APIObject.
Shorthand for:
@property
def baz(self):
return BazClass(self.client, self.fetch('baz'))
"""
def __init__(self, name, class_name, optional=False, as_id=False):
self.name = name
self.class_name = class_name
self.optional = optional
self.as_id = as_id
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
response_dict = instance.fetch(self.name)
if self.optional and not response_dict:
return None
if self.as_id:
# Response_dict wasn't really a dict. Make it so.
response_dict = {'id': response_dict}
return wrapper_class(instance.client, response_dict)
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class ListFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method,
and passes each item in the resulting list through an APIObject.
Shorthand for:
@property
def bar(self):
return [BarClass(self.client, d) for d in self.fetch('bar', [])]
"""
def __init__(self, name, class_name):
self.name = name
self.class_name = class_name
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
return [wrapper_class(instance.client, d) for d in instance.fetch(self.name, [])]
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class ObjectCollectionDescriptor(object):
"""
An attribute that determines its value by fetching a URL to a paginated
list of related objects, and passes each item in the resulting list through
an APIObject.
Shorthand for:
@property
def frozzes(self):
return PaginatedList(self.client, self.fetch('frozzes_url'), 'frozzes', FrozClass)
"""
def __init__(self, name, class_name, url_key=None, list_class=None):
self.name = name
self.class_name = class_name
if url_key is None:
url_key = name + '_url'
self.url_key = url_key
if list_class is None:
list_class = PaginatedList
self.list_class = list_class
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
return self.list_class(instance.client, instance.fetch(self.url_key), self.name, wrapper_class)
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class Field(object):
"""
A placeholder for a descriptor. Is transformed into a descriptor by the
APIObjectMeta metaclass when the APIObject classes are created.
"""
_descriptor_class = None
def __init__(self, *args, **kwargs):
self.key = kwargs.pop('key', None)
self.args = args
self.kwargs = kwargs
def to_descriptor(self, attr_name):
return self._descriptor_class(self.key or attr_name, *self.args, **self.kwargs)
class SimpleField(Field):
"""A field that just returns the value of a given JSON key."""
_descriptor_class = SimpleFieldDescriptor
class ListField(Field):
"""A field that returns a list of APIObjects."""
_descriptor_class = ListFieldDescriptor
class ObjectField(Field):
"""A field that returns a single APIObject."""
_descriptor_class = ObjectFieldDescriptor
class ObjectCollection(Field):
"""A field that returns a paginated list of APIObjects."""
_descriptor_class = ObjectCollectionDescriptor
class APIObjectMeta(type):
def __new__(cls, name, bases, dict_):
for k, v in dict_.iteritems():
if isinstance(v, Field):
dict_[k] = v.to_descriptor(k)
return super(APIObjectMeta, cls).__new__(cls, name, bases, dict_)
class APIObject(object):
__metaclass__ = APIObjectMeta
class PrimaryAPIObject(APIObject):
"""A first-order API object that has a canonical endpoint of its own."""
def __init__(self, client, dict_):
self.data = dict_
self.client = client
self._known_invalid_keys = []
self.changes = {}
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.id == other.id
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
def refresh(self):
if self.data.get('resource_url'):
data = self.client._get(self.data['resource_url'])
self.data.update(data)
self.changes = {}
def save(self):
if self.data.get('resource_url'):
# TODO: This should be PATCH
self.client._post(self.data['resource_url'], self.changes)
# Refresh the object, in case there were side-effects
self.refresh()
def delete(self):
if self.data.get('resource_url'):
self.client._delete(self.data['resource_url'])
def fetch(self, key, default=None):
if key in self._known_invalid_keys:
return default
try:
# First, look in the cache of pending changes
return self.changes[key]
except KeyError:
pass
try:
# Next, look in the potentially incomplete local cache
return self.data[key]
except KeyError:
pass
# Now refresh the object from its resource_url.
# The key might exist but not be in our cache.
self.refresh()
try:
return self.data[key]
except:
self._known_invalid_keys.append(key)
return default
# This is terribly cheesy, but makes the client API more consistent
class SecondaryAPIObject(APIObject):
"""
An object that wraps parts of a response and doesn't have its own
endpoint.
"""
def __init__(self, client, dict_):
self.client = client
self.data = dict_
def fetch(self, key, default=None):
return self.data.get(key, default)
class BasePaginatedResponse(object):
"""Base class for lists of objects spread across many URLs."""
def __init__(self, client, url):
self.client = client
self.url = url
self._num_pages = None
self._num_items = None
self._pages = {}
self._per_page = 50
self._list_key = 'items'
|
jie-lin/libvmi
|
tools/pyvmi/examples/process-list.py
|
Python
|
gpl-3.0
| 1,982
| 0.001009
|
#!/usr/bin/env python
"""
The LibVMI Library is an introspection library that simplifies access to
memory in a target virtual machine or in a file containing a dump of
a system's physical memory. LibVMI is based on the XenAccess Library.
Copyright 2011 Sandia Corporation. Under the terms of Contract
DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government
retains certain rights in this software.
Author: Bryan D. Payne ([email protected])
This file is part of LibVMI.
LibVMI is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) a
|
ny later version.
LibVMI is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License
along with LibVMI. If not, see <http://www.gnu.org/licenses/>.
"""
import pyvmi
import sys
def get_processes(vmi):
tasks_offset = vmi.get_offset("
|
win_tasks")
name_offset = vmi.get_offset("win_pname") - tasks_offset
pid_offset = vmi.get_offset("win_pid") - tasks_offset
list_head = vmi.read_addr_ksym("PsInitialSystemProcess")
next_process = vmi.read_addr_va(list_head + tasks_offset, 0)
list_head = next_process
while True:
procname = vmi.read_str_va(next_process + name_offset, 0)
pid = vmi.read_32_va(next_process + pid_offset, 0)
next_process = vmi.read_addr_va(next_process, 0)
if (pid < 1<<16):
yield pid, procname
if (list_head == next_process):
break
def main(argv):
vmi = pyvmi.init(argv[1], "complete")
for pid, procname in get_processes(vmi):
print "[%5d] %s" % (pid, procname)
if __name__ == "__main__":
main(sys.argv)
|
cgarrard/osgeopy-code
|
Chapter12/listing12_1.py
|
Python
|
mit
| 378
| 0.002646
|
# Function to stack raster bands.
im
|
port numpy as np
from osgeo import gdal
def stack_bands(filenames):
"""Returns a 3D array containing all band data from all files."""
bands = []
for fn in filenames:
ds = gdal.Open(fn)
for i in range(1, ds.RasterCount + 1):
bands.append(ds.GetRasterBand(i).ReadAsArray())
return np.dstack(bands)
|
|
WayneDW/Sentiment-Analysis-in-Event-Driven-Stock-Price-Movement-Prediction
|
util.py
|
Python
|
mit
| 13,305
| 0.006238
|
#!/usr/bin/env python3
import os
import sys
import copy
import re
import time
import datetime
from urllib.request import urlopen
import numpy as np
import nltk
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.stem.porter import PorterStemmer
import json
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
# training with SGLD with annealing and save models
def train(X_train, y_train, X_valid, y_valid, X_test, y_test, model, args):
model.train()
batch = args.batch_size
parameters = [parameter for parameter in model.parameters()]
set_scale = [parameter.data.std().item() for parameter in model.parameters()]
set_scale = [scale / max(set_scale) for scale in set_scale] # normalize
for epoch in range(1, args.epochs+1):
corrects = 0
epsilon = args.lr * ((epoch * 1.0) ** (-0.333)) # optimal decay rate
for idx in range(int(X_train.shape[0]/batch) + 1):
feature = torch.LongTensor(X_train[(idx*batch):(idx*batch+batch),])
target = torch.LongTensor(y_train[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target)
model.zero_grad()
loss.backward()
for layer_no, param in enumerate(model.parameters()):
if args.static and layer_no == 0: # fixed embedding layer cannot update
continue
# by default I assume you train the models using GPU
noise = torch.cuda.FloatTensor(param.data.size()).normal_() * np.sqrt(epsilon / args.t)
#noise = torch.cuda.FloatTensor(param.data.size()).normal_() * set_scale[layer_no]
parameters[layer_no].data += (- epsilon / 2 * param.grad + noise)
corrects += (torch.max(logit, 1)[1].view(target.size()).data == target.data).sum().item()
accuracy = 100.0 * corrects / batch / (idx + 1)
sys.stdout.write('\rEpoch[{}] Batch[{}] - loss: {:.4f} acc: {:.2f}%({}/{}) tempreture: {}'.format(
epoch, idx, loss.item(), accuracy, corrects, batch * (idx + 1), int(args.t)))
args.t = args.t + 1 # annealing
if epoch % 5 != 0:
continue
'''
try:
set_scale = [parameter.grad.data.std().item() for parameter in model.parameters()]
set_scale = [scale / max(set_scale) for scale in set_scale] # normalize
except:
set_scale = [parameter.data.std().item() for parameter in model.parameters()]
set_scale = [scale / max(set_scale) for scale in set_scale] # normalize
'''
save(model, args.save_dir, epoch)
print()
eval(X_valid, y_valid, model, 'Validation', args)
eval(X_test, y_test, model, 'Testing ', args)
def eval(X, y, model, term, args):
model.eval()
corrects, TP, avg_loss = 0, 0, 0
correct_part, total_part = {0.2:0, 0.4:0}, {0.2:1e-16, 0.4:1
|
e-16}
batch = args.batch_size
for idx in range(int(X.shape[0]/batch) + 1):
feature = torch.LongTensor(X[(idx*batch):(idx*batch+batch),])
target = torch.LongTensor(y[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), target.cuda()
log
|
it = model(feature)
loss = F.cross_entropy(logit, target, size_average=False)
avg_loss += loss.data.item()
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
for xnum in range(1, 3):
thres = round(0.2 * xnum, 1)
idx_thres = (predictor > 0.5 + thres) + (predictor < 0.5 - thres)
correct_part[thres] += (torch.max(logit, 1)[1][idx_thres] == target.data[idx_thres]).sum().item()
total_part[thres] += idx_thres.sum().item()
corrects += (torch.max(logit, 1)[1] == target.data).sum().item()
TP += (((torch.max(logit, 1)[1] == target.data).int() + (torch.max(logit, 1)[1]).int()) == 2).sum().item()
size = y.shape[0]
avg_loss /= size
accuracy = 100.0 * corrects / size
# TP, TN: True Positive/True Negative
print(' {} - loss: {:.4f} acc: {:.2f}%({}/{}) {:.2f}%({}/{}) {:.2f}%({}/{}) TP/TN: ({}/{}) \n'.format(term,
avg_loss, accuracy, corrects, size, 100.0 * correct_part[0.2] / total_part[0.2], correct_part[0.2], int(total_part[0.2]),
100.0 * correct_part[0.4] / total_part[0.4], correct_part[0.4], int(total_part[0.4]), TP, corrects - TP))
return accuracy
def bma_eval(X, y, mymodels, term, args):
corrects, TP, avg_loss = 0, 0, 0
correct_part, total_part = {0.2:0, 0.4:0}, {0.2:1e-16,0.4:1e-16}
batch = args.batch_size
for model in mymodels:
model.eval()
for idx in range(int(X.shape[0]/batch) + 1):
feature = torch.LongTensor(X[(idx*batch):(idx*batch+batch),])
target = torch.LongTensor(y[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target, size_average=False)
avg_loss += loss.data.item() / (len(mymodels) * 1.0)
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
for xnum in range(1, 3):
thres = round(0.2 * xnum, 1)
idx_thres = (predictor > 0.5 + thres) + (predictor < 0.5 - thres)
correct_part[thres] += (torch.max(logit, 1)[1][idx_thres] == target.data[idx_thres]).sum().item() / (len(mymodels) * 1.0)
total_part[thres] += idx_thres.sum().item() / (len(mymodels) * 1.0)
corrects += (torch.max(logit, 1)[1] == target.data).sum().item() / (len(mymodels) * 1.0)
TP += (((torch.max(logit, 1)[1] == target.data).int() + (torch.max(logit, 1)[1]).int()) == 2).sum().item()
size = y.shape[0]
avg_loss /= size
accuracy = 100.0 * corrects / size
TP = TP * 1.0 / (len(mymodels) * 1.0)
print(' {} - loss: {:.4f} acc: {:.2f}%({}/{}) {:.2f}%({}/{}) {:.2f}%({}/{}) TP/TN: ({}/{}) \n'.format(term,
avg_loss, accuracy, corrects, size, 100.0 * correct_part[0.2] / total_part[0.2], correct_part[0.2], int(total_part[0.2]),
100.0 * correct_part[0.4] / total_part[0.4], correct_part[0.4], int(total_part[0.4]), TP, corrects - TP))
return accuracy
def predictor_preprocess(cnn, args):
# load trained thinning samples (Bayesian CNN models) from input/models/
mymodels = []
for num, each_model in enumerate(os.listdir(args.save_dir)):
print(args.save_dir + each_model)
if args.cuda:
cnn.load_state_dict(torch.load(args.save_dir + each_model))
else:
cnn.load_state_dict(torch.load(args.save_dir + each_model, map_location=lambda storage, loc: storage))
mymodels.append(copy.deepcopy(cnn))
if num > 30: # in case memory overloads
break
with open('./input/word2idx', 'r') as file:
word2idx = json.load(file)
stopWords = set()
with open('./input/stopWords') as file:
for word in file:
stopWords.add(word.strip())
return(mymodels, word2idx, stopWords)
def predict(sentence, mymodels, word2idx, stopWords, args):
tokens = tokenize_news(sentence, stopWords)
tokens = [word2idx[t] if t in word2idx else word2idx['UNKNOWN'] for t in tokens]
if len(tokens) < 5 or tokens == [word2idx['UNKNOWN']] * len(tokens): # tokens cannot be too short or unknown
signal = 'Unknown'
else:
feature = torch.LongTensor([tokens])
logits = []
for model in mymodels:
model.eval()
if args.cuda:
feature = feature.cuda()
logit = model(feature)
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
logits.append(predictor.item())
signal = signals(np.mean(logits))
return(signal)
de
|
alexpilotti/python-keystoneclient
|
keystoneclient/middleware/s3_token.py
|
Python
|
apache-2.0
| 10,573
| 0
|
# Copyright 2012 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011,2012 Akira YOSHIYAMA <[email protected]>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This source code is based ./auth_token.py and ./ec2_token.py.
# See them for their copyright.
"""
S3 TOKEN MIDDLEWARE
This WSGI component:
* Get a request from the swift3 middleware with an S3 Authorization
access key.
* Validate s3 token in Keystone.
* Transform the account name to AUTH_%(tenant_name).
"""
import logging
from oslo.serialization import jsonutils
import requests
import six
from six.moves import urllib
import webob
PROTOCOL_NAME = 'S3 Token Authentication'
# TODO(kun): remove it after oslo merge this.
def split_path(path, minsegs=1, maxsegs=None, rest_with_last=False):
"""Validate and split the given HTTP request path.
**Examples**::
['a'] = split_path('/a')
['a', None] = split_path('/a', 1, 2)
['a', 'c'] = split_path('/a/c', 1, 2)
['a', 'c', 'o/r'] = split_path('/a/c/o/r', 1, 3, True)
:param path: HTTP Request path to be split
:param minsegs: Minimum number of segments to be extracted
:param maxsegs: Maximum number of segments to be extracted
:param rest_with_last: If True, trailing data will be returned as part
of last segment. If False, and there is
trailing data, raises ValueError.
:returns: list of segments with a length of maxsegs (non-existent
segments will return as None)
:raises: ValueError if given an invalid path
"""
if not maxsegs:
maxsegs = minsegs
if minsegs > maxsegs:
raise ValueError('minsegs > maxsegs: %d > %d' % (minsegs, maxsegs))
if rest_with_last:
segs = path.split('/', maxsegs)
minsegs += 1
maxsegs += 1
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs or
'' in segs[1:minsegs]):
raise ValueError('Invalid path: %s' % urllib.parse.quote(path))
else:
minsegs += 1
maxsegs += 1
segs = path.split('/', maxsegs)
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs + 1 or
'' in segs[1:minsegs] or
(count == maxsegs + 1 and segs[maxsegs])):
raise ValueError('Invalid path: %s' % urllib.parse.quote(path))
segs = segs[1:maxsegs]
segs.extend([None] * (maxsegs - 1 - len(segs)))
return segs
class ServiceError(Exception):
pass
class S3Token(object):
"""Auth Middleware that handles S3 authenticating client calls."""
def __init__(self, app, conf):
"""Common initializat
|
ion code."""
self.app = app
self.logger = logging.getLogger(conf.get('log_name', __name__))
self.logger.debug('Starting the %s component', PROTOCOL_NAME)
self.logger.warning(
'This middleware module is deprecated as of v0.11.0 in favor of '
'keystonemiddleware.s3_token - please update y
|
our WSGI pipeline '
'to reference the new middleware package.')
self.reseller_prefix = conf.get('reseller_prefix', 'AUTH_')
# where to find the auth service (we use this to validate tokens)
auth_host = conf.get('auth_host')
auth_port = int(conf.get('auth_port', 35357))
auth_protocol = conf.get('auth_protocol', 'https')
self.request_uri = '%s://%s:%s' % (auth_protocol, auth_host, auth_port)
# SSL
insecure = conf.get('insecure', False)
cert_file = conf.get('certfile')
key_file = conf.get('keyfile')
if insecure:
self.verify = False
elif cert_file and key_file:
self.verify = (cert_file, key_file)
elif cert_file:
self.verify = cert_file
else:
self.verify = None
def deny_request(self, code):
error_table = {
'AccessDenied': (401, 'Access denied'),
'InvalidURI': (400, 'Could not parse the specified URI'),
}
resp = webob.Response(content_type='text/xml')
resp.status = error_table[code][0]
error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
'<Error>\r\n <Code>%s</Code>\r\n '
'<Message>%s</Message>\r\n</Error>\r\n' %
(code, error_table[code][1]))
if six.PY3:
error_msg = error_msg.encode()
resp.body = error_msg
return resp
def _json_request(self, creds_json):
headers = {'Content-Type': 'application/json'}
try:
response = requests.post('%s/v2.0/s3tokens' % self.request_uri,
headers=headers, data=creds_json,
verify=self.verify)
except requests.exceptions.RequestException as e:
self.logger.info('HTTP connection exception: %s', e)
resp = self.deny_request('InvalidURI')
raise ServiceError(resp)
if response.status_code < 200 or response.status_code >= 300:
self.logger.debug('Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
resp = self.deny_request('AccessDenied')
raise ServiceError(resp)
return response
def __call__(self, environ, start_response):
"""Handle incoming request. authenticate and send downstream."""
req = webob.Request(environ)
self.logger.debug('Calling S3Token middleware.')
try:
parts = split_path(req.path, 1, 4, True)
version, account, container, obj = parts
except ValueError:
msg = 'Not a path query, skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
# Read request signature and access id.
if 'Authorization' not in req.headers:
msg = 'No Authorization header. skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
token = req.headers.get('X-Auth-Token',
req.headers.get('X-Storage-Token'))
if not token:
msg = 'You did not specify an auth or a storage token. skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
auth_header = req.headers['Authorization']
try:
access, signature = auth_header.split(' ')[-1].rsplit(':', 1)
except ValueError:
msg = 'You have an invalid Authorization header: %s'
self.logger.debug(msg, auth_header)
return self.deny_request('InvalidURI')(environ, start_response)
# NOTE(chmou): This is to handle the special case with nova
# when we have the option s3_affix_tenant. We will force it to
# connect to another account than the one
# authenticated. Before people start getting worried about
# security, I should point that we are connecting with
# username/token specified by the user but instead of
# connecting to its own account we will force it to go to an
# another account. In a normal scenario if that user don't
# have the reseller right it will just fail but since the
# reseller account can connect to every account it is allowed
# by the swift_auth middleware.
force_tenant = None
if ':' in access:
acce
|
karthik-sethuraman/ONFOpenTransport
|
RI/flask_server/tapi_server/models/tapi_oam_mip_ref.py
|
Python
|
apache-2.0
| 2,530
| 0.000395
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from tapi_server.models.base_model_ import Model
from tapi_server.models.tapi_oam_meg_ref import TapiOamMegRef # noqa: F401,E501
from tapi_server import util
class TapiOamMipRef(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def _
|
_init__(self, meg_uuid=None, mip_local_id=None): # noqa: E501
"""TapiOamMipRef - a model defined in OpenAPI
:param meg_uuid: The
|
meg_uuid of this TapiOamMipRef. # noqa: E501
:type meg_uuid: str
:param mip_local_id: The mip_local_id of this TapiOamMipRef. # noqa: E501
:type mip_local_id: str
"""
self.openapi_types = {
'meg_uuid': str,
'mip_local_id': str
}
self.attribute_map = {
'meg_uuid': 'meg-uuid',
'mip_local_id': 'mip-local-id'
}
self._meg_uuid = meg_uuid
self._mip_local_id = mip_local_id
@classmethod
def from_dict(cls, dikt) -> 'TapiOamMipRef':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The tapi.oam.MipRef of this TapiOamMipRef. # noqa: E501
:rtype: TapiOamMipRef
"""
return util.deserialize_model(dikt, cls)
@property
def meg_uuid(self):
"""Gets the meg_uuid of this TapiOamMipRef.
none # noqa: E501
:return: The meg_uuid of this TapiOamMipRef.
:rtype: str
"""
return self._meg_uuid
@meg_uuid.setter
def meg_uuid(self, meg_uuid):
"""Sets the meg_uuid of this TapiOamMipRef.
none # noqa: E501
:param meg_uuid: The meg_uuid of this TapiOamMipRef.
:type meg_uuid: str
"""
self._meg_uuid = meg_uuid
@property
def mip_local_id(self):
"""Gets the mip_local_id of this TapiOamMipRef.
none # noqa: E501
:return: The mip_local_id of this TapiOamMipRef.
:rtype: str
"""
return self._mip_local_id
@mip_local_id.setter
def mip_local_id(self, mip_local_id):
"""Sets the mip_local_id of this TapiOamMipRef.
none # noqa: E501
:param mip_local_id: The mip_local_id of this TapiOamMipRef.
:type mip_local_id: str
"""
self._mip_local_id = mip_local_id
|
coder0xff/Plange
|
documentation/syntax-cgi.py
|
Python
|
bsd-3-clause
| 5,936
| 0.012298
|
#!/usr/bin/python
import yaml
import pprint
import os
import pdb
import re
import cgi
import codecs
import sys
import cgitb
cgitb.enable()
if (sys.stdout.encoding is None):
print >> sys.stderr, "please set python env PYTHONIOENCODING=UTF-8, example: export PYTHONIOENCODING=UTF-8, when write to stdout."
exit(1)
specsFile = open('../source/syntax.yml')
specs = yaml.safe_load(specsFile)
specsFile.close()
htmlTest = re.compile("(?i)<\/?\w+((\s+\w+(\s*=\s*(?:\".*?\"|'.*?'|[^'\">\s]+))?)+\s*|\s*)\/?>")
def paragraphy(text):
if htmlTest.match(text):
return text
else:
return "\t\t<p>" + cgi.escape(text).strip() + "\n\t\t</p>\n"
def loadExample(example):
if ("annotation" in example):
result = paragraphy(example["annotation"])
else:
result = ""
result = result + "\t\t<div class=\"code2\">\n\t\t\t<p>Example</p>\n\t\t\t<pre>\n"
if type(example) is str or type(example) is unicode:
result = result + cgi.escape(example).strip()
elif type(example) is dict:
if ("example" in example):
result += cgi.escape(example["example"]).strip()
else:
raise ValueError("every entry must contain an example element")
else:
raise ValueError("unrecognized type for example data")
result = result + "\n</pre>\n\t\t</div>"
return result
indexPageContents = "<meta charset='utf-8'/>\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=0.6\">\n<html>\n\t<head>\n\t\t<title>Syntax Listing - Plange</title>\n\t\t<link rel=StyleSheet href='../css/general.css' type='text/css' />\n\t</head>\n\t<body>\n\t\t<?php require('../header.php') ?>\n\n\n\t\t<p>This page is generated from the <a href='/source/syntax.yml'>syntax specification</a>. Tags, identities, and all {IC} (optional whitespace and comment regions) are filtered from the syntax display to improve readability. See the specification for the full grammar specification. The root production of the grammar is \"STATEMENT_SCOPE\".</p>\n\t\t<h2>Subpage Listing</h2>\n\t\t<table>\n"
names = specs.keys()
names.sort()
regexs = {name: re.compile("\\b" + name + "\\b") for name in names}
stripRegex = re.compile("(\\$)|(%[_\w0-9]+)|(\\{IC\\})")
openParenSpaceRegex = re.compile("\\( ")
openBraceSpaceRegex = re.compile("\\{ ")
openBracketSpaceREgex = re.compile("\\[ ")
spaceCloseParenRegex = re.compile(" \\)")
spaceCloseBraceRegex = re.compile(" \\}")
spaceCloseBracketRegex = re.compile(" \\]")
def simplifySyntaxString(syntax):
syntax = cgi.escape(syntax).strip()
syntax = stripRegex.sub("", syntax)
syntax = openParenSpaceRegex.sub("(", syntax)
syntax = openBraceSpaceRegex.sub("{", syntax)
syntax = openBracketSpaceREgex.sub("[", syntax)
syntax = spaceCloseParenRegex.sub(")", syntax)
syntax = spaceCloseBraceRegex.sub("}", syntax)
syntax = spaceCloseBracketRegex.sub("]", syntax)
syntax = spaceCloseBracketRegex.sub("]", syntax)
return syntax
def simplifySyntaxStringAddAnchors(syntax):
syntax = simplifySyntaxString(syntax)
for refName in names:
if refName == name:
continue
syntax = regexs[refName].sub("<a href=\"/documentation/syntax.php#" + refName + "\">" + refName + "</a>", syntax)
return syntax
def simplifySyntaxStringAddLinks(syntax):
syntax = simplifySyntaxString(syntax)
for refName in names:
if refName == name:
continue
syntax = regexs[refName].sub("<a href=\"/documentation/syntax.php?name=" + refName + "\">" + refName + "</a>", syntax)
return syntax
if len(sys.argv) == 1: # output the table for the syntax listing page
print "\n\t\t<table>\n"
for name in names:
details = specs[name]
syntaxString = simplifySyntaxStringAddAnchors(details["syntax"])
print "\t\t\t<tr>\n"
print "\t\t\t\t<td><a id=\"" + name + "\" href=\"/documentation/syntax.php?name=" + name + "\">" + name + "</a></td>\n"
if "doc" in details:
print "\t\t\t\t<td>" + details["doc"].strip() + "</td>\n"
else:
print "\t\t\t\t<td>no doc string</td>\n"
print "\t\t\t\t<td>" + syntaxString + "</td>\n"
print "\t\t\t</tr>\n"
print "\t\t</table>\n"
else:
name = sys.argv[1]
details = specs[name]
if "doc" in details:
print "\t\t<p>" + details["doc"].strip() + "</p>\n\n"
if
|
"syntax" in details:
syntaxString = simplifySyntaxStringAddLinks(details["syntax"])
title = "syntax"
if "assoc" in details:
title = title + " (associativity: " + details["assoc"] + ")"
print "\t\t<div class=\"syntax\">\n\t\t\t<p>" + title + "</p>\n\t\t\t<div>" + syntaxString + "</div>\n\t\t</div>\n"
else:
raise ValueError("every entry
|
must contain a syntax element")
if "example" in details:
print loadExample(details["example"])
if "examples" in details:
for example in details["examples"]:
print loadExample(example)
if "notes" in details:
print "\t\t<h2>Notes</h2>\n\t\t" + paragraphy(details["notes"])
if "see" in details:
print "\t\t<p>See:"
for i in details["see"]:
print " <a href=\"syntax.php?name=" + i + "\">" + i + "</a>"
print "\n\t\t</p>\n"
|
tensorflow/docs
|
tools/tensorflow_docs/api_generator/doc_controls.py
|
Python
|
apache-2.0
| 12,723
| 0.006445
|
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Documentation control decorators."""
from typing import Iterable, Optional, TypeVar
T = TypeVar("T")
_DEPRECATED = "_tf_docs_deprecated"
def set_deprecated(obj: T) -> T:
"""Explicitly tag an object as deprecated for the doc generator."""
setattr(obj, _DEPRECATED, None)
return obj
def is_deprecated(obj) -> bool:
return hasattr(obj, _DEPRECATED)
_INHERITABLE_HEADER = "_tf_docs_inheritable_header"
def inheritable_header(text: str):
def _wrapped(cls):
setattr(cls, _INHERITABLE_HEADER, text)
return cls
return _wrapped
def get_inheritable_header(cls) -> Optional[str]:
return getattr(cls, _INHERITABLE_HEADER, None)
_NO_SEARCH_HINTS = "_tf_docs_no_search_hints"
def hide_from_search(obj: T) -> T:
"""Marks an object so metadata search hints will not be included on it's page.
The page is set to "noindex" to hide it from search.
Note: This only makes sense to apply to functions, classes and modules.
Constants, and methods do not get their own pages.
Args:
obj: the object to hide.
Returns:
The object.
"""
setattr(obj, _NO_SEARCH_HINTS, None)
return obj
def should_hide_from_search(obj) -> bool:
"""Returns true if metadata search hints should not be included."""
return hasattr(obj, _NO_SEARCH_HINTS)
_CUSTOM_PAGE_BUILDER_CLS = "_tf_docs_custom_page_builder_c
|
ls"
def set_custom_page_builder
|
_cls(obj, cls):
"""Replace most of the generated page with custom content."""
setattr(obj, _CUSTOM_PAGE_BUILDER_CLS, cls)
def get_custom_page_builder_cls(obj):
"""Gets custom page content if available."""
return getattr(obj, _CUSTOM_PAGE_BUILDER_CLS, None)
_DO_NOT_DOC = "_tf_docs_do_not_document"
def do_not_generate_docs(obj: T) -> T:
"""A decorator: Do not generate docs for this object.
For example the following classes:
```
class Parent(object):
def method1(self):
pass
def method2(self):
pass
class Child(Parent):
def method1(self):
pass
def method2(self):
pass
```
Produce the following api_docs:
```
/Parent.md
# method1
# method2
/Child.md
# method1
# method2
```
This decorator allows you to skip classes or methods:
```
@do_not_generate_docs
class Parent(object):
def method1(self):
pass
def method2(self):
pass
class Child(Parent):
@do_not_generate_docs
def method1(self):
pass
def method2(self):
pass
```
This will only produce the following docs:
```
/Child.md
# method2
```
Note: This is implemented by adding a hidden attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@do_not_generate_docs
def x(self):
return self._x
```
Args:
obj: The object to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DO_NOT_DOC, None)
return obj
_DO_NOT_DOC_INHERITABLE = "_tf_docs_do_not_doc_inheritable"
def do_not_doc_inheritable(obj: T) -> T:
"""A decorator: Do not generate docs for this method.
This version of the decorator is "inherited" by subclasses. No docs will be
generated for the decorated method in any subclass. Even if the sub-class
overrides the method.
For example, to ensure that `method1` is **never documented** use this
decorator on the base-class:
```
class Parent(object):
@do_not_doc_inheritable
def method1(self):
pass
def method2(self):
pass
class Child(Parent):
def method1(self):
pass
def method2(self):
pass
```
This will produce the following docs:
```
/Parent.md
# method2
/Child.md
# method2
```
When generating docs for a class's arributes, the `__mro__` is searched and
the attribute will be skipped if this decorator is detected on the attribute
on any class in the `__mro__`.
Note: This is implemented by adding a hidden attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@do_not_doc_inheritable
def x(self):
return self._x
```
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DO_NOT_DOC_INHERITABLE, None)
return obj
_FOR_SUBCLASS_IMPLEMENTERS = "_tf_docs_tools_for_subclass_implementers"
def for_subclass_implementers(obj: T) -> T:
"""A decorator: Only generate docs for this method in the defining class.
Also group this method's docs with and `@abstractmethod` in the class's docs.
No docs will generated for this class attribute in sub-classes.
The canonical use case for this is `tf.keras.layers.Layer.call`: It's a
public method, essential for anyone implementing a subclass, but it should
never be called directly.
Works on method, or other class-attributes.
When generating docs for a class's arributes, the `__mro__` is searched and
the attribute will be skipped if this decorator is detected on the attribute
on any **parent** class in the `__mro__`.
For example:
```
class Parent(object):
@for_subclass_implementers
def method1(self):
pass
def method2(self):
pass
class Child1(Parent):
def method1(self):
pass
def method2(self):
pass
class Child2(Parent):
def method1(self):
pass
def method2(self):
pass
```
This will produce the following docs:
```
/Parent.md
# method1
# method2
/Child1.md
# method2
/Child2.md
# method2
```
Note: This is implemented by adding a hidden attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@for_subclass_implementers
def x(self):
return self._x
```
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _FOR_SUBCLASS_IMPLEMENTERS, None)
return obj
do_not_doc_in_subclasses = for_subclass_implementers
_DOC_PRIVATE = "_tf_docs_doc_private"
def doc_private(obj: T) -> T:
"""A decorator: Generates docs for private methods/functions.
For example:
```
class Try:
@doc_controls.doc_private
def _private(self):
...
```
As a rule of thumb, private(beginning with `_`) methods/functions are
not documented.
This decorator allows to force document a private method/function.
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DOC_PRIVATE, None)
return obj
def should_doc_private(obj) -> bool:
return hasattr(obj, _DOC_PRIVATE)
_DOC_IN_CURRENT_AND_SUBCLASSES = "_tf_docs_doc_in_current_and_subclasses"
def doc_in_current_and_subclasses(obj: T) -> T:
"""Overrides `do_not_doc_in_subclasses` decorator.
If this decorator is set on a child class's method whose parent's method
contains `do_not_doc_in_subclasses`, then that will be overriden and the
child method will get documented. All classes inherting from the child will
also document that method.
For example:
```
class Parent:
@do_not_doc_in_subclas
|
kengz/python-structure
|
setup.py
|
Python
|
mit
| 976
| 0.004098
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
from structure import __version__
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# READM
|
E file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# the setup
setup(
name='structure',
version=__version__,
description='An demonstration of PyPi.',
# long_description=read('README'),
url='https://github.com/kengz/structure',
author='kengz',
author_email='[email protected]',
license='MIT',
keywords
|
='example pypi tutorial',
packages=find_packages(exclude=('docs', 'tests', 'env', 'index.py')),
include_package_data=True,
install_requires=[
],
extras_require={
'dev': [],
'docs': [],
'testing': [],
},
classifiers=[],
)
|
amitsela/incubator-beam
|
sdks/python/apache_beam/io/localfilesystem.py
|
Python
|
apache-2.0
| 8,015
| 0.005989
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Local File system implementation for accessing files on disk."""
from __future__ import absolute_import
import glob
import os
import shutil
from apache_beam.io.filesystem import BeamIOError
from apache_beam.io.filesystem import CompressedFile
from apache_beam.io.filesystem import CompressionTypes
from apache_beam.io.filesystem import FileMetadata
from apache_beam.io.filesystem import FileSystem
from apache_beam.io.filesystem import MatchResult
class LocalFileSystem(FileSystem):
"""A Local ``FileSystem`` implementation for accessing files on disk.
"""
def mkdirs(self, path):
"""Recursively create directories for the provided path.
Args:
path: string path of the directory structure that should be created
Raises:
IOError if leaf directory already exists.
"""
try:
os.makedirs(path)
except OSError as err:
raise IOError(err)
def match(self, patterns, limits=None):
"""Find all matching paths to the pattern provided.
Args:
patterns: list of string for the file path pattern to match against
limits: list of maximum number of responses that need to be fetched
Returns: list of ``MatchResult`` objects.
Raises:
``BeamIOError`` if any of the pattern match operations fail
"""
if limits is None:
limits = [None] * len(patterns)
else:
err_msg = "Patterns and limits should be equal in length"
assert len(patterns) == len(limits), err_msg
def _match(pattern, limit):
"""Find all matching paths to the pattern provided.
"""
files = glob.glob(pattern)
metadata = [FileMetadata(f, os.path.getsize(f)) for f in files[:limit]]
return MatchResult(pattern, metadata)
exceptions = {}
result = []
for pattern, limit in zip(patterns, limits):
try:
result.append(_match(pattern, limit))
except Exception as e: # pylint: disable=broad-except
exceptions[pattern] = e
if exceptions:
raise BeamIOError("Match operation failed", exceptions)
return result
def _path_open(self, path, mode, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Helper functions to open a file in the provided mode.
"""
compression_type = FileSystem._get_compression_type(path, compression_type)
raw_file = open(path, mode)
if compression_type == CompressionTypes.UNCOMPRESSED:
return raw_file
else:
return CompressedFile(raw_file, compression_type=compression_type)
def create(self, path, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Returns a write channel for the given file path.
Args:
path: string path of the file object to be written to the system
mime_type: MIME type to specify the type of content in the file object
compression_type: Type of compression to be used for this object
Returns: file handle with a close function for the user to use
"""
return self._path_open(path, 'wb', mime_type, compression_type)
def open(self, path, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Returns a read channel for the given file path.
Args:
path: string path of the file object to be written to the system
mime_type: MIME type to specify the type of content in the file object
compression_type: Type of compression to be used for this object
Returns: file handle with a close function for the user to use
"""
return self._path_open(path, 'rb', mime_type, compression_type)
def copy(self, source_file_names, destination_file_names):
"""Recursively copy the file tree from the source to the destination
Args:
source_file_names: list of source file objects that needs to be copied
destination_file_names: list of destination of the new object
Raises:
``BeamIOError`` if any of the copy operations fail
"""
err_msg = ("source_file_names and destination_file_names should "
"be equal in length")
assert len(source_file_names) == len(destination_file_names), err_msg
def _copy_path(source, destination):
"""Recursively copy the file tree from the source to the destination
"""
try:
if os.path.exists(destination):
if os.path.isdir(destination):
shutil.rmtree(destination)
else:
os.remove(destination)
if os.path.isdir(source):
shutil.copytree(source, destination)
else:
shutil.copy2(source, destination)
except OSError as err:
raise IOError(err)
exceptions = {}
for source, destination in zip(source_file_names, destination_file_names):
try:
_copy_path(source, destination)
except Exception as e: # pylint: disable=broad-except
exceptions[(source, destination)] = e
if exceptions:
raise BeamIOError("Copy operation failed", exceptions)
def rename(self, source_file_names, destination_file_names):
"""Rename the files at the source list to the destination list.
Source and destination lists should be of the same size.
Args:
source_file_names: List of file paths that need to be moved
destination_file_names: List of destination_file_names for the files
Raises:
``BeamIOError`` if any of the rename operations fail
"""
err_msg = ("source_file_names and destination_file_names should "
"be equal in length")
assert len(source_file_names) == len(destination_file_names), err_msg
def _rename_file(source, destination):
"""Rename a single file object"""
try:
os.rename(source, destination)
except OSError as err:
raise IOError(err)
exceptions = {}
for source, destination in zip(source_file_names, destination_file_names):
try:
_rename_file(source, destination)
except Exception as e: # pylint: disable=broad-except
exceptions[(source, destination)] = e
if exceptions:
raise BeamIOError("Rename operation failed", except
|
ions)
def exists(self, path):
"""Check if the provided path exists on the FileSystem.
Args:
path: string path that needs to be checked.
Returns: boolean flag indicating if path exists
"""
return os.path.exists(path)
def delete(self, paths):
"""Deletes files or directories at the provide
|
d paths.
Directories will be deleted recursively.
Args:
paths: list of paths that give the file objects to be deleted
Raises:
``BeamIOError`` if any of the delete operations fail
"""
def _delete_path(path):
"""Recursively delete the file or directory at the provided path.
"""
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError as err:
raise IOError(err)
exceptions = {}
for path in paths:
try:
_delete_path(path)
except Exception as e: # pylint: disable=broad-except
exceptions[path] = e
if exceptions:
raise BeamIOError("Delete operation failed", exceptions)
|
repotvsupertuga/tvsupertuga.repository
|
script.module.openscrapers/lib/openscrapers/sources_openscrapers/de/tata.py
|
Python
|
gpl-2.0
| 8,804
| 0.005793
|
# -*- coding: UTF-8 -*-
# ..#######.########.#######.##....#..######..######.########....###...########.#######.########..######.
# .##.....#.##.....#.##......###...#.##....#.##....#.##.....#...##.##..##.....#.##......##.....#.##....##
# .##.....#.##.....#.##......####..#.##......##......##.....#..##...##.##.....#.##......##.....#.##......
# .##.....#.########.######..##.##.#..######.##......########.##.....#.########.######..########..######.
# .##.....#.##.......##......##..###.......#.##......##...##..########.##.......##......##...##........##
# .##.....#.##.......##......##...##.##....#.##....#.##....##.##.....#.##.......##......##....##.##....##
# ..#######.##.......#######.##....#..######..######.##.....#.##.....#.##.......#######.##.....#..######.
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @Daddy_Blamo wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: Mr.Blamo
import base64
import json
import re
import urllib
import urlparse
from openscrapers.modules import cleantitle
from openscrapers.modules import client
from openscrapers.modules import directstream
from openscrapers.modules import dom_parser
from openscrapers.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.domains = ['tata.to']
self.base_link = 'http://tata.to'
self.search_link = '/filme?suche=%s&type=alle'
self.ajax_link = '/ajax/stream/%s'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search_movie(imdb, year)
return url if url else None
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'localtvshowtitle': localtvshowtitle,
'aliases': aliases, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if not url:
return
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
tvshowtitle = data['tvshowtitle']
localtvshowtitle = data['localtvshowtitle']
aliases = source_utils.aliases_to_array(eval(data['aliases']))
year = re.findall('(\d{4})', premiered)
year = year[0] if year else data['year']
url = self.__search([localtvshowtitle] + aliases, year, season, episode)
if not url and tvshowtitle != localtvshowtitle:
url = self.__search([tvshowtitle] + aliases, year, season, episode)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
ref = urlparse.urljoin(self.base_link, url)
url = urlparse.urljoin(self.base_link, self.ajax_link % re.findall('-(\w+)$', ref)[0])
headers = {'Referer': ref, 'User-Agent': client.randomagent()}
result = client.request(url, headers=headers, post='')
result = base64.decodestring(result)
result = json.loads(result).get('playinfo', [])
if isinstance(result, basestring):
result = result.replace('embed.html', 'index.m3u8')
base_url = re.sub('index\.m3u8\?token=[\w\-]+[^/$]*', '', result)
r = client.request(result, headers=headers)
r = [(i[0], i[1]) for i in
re.findall('#EXT-X-STREAM-INF:.*?RESOLUTION=\d+x(\d+)[^\n]+\n([^\n]+)', r, re.DOTALL) if i]
r = [(source_utils.label_to_quality(i[0]), i[1] + source_utils.append_headers(headers)) for i in r]
r = [{'quality': i[0], 'url': base_url + i[1]} for i in r]
for i in r: sources.append(
{'source': 'CDN', 'quality': i['quality'], 'language': 'de', 'url': i['url'], 'direct': True,
'debridonly': False})
elif result:
result = [i.get('link_mp4') for i in result]
result = [i for i in result if i]
for i in result:
t
|
ry:
sources.append(
{'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'],
|
'language': 'de',
'url': i, 'direct': True, 'debridonly': False})
except:
pass
return sources
except:
return
def resolve(self, url):
return url
def __search_movie(self, imdb, year):
try:
query = urlparse.urljoin(self.base_link, self.search_link % imdb)
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(query)
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'container'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'ml-item-content'})
r = [(dom_parser.parse_dom(i, 'a', attrs={'class': 'ml-image'}, req='href'),
dom_parser.parse_dom(i, 'ul', attrs={'class': 'item-params'})) for i in r]
r = [(i[0][0].attrs['href'], re.findall('calendar.+?>.+?(\d{4})', ''.join([x.content for x in i[1]]))) for i
in r if i[0] and i[1]]
r = [(i[0], i[1][0] if len(i[1]) > 0 else '0') for i in r]
r = sorted(r, key=lambda i: int(i[1]), reverse=True) # with year > no year
r = [i[0] for i in r if i[1] in y][0]
return source_utils.strip_domain(r)
except:
return
def __search(self, titles, year, season=0, episode=False):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.query(titles[0])))
query = urlparse.urljoin(self.base_link, query)
t = [cleantitle.get(i) for i in set(titles) if i]
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(query)
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'container'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'ml-item-content'})
f = []
for i in r:
_url = dom_parser.parse_dom(i, 'a', attrs={'class': 'ml-image'}, req='href')[0].attrs['href']
_title = re.sub('<.+?>|</.+?>', '', dom_parser.parse_dom(i, 'h6')[0].content).strip()
try:
_title = re.search('(.*?)\s(?:staf+el|s)\s*(\d+)', _title, re.I).group(1)
except:
pass
_season = '0'
_year = re.findall('calendar.+?>.+?(\d{4})', ''.join(
[x.content for x in dom_parser.parse_dom(i, 'ul', attrs={'class': 'item-params'})]))
_year = _year[0] if len(_year) > 0 else '0'
if season > 0:
s = dom_parser.parse_dom(i, 'span', attrs={'class': 'season-label'})
s = dom_parser.parse_dom(s, 'span', attrs={'class': 'el-num'})
if s: _season = s[0].content.strip()
if cleantitle.get(_title) in t and _year in y and int(_season) == int(season):
f.append((_url, _year))
r = f
r = sorted(r, key=lambda i: int(i[1]), reverse=True) # with year > no year
|
jabesq/home-assistant
|
homeassistant/components/hive/__init__.py
|
Python
|
apache-2.0
| 2,196
| 0
|
"""Support for the Hive devices."""
import logging
from pyhiveapi import Pyhiveapi
import voluptuous as vol
from homeassistant.const import (
CONF_PASS
|
WORD, CONF_SCAN_INTERV
|
AL, CONF_USERNAME)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'hive'
DATA_HIVE = 'data_hive'
DEVICETYPES = {
'binary_sensor': 'device_list_binary_sensor',
'climate': 'device_list_climate',
'water_heater': 'device_list_water_heater',
'light': 'device_list_light',
'switch': 'device_list_plug',
'sensor': 'device_list_sensor',
}
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=2): cv.positive_int,
})
}, extra=vol.ALLOW_EXTRA)
class HiveSession:
"""Initiate Hive Session Class."""
entities = []
core = None
heating = None
hotwater = None
light = None
sensor = None
switch = None
weather = None
attributes = None
def setup(hass, config):
"""Set up the Hive Component."""
session = HiveSession()
session.core = Pyhiveapi()
username = config[DOMAIN][CONF_USERNAME]
password = config[DOMAIN][CONF_PASSWORD]
update_interval = config[DOMAIN][CONF_SCAN_INTERVAL]
devicelist = session.core.initialise_api(
username, password, update_interval)
if devicelist is None:
_LOGGER.error("Hive API initialization failed")
return False
session.sensor = Pyhiveapi.Sensor()
session.heating = Pyhiveapi.Heating()
session.hotwater = Pyhiveapi.Hotwater()
session.light = Pyhiveapi.Light()
session.switch = Pyhiveapi.Switch()
session.weather = Pyhiveapi.Weather()
session.attributes = Pyhiveapi.Attributes()
hass.data[DATA_HIVE] = session
for ha_type, hive_type in DEVICETYPES.items():
for key, devices in devicelist.items():
if key == hive_type:
for hivedevice in devices:
load_platform(hass, ha_type, DOMAIN, hivedevice, config)
return True
|
marcioweck/PSSLib
|
reference/deap/doc/code/tutorials/part_3/logbook.py
|
Python
|
lgpl-3.0
| 1,381
| 0.003621
|
import pickle
from deap import tools
from stats import record
logbook = tools.Logbook()
logbook.record(gen=
|
0, evals=30, **record)
print(logbook)
gen, avg = logbook.select("gen", "avg")
pickle.dump(logbook, open("logbook.pkl", "w"))
# Cleaning the pickle file ...
import os
os.remove("logbook.pkl")
l
|
ogbook.header = "gen", "avg", "spam"
print(logbook)
print(logbook.stream)
logbook.record(gen=1, evals=15, **record)
print(logbook.stream)
from multistats import record
logbook = tools.Logbook()
logbook.record(gen=0, evals=30, **record)
logbook.header = "gen", "evals", "fitness", "size"
logbook.chapters["fitness"].header = "min", "avg", "max"
logbook.chapters["size"].header = "min", "avg", "max"
print(logbook)
gen = logbook.select("gen")
fit_mins = logbook.chapters["fitness"].select("min")
size_avgs = logbook.chapters["size"].select("avg")
import matplotlib.pyplot as plt
fig, ax1 = plt.subplots()
line1 = ax1.plot(gen, fit_mins, "b-", label="Minimum Fitness")
ax1.set_xlabel("Generation")
ax1.set_ylabel("Fitness", color="b")
for tl in ax1.get_yticklabels():
tl.set_color("b")
ax2 = ax1.twinx()
line2 = ax2.plot(gen, size_avgs, "r-", label="Average Size")
ax2.set_ylabel("Size", color="r")
for tl in ax2.get_yticklabels():
tl.set_color("r")
lns = line1 + line2
labs = [l.get_label() for l in lns]
ax1.legend(lns, labs, loc="center right")
plt.show()
|
stuckj/dupeguru
|
hscommon/tests/table_test.py
|
Python
|
gpl-3.0
| 9,340
| 0.006852
|
# Created By: Virgil Dupras
# Created On: 2008-08-12
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from ..testutil import CallLogger, eq_
from ..gui.table import Table, GUITable, Row
class TestRow(Row):
def __init__(self, table, index, is_new=False):
Row.__init__(self, table)
self.is_new = is_new
self._index = index
def load(self):
pass
def save(self):
self.is_new = False
@property
def index(self):
return self._index
class TestGUITable(GUITable):
def __init__(self, rowcount):
GUITable.__init__(self)
self.view = CallLogger()
self.rowcount = rowcount
self.updated_rows = None
def _do_add(self):
return TestRow(self, len(self), is_new=True), len(self)
def _is_edited_new(self):
return self.edited is not None and self.edited.is_new
def _fill(self):
for i in range(self.rowcount):
self.append(TestRow(self, i))
def _update_selection(self):
self.updated_rows = self.selected_rows[:]
def table_with_footer():
table = Table()
table.append(TestRow(table, 0))
footer = TestRow(table, 1)
table.footer = footer
return table, footer
def table_with_header():
table = Table()
table.append(TestRow(table, 1))
header = TestRow(table, 0)
table.header = header
return table, header
#--- Tests
def test_allow_edit_when_attr_is_property_with_fset():
# When a row has a property that has a fset, by default, make that cell editable.
class TestRow(Row):
@property
def foo(self):
pass
@property
def bar(self):
pass
@bar.setter
def bar(self, value):
pass
row = TestRow(Table())
assert row.can_edit_cell('bar')
assert not row.can_edit_cell('foo')
assert not row.can_edit_cell('baz') # doesn't exist, can't edit
def test_can_edit_prop_has_priority_over_fset_checks():
# When a row has a cen_edit_* property, it's the result of that property that is used, not the
# result of a fset check.
class TestRow(Row):
@property
def bar(self):
pass
@bar.setter
def bar(self, value):
pass
can_edit_bar = False
row = TestRow(Table())
assert not row.can_edit_cell('bar')
def test_in():
# When a table is in a list, doing "in list" with another instance returns false, even if
# they're the same as lists.
table = Table()
some_list = [table]
assert Table() not in some_list
def test_footer_del_all():
# Removing all rows doesn't crash when doing the footer check.
table, footer = table_with_footer()
del table[:]
assert table.footer is None
def test_footer_del_row():
# Removing the footer row sets it to None
table, footer = table_with_footer()
del table[-1]
assert table.footer is None
eq_(len(table), 1)
def test_footer_is_appened_to_table():
# A footer is appended at the table's bottom
table, footer = table_with_footer()
eq_(len(table), 2)
assert table[1] is footer
def test_footer_remove():
# remove() on footer sets it to None
table, footer = table_with_footer()
table.remove(footer)
assert table.footer is None
def test_footer_replaces_old_footer():
table, footer = table_with_footer()
other = Row(table)
table.footer = other
assert table.footer is other
eq_(len(table), 2)
assert table[1] is other
def test_footer_rows_and_row_count():
# rows() and row_count() ignore footer.
table, footer = table_with_footer()
eq_(table.row_count, 1)
eq_(table.rows, table[:-1])
def test_footer_setting_to_none_removes_old_one():
table, footer = table_with_footer()
table.footer = None
assert table.footer is None
eq_(len(table), 1)
def test_footer_stays_there_on_append():
# Appending another row puts it above the footer
table, footer = table_with_footer()
table.append(Row(table))
eq_(len(table), 3)
assert table[2] is footer
def test_footer_stays_there_on_insert():
# Inserting another row puts it above the footer
table, footer = table_with_footer()
table.insert(3, Row(table))
eq_(len(table), 3)
assert table[2] is footer
def test_header_del_all():
# Removing all rows doesn't crash when doing the header check.
table, header = table_with_header()
del table[:]
assert table.header is None
def test_header_del_row():
# Removing the header row sets it to None
table, header = table_with_header()
del table[0]
assert table.header is None
eq_(len(table), 1)
def test_header_is_inserted_in_table():
# A header is inserted at the table's top
table, header = table_with_header()
eq_(len(table), 2)
assert table[0] is header
def test_header_remove():
# remove() on header sets it to None
table, header = table_with_header()
table.remove(header)
assert table.header is None
def test_header_replaces_old_header():
table, header = table_with_header()
other = Row(table)
table.header = other
assert table.header is other
eq_(len(table), 2)
assert table[0] is other
def test_header_rows_and_row_count():
# rows() and row_count() ignore header.
table, header = table_with_header()
eq_(table.row_count, 1)
eq_(table.rows, table[1:])
def test_header_setting_to_none_removes_old_one():
table, header = table_with_header()
table.header = None
assert table.header is None
eq_(len(table), 1)
def test_header_stays_there_on_insert():
# Inserting another row at the top puts it below the header
table, header = table_with_header()
table.insert(0, Row(table))
eq_(len(table), 3)
assert table[0] is header
def test_refresh_view_on_refresh():
# If refresh_view is not False, we refresh the table's view on refresh()
table = TestGUITable(1)
table.refresh()
table.view.check_gui_calls(['refresh'])
table.view.clear_calls()
table.refresh(refresh_view=False)
table.view.check_gui_calls([])
def test_restore_selection():
# By default, after a refresh, selection goes on the last row
table = TestGUITable(10)
table.refresh()
eq_(table.selected_indexes, [9])
def test_restore_selection_after_cancel_edits():
# _restore_selection() is called
|
after cancel_edits(). Previously, only _update_selection would
# be called.
class MyTable(TestGUITable):
def _restore_selection(self, previous_selec
|
tion):
self.selected_indexes = [6]
table = MyTable(10)
table.refresh()
table.add()
table.cancel_edits()
eq_(table.selected_indexes, [6])
def test_restore_selection_with_previous_selection():
# By default, we try to restore the selection that was there before a refresh
table = TestGUITable(10)
table.refresh()
table.selected_indexes = [2, 4]
table.refresh()
eq_(table.selected_indexes, [2, 4])
def test_restore_selection_custom():
# After a _fill() called, the virtual _restore_selection() is called so that it's possible for a
# GUITable subclass to customize its post-refresh selection behavior.
class MyTable(TestGUITable):
def _restore_selection(self, previous_selection):
self.selected_indexes = [6]
table = MyTable(10)
table.refresh()
eq_(table.selected_indexes, [6])
def test_row_cell_value():
# *_cell_value() correctly mangles attrnames that are Python reserved words.
row = Row(Table())
row.from_ = 'foo'
eq_(row.get_cell_value('from'), 'foo')
row.set_cell_value('from', 'bar')
eq_(row.get_cell_value('from'), 'bar')
def test_sort_table_also_tries_attributes_without_underscores():
# When determining a sort key, after having unsuccessfully tried the attribute with the,
# underscore, try the
|
hzlf/openbroadcast
|
website/cms/test_utils/project/second_urls_for_apphook_tests.py
|
Python
|
gpl-3.0
| 696
| 0.005747
|
from django.conf import settings
from django.conf.urls.defaults import handler500, handler404, patte
|
rns, include, \
url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^jsi18n/(?P<packages>\S+?)/$', 'django.views.i18n.javascript_catalog'),
url(r'^media/cms/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.CMS_MEDIA_ROOT, 'show_indexes': True}),
url(r'^media/(?P<pat
|
h>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^', include('cms.test_utils.project.second_cms_urls_for_apphook_tests')),
)
|
nanonyme/nanoplay
|
nanoplay/__init__.py
|
Python
|
mit
| 76
| 0
|
from nan
|
oplay import PayloadProtocol, C
|
ontrolProtocol, Player, CustomServer
|
hanakamer/eskisozluk-clone
|
App/eksi/manage.py
|
Python
|
gpl-2.0
| 247
| 0
|
#!/usr
|
/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "eksi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(s
|
ys.argv)
|
zhaochl/python-utils
|
utils/mail_util.py
|
Python
|
apache-2.0
| 3,970
| 0.013465
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import email
import mimetypes
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
from email.MIMEImage import MIMEImage
import smtplib
from time import sleep
def sendEmail(authInfo, fromAdd, toAdd, subject, plainText, htmlText):
strFrom = fromAdd
strTo = ', '.join(toAdd)
server = authInfo.get('server')
user = authInfo.get('user')
passwd = authInfo.get('password')
if not (server and user and passwd) :
print 'incomplete login info, exit now'
return
# 设定root信息
msgRoot = MIMEMultipart('related')
msgRoot['Subject'] = subject
msgRoot['From'] = strFrom
msgRoot['To'] = strTo
msgRoot.preamble = 'This is a multi-part message in MIME format.'
# Encapsulate the plain and HTML versions of the message body in an
# 'alternative' part, so message agents can decide which they want to display.
|
msgAlternative = MIMEMultipart('alternative')
msgRoot.attach(msgAlternative)
#设定纯文本信息
#msgText = MIMEText(plainText, 'plain', 'GB18030')
msgText = MIMEText(plainText, 'plain', 'utf-8')
msgAlternative.attach(msgText)
#设定HTML信息
|
#msgText = MIMEText(htmlText, 'html', 'GB18030')
msgText = MIMEText(htmlText, 'html', 'utf-8')
msgAlternative.attach(msgText)
#设定内置图片信息
#fp = open('test.jpg', 'rb')
#msgImage = MIMEImage(fp.read())
#fp.close()
#msgImage.add_header('Content-ID', '<image1>')
#msgRoot.attach(msgImage)
#发送邮件
smtp = smtplib.SMTP()
#设定调试级别,依情况而定
# 1-open log-
#smtp.set_debuglevel(1)
# 0-close log
smtp.set_debuglevel(0)
smtp.connect(server)
smtp.login(user, passwd)
smtp.sendmail(strFrom, strTo, msgRoot.as_string())
smtp.quit()
return
def sendmail(_title,_content,_toUserList,_html=None):
#(authInfo, fromAdd, toAdd, subject, plainText, htmlText):
print "start to send mail start"
authInfo = {}
authInfo['server'] = 'smtp.exmail.qq.com'
authInfo['user'] = '[email protected]'
authInfo['password'] = '123'
fromAdd = '[email protected]'
#toAdd = ["[email protected]"]
subject = 'search exception category'
if(_title):
subject =_title
plainText = _content
#plainText = '服务器异常状态报警'
htmlText = _html
for t in _toUserList:
#print t
tarr=[]
tarr.append(t)
sendEmail(authInfo, fromAdd, tarr, subject, plainText, htmlText)
sleep(2)
print 'send mail success.'
"""
by zcl at 2016.6.15
"""
def rendar_table(title,notice,rhead_list,rdata_list):
html ="""
<p class="section">{0}</p>
<p class="section">{1}</p>
<table cellpadding="5" cellspacing="0" border="1" bordercolor="#04B4AE" style="text-align: center; font-family: Arial; border-collapse: collapse; width: auto;">
<tbody>
<tr>
<td colspan="{2}"><div>{0}</div></td>
</tr>
<tr>
""".format(title,notice,str(len(rhead_list)))
for rhead in rhead_list:
rhead = rhead.encode('utf8')
tmp = """<th style="background-color: #04B4AE; color: #ffffff">{0}</th>
""".format(str(rhead))
html+=tmp
html+="</tr>"
for o in rdata_list:
line_html=''
line_html+="<tr>"
for key in rhead_list:
val = o[key]
key = key.encode('utf8')
line_html+="<td>"+str(val)+"</td>"
line_html+="</tr>"
html+=line_html
html+="""
</tbody>
</table>
<hr>
"""
return html
if __name__ == '__main__' :
toUserList = ['[email protected]']
sendmail('test','sorry to disturb, this mail is just for test',toUserList)
#sendmail('[热门行业统计]'+title,'',toUserList,html.encode('utf8'))
#sendEmail(authInfo, fromAdd, toAdd, subject, plainText, htmlText)
|
albireox/marvin
|
python/marvin/tests/utils/test_images.py
|
Python
|
bsd-3-clause
| 11,203
| 0.002856
|
# !usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Licensed under a 3-clause BSD license.
#
# @Author: Brian Cherinka
# @Date: 2017-06-20 16:36:37
# @Last modified by: Brian Cherinka
# @Last Modified time: 2017-11-13 15:16:57
from __future__ import print_function, division, absolute_import
from marvin.utils.general.images import getImagesByList, getImagesByPlate, getRandomImages, getDir3d, showImage
from marvin.tests.conftest import Galaxy, tempafile
from marvin.tests import marvin_test_if
from marvin.core.exceptions import MarvinError, MarvinUserWarning
import pytest
import os
import warnings
try:
from sdss_access import RsyncAccess, AccessError
except ImportError:
Path = None
RsyncAccess = None
imagelist = ['8485-1901', '7443-12701', '7443-1901']
newgals = ['7495-1901']
@pytest.fixture(scope='function')
def rsync(mode):
''' fixture to create generic rsync object '''
rsync = RsyncAccess(label='marvin_getlist', verbose=False)
if mode != 'local':
rsync.remote()
yield rsync
rsync.reset()
rsync = None
localredux = os.getenv('MANGA_SPECTRO_REDUX')
remoteredux = 'https://[email protected]/sas/mangawork/manga/spectro/redux'
remoteurl = 'https://data.sdss.org/sas/mangawork/manga/spectro/redux'
bases = [localredux, remoteredux, remoteurl]
rmodes = ['full', 'url']
@pytest.fixture()
def base(mode, asurl):
if asurl is False:
return localredux
else:
if mode != 'local':
return remoteredux
else:
return remoteurl
@pytest.fixture(scope='session', params=newgals)
def newgalaxy(request, maindb, get_params, set_sasurl):
release, bintype, template = get_params
gal = Galaxy(request.param)
gal.set_params(bintype=bintype, template=template)
gal.set_filepaths()
yield gal
@pytest.fixture()
def get_cube(newgalaxy, rsync):
if not os.path.isfile(newgalaxy.cubepath):
rsync.add('mangacube', **newgalaxy.access_kwargs)
rsync.set_stream()
rsync.commit()
yield newgalaxy
@pytest.fixture(params=rmodes)
def asurl(request):
if request.param == 'full':
return False
elif request.param == 'url':
return True
@pytest.fixture()
def make_paths(request, rsync, mode, asurl, release):
inputs = request.param if hasattr(request, 'param') else None
rmode = 'url' if asurl else 'full'
fullpaths = []
inputs = inputs if inputs else imagelist
for plateifu in inputs:
gal = Galaxy(plateifu)
gal.set_params(release=release)
gal.set_filepaths()
if mode == 'local':
path = rsync.__getattribute__(rmode)('mangaimage', **gal.access_kwargs)
fullpaths.append(path)
else:
rsync.add('mangaimage', **gal.access_kwargs)
rsync.set_stream()
path = rsync.get_urls() if asurl else rsync.get_paths()
fullpaths.extend(path)
return fullpaths
class TestImagesGetDir3d(object):
@pytest.mark.parametrize('expval', [('stack')])
def test_getdir3d(self, galaxy, expval, mode, db):
dir3d = getDir3d(galaxy.plateifu, mode=mode, release=galaxy.release)
assert expval == dir3d
@pytest.mark.parametrize('expval', [('stack')])
def test_getdir3d_plate(self, galaxy, expval, mode, db):
dir3d = getDir3d(galaxy.plate, mode=mode, release=galaxy.release)
assert expval == dir3d
@pytest.mark.xfail()
@pytest.mark.timeout(40)
class TestImagesByList(object):
@pytest.mark.parametrize('imglist, mode, errmsg',
[('7495-1901', 'local', 'Input must be of type list or Numpy array'),
(['nogoodid'], 'local', 'Input must be of type plate-ifu or mangaid'),
(imagelist, 'notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['notlist', 'badid', 'badmode'])
def test_failures(self, imglist, mode, errmsg, release):
with pytest.raises(AssertionError) as cm:
image = getImagesByList(imglist, mode=mode, release=release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
def test_get_imagelist(self, make_paths, mode, asurl, release):
images = getImagesByList(imagelist, mode=mode, as_url=asurl, release=release)
assert set(make_paths) == set(images)
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# #assert os.path.isfile(get_cube.imgpath) is False
# assert imgpath.check(file=0) is True
# image = getImagesByList([get_cube.plateifu], mode='remote', as_url=True, download=True, release=get_cube.release)
# #assert os.path.isfile(get_cube.imgpath) is True
# assert imgpath.check(file=1) is True
# assert image is None
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download_fails(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# errmsg = 'Download not
|
available when in local mode'
# with warnings.catch_warnings(record=True) as cm:
# warnings.simplefilter('always')
# image = getImagesByList([get_cube.plateifu], mode='local', as_url=True, download=True)
# assert cm[-1].category is MarvinUserWarning
# assert errmsg in str(cm[-1].messa
|
ge)
class TestImagesByPlate(object):
@pytest.mark.parametrize('plateid, mode, errmsg',
[('8485abcd', 'local', 'Plateid must be a numeric integer value'),
(None, 'notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['badid', 'badmode'])
def test_failures(self, galaxy, plateid, mode, errmsg):
plateid = plateid if plateid else galaxy.plate
with pytest.raises(AssertionError) as cm:
image = getImagesByPlate(plateid, mode=mode, release=galaxy.release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
@pytest.mark.parametrize('make_paths, plate', [(['8485-1901'], '8485')], indirect=['make_paths'], ids=['plateifu'])
def test_get_imageplate(self, make_paths, plate, mode, asurl, release):
images = getImagesByPlate(plate, mode=mode, as_url=asurl, release=release)
assert make_paths[0] in images
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# image = getImagesByPlate(get_cube.plate, mode='remote', as_url=True, download=True)
# assert imgpath.check(file=1) is True
# assert image is None
# def test_get_images_download_local_fail(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# errmsg = 'Download not available when in local mode'
# with warnings.catch_warnings(record=True) as cm:
# warnings.simplefilter("always")
# image = getImagesByPlate(self.new_plate, mode='local', as_url=True, download=True)
# self.assertIs(cm[-1].category, MarvinUserWarning)
# self.assertIn(errmsg, str(cm[-1].message))
class TestRandomImages(object):
@pytest.mark.parametrize('mode, errmsg',
[('notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['badmode'])
def test_failures(self, mode, errmsg, release):
with pytest.raises(AssertionError) as cm:
image = getRandomImages(mode=mode, release=release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
@pytest.mark.parametrize('num', [(10), (5)], ids=['
|
tzuria/Shift-It-Easy
|
webApp/shift-it-easy-2015/web/pages/__init__.py
|
Python
|
mit
| 177
| 0.00565
|
#
|
To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
| |
caphrim007/ansible
|
lib/ansible/modules/cloud/azure/azure_rm_loadbalancer.py
|
Python
|
gpl-3.0
| 36,330
| 0.002175
|
#!/usr/bin/python
# Copyright (c) 2016 Thomas Stringer, <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_loadbalancer
version_added: "2.4"
short_description: Manage Azure load balancers.
description:
- Create, update and delete Azure load balancers
options:
resource_group:
description:
- Name of a resource group where the load balancer exists or will be created.
required: true
name:
description:
- Name of the load balancer.
required: true
state:
description:
- Assert the state of the load balancer. Use C(present) to create/update a load balancer, or
C(absent) to delete one.
default: present
choices:
- absent
- present
location:
description:
- Valid azure location. Defaults to location of the resource group.
sku:
description:
The load balancer SKU.
choices:
- Basic
- Standard
version_added: 2.6
frontend_ip_configurations:
description: List of frontend IPs to be used
suboptions:
name:
description: Name of the frontend ip configuration.
required: True
public_ip_address:
description: Name of an existing public IP address object in the current resource group to associate with the security group.
private_ip_address:
description: The reference of the Public IP resource.
version_added: 2.6
private_ip_allocation_method:
description: The Private IP allocation method.
choices:
- Static
- Dynamic
version_added: 2.6
subnet:
description:
- The reference of the subnet resource.
- Should be an existing subnet's resource id.
version_added: 2.6
version_added: 2.5
backend_address_pools:
description: List of backend address pools
suboptions:
name:
description: Name of the backend address pool.
required: True
version_added: 2.5
probes:
description: List of probe definitions used to check endpoint health.
suboptions:
name:
description: Name of the probe.
required: True
port:
description: Probe port for communicating the probe. Possible values range from 1 to 65535, inclusive.
required: True
protocol:
description:
- The protocol of the end point to be probed.
- If 'Tcp' is specified, a received ACK is required for the probe to be successful.
- If 'Http' is specified, a 200 OK response from the specified URL is required for the probe to be successful.
choices:
- Tcp
- Http
default: Tcp
interval:
description:
- The interval, in seconds, for how frequently to probe the endpoint for health status.
- Slightly less than half the allocated timeout period, which allows two full probes before taking the instance out of rotation.
- The default value is 15, the minimum value is 5.
default: 15
fail_count:
description:
- The number of probes where if no response, will result in stopping further traffic from being delivered to the endpoint.
- This values allows endpoints to be taken out of rotation faster or slower than the typical times used in Azure.
default: 3
aliases:
- number_of_probes
request_path:
description:
- The URI used for requesting health status from the VM.
- Path is required if a protocol is set to http. Otherwise, it is not allowed.
version_added: 2.5
inbound_nat_pools:
description:
- Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer.
- Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range.
- Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules.
- Inbound NAT pools are referenced f
|
rom virtual machine scale sets.
- NICs that are associated with individual virtual machines
|
cannot reference an inbound NAT pool.
- They have to reference individual inbound NAT rules.
suboptions:
name:
description: Name of the inbound NAT pool.
required: True
frontend_ip_configuration_name:
description: A reference to frontend IP addresses.
required: True
protocol:
description: IP protocol for the NAT pool
choices:
- Tcp
- Udp
- All
default: Tcp
frontend_port_range_start:
description:
- The first port in the range of external ports that will be used to provide inbound NAT to NICs associated with the load balancer.
- Acceptable values range between 1 and 65534.
required: True
frontend_port_range_end:
description:
- The last port in the range of external ports that will be used to provide inbound NAT to NICs associated with the load balancer.
- Acceptable values range between 1 and 65535.
required: True
backend_port:
description:
- The port used for internal connections on the endpoint.
- Acceptable values are between 1 and 65535.
version_added: 2.5
load_balancing_rules:
description:
- Object collection representing the load balancing rules Gets the provisioning.
suboptions:
name:
description: name of the load balancing rule.
required: True
frontend_ip_configuration:
description: A reference to frontend IP addresses.
required: True
backend_address_pool:
description: A reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs.
required: True
probe:
description: The name of the load balancer probe this rule should use for health checks.
required: True
protocol:
description: IP protocol for the load balancing rule.
choices:
- Tcp
- Udp
- All
default: Tcp
load_distribution:
description:
- The session persistence policy for this rule; C(Default) is no persistence.
choices:
- Default
- SourceIP
- SourceIPProtocol
default: Default
frontend_port:
description:
- The port for the external endpoint.
- Frontend port numbers must be unique across all rules within the load balancer.
- Acceptable values are between 0 and 65534.
- Note that va
|
googleinterns/vm-network-migration
|
vm_network_migration/module_helpers/instance_group_helper.py
|
Python
|
apache-2.0
| 4,376
| 0.0016
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Helper class for creating an InstanceGroup object.
"""
from vm_network_migration.modules.instance_group_modules.regional_managed_instance_group import RegionalManagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.unmanaged_instance_group import UnmanagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.zonal_managed_instance_group import ZonalManagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.instance_group import InstanceGroup
from vm_network_migration.utils import initializer
class InstanceGroupHelper:
@initializer
def __init__(self, compute, project, instance_group_name,
region, zone, network, subnetwork, preserve_instance_ip=False):
""" Initialize an instance group helper object
Args:
compute: google compute engine
project: project ID
instance_group_name: name of the instance group
region:
|
region of the instance group
zone: zone of the instance group
preserve_instance_ip: only valid for an unmanaged instance group
"""
def build_instance_group(self) -> InstanceGroup:
""" Build an object which is an instance of the InstanceGroup's subclass
"""
# try to build a zonal instance group
try:
instance_group_configs = self.get_instance_group_in_zone()
except Exception:
# It is not a single z
|
one instance group
pass
else:
if 'Instance Group Manager' not in instance_group_configs[
'description']:
return UnmanagedInstanceGroup(self.compute, self.project,
self.instance_group_name,
self.network,
self.subnetwork,
self.preserve_instance_ip,
self.zone)
else:
return ZonalManagedInstanceGroup(self.compute,
self.project,
self.instance_group_name,
self.network,
self.subnetwork,
self.preserve_instance_ip,
self.zone)
# try to build a regional instance group
try:
self.get_instance_group_in_region()
except Exception as e:
raise e
else:
return RegionalManagedInstanceGroup(self.compute, self.project,
self.instance_group_name,
self.network,
self.subnetwork,
self.preserve_instance_ip,
self.region)
def get_instance_group_in_zone(self) -> dict:
""" Get a zonal instance group's configurations
Returns: instance group's configurations
"""
return self.compute.instanceGroups().get(
project=self.project,
zone=self.zone,
instanceGroup=self.instance_group_name).execute()
def get_instance_group_in_region(self) -> dict:
""" Get a regional instance group's configurations
Returns: instance group's configurations
"""
return self.compute.regionInstanceGroups().get(
project=self.project,
region=self.region,
instanceGroup=self.instance_group_name).execute()
|
TheArchives/Nexus
|
core/plugins/respawn.py
|
Python
|
bsd-2-clause
| 1,072
| 0.012127
|
# The Nexus software is licensed under the BSD 2-Clause license.
#
# You should have recieved a copy of this license with the software.
# If you did not, you can find one at the following link.
#
# http://opensource.org/licenses/bsd-license.php
from core.plugins import ProtocolPlugin
from core.decorators import *
from core.constants import *
class FetchPlugin(ProtocolPlugin):
commands = {
"respawn": "commandRespawn",
}
@player_list
@mod_only
@only_username_command
def commandRespawn(self, username, fromloc, rankoverride):
"/respawn username - Mod\nRespawns the user."
|
if username in self.client.factory.usernames:
self.client.factory.usernames[username].respawn()
else:
self.client.sendServerMessage("%s is not on the server." % username)
return
self.client.factory.usernames[username].sendServerMessage
|
("You have been respawned by %s." % self.client.username)
self.client.sendServerMessage("%s respawned." % username)
|
plepe/pgmapcss
|
pgmapcss/types/image_png.py
|
Python
|
agpl-3.0
| 1,840
| 0.002717
|
from .default import default
import os
import re
class image_png(default):
def __init__(self, key, stat):
default.__init__(self, key, stat)
|
self.data = {}
def compile(self, prop):
if not os.path.exists(prop['value']):
print("Image '{}' not found.".format(prop['value']))
else:
# Convert SVG to PNG
m = re.search("\.svg$", prop['value'])
if m:
from wand.ima
|
ge import Image
from wand.color import Color
from wand.api import library
dest = self.stat['icons_dir'] + "/" + prop['value'].replace('/', '_') + ".png"
print("svg icon detected. converting '{0}' to '{1}'".format(prop['value'], dest))
with Image() as img:
with Color('transparent') as bg_color:
library.MagickSetBackgroundColor(img.wand, bg_color.resource)
img.read(blob=open(prop['value'], 'rb').read())
dest_img = img.make_blob('png32')
with open(dest, 'wb') as out:
out.write(dest_img)
return repr(dest)
return repr(prop['value'])
def stat_value(self, prop):
if prop['value'] is None:
return prop['value']
if os.path.exists(prop['value']):
from wand.image import Image
img = Image(filename=prop['value'])
self.data[prop['value']] = img.size
if not prop['key'] in self.stat['global_data']:
self.stat['global_data'][prop['key']] = {}
self.stat['global_data'][prop['key']][prop['value']] = img.size
return prop['value']
def get_global_data(self):
self.stat.property_values(self.key)
return self.data
|
davidfarr/mg-gap
|
mg-gap/mg-gap-py/mg-gap/test_files/reduceVCF.py
|
Python
|
mit
| 1,210
| 0.005785
|
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 10 10:43:53 2019
@author: Heathro
Description: Reduces a vcf file to meta section and
one line for each chromosome number for testing and
debugging purposes.
"""
# Open files to read from and write to
vcfpath = open("D:/MG_GAP/Ali_w_767.vcf", "rU")
testvcf = open("REDUCED_ali.vcf", "w")
# Keep track of chromosome number so we can get one of each
temp_chrom = 0
counter = 0
for line_index, line in enumerate(vcfpath):
# Found a chromosome line
if line[0:8] == "sNNffold":
column = line.split('\t')
first_col = column[0].split('_')
current_chrom = first_col[1]
# Write up to 1000 lines of each chromosome
if current_chrom == temp_chrom:
counter = counter + 1
if counter < 1000:
testvcf.write(line)
# If a new chromosome, write a line, start
|
counter at 0
|
elif current_chrom != temp_chrom:
counter = 0
temp_chrom = current_chrom
testvcf.write(line)
# Include the meta lines and header line
else:
testvcf.write(line)
testvcf.close()
vcfpath.close()
|
hoehermann/wxpyWha
|
whastack.py
|
Python
|
gpl-3.0
| 3,522
| 0.010789
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""@package docstring
Yowsup connector for wxpyWha (a simple wxWidgets GUI wrapper atop yowsup).
Uses WhaLayer to build the Yowsup stack.
This is based on code from the yowsup echo example, the yowsup cli and pywhatsapp.
"""
SECONDS_RECONNECT_DELAY = 10
import sys
# from echo stack
from yowsup.stacks import YowStackBuilder
from yowsup.layers.auth import AuthError
from yowsup.layers.network import YowNetworkLayer
# from cli stack
try:
from yowsup.layers.axolotl.props import PROP_IDENTITY_AUTOTRUST #tgalal
except ImportError as ie:
sys.stderr.write("WARNING: PROP_IDENTITY_AUTOTRUST could not be imported from yowsup.layers.axolotl.props. Using hardcoded value instead.\n")
PROP_IDENTITY_AUTOTRUST = "org.openwhatsapp.yowsup.prop.axolotl.INDENTITY_AUTOTRUST" #as done by jlguardi
# from cli layer
from yowsup.layers import YowLayerEvent
# from http://stackoverflow.com/questions/3702675/how-to-print-the-full-traceback-without-halting-the-program
import traceback
# from https://github.com/tgalal/yowsup/issues/1069
import logging
try:
import queue
except ImportError: # python2 compatibility
import Queue as queue
from whalayer import WhaLayer
class WhaClient(object):
def __init__(self, credentials, encryptionEnabled = True):
stackBuilder = YowStackBuilder()
self.stack = stackBuilder\
.pushDefaultLayers(encryptionEnabled)\
.push(WhaLayer)\
.build()
self.stack.setCredentials(credentials)
self.stack.setProp(PROP_IDENTITY_AUTOTRUST, True) #not in jlguardi
self.wantReconnect = True
self.abortReconnectWait = queue.Queue()
def setYowsupEventHandler(self, handler):
interface = self.stack.getLayerInterface(WhaLayer)
interface.enventHandler = handler
def sendMessage(self, outgoingMessag
|
e):
interface = self.stack.getLayerInterface(WhaLayer)
interface.sendMessage(outgoingMessage)
def disconnect(self):
interface = self.stack.getLayerInterface(WhaLayer)
interface.disconnect()
def start(self):
logging.basicConfig(level=logging.WARNING)
while (self.wantReconnect):
self.stack.broadcastEvent(YowLayerEvent(Yow
|
NetworkLayer.EVENT_STATE_CONNECT))
try:
self.stack.loop()
except AuthError as e:
sys.stderr.write("Authentication Error\n")
except KeyboardInterrupt:
# This is only relevant if this is the main module
# TODO: disconnect cleanly
print("\nExit")
sys.exit(0)
except: # catch *all* exceptions
sys.stderr.write("Unhandled exception.\n")
traceback.print_exc()
# TODO: regard connection state in the GUI
sys.stderr.write("Yowsup WhaClient exited.\nYOU ARE NOW DISCONNECTED.\n")
if (self.wantReconnect):
sys.stderr.write("Auto-reconnect enabled. Waiting up to %d seconds before reconnecting...\n"%(SECONDS_RECONNECT_DELAY))
try:
self.abortReconnectWait.get(timeout=SECONDS_RECONNECT_DELAY)
except queue.Empty:
pass
def setEnableReconnect(self, b = True):
self.wantReconnect = b
self.abortReconnectWait.put(b)
if __name__ == "__main__":
client = WhaClient(("login","base64passwd"))
client.start()
|
alex/changes
|
changes/api/build_restart.py
|
Python
|
apache-2.0
| 1,841
| 0.000543
|
from sqlalchemy.orm import joinedload
from datetime import datetime
from changes.api.base import APIView
from changes.api.build_index import execute_build
from changes.config import db
from changes.constants import Result, Status
from changes.models import Build, Job, JobStep, ItemStat
class BuildRestartAPIView(APIView):
def post(self, build_id):
build = Build.query.options(
joinedload('project', i
|
nnerjoin=True),
joinedload('author'),
joinedload('source').joinedload('revision'),
).get(build_id)
if build is None:
return '', 404
if build.status != Status.finished:
return '', 400
# ItemStat doesnt cascade by itself
stat_ids = [build.id]
job_ids =
|
[
j[0] for j in
db.session.query(Job.id).filter(Job.build_id == build.id)
]
if job_ids:
step_ids = [
s[0] for s in
db.session.query(JobStep.id).filter(JobStep.job_id.in_(job_ids))
]
stat_ids.extend(job_ids)
stat_ids.extend(step_ids)
if stat_ids:
ItemStat.query.filter(
ItemStat.item_id.in_(stat_ids),
).delete(synchronize_session=False)
# remove any existing job data
# TODO(dcramer): this is potentially fairly slow with cascades
Job.query.filter(
Job.build_id == build.id
).delete(synchronize_session=False)
build.date_started = datetime.utcnow()
build.date_modified = build.date_started
build.date_finished = None
build.duration = None
build.status = Status.queued
build.result = Result.unknown
db.session.add(build)
execute_build(build=build)
return self.respond(build)
|
appeltel/AutoCMS
|
print_records.py
|
Python
|
mit
| 801
| 0.002497
|
"""Print all records in the pickle for the specified test"""
import sys
import argparse
from autocms.core
|
import (load_configuration, load_records)
def main():
"""Print all records corresponding to test given as an argument"""
parser = argparse.ArgumentParser(description='Submit one or more jobs.')
parser.add_argument('testname', help='test directory')
parser.add_argument('-c', '--configfile', type=str,
default='autocms.cfg',
help='AutoCMS configuration file name')
args = parser.parse_args()
config = load
|
_configuration(args.configfile)
records = load_records(args.testname,config)
for job in records:
print str(job)+'\n'
return 0
if __name__ == '__main__':
status = main()
sys.exit(status)
|
ThomasYeoLab/CBIG
|
stable_projects/fMRI_dynamics/Kong2021_pMFM/part2_pMFM_control_analysis/Primary_gradients/scripts/CBIG_pMFM_step33_test_GradPC2Grad.py
|
Python
|
mit
| 4,115
| 0.000486
|
# /usr/bin/env python
'''
Written by Kong Xiaolu and CBIG under MIT license:
https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
'''
import os
import numpy as np
import torch
import CBIG_pMFM_basic_functions as fc
def CBIG_mfm_test_desikan_main(gpu_index=0):
'''
This function is to implement the testing processes of mean field model.
The objective function is the summation of FC correlation cost and FCD KS statistics cost.
Args:
gpu_index: index of gpu used for optimization
input_path: input directory to load validation data
output_path: output directory for saving selected model parameters and costs on test set
Returns:
None
'''
input_path = '../output/rsfcpc2_rsfc/validation/'
output_path = '../output/rsfcpc2_rsfc/test/'
if not os.path.isdir(output_path):
os.makedirs(output_path)
torch.cuda.set_device(gpu_index)
torch.cuda.manual_seed(1)
n_set = 100
n_dup = 10
n_node = 68
vali_raw_all = np.zeros((3 * n_node + 1 + 8, 1))
print('Get data')
for i in range(1, 11):
load_file = 'random_seed_' + str(i) + '.csv'
load_path = os.path.join(input_path, load_file)
xmin = fc.csv_matrix_read(load_path)
index_mat = np.zeros((2, xmin.shape[1]))
index_mat[0, :] = i
index_mat[1, :] = np.arange(xmin.shape[1])
xmin = np.concatenate((index_mat, xmin), axis=0)
vali_raw_all = np.concatenate((vali_raw_all, xmin), axis=1)
vali_raw_all = vali_raw_all[:, 1:]
vali_index = np.argsort(vali_raw_all[7, :])
vali_sort_all = vali_raw_all[:, vali_index]
vali_sel_num = 10
i = 0
vali_sel = np.zeros((vali_raw_all.shape[0], vali_sel_num))
p = 0
p_set = np.zeros(vali_sel_num)
print('select data')
while i < vali_sel_num and p < vali_raw_all.shape[1]:
corr_t = np.zeros(vali_sel_num, dtype=bool)
corr_tr = np.zeros((vali_sel_num, 3))
for j in range(vali_sel_num):
w_corr = np.corrcoef(vali_sel[8:8 + n_node, j:j + 1].T,
vali_sort_all[8:8 + n_node, p:p + 1].T)
i_corr = np.corrcoef(
vali_sel[8 + n_node:8 + 2 * n_node, j:j + 1].T,
vali_sort_all[8 + n_node:8 + 2 * n_node, p:p + 1].T)
s_corr = np.corrcoef(vali_sel[9 + 2 * n_node:, j:j + 1].T,
vali_sort_all[9 + 2 * n_node:, p:p + 1].T)
corr_tr[j, 0] = w_corr[0, 1]
corr_tr[j, 1] = i_corr[0, 1]
corr_tr[j, 2] = s_corr[0, 1]
|
for k in range(vali_sel_num):
corr_t[k] = (corr_tr[k, :] > 0.98).all()
if not corr_t.any():
vali_sel[:, i] = vali_sort_all[:, p]
p_set[i] = p
i += 1
p += 1
result_save = np.zeros((3 * n_node + 1 + 11, vali_sel_num))
result_save[0:8, :] = vali_sel[0:8, :]
result_save[11:, :] = vali_sel[8:, :]
print('Start testing')
for j in range(va
|
li_sel_num):
test_cost = np.zeros((3, n_set))
for k in range(1):
arx = np.tile(vali_sel[8:, j:j + 1], [1, n_set])
total_cost, fc_cost, fcd_cost = fc.CBIG_combined_cost_test(
arx, n_dup)
test_cost[0, n_set * k:n_set * (k + 1)] = fc_cost
test_cost[1, n_set * k:n_set * (k + 1)] = fcd_cost
test_cost[2, n_set * k:n_set * (k + 1)] = total_cost
test_file = os.path.join(output_path,
'test_num_' + str(j + 1) + '.csv')
np.savetxt(test_file, test_cost, delimiter=',')
result_save[8, j] = np.nanmean(test_cost[0, :])
result_save[9, j] = np.nanmean(test_cost[1, :])
result_save[10, j] = np.nanmean(test_cost[2, :])
print('**************** finish top ' + str(j + 1) +
' test ****************')
test_file_all = os.path.join(output_path, 'test_all.csv')
np.savetxt(test_file_all, result_save, delimiter=',')
if __name__ == '__main__':
CBIG_mfm_test_desikan_main(gpu_index=0)
|
dendory/scripts
|
wikipedia_define.py
|
Python
|
mit
| 347
| 0.028818
|
#!/usr/bin/env python3
# Uses the wikipedia module to define words on th
|
e command line
import wikipedia
import sys
sys.argv.pop(0)
for word in sys.argv:
try:
if word[0] != '-':
|
if '-full' in sys.argv:
print(wikipedia.summary(word))
else:
print(wikipedia.summary(word, sentences=1))
except:
print("* Unknown word: " + word)
|
bengland2/fsstress
|
fsd_log.py
|
Python
|
apache-2.0
| 1,486
| 0.004038
|
import os
import logging
# standardize use of logging module in fs-drift
def start_log(prefix, verbosity=0):
log = logging.getLogger(prefix)
if os.getenv('LOGLEVEL_DEBUG') != None or verbosity != 0:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
log_format = prefix + ' %(asctime)s - %(levelname)s - %(message)s'
|
formatter = logging.Formatter(log_format)
h = logging.StreamH
|
andler()
h.setFormatter(formatter)
h.setLevel(logging.INFO)
log.addHandler(h)
h2 = logging.FileHandler('/var/tmp/fsd.%s.log' % prefix)
h2.setFormatter(formatter)
log.addHandler(h2)
log.info('starting log')
return log
# assumptions:
# - there is only 1 FileHandler associated with logger
# - you don't want to change loglevel of StreamHandler
def change_loglevel(logger, loglevel):
for h in logger.handlers:
if isinstance(h, logging.FileHandler):
logger.info('changing log level of FileHandler to %s' % loglevel)
h.setLevel(loglevel)
if __name__ == '__main__':
log = start_log('fsd_log_test')
log.error('level %s', 'error')
log.warn('level %s', 'warn')
log.info('level %s', 'info')
log.debug('level %s', 'debug')
change_loglevel(log, logging.DEBUG)
log.debug('level %s', 'debug - should see this one in the log file /var/tmp/fsd.fsd_log_test.log')
change_loglevel(log, logging.INFO)
log.debug('level %s', 'debug - should NOT see this one there')
|
williamFalcon/pytorch-lightning
|
tests/models/test_hooks.py
|
Python
|
apache-2.0
| 38,377
| 0.002528
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from inspect import getmembers, isfunction
from unittest import mock
from unittest.mock import ANY, PropertyMock
import pytest
import torch
from torch.utils.data import DataLoader
from pytorch_lightning import __version__, Callback, LightningDataModule, LightningModule, Trainer
from tests.helpers import BoringDataModule, BoringModel, RandomDataset
from tests.helpers.runif import RunIf
@pytest.mark.parametrize("max_steps", [1, 2, 3])
def test_on_before_zero_grad_called(tmpdir, max_steps):
class CurrentTestModel(BoringModel):
on_before_zero_grad_called = 0
def on_before_zero_grad(self, optimizer):
self.on_before_zero_grad_called += 1
model = CurrentTestModel()
trainer = Trainer(default_root_dir=tmpdir, max_steps=max_steps, max_epochs=2)
assert 0 == model.on_before_zero_grad_called
trainer.fit(model)
assert max_steps == model.on_before_zero_grad_called
model.on_before_zero_grad_called = 0
trainer.test(model)
assert 0 == model.on_before_zero_grad_called
def test_training_epoch_end_metrics_collection(tmpdir):
"""Test that progress bar metrics also get collected at the end of an epoch."""
num_epochs = 3
class CurrentModel(BoringModel):
def training_step(self, *args, **kwargs):
output = super().training_step(*args, **kwargs)
self.log_dict({"step_metric": torch.tensor(-1), "shared_metric": 100}, logger=False, prog_bar=True)
return output
def training_epoch_end(self, outputs):
epoch = self.current_epoch
# both scalar tensors and Python numbers are accepted
self.log_dict(
{f"epoch_metric_{epoch}": torch.tensor(epoch), "shared_metric": 111}, logger=False, prog_bar=True
)
model = CurrentModel()
trainer = Trainer(max_epochs=num_epochs, default_root_dir=tmpdir, overfit_batches=2)
trainer.fit(model)
assert trainer.state.finished, f"Training failed with {trainer.state}"
metrics = trainer.progress_bar_dict
# metrics added in training step should be unchanged by epoch end method
assert metrics["step_metric"] == -1
# a metric shared in both methods gets overwritten by epoch_end
assert metrics["shared_metric"] == 111
# metrics are kept after each epoch
for i in range(num_epochs):
assert metrics[f"epoch_metric_{i}"] == i
def test_training_epoch_end_metrics_collection_on_override(tmpdir):
"""Test that batch end metrics are collected when training_epoch_end is overridden at the end of an epoch."""
class OverriddenModel(BoringModel):
def __init__(self):
super().__init__()
self.len_outputs = 0
def on_train_epoch_start(self):
self.num_train_batches = 0
def training_epoch_end(self, outputs):
self.len_outputs = len(outputs)
def on_train_batch_end(self, outputs, batch, batch_idx, dataloader_idx):
self.num_train_batches += 1
class NotOverriddenModel(BoringModel):
def on_train_epoch_start(self):
self.num_train_batches = 0
def on
|
_train_batch_end(self, outputs, batch, batch_idx, dataloader_idx):
self.num_train_batches += 1
overridden_model = OverriddenModel()
not_overridden_model = NotOverriddenModel()
not_overridden_model.training_epoch_end = None
trainer = Tra
|
iner(max_epochs=1, default_root_dir=tmpdir, overfit_batches=2)
trainer.fit(overridden_model)
assert overridden_model.len_outputs == overridden_model.num_train_batches
@RunIf(min_gpus=1)
@mock.patch("pytorch_lightning.accelerators.accelerator.Accelerator.lightning_module", new_callable=PropertyMock)
def test_apply_batch_transfer_handler(model_getter_mock):
expected_device = torch.device("cuda", 0)
class CustomBatch:
def __init__(self, data):
self.samples = data[0]
self.targets = data[1]
class CurrentTestModel(BoringModel):
rank = 0
transfer_batch_to_device_hook_rank = None
on_before_batch_transfer_hook_rank = None
on_after_batch_transfer_hook_rank = None
def on_before_batch_transfer(self, batch, dataloader_idx):
assert dataloader_idx == 0
self.on_before_batch_transfer_hook_rank = self.rank
self.rank += 1
batch.samples += 1
return batch
def on_after_batch_transfer(self, batch, dataloader_idx):
assert dataloader_idx == 0
assert batch.samples.device == batch.targets.device == expected_device
self.on_after_batch_transfer_hook_rank = self.rank
self.rank += 1
batch.targets *= 2
return batch
def transfer_batch_to_device(self, batch, device, dataloader_idx):
assert dataloader_idx == 0
self.transfer_batch_to_device_hook_rank = self.rank
self.rank += 1
batch.samples = batch.samples.to(device)
batch.targets = batch.targets.to(device)
return batch
model = CurrentTestModel()
batch = CustomBatch((torch.zeros(5, 32), torch.ones(5, 1, dtype=torch.long)))
trainer = Trainer(gpus=1)
# running .fit() would require us to implement custom data loaders, we mock the model reference instead
model_getter_mock.return_value = model
batch_gpu = trainer.accelerator.batch_to_device(batch, expected_device)
assert model.on_before_batch_transfer_hook_rank == 0
assert model.transfer_batch_to_device_hook_rank == 1
assert model.on_after_batch_transfer_hook_rank == 2
assert batch_gpu.samples.device == batch_gpu.targets.device == expected_device
assert torch.allclose(batch_gpu.samples.cpu(), torch.ones(5, 32))
assert torch.allclose(batch_gpu.targets.cpu(), torch.ones(5, 1, dtype=torch.long) * 2)
@RunIf(min_gpus=2, special=True)
def test_transfer_batch_hook_ddp(tmpdir):
"""
Test custom data are properly moved to the right device using ddp
"""
class CustomBatch:
def __init__(self, data):
self.samples = data[0]
def to(self, device, **kwargs):
self.samples = self.samples.to(device, **kwargs)
return self
def collate_fn(batch):
return CustomBatch(batch)
class TestModel(BoringModel):
def training_step(self, batch, batch_idx):
assert batch.samples.device == self.device
assert isinstance(batch_idx, int)
def train_dataloader(self):
return torch.utils.data.DataLoader(RandomDataset(32, 64), collate_fn=collate_fn)
model = TestModel()
model.validation_step = None
model.training_epoch_end = None
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=0,
max_epochs=1,
weights_summary=None,
accelerator="ddp",
gpus=2,
)
trainer.fit(model)
def get_members(cls):
return {h for h, _ in getmembers(cls, predicate=isfunction) if not h.startswith("_")}
class HookedCallback(Callback):
def __init__(self, called):
def call(hook, fn, *args, **kwargs):
out = fn(*args, **kwargs)
d = {"name": f"Callback.{hook}"}
if args:
d["args"] = args
if kwargs:
d["kwargs"] = kwargs
called.append(d)
return out
for h in get_members(Callback):
attr = getattr(self, h)
setattr(self, h, partial(call,
|
laosiaudi/tensorflow
|
tensorflow/python/util/deprecation.py
|
Python
|
apache-2.0
| 12,098
| 0.004877
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import inspect
import re
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import decorator_utils
def _add_deprecated_function_notice_to_docstring(doc, date, instructions):
"""Adds a deprecation notice to a docstring for deprecated functions."""
return decorator_utils.add_notice_to_docstring(
doc, instructions,
'DEPRECATED FUNCTION',
'(deprecated)', [
'THIS FUNCTION IS DEPRECATED. It will be removed after %s.' % date,
'Instructions for updating:'])
def _add_deprecated_arg_notice_to_docstring(doc, date, instructions):
"""Adds a deprecation notice to a docstring for deprecated arguments."""
return decorator_utils.add_notice_to_docstring(
doc, instructions,
'DEPRECATED FUNCTION ARGUMENTS',
'(deprecated arguments)', [
'SOME ARGUMENTS ARE DEPRECATED. '
'They will be removed after %s.' % date,
'Instructions for updating:'])
def _validate_deprecation_args(date, instructions):
if not date:
raise ValueError('Tell us what date this will be deprecated!')
if not re.match(r'20\d\d-[01]\d-[0123]\d', date):
raise ValueError('Date must be YYYY-MM-DD.')
if not instructions:
raise ValueError('Don\'t deprecate things without conversion instructions!')
def _call_location(level=2):
"""Returns call location given level up from current call."""
stack = inspect.stack()
# Check that stack has enough elements.
if len(stack)
|
> level:
location = stack[level]
return '%s:%d in %s.' % (location[1], location[2], location[3])
return '<unknown>'
def deprecated(date, instructions):
"""Decorator for marking functions or methods deprecated.
This decorator logs a deprecation warning whenever the
|
decorated function is
called. It has the following format:
<function> (from <module>) is deprecated and will be removed after <date>.
Instructions for updating:
<instructions>
<function> will include the class name if it is a method.
It also edits the docstring of the function: ' (deprecated)' is appended
to the first line of the docstring and a deprecation notice is prepended
to the rest of the docstring.
Args:
date: String. The date the function is scheduled to be removed. Must be
ISO 8601 (YYYY-MM-DD).
instructions: String. Instructions on how to update code using the
deprecated function.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not in ISO 8601 format, or instructions are empty.
"""
_validate_deprecation_args(date, instructions)
def deprecated_wrapper(func):
"""Deprecation wrapper."""
decorator_utils.validate_callable(func, 'deprecated')
@functools.wraps(func)
def new_func(*args, **kwargs):
logging.warning(
'From %s: %s (from %s) is deprecated and will be removed '
'after %s.\n'
'Instructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__, date, instructions)
return func(*args, **kwargs)
new_func.__doc__ = _add_deprecated_function_notice_to_docstring(
func.__doc__, date, instructions)
return new_func
return deprecated_wrapper
DeprecatedArgSpec = collections.namedtuple(
'DeprecatedArgSpec', ['position', 'has_ok_value', 'ok_value'])
def deprecated_args(date, instructions, *deprecated_arg_names_or_tuples):
"""Decorator for marking specific function arguments as deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called with the deprecated argument. It has the following format:
Calling <function> (from <module>) with <arg> is deprecated and will be
removed after <date>. Instructions for updating:
<instructions>
<function> will include the class name if it is a method.
It also edits the docstring of the function: ' (deprecated arguments)' is
appended to the first line of the docstring and a deprecation notice is
prepended to the rest of the docstring.
Args:
date: String. The date the function is scheduled to be removed. Must be
ISO 8601 (YYYY-MM-DD).
instructions: String. Instructions on how to update code using the
deprecated function.
*deprecated_arg_names_or_tuples: String. or 2-Tuple(String,
[ok_vals]). The string is the deprecated argument name.
Optionally, an ok-value may be provided. If the user provided
argument equals this value, the warning is suppressed.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not in ISO 8601 format, instructions are
empty, the deprecated arguments are not present in the function
signature, or the second element of a deprecated_tuple is not a
list.
"""
_validate_deprecation_args(date, instructions)
if not deprecated_arg_names_or_tuples:
raise ValueError('Specify which argument is deprecated.')
def _get_arg_names_to_ok_vals():
"""Returns a dict mapping arg_name to DeprecatedArgSpec w/o position."""
d = {}
for name_or_tuple in deprecated_arg_names_or_tuples:
if isinstance(name_or_tuple, tuple):
d[name_or_tuple[0]] = DeprecatedArgSpec(-1, True, name_or_tuple[1])
else:
d[name_or_tuple] = DeprecatedArgSpec(-1, False, None)
return d
def _get_deprecated_positional_arguments(names_to_ok_vals, arg_spec):
"""Builds a dictionary from deprecated arguments to thier spec.
Returned dict is keyed by argument name.
Each value is a DeprecatedArgSpec with the following fields:
position: The zero-based argument position of the argument
within the signature. None if the argument isn't found in
the signature.
ok_values: Values of this argument for which warning will be
suppressed.
Args:
names_to_ok_vals: dict from string arg_name to a list of values,
possibly empty, which should not elicit a warning.
arg_spec: Output from inspect.getargspec on the called function.
Returns:
Dictionary from arg_name to DeprecatedArgSpec.
"""
arg_name_to_pos = dict(
(name, pos) for (pos, name) in enumerate(arg_spec.args))
deprecated_positional_args = {}
for arg_name, spec in iter(names_to_ok_vals.items()):
if arg_name in arg_name_to_pos:
pos = arg_name_to_pos[arg_name]
deprecated_positional_args[arg_name] = DeprecatedArgSpec(
pos, spec.has_ok_value, spec.ok_value)
return deprecated_positional_args
def deprecated_wrapper(func):
"""Deprecation decorator."""
decorator_utils.validate_callable(func, 'deprecated_args')
deprecated_arg_names = _get_arg_names_to_ok_vals()
arg_spec = inspect.getargspec(func)
deprecated_positions = _get_deprecated_positional_arguments(
deprecated_arg_names, arg_spec)
is_varargs_deprecated = arg_spec.varargs in deprecated_arg_names
is_kwargs_deprecated = arg_spec.keywords in deprecated_arg_names
if (len(deprecated_positions) + is_varargs_deprecated + is_kwargs_deprecated
!= len(deprecated_arg_names_or_tuples)):
known_args = arg_spec.args + [arg_spec.varargs, arg_spec.keywords]
missing_args = [arg_name for arg_name in deprecated_arg_names
|
otuncelli/turkish-stemmer-python
|
TurkishStemmer/transitions/__init__.py
|
Python
|
apache-2.0
| 516
| 0.007782
|
__all__ = ["Transition"]
class Transition(object):
def __init__(self, startState, nextState, word, suffix, marked):
self.startState = startState
self.nextState = nextState
self.word = word
self.suffix = suffix
self.marked = Fal
|
se
def similarTransitions(self, transitions):
for transition in transitions:
if (self.startState == transition.startState and
self.nextState == transition.nextState):
|
yield transition
|
yehzhang/RapidTest
|
tests/test_by_examples.py
|
Python
|
mit
| 1,535
| 0.001954
|
from unittest import TestCase
EXAMPLES_PATH = '../examples'
SKIPPED_EXAMPLES = {472, 473, 477}
def _set_test_class():
import re
from imp import load_module, find_module, PY_SOURCE
from pathlib import Path
def _load_module(name, file, pathname, description):
try:
load_module(name, file, pathname, description)
finally:
if file:
file.close()
def make_method(module_name, module_tuple):
def _m(self):
print('Running: {}'.format(module_name))
_load_module
|
(module_name, *module_tuple)
return _m
sols_module_name = 'solutions'
_load_module(sols_module_name, *find_module(sols_module_name, [EXAMPLES_PATH]))
pat_example = re.compile(r'\d+\. .+\.py')
attrs = {}
for i,
|
example_path in enumerate(Path(EXAMPLES_PATH).iterdir()):
if not re.match(pat_example, example_path.name):
continue
module_name = example_path.stem
if int(module_name.split('. ')[0]) in SKIPPED_EXAMPLES:
continue
module_tuple = open(str(example_path), 'rb'), example_path.stem, ('.py', 'rb', PY_SOURCE)
func_name = module_name.replace(' ', '_').replace('.', '').lower()
func_name = 'test_' + ''.join(c for c in func_name if c.isalnum() or c == '_')
attrs[func_name] = make_method(module_name, module_tuple)
class_name = 'TestByExamples'
globals()[class_name] = type(class_name, (TestCase,), attrs)
_set_test_class()
del _set_test_class
|
ahmetalpbalkan/permalinker
|
application/downloader.py
|
Python
|
apache-2.0
| 140
| 0
|
# coding=utf-8
im
|
port requests
def download(url):
resp = requests.get(url) # TODO add retries
return resp.content, resp.headers
| |
nuxis/p0sX-server
|
p0sx/pos/migrations/0002_itemingredient_exclusive.py
|
Python
|
mit
| 448
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-19 02:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('p
|
os', '0001_initial'),
]
operations = [
migrations.AddField(
|
model_name='itemingredient',
name='exclusive',
field=models.BooleanField(default=False),
),
]
|
lincoln-lil/flink
|
flink-python/pyflink/table/tests/test_environment_settings_completeness.py
|
Python
|
apache-2.0
| 2,417
| 0.002482
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.table import EnvironmentSettings
from pyflink.testing.test_case_utils import PythonAPICompletenessTestCase, PyFlinkTestCase
class EnvironmentSettingsCompletenessTests(PythonAPICompletenessTestCase, PyFlinkTestCase):
"""
Tests whether the Python :class:`EnvironmentSettings` is consistent with
Java `org.apache.flink.table.api.EnvironmentSettings`.
"""
@classmethod
def python_class(cls):
return EnvironmentSettings
@classmethod
def java_class(cls):
return "org.apache.flink.table.api.EnvironmentSettings"
@classmethod
def excluded_methods(cls):
# internal interfaces, no need to expose to users.
return {'getPlanner', 'getExecutor'}
class EnvironmentSettingsBuilderCompletenessTests(PythonAPICompletenessTestCase, PyFlinkTestCase):
"""
Tests whether th
|
e Python :class:`EnvironmentSettings.Builder` is consistent with
Java `org.apache.flink.table.api.EnvironmentSettings$Builder`.
"""
@classmethod
def p
|
ython_class(cls):
return EnvironmentSettings.Builder
@classmethod
def java_class(cls):
return "org.apache.flink.table.api.EnvironmentSettings$Builder"
if __name__ == '__main__':
import unittest
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
sahlinet/fastapp
|
fastapp/plugins/singleton.py
|
Python
|
mit
| 1,074
| 0.003724
|
import logging
logger = logging.getLogger(__name__)
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, keep=True, *args, **kwargs):
logger.debug("Handle singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
if keep:
if cls.instance is None:
logger.debug("Return and keep si
|
ngleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
cls.instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls.instance
else:
logger.debug("Return cached singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
return cls.instance
else:
logger.debug("Return new singleton insta
|
nce for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
return super(Singleton, cls).__call__(*args, **kwargs)
return None
|
jalaziz/validictory
|
validictory/tests/test_defaults.py
|
Python
|
mit
| 1,074
| 0
|
from unittest import TestCase
import validictory
class TestItems
|
(TestCase):
def test_property(self):
schema = {
"type": "object",
"properties": {
"foo": {
"default": "bar"
},
"baz": {
"type": "integer"
}
}
}
data = {'baz': 2}
result = validictory.validate(data, schema, required_by_default=False)
self.assertEqual(result, {"foo": "bar", "baz": 2})
|
def test_item(self):
schema = {
'type': 'object',
'type': 'array',
'items': [
{
'type': 'any'
},
{
'type': 'string'
},
{
'default': 'baz'
},
]
}
data = ['foo', 'bar']
result = validictory.validate(data, schema, required_by_default=False)
self.assertEqual(result, ["foo", "bar", "baz"])
|
TwilioDevEd/airtng-flask
|
airtng_flask/__init__.py
|
Python
|
mit
| 801
| 0.002497
|
import os
from airtng_flask.config import config_env_files
from flask import Flask
from flask_bcrypt import Bcrypt
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
db = SQLAlchemy()
bcrypt = Bcrypt()
login_manager = LoginManager()
def create_app(config_name='development', p_db=db, p_bcrypt=bcrypt, p_login_manager=login_manager):
new_app = Flask(__name__)
config_app(config_name, new_app)
new_app.
|
config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
p_db.init_app(new_app)
p_bcrypt.init_app(new_app)
p_login_manager.init_app(new_app)
p_login_manager.login_view = 'register'
return new_app
def config_app(config_name, new_app):
new_app.config.from_object(config_env_files[config_name])
app = create_app()
import airtng_flask
|
.views
|
jmjj/messages2json
|
messages2json/__init__.py
|
Python
|
mit
| 140
| 0
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 2 10:56:34 2016
@auth
|
or: jmjj (Jari Juopperi, [email protected])
"""
fro
|
m .main import *
|
ChawalitK/odoo
|
addons/web_editor/controllers/main.py
|
Python
|
gpl-3.0
| 10,264
| 0.00341
|
# -*- coding: utf-8 -*-
from openerp.http import request, STATIC_CACHE
from openerp.addons.web import http
import json
import io
from PIL import Image, ImageFont, ImageDraw
from openerp import tools
import cStringIO
import werkzeug.wrappers
import time
import logging
logger = logging.getLogger(__name__)
class Web_Editor(http.Controller):
#------------------------------------------------------
# Backend snippet
#------------------------------------------------------
@http.route('/web_editor/snippets', type='json', auth="user")
def snippets(self, **kwargs):
return request.env.ref('web_editor.snippets').render(None)
#------------------------------------------------------
# Backend html field
#------------------------------------------------------
@http.route('/web_editor/field/html', type='http', auth="user")
def FieldTextHtml(self, model=None, res_id=None, field=None, callback=None, **kwargs):
cr, uid, context = request.cr, request.uid, request.context
kwargs.update(
model=model,
res_id=res_id,
field=field,
datarecord=json.loads(kwargs['datarecord']),
debug='debug' in kwargs)
for k in kwargs:
if isinstance(kwargs[k], basestring) and kwargs[k].isdigit():
kwargs[k] = int(kwargs[k])
trans = dict(
lang=kwargs.get('lang', context.get('lang')),
translatable=kwargs.get('translatable'),
edit_translations=kwargs.get('edit_translations'),
editable=kwargs.get('enable_editor'))
context.update(trans)
kwargs.update(trans)
record = None
if model and kwargs.get('res_id'):
record = request.registry[model].browse(cr, uid, kwargs.get('res_id'), context)
kwargs.update(content=record and getattr(record, field) or "")
return request.render(kwargs.get("template") or "web_editor.FieldTextHtml", kwargs, uid=request.uid)
#------------------------------------------------------
# Backend html field in inline mode
#------------------------------------------------------
@http.route('/web_editor/field/html/inline', type='http', auth="user")
def FieldTextHtmlInline(self, model=None, res_id=None, field=None, callback=None, **kwargs):
kwargs['inline_mode'] = True
kwargs['dont_load_assets'] = not kwargs.get('enable_editor') and not kwargs.get('edit_translations')
return self.FieldTextHtml(model, res_id, field, callback, **kwargs)
#------------------------------------------------------
# convert font into picture
#------------------------------------------------------
@http.route([
'/web_editor/font_to_img/<icon>',
'/web_editor/font_to_img/<icon>/<color>',
'/web_editor/font_to_img/<icon>/<color>/<int:size>',
'/web_editor/font_to_img/<icon>/<color>/<int:size>/<int:alpha>',
|
], type='http', auth="none")
def export_icon_to_png(self, icon, color='#000', size=100, alpha=255, font='/web/static/lib/fontawesome/fonts/fontawesome-webfont.ttf'):
""" This method converts an unicode character to an image (using Font
Aw
|
esome font by default) and is used only for mass mailing because
custom fonts are not supported in mail.
:param icon : decimal encoding of unicode character
:param color : RGB code of the color
:param size : Pixels in integer
:param alpha : transparency of the image from 0 to 255
:param font : font path
:returns PNG image converted from given font
"""
# Make sure we have at least size=1
size = max(1, size)
# Initialize font
addons_path = http.addons_manifest['web']['addons_path']
font_obj = ImageFont.truetype(addons_path + font, size)
# if received character is not a number, keep old behaviour (icon is character)
icon = unichr(int(icon)) if icon.isdigit() else icon
# Determine the dimensions of the icon
image = Image.new("RGBA", (size, size), color=(0, 0, 0, 0))
draw = ImageDraw.Draw(image)
boxw, boxh = draw.textsize(icon, font=font_obj)
draw.text((0, 0), icon, font=font_obj)
left, top, right, bottom = image.getbbox()
# Create an alpha mask
imagemask = Image.new("L", (boxw, boxh), 0)
drawmask = ImageDraw.Draw(imagemask)
drawmask.text((-left, -top), icon, font=font_obj, fill=alpha)
# Create a solid color image and apply the mask
if color.startswith('rgba'):
color = color.replace('rgba', 'rgb')
color = ','.join(color.split(',')[:-1])+')'
iconimage = Image.new("RGBA", (boxw, boxh), color)
iconimage.putalpha(imagemask)
# Create output image
outimage = Image.new("RGBA", (boxw, size), (0, 0, 0, 0))
outimage.paste(iconimage, (left, top))
# output image
output = io.BytesIO()
outimage.save(output, format="PNG")
response = werkzeug.wrappers.Response()
response.mimetype = 'image/png'
response.data = output.getvalue()
response.headers['Cache-Control'] = 'public, max-age=604800'
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST'
response.headers['Connection'] = 'close'
response.headers['Date'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime())
response.headers['Expires'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime(time.time()+604800*60))
return response
#------------------------------------------------------
# add attachment (images or link)
#------------------------------------------------------
@http.route('/web_editor/attachment/add', type='http', auth='user', methods=['POST'])
def attach(self, func, upload=None, url=None, disable_optimization=None, **kwargs):
# the upload argument doesn't allow us to access the files if more than
# one file is uploaded, as upload references the first file
# therefore we have to recover the files from the request object
Attachments = request.registry['ir.attachment'] # registry for the attachment table
uploads = []
message = None
if not upload: # no image provided, storing the link and the image name
name = url.split("/").pop() # recover filename
attachment_id = Attachments.create(request.cr, request.uid, {
'name': name,
'type': 'url',
'url': url,
'public': True,
'res_model': 'ir.ui.view',
}, request.context)
uploads += Attachments.read(request.cr, request.uid, [attachment_id], ['name', 'mimetype', 'checksum', 'url'], request.context)
else: # images provided
try:
attachment_ids = []
for c_file in request.httprequest.files.getlist('upload'):
data = c_file.read()
try:
image = Image.open(cStringIO.StringIO(data))
w, h = image.size
if w*h > 42e6: # Nokia Lumia 1020 photo resolution
raise ValueError(
u"Image size excessive, uploaded images must be smaller "
u"than 42 million pixel")
if not disable_optimization and image.format in ('PNG', 'JPEG'):
data = tools.image_save_for_web(image)
except IOError, e:
pass
attachment_id = Attachments.create(request.cr, request.uid, {
'name': c_file.filename,
'datas': data.encode('base64'),
'datas_fname': c_file.filename,
'public': True,
'res_model': 'ir.ui.vie
|
jbalm/ActuarialCashFlowModel
|
esg/credit_risk/JLT.py
|
Python
|
gpl-3.0
| 10,706
| 0.014413
|
## Progam packages
from .credit_model_classes import credit_model_base
from ...asset.Asset_data import Asset_data
from ..generator_correlated_variables import generator_correlated_variables
from ...core_math.function_optim import function_optim
from ...core_math.functions_credit import generator_matrix, exp_matrix
## Python packages
from scipy.linalg import inv, norm
from numpy import linalg as la
from scipy.optimize import minimize
import numpy as np
class JLT(credit_model_base):
"""
The JLT model is inplemented here
Attributes:
==========
Input:
_______
1. pi_0 : initial value of the risk premium
Type : float
2. mu : long term average parameter
Type : float
3. alpha : speed of adjustment parameter
Type : float
4. recovery_rate : recorevry rate when default
Type : float
5. sigma : volatility parameter
|
Type : float
6. market_name : market name
Type : string
Output:
_______
1. RN_migration_matrix : risk-neutral migration matrix
Type : matrix 7x7
|
2. spreads : credit spreads
Type : vector of length 7
Methods:
_______
1. add_time_horizon
2. get_spread
3. get_hist_transition_matrix
4. calibrate_spread
5. calibrate_price
6. generate_spreads_and_matrix
7. test_diffusion_pi
"""
def __init__(self, pi_0= None, mu= None, alpha= None, sigma= None, recovery_rate= None, market_name= None):
self.time_horizon=0
self.recovery_rate = recovery_rate
# initiate
self.market_spread = None
self.eigenval_hist_gen= None
self.eigenvect_hist_gen= None
self.historical_transition_matrix = None
self.RN_migration_matrix=[]
self.spreads=[]
self.mu=mu
self.alpha=alpha
self.sigma=sigma
self.corr_matrix= None
self.fixed_seed = None
self.num_instrument = 0
self.pi_0=pi_0
self.market_name=market_name
# prend comme entrée IR_model, ou pas... On a défini également une méthode qui permet d'aller récupérer les taux zéro coupons d'un modèle IR
# ici, on a peut-etre seulement besoin de tout initialiser à vide
# l'intérêt de la définition est qu'il est prêt d'être utilisé, plus simple
# or une méthode permet de modifier/ d'accéder à des attributes depuis extérieur.
def getMatrixJLT(self,t,T):
out = None
d = self.eigenval_hist_gen
if self.sigma !=0:
v = np.sqrt(self.alpha**2 - 2*d*self.sigma**2)
denominator = (v+self.alpha)*(np.exp(v*(T-t))-1)+2*v
A = (2*self.alpha*self.mu)/(self.sigma**2)*np.log((2*v*np.exp(0.5*(self.alpha+v)*(T-t)))/denominator)
B = - (2*d*(np.exp(v*(T-t))-1))/denominator
value = np.exp(A - B*self.risk_premium[t])
out = np.diag(value)
else:
temp = (self.risk_premium[t]+np.exp(-self.alpha*t))*(T-t) + 1/(self.alpha)*(np.exp(-self.alpha*T)-np.exp(-self.alpha*t))
value = np.exp(d*temp)
out = np.diag(value)
return out
def add_time_horizon(self,time_horizon):
"""
Method : add_time_horizon
Function : add the time horizon
Parameter :
1. time_horizon
Type : int
Function : correspond to the time horizon
"""
self.time_horizon = time_horizon
def get_spread(self,asset_data):
"""
Method : get_spread
Function : retrieve the spread from the pickle file
Parameter : None
"""
# read the market spread data ''of time 0''
market = asset_data.get_list_market(self.market_name)
spread_list = market.spread_list
col_index = market.col_index
row_index = market.row_index
self.market_spread = spread_list, col_index, row_index
def get_hist_transition_matrix(self, asset_data):
"""
Method : get_hist_transition_matrix
Function : retrieve the historical transition matrix from the pickle file and then deduce the generator matrix, its eigenvectors and its eigenvalues.
Parameter : None
"""
market = asset_data.get_list_market(self.market_name)
historical_transition_matrix = market.historical_transition_matrix
self.historical_transition_matrix = historical_transition_matrix
self.historical_generator_matrix = generator_matrix(self.historical_transition_matrix)
w, v = la.eig(self.historical_generator_matrix)
eigenval_hist_gen = w.real
eigenvect_hist_gen = (v.T).real
for l in range(len(eigenvect_hist_gen)):
eigenvect_hist_gen[l] = eigenvect_hist_gen[l]/norm(eigenvect_hist_gen[l])
eigenvect_hist_gen = eigenvect_hist_gen.T
self.eigenval_hist_gen= eigenval_hist_gen
self.eigenvect_hist_gen= eigenvect_hist_gen
def calibrate_spread(self, asset_data, AAA_AA):
"""
Method : calibrate_spread
Function : calibrate the model on the market data of spread
Parameter :
1. asset_data
Type : instance of Asset_data class
Function : see class Asset_data for more details.
2. AAA_AA
Type : boolean
Function : if it is true, then only spreads of AAA and AA ratings are used for the calibration
"""
market = asset_data.get_list_market(self.market_name)
if self.mu is None:
self.mu = market.JLT_mu
if self.sigma is None:
self.sigma = market.JLT_sigma
if self.alpha is None:
self.alpha = market.JLT_alpha
if self.pi_0 is None:
self.pi_0 = market.JLT_pi
if self.recovery_rate is None:
self.recovery_rate = market.recovery_rate
spread_list, col_index, row_index = self.market_spread
def f(pi_0):
return function_optim(pi_0, self.alpha, self.mu, self.sigma, self.recovery_rate,
self.eigenvect_hist_gen, self.eigenval_hist_gen,
row_index, col_index, spread_list,AAA_AA)
bds = [(0.001,None)]
res = minimize(f,x0=2, bounds=bds )
self.pi_0 = res.x[0]
return self.pi_0
def calibrate_price(self, asset_data):
"""
Method : calibrate_price
Function : calibrate the model on the market data of bonds' price
Parameter :
1. asset_data
Type : instance of Asset_data class
Function : see class Asset_data for more details.
"""
market = asset_data.get_list_market(self.market_name)
if self.mu is None:
self.mu = market.JLT_mu
if self.sigma is None:
self.sigma = market.JLT_sigma
if self.alpha is None:
self.alpha = market.JLT_alpha
if self.pi_0 is None:
self.pi_0 = market.JLT_pi
if self.recovery_rate is None:
self.recovery_rate = market.recovery_rate
spread_list, col_index, row_index = self.market_spread
def f(pi_0):
return function_optim(pi_0, self.alpha, self.mu, self.sigma,
self.recovery_rate, self.eigenvect_hist_gen, self
|
firebase/grpc-SwiftPM
|
setup.py
|
Python
|
apache-2.0
| 15,931
| 0.006403
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A setup module for the GRPC Python package."""
from distutils import cygwinccompiler
from distutils import extension as _extension
from distutils import util
import os
import os.path
import pkg_resources
import platform
import re
import shlex
import shutil
import sys
import sysconfig
import setuptools
from setuptools.command import egg_info
import subprocess
from subprocess import PIPE
# Redirect the manifest template from MANIFEST.in to PYTHON-MANIFEST.in.
egg_info.manifest_maker.template = 'PYTHON-MANIFEST.in'
PY3 = sys.version_info.major == 3
PYTHON_STEM = os.path.join('src', 'python', 'grpcio')
CORE_INCLUDE = ('include', '.',)
ABSL_INCLUDE = (os.path.join('third_party', 'abseil-cpp'),)
ADDRESS_SORTING_INCLUDE = (os.path.join('third_party', 'address_sorting', 'include'),)
CARES_INCLUDE = (
os.path.join('third_party', 'cares'),
os.path.join('third_party', 'cares', 'cares'),)
if 'darwin' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_darwin'),)
if 'freebsd' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_freebsd'),)
if 'linux' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_linux'),)
if 'openbsd' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_openbsd'),)
SSL_INCLUDE = (os.path.join('third_party', 'boringssl-with-bazel', 'src', 'include'),)
UPB_INCLUDE = (os.path.join('third_party', 'upb'),)
UPB_GRPC_GENERATED_INCLUDE = (os.path.join('src', 'core', 'ext', 'upb-generated'),)
ZLIB_INCLUDE = (os.path.join('third_party', 'zlib'),)
README = os.path.join(PYTHON_STEM, 'README.rst')
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.abspath(PYTHON_STEM))
# Break import-style to ensure we can actually find our in-repo dependencies.
import _parallel_compile_patch
import _spawn_patch
import commands
import grpc_core_dependencies
import grpc_version
_parallel_compile_patch.monkeypatch_compile_maybe()
_spawn_patch.monkeypatch_spawn()
LICENSE = 'Apache License 2.0'
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: Apache Software License',
]
# Environment variable to determine whether or not the Cython extension should
# *use* Cython or use the generated C files. Note that this requires the C files
# to have been generated by building first *with* Cython support. Even if this
# is set to false, if the script detects that the generated `.c` file isn't
# present, then it will still attempt to use Cython.
BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
# Export this variable to use the system installation of openssl. You need to
# have the header files installed (in /usr/include/openssl) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_OPENSSL = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_OPENSSL',
False)
# Export this variable to use the system installation of zlib. You need to
# have the header files installed (in /usr/include/) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_ZLIB = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_ZLIB',
False)
# Export this variable to use the system installation of cares. You need to
# have the header files installed (in /usr/include/) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_CARES = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_CARES',
False)
# For local development use only: This skips building gRPC Core and its
# dependencies, including protobuf and boringssl. This allows "incremental"
# compilation by first building gRPC Core using make, then building only the
# Python/Cython layers here.
#
# Note that this requires libboringssl.a in the libs/{dbg,opt}/ directory, which
# may require configuring make to not use the system openssl implementation:
#
# make HAS_SYSTEM_OPENSSL_ALPN=0
#
# TODO(ericgribkoff) Respect the BUILD_WITH_SYSTEM_* flags alongside this option
USE_PREBUILT_GRPC_CORE = os.environ.get(
'GRPC_PYTHON_USE_PREBUILT_GRPC_CORE', False)
# If this environmental variable is set, GRPC will not try to be compatible with
# libc versions old than the one it was compiled against.
DISABLE_LIBC_COMPATIBILITY = os.environ.get('GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY', False)
# Environment variable to determine whether or not to enable coverage analysis
# in Cython modules.
ENABLE_CYTHON_TRACING = os.environ.get(
'GRPC_PYTHON_ENABLE_CYTHON_TRACING', False)
# Environment variable specifying whether or not there's interest in setting up
# documentation building.
ENABLE_DOCUMENTATION_BUILD = os.environ.get(
'GRPC_PYTHON_ENABLE_DOCUMENTATION_BUILD', False)
def check_linker_need_libatomic():
"""Test if linker on system needs libatomic."""
code_test = (b'#include <atomic>\n' +
b'int main() { return std::atomic<int64_t>{}; }')
cc_test = subprocess.Popen(['cc', '-x', 'c++', '-std=c++11', '-'],
stdin=
|
PIPE,
stdout=PIPE,
stderr=PIPE)
cc_test.communicate(input=code_test)
return cc_test.returncode != 0
# There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
# ourselves in w.r.t.
|
the multitude of operating systems this ought to build on.
# We can also use these variables as a way to inject environment-specific
# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
# reasonable default.
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = ' -std=c++11'
if 'win32' in sys.platform:
if sys.version_info < (3, 5):
EXTRA_ENV_COMPILE_ARGS += ' -D_hypot=hypot'
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
else:
# We need to statically link the C++ Runtime, only the C runtime is
# available dynamically
EXTRA_ENV_COMPILE_ARGS += ' /MT'
elif "linux" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -std=gnu99 -fvisibility=hidden -fno-wrapv -fno-exceptions'
elif "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -stdlib=libc++ -fvisibility=hidden -fno-wrapv -fno-exceptions'
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin"
|
Net-ng/kansha
|
kansha/card_addons/due_date/view.py
|
Python
|
bsd-3-clause
| 2,112
| 0.001894
|
#--
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
import peak
import datetime
from nagare import presentation, security, ajax, i18n
from nagare.i18n import _, format_date
from .comp import DueDate
@peak.rules.when(ajax.py2js, (datetime.date,))
def py2js(value, h):
"""Generic method to transcode a Datetime
In:
- ``value`` -- the datetime object
- ``h`` -- the current renderer
Return:
- transcoded javascript
"""
dt = i18n.to_timezone(value)
return 'new Date("%s", "%s", "%s")' % (
dt.year, dt.month - 1, dt.day)
@peak.rules.when(ajax.py2js, (DueDate,))
def py2js(value, h):
if value.due_date:
return ajax.py2js(value.due_date, h)
return None
@presentation.render_for(DueDate)
def render_DueDate(self, h, comp, model):
return h.root
@presentation.render_for(DueDate, model='badge')
def render_DueDate_badge(self, h, *args):
"""Gallery badge for the card"""
if self.due_date:
with h.span(class_='badge'):
h << h.span(h.i(class_='icon-alarm'), ' ', self.get_days_count(), class_='label due-date ' + self.get_class(), title=format_date(self.due_date, 'full'))
return h.root
@presentation.render_for(DueDate, model='action')
def render_DueDate_button(self, h, comp, *args):
if security.has_permissions('due_date', self.card):
self._init_calendar()
id_ = h.generate_id()
if self.due_date:
classes = ['btn', 'btn-due-date', self.get_class()]
with h.a(class_=u' '.join(classes), id_=id_).action(self.calendar().toggle):
h << h.i(class_='icon-alarm duedate-icon')
h << format_date(self.due_date, 'short')
else:
with h
|
.a(class_='bt
|
n', id_=id_).action(self.calendar().toggle):
h << h.i(class_='icon-alarm')
h << _('Due date')
h << self.calendar.on_answer(self.set_value)
return h.root
|
alxgu/ansible
|
lib/ansible/modules/network/f5/bigiq_application_fastl4_udp.py
|
Python
|
gpl-3.0
| 21,905
| 0.00137
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigiq_application_fastl4_udp
short_description: Manages BIG-IQ FastL4 UDP applications
description:
- Manages BIG-IQ applications used for load balancing a UDP-based application
with a FastL4 profile.
version_added: 2.6
options:
name:
description:
- Name of the new application.
type: str
required: True
description:
description:
- Description of the application.
type: str
servers:
description:
- A list of servers that the application is hosted on.
- If you are familiar with other BIG-IP setting, you might also refer to this
list as the list of pool members.
- When creating a new application, at least one server is required.
suboptions:
address:
description:
- The IP address of the server.
type: str
required: True
port:
description:
- The port of the server.
- When creating a new application and specifying a server, if this parameter
is not provided, the default of C(8000) will be used.
type: str
default: 8000
type: list
inbound_virtual:
description:
- Settings to configure the virtual which will receive the inbound connection.
suboptions:
address:
description:
- Specifies destination IP address information to which the virtual server
sends traffic.
- This parameter is required when creating a new application.
type: str
required: True
netmask:
description:
- Specifies the netmask to associate with the given C(destination).
- This parameter is required when creating a new application.
type: str
required: True
port:
description:
- The port that the virtual listens for connections on.
- When creating a new application, if this parameter is not specified, the
default value of C(53) will be used.
type: str
default: 53
service_environment:
description:
- Specifies the name of service environment that the application will be
deployed to.
- When creating a new application, this parameter is required.
- The service environment type will be discovered by this module automatically.
Therefore, it is crucial that you maintain unique names for items in the
different service environment types.
- SSGs are not supported for this type of application.
type: str
add_analytics:
description:
- Collects statistics of the BIG-IP that the application is deployed to.
- This parameter is only relevant when specifying a C(service_environment) which
is a BIG-IP; not an SSG.
type: bool
default: no
state:
description:
- The state of the resource on the system.
- When C(present), guarantees that the resource exists with the provided attributes.
- When C(absent), removes the resource from the system.
type: str
choices:
- absent
- present
default: present
wait:
description:
- If the module should wait for the application to be created, deleted or updated.
type: bool
default: yes
extends_documentation_fragment: f5
notes:
- This module does not support updating of your application (whether deployed or not).
If you need to update the application, the recommended practice is to remove and
re-create.
- This module will not work on BIGIQ version 6.1.x or greater.
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Load balance a UDP-based application with a FastL4 profile
bigiq_application_fastl4_udp:
name: my-app
description: My description
service_environment: my-bigip-device
servers:
- address: 1.2.3.4
port: 8080
- address: 5.6.7.8
port: 8080
inbound_virtual:
name: foo
address: 2.2.2.2
netmask: 255.255.255.255
port: 53
provider:
password: secret
server: lb.mydomain.com
user: admin
state: present
delegate_to: localhost
'''
RETURN = r'''
description:
description: The new description of the application of the resource.
returned: changed
type: str
sample: My application
service_environment:
description: The environment which the service was deployed to.
returned: changed
type: str
sample: my-ssg1
inbound_virtual_destination:
description: The destination of the virtual that was created.
returned: changed
type: str
sample: 6.7.8.9
inbound_virtual_netmask:
description: The network mask of the provided inbound destination.
returned: changed
type: str
sample: 255.255.255.0
inbound_virtual_port:
description: The port the inbound virtual address listens on.
returned: changed
type: int
sample: 80
servers:
description: List of servers, and their ports, that make up the application.
type: complex
returned: changed
contains:
address:
description: The IP address of the server.
returned: changed
type: str
sample: 2.3.4.5
port:
description: The port that the server listens on.
returned: changed
type: int
sample: 8080
sample: hash/dictionary of values
'''
import time
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigiq import F5RestClient
from library.module_utils.network.f5.common import
|
F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameter
|
s
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.ipaddress import is_valid_ip
from library.module_utils.network.f5.icontrol import bigiq_version
except ImportError:
from ansible.module_utils.network.f5.bigiq import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
from ansible.module_utils.network.f5.icontrol import bigiq_version
class Parameters(AnsibleF5Parameters):
api_map = {
'templateReference': 'template_reference',
'subPath': 'sub_path',
'configSetName': 'config_set_name',
'defaultDeviceReference': 'default_device_reference',
'addAnalytics': 'add_analytics'
}
api_attributes = [
'resources', 'description', 'configSetName', 'subPath', 'templateReference',
'defaultDeviceReference', 'addAnalytics'
]
returnables = [
'resources', 'description', 'config_set_name', 'sub_path', 'template_reference',
'default_device_reference', 'servers', 'inbound_virtual', 'add_analytics'
]
updatables = [
'resources', 'description', 'config_set_name', 'sub_path', 'template_reference',
'default_device_reference', 'servers', 'add_analytics'
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def http_profile(self):
return "profile_http"
@property
def config_set_name(self):
return self.name
@property
def sub_path(self):
return self.name
@property
def template_reference(self):
filter = "name+eq+'Default-f5-FastL4-UDP-lb-template'"
uri = "https://{0}:{1}/mgmt/cm/global/templates/?$filter={2}&$top=1&$select=selfLink".format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
|
jesseops/i3-lemonbar
|
i3_lemonbar.py
|
Python
|
mit
| 370
| 0.002703
|
#!/usr/bin/env python3
import os
from i3
|
_lemonbar_conf import *
cwd = os.path.dirname(os.path.abspath(__file__))
lemon = "lemonbar -p -f '%s' -f '%s' -g '%s' -B '%s' -F '%s'" % (font, iconfont, geometry, color_back, color_fore)
feed = "python3 -c 'import i3_lemonbar_feeder; i3_lemonbar_feeder.run()'"
check_output('cd %s; %s | %s' % (cwd, feed, lemon), shell=True)
| |
passren/Roxd
|
member/models.py
|
Python
|
gpl-2.0
| 727
| 0.005502
|
from __future__ import unicode_literals
from django.db import transaction
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class UserProfile(models.Model):
user = models.OneToOneField(User, unique=True, verbose_name=('user'))
phone = models.CharField(max_length=20)
USER_SOURCE = (
('LO', 'Local'),
('WB', 'Weibo'
|
),
('QQ', 'QQ'),
)
source = models.CharField(max_length=2, choices=USER_SOURCE, default='LO')
created_date = models.DateTimeField(auto_now_add=True)
last_upda
|
ted_date = models.DateTimeField(auto_now=True)
@transaction.atomic
def createUser(self):
self.user.save()
self.save()
|
Krozark/django-slider
|
slider/utils.py
|
Python
|
bsd-2-clause
| 3,032
| 0.005937
|
import os, unicodedata
from django.utils.translation import ugettext_lazy as _
from django.core.files.storage import FileSystemStorage
from django.db.models.fields.files import FileField
from django.core.files.storage import default_storage
from django.conf import settings
from django.utils.safestring import mark_safe
class AdminThumbnailMixin(object):
thumbnail_options = {'size': (60, 60)}
thumbnail_image_field_name = 'image'
thumbnail_alt_field_name = None
def _thumb(self, image, options={'size': (60, 60)}, alt=None):
|
from easy_thumbnails.files import get_thumbnailer
media = getattr(settings, 'THUMBNAIL_MEDIA_URL', settings.MEDIA_URL)
attrs = []
try:
src = "%s%s" % (media, get_thumbnailer(image).get_thumbnail(options))
except:
src = ""
if alt is not None: attrs.append('alt="%s"' % alt)
return mark_safe('<img src="%s" %s />' % (src, " ".join(attrs)))
def thumbnail(self, obj):
kwargs = {'options': self.thumbnail_options}
|
if self.thumbnail_alt_field_name:
kwargs['alt'] = getattr(obj, self.thumbnail_alt_field_name)
return self._thumb(getattr(obj, self.thumbnail_image_field_name), **kwargs)
thumbnail.allow_tags = True
thumbnail.short_description = _('Thumbnail')
def file_cleanup(sender, **kwargs):
"""
File cleanup callback used to emulate the old delete
behavior using signals. Initially django deleted linked
files when an object containing a File/ImageField was deleted.
Usage:
>>> from django.db.models.signals import post_delete
>>> post_delete.connect(file_cleanup, sender=MyModel, dispatch_uid="mymodel.file_cleanup")
"""
for fieldname in sender._meta.get_all_field_names():
try:
field = sender._meta.get_field(fieldname)
except:
field = None
if field and isinstance(field, FileField):
inst = kwargs['instance']
f = getattr(inst, fieldname)
m = inst.__class__._default_manager
if hasattr(f, 'path') and os.path.exists(f.path) \
and not m.filter(**{'%s__exact' % fieldname: getattr(inst, fieldname)})\
.exclude(pk=inst._get_pk_val()):
try:
#os.remove(f.path)
default_storage.delete(f.path)
except:
pass
class ASCIISafeFileSystemStorage(FileSystemStorage):
"""
Same as FileSystemStorage, but converts unicode characters
in file name to ASCII characters before saving the file. This
is mostly useful for the non-English world.
Usage (settings.py):
>>> DEFAULT_FILE_STORAGE = 'webcore.utils.storage.ASCIISafeFileSystemStorage'
"""
def get_valid_name(self, name):
name = unicodedata.normalize('NFKD', unicode(name.replace(' ', '_'))).encode('ascii', 'ignore')
return super(ASCIISafeFileSystemStorage, self).get_valid_name(name)
|
nim65s/MarkDownBlog
|
dmdb/sitemaps.py
|
Python
|
gpl-3.0
| 320
| 0
|
from django.contrib.sitemaps import Sitemap
from .models import Bl
|
ogEntry
class BlogEntrySitemap(Sitemap):
changefreq = "yearly"
priority = 0.6
protocol = 'https'
def items(self):
return BlogEntry.on_site.filter(is_visible=True)
def lastmod(self, item):
return item.modification
| |
h4wkmoon/shinken
|
shinken/objects/contact.py
|
Python
|
agpl-3.0
| 13,143
| 0.004337
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
from item import Item, Items
from shinken.util import strip_and_uniq
from shinken.property import BoolProp, IntegerProp, StringProp
from shinken.log import logger, naglog_result
_special_properties = ('service_notification_commands', 'host_notification_commands',
'service_notification_period', 'host_notification_period',
'service_notification_options', 'host_notification_options',
'host_notification_commands', 'contact_name')
_simple_way_parameters = ('service_notification_period', 'host_notification_period',
'service_notification_options', 'host_notification_options',
'service_notification_commands', 'host_notification_commands',
'min_business_impact')
class Contact(Item):
id = 1 # zero is always special in database, so we do not take risk h
|
ere
my_type = 'contact'
properties = Item.properties.copy()
properties.update({
'contact_name': StringProp(fill_brok=['full_status']),
'alias': StringProp(default='none', fill_brok=['full_status']),
'contactgroups': StringProp(default='', fill_brok=['full_status']),
'host_notifications_enabled': BoolProp(
|
default='1', fill_brok=['full_status']),
'service_notifications_enabled': BoolProp(default='1', fill_brok=['full_status']),
'host_notification_period': StringProp(fill_brok=['full_status']),
'service_notification_period': StringProp(fill_brok=['full_status']),
'host_notification_options': StringProp(fill_brok=['full_status']),
'service_notification_options': StringProp(fill_brok=['full_status']),
'host_notification_commands': StringProp(fill_brok=['full_status']),
'service_notification_commands': StringProp(fill_brok=['full_status']),
'min_business_impact': IntegerProp(default='0', fill_brok=['full_status']),
'email': StringProp(default='none', fill_brok=['full_status']),
'pager': StringProp(default='none', fill_brok=['full_status']),
'address1': StringProp(default='none', fill_brok=['full_status']),
'address2': StringProp(default='none', fill_brok=['full_status']),
'address3': StringProp(default='none', fill_brok=['full_status']),
'address4': StringProp(default='none', fill_brok=['full_status']),
'address5': StringProp(default='none', fill_brok=['full_status']),
'address6': StringProp(default='none', fill_brok=['full_status']),
'can_submit_commands': BoolProp(default='0', fill_brok=['full_status']),
'is_admin': BoolProp(default='0', fill_brok=['full_status']),
'retain_status_information': BoolProp(default='1', fill_brok=['full_status']),
'notificationways': StringProp(default='', fill_brok=['full_status']),
'password': StringProp(default='NOPASSWORDSET', fill_brok=['full_status']),
})
running_properties = Item.running_properties.copy()
running_properties.update({
'modified_attributes': IntegerProp(default=0L, fill_brok=['full_status'], retention=True),
'downtimes': StringProp(default=[], fill_brok=['full_status'], retention=True),
})
# This tab is used to transform old parameters name into new ones
# so from Nagios2 format, to Nagios3 ones.
# Or Shinken deprecated names like criticity
old_properties = {
'min_criticity': 'min_business_impact',
}
macros = {
'CONTACTNAME': 'contact_name',
'CONTACTALIAS': 'alias',
'CONTACTEMAIL': 'email',
'CONTACTPAGER': 'pager',
'CONTACTADDRESS1': 'address1',
'CONTACTADDRESS2': 'address2',
'CONTACTADDRESS3': 'address3',
'CONTACTADDRESS4': 'address4',
'CONTACTADDRESS5': 'address5',
'CONTACTADDRESS6': 'address6',
'CONTACTGROUPNAME': 'get_groupname',
'CONTACTGROUPNAMES': 'get_groupnames'
}
# For debugging purpose only (nice name)
def get_name(self):
try:
return self.contact_name
except AttributeError:
return 'UnnamedContact'
# Search for notification_options with state and if t is
# in service_notification_period
def want_service_notification(self, t, state, type, business_impact, cmd=None):
if not self.service_notifications_enabled:
return False
# If we are in downtime, we do nto want notification
for dt in self.downtimes:
if dt.is_in_effect:
return False
# Now the rest is for sub notificationways. If one is OK, we are ok
# We will filter in another phase
for nw in self.notificationways:
nw_b = nw.want_service_notification(t, state, type, business_impact, cmd)
if nw_b:
return True
# Oh... no one is ok for it? so no, sorry
return False
# Search for notification_options with state and if t is in
# host_notification_period
def want_host_notification(self, t, state, type, business_impact, cmd=None):
if not self.host_notifications_enabled:
return False
# If we are in downtime, we do nto want notification
for dt in self.downtimes:
if dt.is_in_effect:
return False
# Now it's all for sub notificationways. If one is OK, we are OK
# We will filter in another phase
for nw in self.notificationways:
nw_b = nw.want_host_notification(t, state, type, business_impact, cmd)
if nw_b:
return True
# Oh, nobody..so NO :)
return False
# Call to get our commands to launch a Notification
def get_notification_commands(self, type):
r = []
# service_notification_commands for service
notif_commands_prop = type + '_notification_commands'
for nw in self.notificationways:
r.extend(getattr(nw, notif_commands_prop))
return r
# Check is required prop are set:
# contacts OR contactgroups is need
def is_correct(self):
state = True
cls = self.__class__
# All of the above are checks in the notificationways part
for prop, entry in cls.properties.items():
if prop not in _special_properties:
if not hasattr(self, prop) and entry.required:
logger.error("[contact::%s] %s property not set", self.get_name(), prop)
state = False # Bad boy...
# There is a case where there is no nw: when there is not special_prop defined
# at all!!
if self.notificationways == []:
for p in _special_properties:
if not hasattr(self, p):
logger.error("[contact::%s] %s property is missing", self.get_name(), p)
state = False
if hasattr(self, 'contact_name'):
for c in cls.illegal_object_name_chars:
if c in self.contact_name:
logger.error("[contact::%s] %s character not allowed in contact_name", self.get_name(),
|
huggingface/transformers
|
utils/test_module/custom_modeling.py
|
Python
|
apache-2.0
| 772
| 0
|
import torch
from transformers import PreTrainedModel
from .custom_configuration import CustomConfig, NoSuperInitConfig
class CustomModel(PreTrainedModel):
config_class = CustomConfig
def __init__(self, config):
super().__init__(config)
self.linear = torch.nn.Linear(config.hidden_size, config.hidden_size)
def forward(self, x):
return self.linear(x)
def _init_weights(self, module):
pass
class NoSuperInitModel(PreTrainedModel):
config_class = NoSuperInitConfig
def __init__(self, config):
super().__init__(config)
self.linear = torch.nn.Linear(co
|
nfig.attribute, config.attribute)
def forward(self, x):
return self.linear(x)
def
|
_init_weights(self, module):
pass
|
knoppo/pi3bar
|
pi3bar/tests/plugins/test_uptime.py
|
Python
|
mit
| 1,771
| 0
|
import unittest
try:
from unittest import mock
except ImportError:
import mock
from pi3bar.plugins.uptime import get_uptime_seconds, uptime_format, Uptime
class GetUptimeSecondsTestCase(unittest.TestCase):
def test(self):
m = mock.mock_open(read_data='5')
m.return_value.readline.return_value = '5' # py33
with mock.patch('pi3bar.plugins.uptime.open', m, create=True):
seconds = get_uptime_seconds()
self.asser
|
tEqual(5, seconds)
class UptimeFormatTestCase(unittest.TestCase):
def test_seconds(self):
s = uptime_format(5)
self.assertEqual('0:00:00:05', s)
def test_minutes(self):
|
s = uptime_format(3540)
self.assertEqual('0:00:59:00', s)
def test_hours(self):
s = uptime_format(49020)
self.assertEqual('0:13:37:00', s)
def test_days(self):
s = uptime_format(135420)
self.assertEqual('1:13:37:00', s)
def test_format_days_applied_to_hours(self):
s = uptime_format(135420, '%H:%M:%S')
self.assertEqual('37:37:00', s)
def test_format_hours_applied_to_minutes(self):
s = uptime_format(49020, '%M:%S')
self.assertEqual('817:00', s)
class UptimeTestCase(unittest.TestCase):
def test(self):
plugin = Uptime()
self.assertEqual('%d days %H:%M:%S up', plugin.full_format)
self.assertEqual('%dd %H:%M up', plugin.short_format)
@mock.patch('pi3bar.plugins.uptime.get_uptime_seconds')
def test_cycle(self, mock_get_uptime_seconds):
plugin = Uptime()
mock_get_uptime_seconds.return_value = 49020
plugin.cycle()
self.assertEqual('0 days 13:37:00 up', plugin.full_text)
self.assertEqual('0d 13:37 up', plugin.short_text)
|
mattoufoutu/ToolBoxAssistant
|
ToolBoxAssistant/__init__.py
|
Python
|
gpl-3.0
| 5,458
| 0.001649
|
# -*- coding: utf-8 -*-
import os
import re
try:
import simplejson as json
except ImportError:
import json
from ToolBoxAssistant.app import AppFactory
from ToolBoxAssistant.helpers import get_svn_url, readfile, find_versionned_folders, yes_no, Color
from ToolBoxAssistant.log import logger
VERSION = '0.1'
class ToolBoxAssistant(object):
"""
The main class
"""
config_basedir = os.path.join(os.path.expanduser('~'), '.tba')
tba_required_fields = ['path', 'apps']
app_required_fields = ['type', 'url', 'path']
vcs_repo_finders = {
'git': (
'.git/config',
re.compile(r'\[remote "origin"\]\s+url = (.*)$', re.M),
lambda regex, cfg: regex.search(readfile(cfg)).group(1)
),
'hg': (
'.hg/hgrc',
re.compile(r'default = (.*)$'),
lambda regex, cfg: regex.search(readfile(cfg)).group(1)
),
'svn': (
'',
re.compile(r'Repository Root: (.*)$', re.M),
get_svn_url
)
}
def __init__(self):
self.config_dir = None
def setup_config_dir(self, path):
self.config_dir = os.path.join(
self.config_basedir,
path.replace(os.path.sep, '_').strip('_')
)
if not os.path.exists(self.config_dir):
logger.debug('creating configuration folder: %s' % Color.GREEN+self.config_dir+Color.END)
os.makedirs(self.config_dir)
def load_specs(self, fpath):
"""
Loads a specifications file and checks for missing fields.
"""
with open(fpath) as ifile:
logger.debug('loading specfile: %s' % Color.GREEN+fpath+Color.END)
data = json.load(ifile)
for field in self.tba_required_fields:
if field not in data:
logger.error('missing top-level field in specs: %s' % Color.GREEN+field+Color.END)
return None
for app_name in data['apps']:
app_specs = data['apps'][app_name]
for app_field in self.app_required_fields:
if app_field not in app_specs:
logger.error('missing app field in specs: %s' % Color.GREEN+app_field+Color.END)
return None
return data
def do_sync(self, args):
"""
Synchronizes installed application with the specfile.
"""
if (not os.path.exists(args.file)) or (not os.path.isfile(args.file)):
logger.error('file not found: %s' % Color.GREEN+args.file+Color.END)
return
specs = self.load_specs(args.file)
if specs is None:
return
self.setup_config_dir(specs['path'])
rootpath = specs['path']
for app_name in specs['apps']:
app_specs = specs['apps'][app_name]
if not app_specs['path'].startswith(os.path.sep):
app_specs['path'] = os.path.join(rootpath, app_specs['path'])
app = AppFactory.load(self, app_name, app_specs)
app.sync()
if app.is_updated:
app.build()
if args.unlisted:
for _, folder in find_versionned_folders(rootpath):
folder, app_name = os.path.split(folder)
logger.warn('found unlisted application in %s: %s' % (
folder, Color.GREEN+app_name+Color.END
))
def do_genspec(self, args):
"""
Scans current folder for versionned applications and
creates a specfile accordingly.
"""
self.setup_config_dir(args.path)
new_specs = {
'path': args.path,
'apps': {}
}
if args.merge is not None:
new_specs = self.load_specs(args.merge)
apps_specs = new_specs['apps']
new_apps_found = False
for vcs_type, app_folder in find_versionned_folders(args.path):
app_path = app_folder[len(args.path)+1:]
if app_path not in [apps_specs[a]['path'] for a in apps_specs]:
new_apps_found = True
folder, app_name = os.path.split(app_folder)
logger.info('found%s application in %s: %s (%s)' % (
' new' if args.merge is not None else '',
|
folder, Color.GREEN+app_name+Color.END, vcs_type
))
cfg_file, regex, handler = self.vcs_repo_finders[vcs_type]
cfg_path = os.path.join(app_folder, cfg_file)
app_specs = {
'type': vcs_type,
'url': handler(regex, cfg_path),
'path': app_path,
}
apps_specs[app_name]
|
= app_specs
if new_apps_found:
outfile = args.merge or args.file
if os.path.exists(outfile):
logger.warning('file already exists: %s' % Color.GREEN+outfile+Color.END)
if not yes_no('Overwrite ?'):
logger.error('operation aborted by user')
return
with open(outfile, 'w') as ofile:
json.dump(new_specs, ofile, sort_keys=True, indent=2, separators=(',', ': '))
logger.info('specfile written to %s' % Color.GREEN+outfile+Color.END)
logger.info('you may now add build information to the new specfile')
else:
logger.info('no new application found')
|
fizz-ml/pytorch-aux-reward-rl
|
replay_buffer.py
|
Python
|
mit
| 4,985
| 0.004814
|
import numpy as np
import random
class ReplayBuffer:
""" Buffer for storing values over timesteps.
"""
def __init__(self):
""" Initializes the buffer.
"""
pass
def batch_sample(self, batch_size):
""" Randomly sample a batch of values from the buffer.
"""
raise NotImplementedError
def put(self, *value):
""" Put values into the replay buffer.
"""
raise NotImplementedError
class ExperienceReplay(ReplayBuffer):
"""
Experience Replay stores action, state, reward and terminal signal
for each time step.
"""
def __init__(self, state_size, action_size, capacity):
""" Creates an Experience Replay of certain capacity.
Acts like a circular buffer.
Args:
state_size: The size of the state to be stored.
action_size: The size of the action to be stored.
capacity: The capacity of the experience replay buffer.
"""
self.state_size = state_size
self.action_size = action_size
self.length = 0
self.capacity = capacity
self.actions = np.empty((self.capacity, self.action_size), dtype = np.float16)
self.states = np.empty((self.capacity, self.state_size), dtype = np.float16)
self.rewards = np.empty(self.capacity, dtype = np.float16)
self.dones = np.empty(self.capacity, dtype = np.bool)
self.current_index = 0
self.staged = False
def batch_sample(self, batch_size):
""" Sample a batch of experiences from the replay.
Args:
batch_size: The number of batches to select
Returns:
s_t
a_t
r_t
s_t1
done
"""
if batch_size > self.length-3:
# we might not have enough experience
raise IOError('batch_size out of range')
idxs = []
while len(idxs) < batch_size:
while True:
# keep trying random indices
idx = random.randint(1, self.length - 1)
# don't want to grab current index since it wraps
if not( idx == self.current_index and idx == self.current_index - 1 ):
idxs.append(idx)
break
s_t = self.states[idxs]
s_t1 = self.states[[(x+1) for x in idxs]]
a_t = self.actions[idxs]
r_t = np.expand_dims(self.rewards[idxs], axis = 1)
done = self.dones[idxs]
'''
j = 0
print(s_t[j], s_t1[j], a_t[j], r_t[j], done[j])
j = 1
print(s_t[j], s_t1[j], a_t[j], r_t[j], done[j])
raw_input("Press Enter to continue...")
'''
return s_t, a_t,
|
r_t, s_t1, done
def _put(self, s_t, a_t, reward, done):
self.actions[self.current_index] = a_t
self.states[self.current_index] = s_t
self.rewards[self.current_index] = reward
|
self.dones[self.current_index] = done
self._icrement_index()
def put_act(self, s_t, a_t):
""" Puts the current state and the action taking into Experience Replay.
Args:
s_t: Current state.
a_t: Action taking at this state.
Raises:
IOError: If trying to overwrite previously staged action and state.
"""
if not self.staged:
self.actions[self.current_index] = a_t
self.states[self.current_index] = s_t
# stage to prevent double staging
self.staged = True
else:
# already staged an action and state
raise IOError('Trying to override previously staged action and state.')
def put_rew(self, reward, done):
""" Completes a staged insertion by adding reward and
terminal signal to Experience Replay
Args:
reward: Reward received in this step.
done: Bool signalling terminal step.
Raises:
IOError: If trying to complete insertion without having staged first.
"""
if(self.staged):
self.rewards[self.current_index] = reward
self.dones[self.current_index] = done
# unstage and increment index
self.staged = False
self._increment_index()
else:
# not yet staged state and action
raise IOError( 'Trying to complete unstaged insertion. Must insert action and state first.')
def unstage(self):
""" Unstages any currently staged insertion
"""
if(self.staged):
# stage to prevent double staging
self.staged = False
self.actions[self.current_index] = None
self.states[self.current_index] = None
def _increment_index(self):
self.current_index = (self.current_index + 1) % self.capacity
self.length = min(self.capacity-1, self.length + 1)
|
Fale/ansible
|
lib/ansible/module_utils/urls.py
|
Python
|
gpl-3.0
| 77,490
| 0.00231
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <[email protected]>, 2012-2013
# Copyright (c), Toshio Kuratomi <[email protected]>, 2015
#
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
#
# The match_hostname function and supporting code is under the terms and
# conditions of the Python Software Foundation License. They were taken from
# the Python3 standard library and adapted for use in Python2. See comments in the
# source for which code precisely is under this License.
#
# PSF License (see licenses/PSF-license.txt or https://opensource.org/licenses/Python-2.0)
'''
The **urls** utils module offers a replacement for the urllib2 python library.
urllib2 is the python stdlib way to retrieve files from the Internet but it
lacks some security features (around verifying SSL certificates) that users
should care about in most situations. Using the functions in this module corrects
deficiencies in the urllib2 module wherever possible.
There are also third-party libraries (for instance, requests) which can be used
to replace urllib2 with a more secure library. However, all third party libraries
require that the library be installed on the managed machine. That is an extra step
for users making use of a module. If possible, avoid third party libraries by using
this code instead.
'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import atexit
import base64
import email.mime.multipart
import email.mime.nonmultipart
import email.mime.application
import email.parser
import email.utils
import functools
import mimetypes
import netrc
import os
import platform
import re
import socket
import sys
import tempfile
import traceback
from contextlib import contextmanager
try:
import email.policy
except ImportError:
# Py2
import email.generator
try:
import httplib
except ImportError:
# Python 3
import http.client as httplib
import ansible.module_utils.six.moves.http_cookiejar as cookiejar
import ansible.module_utils.six.moves.urllib.request as urllib_request
import ansible.module_utils.six.moves.urllib.error as urllib_error
from ansible.module_utils.common.collections import Mapping
from ansible.module_utils.six import PY3, string_types
from ansible.module_utils.six.moves import cStringIO
from ansible.module_utils.basic import get_distribution, missing_required_lib
from ansible.module_utils._text import to_bytes, to_native, to_text
try:
# python3
import urllib.request as urllib_request
from urllib.request import AbstractHTTPHandler, BaseHandler
except ImportError:
# python2
import urllib2 as urllib_request
from urllib2 import AbstractHTTPHandler, BaseHandler
urllib_request.HTTPRedirectHandler.http_error_308 = urllib_request.HTTPRedirectHandler.http_error_307
try:
from ansible.module_utils.six.moves.urllib.parse import urlparse, urlunparse
HAS_URLPARSE = True
except Exception:
HAS_URLPARSE = False
try:
import ssl
HAS_SSL = True
except Exception:
HAS_SSL = False
try:
# SNI Handling needs python2.7.9's SSLContext
from ssl import create_default_context, SSLContext
HAS_SSLCONTEXT = True
except ImportError:
HAS_SSLCONTEXT = False
# SNI Handling for python < 2.7.9 with urllib3 support
try:
# urllib3>=1.15
HAS_URLLIB3_SSL_WRAP_SOCKET = False
try:
from urllib3.contrib.pyopenssl import PyOpenSSLContext
except ImportError:
from requests.packages.urllib3.contrib.pyopenssl import PyOpenSSLContext
HAS_URLLIB3_PYOPENSSLCONTEXT = True
except ImportError:
# urllib3<1.15,>=1.6
HAS_URLLIB3_PYOPENSSLCONTEXT = False
try:
try:
from urllib3.contrib.pyopenssl import ssl_wrap_socket
except ImportError:
from requests.packages.urllib3.contrib.pyopenssl import ssl_wrap_socket
HAS_URLLIB3_SSL_WRAP_SOCKET = True
except ImportError:
pass
# Select a protocol that includes all secure tls protocols
# Exclude insecure ssl protocols if possible
if HAS_SSL:
# If we can't find extra tls methods, ssl.PROTOCOL_TLSv1 is sufficient
PROTOCOL = ssl.PROTOCOL_TLSv1
if not HAS_SSLCONTEXT and HAS_SSL:
try:
import ctypes
import ctypes.
|
util
except ImportError:
# python 2.4 (likely rhel5 which doesn't have tls1.1 support in its openssl)
pass
else:
libssl_name = ctypes.util.find_library('ssl')
libssl = ctypes.CDLL(libssl_name)
for method in ('TLSv1_1_method', 'TLSv1_2_method'):
try:
libssl[method]
# Found something - we'll let openssl auto
|
negotiate and hope
# the server has disabled sslv2 and 3. best we can do.
PROTOCOL = ssl.PROTOCOL_SSLv23
break
except AttributeError:
pass
del libssl
# The following makes it easier for us to script updates of the bundled backports.ssl_match_hostname
# The bundled backports.ssl_match_hostname should really be moved into its own file for processing
_BUNDLED_METADATA = {"pypi_name": "backports.ssl_match_hostname", "version": "3.7.0.1"}
LOADED_VERIFY_LOCATIONS = set()
HAS_MATCH_HOSTNAME = True
try:
from ssl import match_hostname, CertificateError
except ImportError:
try:
from backports.ssl_match_hostname import match_hostname, CertificateError
except ImportError:
HAS_MATCH_HOSTNAME = False
HAS_CRYPTOGRAPHY = True
try:
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.exceptions import UnsupportedAlgorithm
except ImportError:
HAS_CRYPTOGRAPHY = False
# Old import for GSSAPI authentication, this is not used in urls.py but kept for backwards compatibility.
try:
import urllib_gssapi
HAS_GSSAPI = True
except ImportError:
HAS_GSSAPI = False
GSSAPI_IMP_ERR = None
try:
import gssapi
class HTTPGSSAPIAuthHandler(BaseHandler):
""" Handles Negotiate/Kerberos support through the gssapi library. """
AUTH_HEADER_PATTERN = re.compile(r'(?:.*)\s*(Negotiate|Kerberos)\s*([^,]*),?', re.I)
handler_order = 480 # Handle before Digest authentication
def __init__(self, username=None, password=None):
self.username = username
self.password = password
self._context = None
def get_auth_value(self, headers):
auth_match = self.AUTH_HEADER_PATTERN.search(headers.get('www-authenticate', ''))
if auth_match:
return auth_match.group(1), base64.b64decode(auth_match.group(2))
def http_error_401(self, req, fp, code, msg, headers):
# If we've already attempted the auth and we've reached this again then there was a failure.
if self._context:
return
parsed = generic_urlparse(urlparse(req.get_full_url()))
auth_header = self.get_auth_value(headers)
if not auth_header:
return
auth_protocol, in_token = auth_header
username = None
if self.username:
username = gssapi.Name(self.username, name_type=gssapi.NameType.user)
if username and self.password:
if not hasattr(gssapi.raw, 'acquire_cred_with_password'):
raise NotImplementedError("Platform GSSAPI library does not support "
"gss_acquire_cred_with_password, cannot acquire GSSAPI credential with "
"explicit username and password.")
b_password = to_bytes(self.password, errors='surrogate_or_strict')
cred = gssapi.raw.acqu
|
Scriptopathe/simso-exp
|
simsoexp/migrations/0006_auto_20150721_1432.py
|
Python
|
bsd-2-clause
| 2,084
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('simsoexp', '0005_schedulingpolicy_class_name'),
]
operations = [
migrations.RemoveField(
model_name='results',
name='metrics',
),
migrations.AddField(
model_name='results',
name='aborted_jobs',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='jobs',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='migrations',
field=models.IntegerField(default=0),
preserve_defaul
|
t=False,
),
migrations.AddField(
model_name='results',
name='norm_laxity',
|
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='on_schedule',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='preemptions',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='sys_preempt',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='task_migrations',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='timers',
field=models.IntegerField(default=0),
preserve_default=False,
),
]
|
tbabej/astropy
|
astropy/table/column.py
|
Python
|
bsd-3-clause
| 41,990
| 0.001143
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from ..extern import six
from ..extern.six.moves import zip
import warnings
import weakref
from copy import deepcopy
import numpy as np
from numpy import ma
from ..units import Unit, Quantity
from ..utils.compat import NUMPY_LT_1_8
from ..utils.console import color_print
from ..utils.metadata import MetaData
from ..utils.data_info import BaseColumnInfo, dtype_info_name
from ..extern.six.moves import range
from . import groups
from . import pprint
from .np_utils import fix_column_name
# These "shims" provide __getitem__ implementations for Column and MaskedColumn
from ._column_mixins import _ColumnGetitemShim, _MaskedColumnGetitemShim
# Create a generic TableFormatter object for use by bare columns with no
# parent table.
FORMATTER = pprint.TableFormatter()
INTEGER_TYPES = (int, long, np.integer) if six.PY2 else (int, np.integer)
class StringTruncateWarning(UserWarning):
"""
Warning class for whe
|
n a string column is assigned a value
that gets truncated because the base (numpy) string length
is too short.
This does not inherit from AstropyWarning because w
|
e want to use
stacklevel=2 to show the user where the issue occurred in their code.
"""
pass
def _auto_names(n_cols):
from . import conf
return [str(conf.auto_colname).format(i) for i in range(n_cols)]
# list of one and two-dimensional comparison functions, which sometimes return
# a Column class and sometimes a plain array. Used in __array_wrap__ to ensure
# they only return plain (masked) arrays (see #1446 and #1685)
_comparison_functions = set(
[np.greater, np.greater_equal, np.less, np.less_equal,
np.not_equal, np.equal,
np.isfinite, np.isinf, np.isnan, np.sign, np.signbit])
def col_copy(col, copy_indices=True):
"""
This is a mixin-safe version of Column.copy() (with copy_data=True).
"""
if isinstance(col, BaseColumn):
return col.copy()
# The new column should have None for the parent_table ref. If the
# original parent_table weakref there at the point of copying then it
# generates an infinite recursion. Instead temporarily remove the weakref
# on the original column and restore after the copy in an exception-safe
# manner.
parent_table = col.info.parent_table
indices = col.info.indices
col.info.parent_table = None
col.info.indices = []
try:
newcol = col.copy() if hasattr(col, 'copy') else deepcopy(col)
newcol.info = col.info
newcol.info.indices = deepcopy(indices or []) if copy_indices else []
for index in newcol.info.indices:
index.replace_col(col, newcol)
finally:
col.info.parent_table = parent_table
col.info.indices = indices
return newcol
class FalseArray(np.ndarray):
def __new__(cls, shape):
obj = np.zeros(shape, dtype=np.bool).view(cls)
return obj
def __setitem__(self, item, val):
val = np.asarray(val)
if np.any(val):
raise ValueError('Cannot set any element of {0} class to True'
.format(self.__class__.__name__))
if six.PY2: # avoid falling back to ndarray.__setslice__
def __setslice__(self, start, stop, val):
self.__setitem__(slice(start, stop), val)
class ColumnInfo(BaseColumnInfo):
attrs_from_parent = BaseColumnInfo.attr_names
_supports_indexing = True
class BaseColumn(_ColumnGetitemShim, np.ndarray):
meta = MetaData()
def __new__(cls, data=None, name=None,
dtype=None, shape=(), length=0,
description=None, unit=None, format=None, meta=None,
copy=False, copy_indices=True):
if data is None:
dtype = (np.dtype(dtype).str, shape)
self_data = np.zeros(length, dtype=dtype)
elif isinstance(data, BaseColumn) and hasattr(data, '_name'):
# When unpickling a MaskedColumn, ``data`` will be a bare
# BaseColumn with none of the expected attributes. In this case
# do NOT execute this block which initializes from ``data``
# attributes.
self_data = np.array(data.data, dtype=dtype, copy=copy)
if description is None:
description = data.description
if unit is None:
unit = unit or data.unit
if format is None:
format = data.format
if meta is None:
meta = deepcopy(data.meta)
if name is None:
name = data.name
elif isinstance(data, Quantity):
if unit is None:
self_data = np.array(data, dtype=dtype, copy=copy)
unit = data.unit
else:
self_data = np.array(data.to(unit), dtype=dtype, copy=copy)
if description is None:
description = data.info.description
if format is None:
format = data.info.format
if meta is None:
meta = deepcopy(data.info.meta)
else:
self_data = np.array(data, dtype=dtype, copy=copy)
self = self_data.view(cls)
self._name = fix_column_name(name)
self.unit = unit
self.format = format
self.description = description
self.meta = meta
self._parent_table = None
self.indices = deepcopy(getattr(data, 'indices', [])) if \
copy_indices else []
for index in self.indices:
index.replace_col(data, self)
return self
@property
def data(self):
return self.view(np.ndarray)
@property
def parent_table(self):
if self._parent_table is None:
return None
else:
return self._parent_table()
@parent_table.setter
def parent_table(self, table):
if table is None:
self._parent_table = None
else:
self._parent_table = weakref.ref(table)
info = ColumnInfo()
def copy(self, order='C', data=None, copy_data=True):
"""
Return a copy of the current instance.
If ``data`` is supplied then a view (reference) of ``data`` is used,
and ``copy_data`` is ignored.
Parameters
----------
order : {'C', 'F', 'A', 'K'}, optional
Controls the memory layout of the copy. 'C' means C-order,
'F' means F-order, 'A' means 'F' if ``a`` is Fortran contiguous,
'C' otherwise. 'K' means match the layout of ``a`` as closely
as possible. (Note that this function and :func:numpy.copy are very
similar, but have different default values for their order=
arguments.) Default is 'C'.
data : array, optional
If supplied then use a view of ``data`` instead of the instance
data. This allows copying the instance attributes and meta.
copy_data : bool, optional
Make a copy of the internal numpy array instead of using a
reference. Default is True.
Returns
-------
col : Column or MaskedColumn
Copy of the current column (same type as original)
"""
if data is None:
data = self.data
if copy_data:
data = data.copy(order)
out = data.view(self.__class__)
out.__array_finalize__(self)
# for MaskedColumn, MaskedArray.__array_finalize__ also copies mask
# from self, which is not the idea here, so undo
if isinstance(self, MaskedColumn):
out._mask = data._mask
self._copy_groups(out)
return out
def __setstate__(self, state):
"""
Restore the internal state of the Column/MaskedColumn for pickling
purposes. This requires that the last element of ``state`` is a
5-tuple that has Column-specific state values.
"""
# Get the Column attributes
names = ('_name
|
daniellawrence/pyspeccheck
|
speccheck/port.py
|
Python
|
mit
| 2,928
| 0
|
#!/usr/bin/env python
from .util import Spec
class Port(Spec):
STATES = [
"listening", "closed", "open",
"bound_to",
"tcp", "tcp6", "udp"
]
def __init__(self, portnumber):
self.portnumber = portnumber
self.get_state()
self.state = {
'state': 'closed',
'bound': False,
'uid': None,
'inode': None,
'proto': None,
}
self.get_state()
#
self.WIN = "Port %s is %%s" % self.portnumber
def get_state(self):
import os
for line in os.popen("netstat -tnle").readlines():
line = line.strip().split()
if len(line) != 8:
continue
(proto, _, _, local, foreign, state, uid, inode) = line
if proto == 'tcp':
(bound, port) = local.split(':')
if proto == 'tcp6':
port = local.split(':::')[-1]
port = int(port)
if port == self.portnumber:
self.state = {
'state': 'listening',
'bound': bound,
'uid': uid,
'inode': inode,
'proto': proto,
}
def _make_sure(self, x, y):
if x == y:
return True
else:
return False
def sb_listening(self, *args):
if self._make_sure(self.state['state'], "listening"):
return True, "Port %s is listening" % self.portnumber
return False, "Port %s is current %s not listening" % (
self.
|
portnumber,
self.state['state']
)
def sb_closed(self, *args):
if self._make_sure(self.state['state'], "closed"):
return True, "Port %s is closed" % self.portnumber
return False, "Port %s is current %s not closed" % (
self.portnumber, self.state['state']
)
def sb_tcp(self, *args):
|
if self._make_sure(self.state['proto'], "tcp"):
return True
return "Port %s is using protocol %s not TCP" % (
self.portnumber, self.state['proto']
)
def sb_udp(self, *args):
if self._make_sure(self.state['proto'], "udp"):
return True
return "Port %s is using protocol %s not udp" % (
self.portnumber, self.state['proto']
)
def sb_tcp6(self, *args):
if self._make_sure(self.state['proto'], "tcp6"):
return True
return "Port %s is using protocol %s not TCP6" % (
self.portnumber, self.state['proto']
)
def sb_bound_to(self, bound_ip):
if self._make_sure(self.state['bound'], bound_ip):
return True, "Port %s is bound to %s" % (self.portnumber, bound_ip)
return False, "The port currently bound to %s not %s" % (
self.state['bound'], bound_ip
)
|
cosven/FeelUOwn
|
feeluown/config.py
|
Python
|
gpl-3.0
| 2,065
| 0.000504
|
import logging
import warnings
from collections import namedtuple
logger = logging.getLogger(__name__)
Field = namedtu
|
ple('Field', ('name', 'type_', 'default', 'desc', 'warn'))
class Config:
"""配置模块
用户可以在 rc 文件中配置各个选项的值
"""
def __init__(self):
object.__setattr__(self, '_fields', {})
def __getattr__(self, name):
# tips: 这里不能用 getattr 来获取值, 否则会死循环
if name == '_fields':
return object.__getattribute__(self, '_fields')
if name in self._fields:
try:
object.__getattribute__(self, name)
except Attri
|
buteError:
return self._fields[name].default
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if name in self._fields:
field = self._fields[name]
if field.warn is not None:
warnings.warn('Config field({}): {}'.format(name, field.warn),
stacklevel=2)
# TODO: 校验值类型
object.__setattr__(self, name, value)
else:
logger.warning('Assign to an undeclared config key.')
def deffield(self, name, type_=None, default=None, desc='', warn=None):
"""Define a configuration field
:param str name: the field name. It SHOULD be capitalized except the field
refers to a sub-config.
:param type_: feild type.
:param default: default value for the field.
:param desc: description for the field.
:param warn: if field is deprecated, set a warn message.
"""
if name not in self._fields:
self._fields[name] = Field(name=name,
type_=type_,
default=default,
desc=desc,
warn=warn)
else:
raise ValueError('Field({}) is already defined.'.format(name))
|
antoinecarme/sklearn2sql_heroku
|
tests/regression/RandomReg_500/ws_RandomReg_500_XGBRegressor_db2_code_gen.py
|
Python
|
bsd-3-clause
| 130
| 0.015385
|
from sk
|
learn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("XGBRegressor" , "RandomReg_500" , "db
|
2")
|
young-geng/leet_code
|
problems/20_valid-parentheses/main.py
|
Python
|
mit
| 920
| 0.003261
|
# https://leetcode.com/problems/valid-parentheses/
class Solution(object):
def isValid(self, s):
"""
:type s: str
:rtype: bool
"""
if not s:
return True
stack = []
for i in xrange(len(s)):
# if its opening it, its getting deeper so add to stack
if s[i] in "([{":
|
stack.append(s[i])
# if not it must be a closing parenth
# in which case check if stack is empty if not pop and c
|
heck
# whether popped elem is closed with the current item
else:
if len(stack) == 0:
return False
last = stack.pop()
if s[i] == ")" and last != "(": return False
if s[i] == "]" and last != "[": return False
if s[i] == "}" and last != "{": return False
return len(stack) == 0
|
sdispater/cleo
|
tests/io/inputs/test_option.py
|
Python
|
mit
| 2,858
| 0
|
import pytest
from cleo.exceptions import LogicException
from cleo.exceptions import ValueException
from cleo.io.inputs.option import Option
def test_create():
opt = Option("option")
assert "option" == opt.name
assert opt.shortcut is None
assert opt.is_flag()
assert not opt.accepts_value()
assert not opt.requires_value()
assert not opt.is_list()
assert not opt.default
def test_dashed_name():
opt = Option("--option")
assert "option" == opt.name
def test_fail_if_name_is_empty():
with pytest.raises(ValueException):
Option("")
def test_fail_if_default_value_provided_for_flag():
with pytest.raises(LogicException):
Option("option", flag=True, default="default")
def test_fail_if_wrong_default_value_for_list_option():
with pytest.raises(LogicException):
Option("option", flag=False, is_list=True, default="default")
def test_shortcut():
opt = Option("option", "o")
assert "o" == opt.shortcut
def test_dashed_shortcut():
opt = Option("option", "-o")
assert "o" == opt.shortcut
def test_multiple_shortcuts():
opt = Option("option", "-o|oo|-ooo")
assert "o|oo|ooo" == opt.shortcut
def test_fail_if_shortcut_is_empty():
with pytest.raises(ValueException):
Option("option", "")
def test_optional_value():
opt = Option("option", flag=False, requires_value=False)
assert not opt.is_flag()
assert opt.accepts_value()
assert not opt.requires_value()
assert not opt.is_list()
assert opt.default is None
|
def test_optional_value_with_default():
opt = Option("option", flag=False, requires_value=False, default="Defaul
|
t")
assert not opt.is_flag()
assert opt.accepts_value()
assert not opt.requires_value()
assert not opt.is_list()
assert opt.default == "Default"
def test_required_value():
opt = Option("option", flag=False, requires_value=True)
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert not opt.is_list()
assert opt.default is None
def test_required_value_with_default():
opt = Option("option", flag=False, requires_value=True, default="Default")
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert not opt.is_list()
assert "Default" == opt.default
def test_list():
opt = Option("option", flag=False, is_list=True)
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert opt.is_list()
assert [] == opt.default
def test_multi_valued_with_default():
opt = Option("option", flag=False, is_list=True, default=["foo", "bar"])
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert opt.is_list()
assert ["foo", "bar"] == opt.default
|
mattseymour/django
|
django/contrib/gis/db/backends/spatialite/base.py
|
Python
|
bsd-3-clause
| 3,105
| 0.001932
|
from ctypes.util import find_library
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.base import (
DatabaseWrapper as SQLiteDatabaseWrapper, SQLiteCursorWrapper,
)
from .client import SpatiaLiteClient
from .features import DatabaseFeatures
from .introspection import SpatiaLiteIntrospection
from .operations import SpatiaLiteOperations
from .schema import SpatialiteSchemaEditor
class DatabaseWrapper(SQLiteDatabaseWrapper):
SchemaEditorClass = SpatialiteSchemaEditor
# Classes instantiated in __init__().
client_class = SpatiaLiteClient
features_class = DatabaseFeatures
introspection_class = SpatiaLiteIntrospection
ops_class = SpatiaLiteOperations
def __init__(self, *args, **kwargs):
# Trying to find the location of the SpatiaLite library.
# Here we are figuring out the path to the SpatiaLite library
# (`libspatialite`). If it's not in the system library path (e.g., it
# cannot be found by `ctypes.util.find_library`), then it may be set
# manually in the settings via the `SPATIALITE_LIBRARY_PATH` setting.
self.spatialite_lib = getattr(settings, 'SP
|
ATIALITE_LIBRARY_PATH',
find_library('spatialite'))
if not self.spatialite_lib:
raise ImproperlyConfigured('Unable to locate the SpatiaLite library. '
'Make sure it is in your library path, or set '
'SPATIALITE_LIBRARY_PATH in your settings.
|
'
)
super(DatabaseWrapper, self).__init__(*args, **kwargs)
def get_new_connection(self, conn_params):
conn = super(DatabaseWrapper, self).get_new_connection(conn_params)
# Enabling extension loading on the SQLite connection.
try:
conn.enable_load_extension(True)
except AttributeError:
raise ImproperlyConfigured(
'SpatiaLite requires SQLite to be configured to allow '
'extension loading.'
)
# Loading the SpatiaLite library extension on the connection, and returning
# the created cursor.
cur = conn.cursor(factory=SQLiteCursorWrapper)
try:
cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,))
except Exception as exc:
raise ImproperlyConfigured(
'Unable to load the SpatiaLite library extension "%s"' % self.spatialite_lib
) from exc
cur.close()
return conn
def prepare_database(self):
super(DatabaseWrapper, self).prepare_database()
# Check if spatial metadata have been initialized in the database
with self.cursor() as cursor:
cursor.execute("PRAGMA table_info(geometry_columns);")
if cursor.fetchall() == []:
arg = "1" if self.features.supports_initspatialmetadata_in_one_transaction else ""
cursor.execute("SELECT InitSpatialMetaData(%s)" % arg)
|
python-xlib/python-xlib
|
Xlib/__init__.py
|
Python
|
lgpl-2.1
| 1,184
| 0
|
# Xlib.__init__ -- glue for Xlib package
#
# Copyright (C) 2000-2002 Peter Liljenberg <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the
|
hope that it
|
will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
__version__ = (0, 31)
__version_extra__ = ''
__version_string__ = '.'.join(map(str, __version__)) + __version_extra__
__all__ = [
'X',
'XK',
'Xatom',
'Xcursorfont',
'Xutil',
'display',
'error',
'rdb',
# Explicitly exclude threaded, so that it isn't imported by
# from Xlib import *
]
|
grimoirelab/arthur
|
arthur/worker.py
|
Python
|
gpl-3.0
| 1,980
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public Lice
|
nse for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors:
# Santiago Dueñas <[email protected]>
# Alvaro del Castillo San Felix <[email protected]>
#
import logging
import pickle
import rq
from .common import CH_PUBSUB
logger = logging.getLogger(__name__)
|
class ArthurWorker(rq.Worker):
"""Worker class for Arthur"""
def __init__(self, queues, **kwargs):
super().__init__(queues, **kwargs)
self.__pubsub_channel = CH_PUBSUB
@property
def pubsub_channel(self):
return self.__pubsub_channel
@pubsub_channel.setter
def pubsub_channel(self, value):
self.__pubsub_channel = value
def perform_job(self, job, queue):
"""Custom method to execute a job and notify of its result
:param job: Job object
:param queue: the queue containing the object
"""
result = super().perform_job(job, queue)
job_status = job.get_status()
job_result = job.return_value if job_status == 'finished' else None
data = {
'job_id': job.id,
'status': job_status,
'result': job_result
}
msg = pickle.dumps(data)
self.connection.publish(self.pubsub_channel, msg)
return result
|
shafiquejamal/socialassistanceregistry
|
nr/nr/formulas.py
|
Python
|
bsd-3-clause
| 137
| 0.021898
|
from django.conf import settings
def mask_toggle(number_to_mask_or_unmask
|
):
return int(nu
|
mber_to_mask_or_unmask) ^ settings.MASKING_KEY
|
mhbu50/erpnext
|
erpnext/hr/doctype/vehicle_log/test_vehicle_log.py
|
Python
|
gpl-3.0
| 3,526
| 0.025241
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import unittest
import frappe
from frappe.utils import cstr, flt, nowdate, random_string
from erpnext.hr.doctype.employee.test_employee import make_employee
from erpnext.hr.doctype.vehicle_log.vehicle_log import make_expense_claim
class TestVehicleLog(unittest.TestCase):
def setUp(self):
employee_id = frappe.db.sql("""select name from `tabEmployee` where name='[email protected]'""")
self.employee_id = employee_id[0][0] if employee_id else N
|
one
if not self.employee_id:
self.employee_id = make_employee("[email protected]", company="_Test Company")
self.license_plate = get_vehicle(self.employee_id)
def tearDown(self):
frappe.delete_doc("Vehicle", self.license_plate, force=1)
frappe.delete_doc("Employee", self.employee_id
|
, force=1)
def test_make_vehicle_log_and_syncing_of_odometer_value(self):
vehicle_log = make_vehicle_log(self.license_plate, self.employee_id)
#checking value of vehicle odometer value on submit.
vehicle = frappe.get_doc("Vehicle", self.license_plate)
self.assertEqual(vehicle.last_odometer, vehicle_log.odometer)
#checking value vehicle odometer on vehicle log cancellation.
last_odometer = vehicle_log.last_odometer
current_odometer = vehicle_log.odometer
distance_travelled = current_odometer - last_odometer
vehicle_log.cancel()
vehicle.reload()
self.assertEqual(vehicle.last_odometer, current_odometer - distance_travelled)
vehicle_log.delete()
def test_vehicle_log_fuel_expense(self):
vehicle_log = make_vehicle_log(self.license_plate, self.employee_id)
expense_claim = make_expense_claim(vehicle_log.name)
fuel_expense = expense_claim.expenses[0].amount
self.assertEqual(fuel_expense, 50*500)
vehicle_log.cancel()
frappe.delete_doc("Expense Claim", expense_claim.name)
frappe.delete_doc("Vehicle Log", vehicle_log.name)
def test_vehicle_log_with_service_expenses(self):
vehicle_log = make_vehicle_log(self.license_plate, self.employee_id, with_services=True)
expense_claim = make_expense_claim(vehicle_log.name)
expenses = expense_claim.expenses[0].amount
self.assertEqual(expenses, 27000)
vehicle_log.cancel()
frappe.delete_doc("Expense Claim", expense_claim.name)
frappe.delete_doc("Vehicle Log", vehicle_log.name)
def get_vehicle(employee_id):
license_plate=random_string(10).upper()
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": cstr(license_plate),
"make": "Maruti",
"model": "PCM",
"employee": employee_id,
"last_odometer": 5000,
"acquisition_date": nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": flt(500000)
})
try:
vehicle.insert()
except frappe.DuplicateEntryError:
pass
return license_plate
def make_vehicle_log(license_plate, employee_id, with_services=False):
vehicle_log = frappe.get_doc({
"doctype": "Vehicle Log",
"license_plate": cstr(license_plate),
"employee": employee_id,
"date": nowdate(),
"odometer": 5010,
"fuel_qty": flt(50),
"price": flt(500)
})
if with_services:
vehicle_log.append("service_detail", {
"service_item": "Oil Change",
"type": "Inspection",
"frequency": "Mileage",
"expense_amount": flt(500)
})
vehicle_log.append("service_detail", {
"service_item": "Wheels",
"type": "Change",
"frequency": "Half Yearly",
"expense_amount": flt(1500)
})
vehicle_log.save()
vehicle_log.submit()
return vehicle_log
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.