repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
TribeMedia/synapse
|
synapse/server.py
|
Python
|
apache-2.0
| 11,005
| 0.000091
|
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file provides some classes for setting up (partially-populated)
# homeservers; either as a full homeserver as a real application, or a small
# partial one for unit test mocking.
# Imports required for the default HomeServer() implementation
import logging
from twisted.enterprise import adbapi
from twisted.web.client import BrowserLikePolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.federation import initialize_http_replication
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.transport.client import TransportLayerClient
from synapse.federation.transaction_queue import TransactionQueue
from synapse.handlers import Handlers
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGeneartor
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.device import DeviceHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import TypingHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.notifier import Notifier
from synapse.push.pusherpool import PusherPool
from synapse.rest.media.v1.media_repository import MediaRepository
from synapse.state import StateHandler
from synapse.storage import DataStore
from synapse.streams.events import EventSources
from synapse.util import Clock
from synapse.util.distributor import Distributor
logger = logging.getLogger(__name__)
class HomeServer(object):
"""A basic homeserver object without lazy component builders.
This will need all of the components it requires to either be passed as
constructor arguments, or the relevant methods overriding to create them.
Typically this would only be used for unit tests.
For every dependency in the DEPENDENCIES list below, this class creates one
method,
def get_DEPENDENCY(self)
which returns the value of that dependency. If no value has yet been set
nor was provided to the constructor, it will attempt to call a lazy builder
method called
def build_DEPENDENCY(self)
which must be implemented by the subclass. This code may call any of the
required "get" methods on the instance to obtain the sub-dependencies that
one requires.
"""
DEPENDENCIES = [
'config',
'clock',
'http_client',
'db_pool',
'persistence_service',
'replication_layer',
'datastore',
'handlers',
'v1auth',
'auth',
'rest_servlet_factory',
'state_handler',
'presence_handler',
'sync_handler',
'typing_handler',
'room_list_handler',
'auth_handler',
'device_handler',
'e2e_keys_handler',
'event_handler',
'event_stream_handler',
'initial_sync_handler',
'application_service_api',
'application_service_scheduler',
'application_service_handler',
'device_message_handler',
'notifier',
'distributor',
'client_resource',
'resource_for_federation',
'resource_for_static_content',
'resource_for_web_client',
'resource_for_content_repo',
'resource_for_server_key',
'resource_for_server_key_v2',
'resource_for_media_repository',
'resource_for_metrics',
'event_sources',
'ratelimiter',
'keyring',
'pusherpool',
'event_builder_factory',
'filtering',
'http_client_context_factory',
'simple_http_client',
'media_repository',
'federation_transport_client',
'federation_sender',
'receipts_handler',
'macaroon_generator',
]
def __init__(self, hostname, **kwargs):
"""
Args:
hostname : The hostname for the server.
"""
self.hostname = hostname
self._building = {}
self.clock = Clock()
self.distributor = Distributor()
self.ratelimiter = Ratelimiter()
# Other kwargs are explicit dependencies
for depname in kwargs:
setattr(self, depname, kwargs[depname])
def setup(self):
logger.info("Setting up.")
self.datastore = DataStore(self.get_db_conn(), self)
logger.info("Finished setting up.")
def get_ip_from_request(self, request):
# X-Forwarded-For is handled by our custom request type.
return request.getClientIP()
def is_mine(self, domain_specific_string):
return domain_specific_string.domain == self.hostname
def is_mine_id(self, string):
return string.split(":", 1)[1] == self.hostname
def build_replication_layer(self):
return initialize_http_replication(self)
def build_handlers(self):
return Handlers(self)
def build_notifier(self):
return Notifier(self)
def build_auth(self):
return Auth(self)
def build_http_client_context_factory(self):
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else BrowserLikePolicyForHTTPS()
)
def build_simple_http_client(self):
return SimpleHttpClient(self)
def build_v1auth(self):
orf = Auth(self)
# Matrix spec makes no reference to what HTTP status code is returned,
# but the V1 API uses 403 where it means 401, and the webclient
# relies on this behaviour, so V1 gets its own copy of the auth
# with backwards compat behaviour.
orf.TOKEN_NOT_FOUND_HTTP_STATUS = 403
return orf
def build_state_handler(self):
return StateHandler(self)
def build_presence_handler(self):
return PresenceHandler(self)
def build_typing_handler(self):
return TypingHandler(self)
def build_sync_handler(self):
return SyncHandler(self)
def build_room_list_handler(self):
return RoomListHandler(self)
def build_auth_handler(self):
|
return AuthH
|
andler(self)
def build_macaroon_generator(self):
return MacaroonGeneartor(self)
def build_device_handler(self):
return DeviceHandler(self)
def build_device_message_handler(self):
return DeviceMessageHandler(self)
def build_e2e_keys_handler(self):
return E2eKeysHandler(self)
def build_application_service_api(self):
return ApplicationServiceApi(self)
def build_application_service_scheduler(self):
return ApplicationServiceScheduler(self)
def build_application_service_handler(self):
return ApplicationServicesHandler(self)
def build_event_handler(self):
return EventHandler(self)
def buil
|
miloszz/DIRAC
|
DataManagementSystem/Client/FTSRequest.py
|
Python
|
gpl-3.0
| 37,261
| 0.02818
|
#############################################################################
# $HeadURL$
#############################################################################
""" ..mod: FTSRequest
=================
Helper class to perform FTS job submission and monitoring.
"""
# # imports
import sys
import re
import time
# # from DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.File import checkGuid
from DIRAC.Core.Utilities.Adler import compareAdler, intAdlerToHex, hexAdlerToInt
from DIRAC.Core.Utilities.SiteSEMapping import getSitesForSE
from DIRAC.Core.Utilities.Time import dateTime
from DIRAC.Resources.Storage.StorageElement import StorageElement
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
from DIRAC.Core.Utilities.ReturnValues import returnSingleResult
from DIRAC.AccountingSystem.Client.Types.DataOperation import DataOperation
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import Resources
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.DataManagementSystem.Client.FTSJob import FTSJob
from DIRAC.DataManagementSystem.Client.FTSFile import FTSFile
# # RCSID
__RCSID__ = "$Id$"
class FTSRequest( object ):
"""
.. class:: FTSRequest
Helper class for FTS job submission and monitoring.
"""
# # default checksum type
__defaultCksmType = "ADLER32"
# # flag to disablr/enable checksum test, default: disabled
__cksmTest = False
def __init__( self ):
"""c'tor
:param self: self reference
"""
self.log = gLogger.getSubLogger( self.__class__.__name__, True )
# # final states tuple
self.finalStates = ( 'Canceled', 'Failed', 'Hold',
'Finished', 'FinishedDirty' )
# # failed states tuple
self.failedStates = ( 'Canceled', 'Failed',
'Hold', 'FinishedDirty' )
# # successful states tuple
self.successfulStates = ( 'Finished', 'Done' )
# # all file states tuple
self.fileStates = ( 'Done', 'Active', 'Pending', 'Ready', 'Canceled', 'Failed',
'Finishing', 'Finished', 'Submitted', 'Hold', 'Waiting' )
self.statusSummary = {}
# # request status
self.requestStatus = 'Unknown'
# # dict for FTS job files
self.fileDict = {}
# # dict for replicas information
self.catalogReplicas = {}
# # dict for metadata information
self.catalogMetadata = {}
# # dict for files that failed to register
self.failedRegistrations = {}
# # placehoder for FileCatalog reference
self.oCatalog = None
# # submit timestamp
self.submitTime = ''
# # placeholder FTS job GUID
self.ftsGUID = ''
# # placeholder for FTS server URL
self.ftsServer = ''
# # flag marking FTS job completness
self.isTerminal = False
# # completness percentage
self.percentageComplete = 0.0
# # source SE name
self.sourceSE = ''
# # flag marking source SE validity
self.sourceValid = False
# # source space token
self.sourceToken = ''
# # target SE name
self.targetSE = ''
# # flag marking target SE validity
self.targetValid = False
# # target space token
self.targetToken = ''
# # placeholder for target StorageElement
self.oTargetSE = None
# # placeholder for source StorageElement
self.oSourceSE = None
# # checksum type, set it to default
self.__cksmType = self.__defaultCksmType
# # disable checksum test by default
self.__cksmTest = False
# # statuses that prevent submitting to FTS
self.noSubmitStatus = ( 'Failed', 'Done', 'Staging' )
# # were sources resolved?
self.sourceResolved = False
# # Number of file transfers actually submitted
self.submittedFiles = 0
self.transferTime = 0
self.submitCommand = Operations().getValue( 'DataManagement/FTSPlacement/FTS2/SubmitCommand', 'glite-transfer-submit' )
self.monitorCommand = Operations().getValue( 'DataManagement/FTSPlacement/FTS2/MonitorCommand', 'glite-transfer-status' )
self.ftsJob = None
self.ftsFiles = []
####################################################################
#
# Methods for setting/getting/checking the SEs
#
def setSourceSE( self, se ):
""" set SE for source
:param self: self reference
:param str se: source SE name
"""
if se == self.targetSE:
return S_ERROR( "SourceSE is TargetSE" )
self.sourceSE = se
self.oSourceSE = StorageElement( self.sourceSE )
return self.__checkSourceSE()
def __checkSourceSE( self ):
""" check source SE availability
:param self: self reference
"""
if not self.sourceSE:
return S_ERROR( "SourceSE not set" )
res = self.oSourceSE.isValid( 'Read' )
if not res['OK']:
return S_ERROR( "SourceSE not available for reading" )
res = self.__getSESpaceToken( self.oSourceSE )
if not res['OK']:
self.log.error( "FTSRequest failed to get SRM Space Token for SourceSE", res['Message'] )
return S_ERROR( "SourceSE does not support FTS transfers" )
if self.__cksmTest:
res = self.oSourceSE.getChecksumType()
if not res["OK"]:
self.log.error( "Unable to get checksum type for SourceSE",
"%s: %s" % ( self.sourceSE, res["Message"] ) )
cksmType = res["Value"]
if cksmType in ( "NONE", "NULL" ):
self.log.warn( "Checksum type set to %s at SourceSE %s, disabling checksum test" % ( cksmType,
self.sourceSE ) )
self.__cksmTest = False
elif cksmType != self.__cksmType:
self.log.warn( "Checksum type mismatch, disabling checksum test" )
self.__cksmTest = Fal
|
se
self.sourceToken = res['Value']
self.sourceValid = True
return S_OK()
def setTargetSE( self, se ):
""" set target SE
:param self: self reference
:param str se: target
|
SE name
"""
if se == self.sourceSE:
return S_ERROR( "TargetSE is SourceSE" )
self.targetSE = se
self.oTargetSE = StorageElement( self.targetSE )
return self.__checkTargetSE()
def setTargetToken( self, token ):
""" target space token setter
:param self: self reference
:param str token: target space token
"""
self.targetToken = token
return S_OK()
def __checkTargetSE( self ):
""" check target SE availability
:param self: self reference
"""
if not self.targetSE:
return S_ERROR( "TargetSE not set" )
res = self.oTargetSE.isValid( 'Write' )
if not res['OK']:
return S_ERROR( "TargetSE not available for writing" )
res = self.__getSESpaceToken( self.oTargetSE )
if not res['OK']:
self.log.error( "FTSRequest failed to get SRM Space Token for TargetSE", res['Message'] )
return S_ERROR( "TargetSE does not support FTS transfers" )
# # check checksum types
if self.__cksmTest:
res = self.oTargetSE.getChecksumType()
if not res["OK"]:
self.log.error( "Unable to get checksum type for TargetSE",
"%s: %s" % ( self.targetSE, res["Message"] ) )
cksmType = res["Value"]
if cksmType in ( "NONE", "NULL" ):
self.log.warn( "Checksum type set to %s at TargetSE %s, disabling checksum test" % ( cksmType,
self.targetSE ) )
self.__cksmTest = False
elif cksmType != self.__cksmType:
self.log.warn( "Checksum type mismatch, disabling checksum test" )
self.__cksmTest = False
self.targetToken = res['Value']
self.targetValid = True
return S_OK()
@staticmethod
def __getSESpaceToken( oSE ):
""" get space token from StorageElement instance
:param self: self reference
:param StorageElement oSE: StorageElement instance
"""
res = oSE.getStorageParameters( "SRM2" )
if not res['OK']:
return res
return S_OK( res['Value'].get( 'SpaceToken' ) )
########################
|
XiaosongWei/blink-crosswalk
|
Tools/Scripts/webkitpy/layout_tests/port/driver.py
|
Python
|
bsd-3-clause
| 23,508
| 0.003148
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import base64
import copy
import logging
import re
import shlex
import sys
import time
import os
from webkitpy.common.system import path
from webkitpy.common.system.profiler import ProfilerFactory
_log = logging.getLogger(__name__)
DRIVER_START_TIMEOUT_SECS = 30
class DriverInput(object):
def __init__(self, test_name, timeout, image_hash, should_run_pixel_test, args):
self.test_name = test_name
self.timeout = timeout # in ms
self.image_hash = image_hash
self.should_run_pixel_test = should_run_pixel_test
self.args = args
class DriverOutput(object):
"""Groups information about a output from driver for easy passing
and post-processing of data."""
def __init__(self, text, image, image_hash, audio, crash=False,
test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??',
crashed_pid=None, crash_log=None, leak=False, leak_log=None, pid=None):
# FIXME: Args could be renamed to better clarify what they do.
self.text = text
self.image = image # May be empty-string if the test crashes.
self.image_hash = image_hash
self.image_diff = None # image_diff gets filled in after construction.
self.audio = audio # Binary format is port-dependent.
self.crash = crash
self.crashed_process_name = crashed_process_name
self.crashed_pid = crashed_pid
self.crash_log = crash_log
self.leak = leak
self.leak_log = leak_log
self.test_time = test_time
self.measurements = measurements
self.timeout = timeout
self.error = error # stderr output
self.pid = pid
def has_stderr(self):
return bool(self.error)
class DeviceFailure(Exception):
pass
class Driver(object):
"""object for running test(s) using content_shell or other driver."""
def __init__(self, port, worker_number, pixel_tests, no_timeout=False):
"""Initialize a Driver to subsequently run tests.
Typically this routine will spawn content_shell in a config
ready for subsequent input.
port - reference back to the port object.
worker_number - identifier for a particular worker/driver instance
"""
self._port = port
self._worker_number = worker_number
self._no_timeout = no_timeout
self._driver_tempdir = None
# content_shell can report back subprocess crashes by printing
# "#CRASHED - PROCESSNAME". Since those can happen at any time
# and ServerProcess won't be aware of them (since the actual tool
# didn't crash, just a subprocess) we record the crashed subprocess name here.
self._crashed_process_name = None
self._crashed_pid = None
# content_shell can report back subprocesses that became unresponsive
# This could mean they crashed.
self._subprocess_was_unresponsive = False
# content_shell can report back subprocess DOM-object leaks by printing
# "#LEAK". This leak detection is enabled only when the flag
# --enable-leak-detection is passed to content_shell.
self._leaked = False
# stderr reading is scoped on a per-test (not per-block) basis, so we store the accumulated
# stderr output, as well as if we've seen #EOF on this driver instance.
# FIXME: We should probably remove _read_first_block and _read_optional_image_block and
# instead scope these locally in run_test.
self.error_from_test = str()
self.err_seen_eof = False
self._server_process = None
self._current_cmd_line = None
self._measurements = {}
if self._port.get_option("profile"):
profiler_name = self._port.get_option("profiler")
self._profiler = ProfilerFactory.create_profiler(self._port.host,
self._port._path_to_driver(), self._port.results_directory(), profiler_name)
else:
self._profiler = None
def __del__(self):
self.stop()
def run_test(self, driver_input, stop_when_done):
"""Run a single test and return the results.
Note that it is okay if a test times out or crashes and leaves
the driver in an indeterminate state. The upper layers of the program
are responsible for cleaning up and ensuring things are okay.
Returns a DriverOutput object.
"""
start_time = time.time()
stdin_deadline = start_time + int(driver_input.timeout) / 2000.0
self.start(driver_input.should_run_pixel_test, driver_input.args, stdin_deadline)
test_begin_time = time.time()
self.error_from_test = str()
self.err_seen_eof = False
command = self._command_from_driver_input(driver_input)
deadline = test_begin_time + int(driver_input.timeout) / 1000.0
self._server_process.write(command)
text, audio = self._read_first_block(deadline) # First block is either text or audio
image, actual_image_hash = self._read_optional_image_block(deadline) # The second (optional) block is image data.
crashed = self.has_crashed()
timed_out = self._server_process.timed_out
pid = self._server_process.pid()
leaked = self._leaked
if not crashed:
sanitizer = self._port.output_contains_sanitizer_messages(self.error_from_test)
if sanitizer:
self.error_from_test = 'OUTPUT CONTAINS "' + sanitizer + '", so we are treating this test as if it crashed, even though it did not.\n\n' + self.error_from_test
crashed = True
|
self._crashed_process_name = "unknown process name"
self._crashed_pid = 0
if stop_when_done or crashed or timed_out or leaked:
# We call stop() even if we crashed or timed out in order to get any remaining stdo
|
ut/stderr output.
# In the timeout case, we kill the hung process as well.
out, err = self._server_process.stop(self._port.driver_stop_timeout() if stop_when_done else 0.0)
if out:
text += out
if err:
self.error_from_test += err
self._server_process = None
crash_log = None
if crashed:
self.error_from_test, crash_log = self._get_crash_log(text, self.error_from_test, newer_than=start_time)
# If we don't find a crash log use a placeholder error message instead.
if not crash_log:
pid_str = str(self._crashed_pid) if self._crashed_pid else "unknown pi
|
peoplepower/botlab
|
com.ppc.Microservices/intelligence/dashboard/tools/set_status.py
|
Python
|
apache-2.0
| 7,607
| 0.00723
|
#!/usr/bin/env python
# encoding: utf-8
'''
Created on September 24, 2019
@author: David Moss
'''
import time
# Time conversions to ms
ONE_SECOND_MS = 1000
ONE_MINUTE_MS = 60 * ONE_SECOND_MS
ONE_HOUR_MS = ONE_MINUTE_MS * 60
ONE_DAY_MS = ONE_HOUR_MS * 24
ONE_WEEK_MS = ONE_DAY_MS * 7
ONE_MONTH_MS = ONE_DAY_MS * 30
ONE_YEAR_MS = ONE_DAY_MS * 365
# Timestamped commands
COMMAND_DELETE = -2
COMMAND_SET_STATUS_HIDDEN = -1
COMMAND_SET_STATUS_GOOD = 0
COMMAND_SET_STATUS_WARNING = 1
COMMAND_SET_STATUS_CRITICAL = 2
# Data Stream Address
DATASTREAM_AD
|
DRESS = "update_dashboard_content"
# # Data Stream Content
# DATASTREAM_CONTENT = {
# "type": 0,
# "title": "NOW",
# "weight": 0,
# "content": {
# "status": 0,
# "comment": "Left the house once today.",
# "weight": 25,
# "id": "leave",
# "icon": "house-leave",
# "icon_font": "far",
# "alarms": {
# int(time.time() * 1000) + (ONE_SECOND_MS * 600): COMMAND_DELETE,
# }
# }
# }
|
# # Data Stream Content
# DATASTREAM_CONTENT = {
# "type": 0,
# "title": "NOW",
# "weight": 0,
# "content": {
# "status": 0,
# "comment": "81% sleep score.",
# "weight": 20,
# "id": "sleep",
# "icon": "snooze",
# "icon_font": "far",
# "alarms": {
# int(time.time() * 1000) + (ONE_SECOND_MS * 600): COMMAND_DELETE,
# }
# }
# }
# Data Stream Content
DATASTREAM_CONTENT = {
"type": 0,
"title": "NOW",
"weight": 0,
"content": {
"status": 0,
"comment": "Judy Bessee reached out today.",
"weight": 30,
"id": "temporary",
"icon": "comment-smile",
"icon_font": "far",
"alarms": {
int(time.time() * 1000) + (ONE_SECOND_MS * 600): COMMAND_DELETE,
}
}
}
# input function behaves differently in Python 2.x and 3.x. And there is no raw_input in 3.x.
if hasattr(__builtins__, 'raw_input'):
input=raw_input
import requests
import sys
import json
import logging
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
def main(argv=None):
if argv is None:
argv = sys.argv
else:
sys.argv.extend(argv)
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-u", "--username", dest="username", help="Username")
parser.add_argument("-p", "--password", dest="password", help="Password")
parser.add_argument("-s", "--server", dest="server", help="Base server URL (app.presencepro.com)")
parser.add_argument("-l", "--location", dest="location_id", help="Location ID")
parser.add_argument("-a", "--api_key", dest="apikey", help="User's API key instead of a username/password")
parser.add_argument("--httpdebug", dest="httpdebug", action="store_true", help="HTTP debug logger output");
# Process arguments
args, unknown = parser.parse_known_args()
# Extract the arguments
username = args.username
password = args.password
server = args.server
httpdebug = args.httpdebug
app_key = args.apikey
location_id = args.location_id
if location_id is not None:
location_id = int(location_id)
print(Color.BOLD + "Location ID: {}".format(location_id) + Color.END)
# Define the bot server
if not server:
server = "https://app.presencepro.com"
if "http" not in server:
server = "https://" + server
# HTTP Debugging
if httpdebug:
try:
import http.client as http_client
except ImportError:
# Python 2
import httplib as http_client
http_client.HTTPConnection.debuglevel = 1
# You must initialize logging, otherwise you'll not see debug output.
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
# Login to your user account
if app_key is None:
app_key, user_info = _login(server, username, password)
send_datastream_message(server, app_key, location_id, DATASTREAM_ADDRESS, DATASTREAM_CONTENT)
print("Done!")
def send_datastream_message(server, app_key, location_id, address, content):
http_headers = {"API_KEY": app_key, "Content-Type": "application/json"}
params = {
"address": address,
"scope": 1,
"locationId": location_id
}
body = {
"feed": content
}
print("Body: " + json.dumps(body, indent=2, sort_keys=True))
print("Server: " + server)
r = requests.post(server + "/cloud/appstore/stream/", params=params, data=json.dumps(body), headers=http_headers)
j = json.loads(r.text)
_check_for_errors(j)
print(str(r.text))
def _login(server, username, password):
"""Get an Bot API key and User Info by login with a username and password"""
if not username:
username = raw_input('Email address: ')
if not password:
import getpass
password = getpass.getpass('Password: ')
try:
import requests
# login by username and password
http_headers = {"PASSWORD": password, "Content-Type": "application/json"}
r = requests.get(server + "/cloud/json/login", params={"username":username}, headers=http_headers)
j = json.loads(r.text)
_check_for_errors(j)
app_key = j['key']
# get user info
http_headers = {"API_KEY": app_key, "Content-Type": "application/json"}
r = requests.get(server + "/cloud/json/user", headers=http_headers)
j = json.loads(r.text)
_check_for_errors(j)
return app_key, j
except BotError as e:
sys.stderr.write("Error: " + e.msg)
sys.stderr.write("\nCreate an account on " + server + " and use it to sign in")
sys.stderr.write("\n\n")
raise e
def _check_for_errors(json_response):
"""Check some JSON response for BotEngine errors"""
if not json_response:
raise BotError("No response from the server!", -1)
if json_response['resultCode'] > 0:
msg = "Unknown error!"
if 'resultCodeMessage' in json_response.keys():
msg = json_response['resultCodeMessage']
elif 'resultCodeDesc' in json_response.keys():
msg = json_response['resultCodeDesc']
raise BotError(msg, json_response['resultCode'])
del(json_response['resultCode'])
class BotError(Exception):
"""BotEngine exception to raise and log errors."""
def __init__(self, msg, code):
super(BotError).__init__(type(self))
self.msg = msg
self.code = code
def __str__(self):
return self.msg
def __unicode__(self):
return self.msg
#===============================================================================
# Color Class for CLI
#===============================================================================
class Color:
"""Color your command line output text with Color.WHATEVER and Color.END"""
PURPLE = '\033[95m'
CYAN = '\033[96m'
DARKCYAN = '\033[36m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
if __name__ == "__main__":
sys.exit(main())
|
ktan2020/legacy-automation
|
win/Lib/test/test_shutil.py
|
Python
|
mit
| 30,473
| 0.001313
|
# Copyright (C) 2003 Python Software Foundation
import unittest
import shutil
import tempfile
import sys
import stat
import os
import os.path
from os.path import splitdrive
from distutils.spawn import find_executable, spawn
from shutil import (_make_tarball, _make_zipfile, make_archive,
register_archive_format, unregister_archive_format,
get_archive_formats)
import tarfile
import warnings
from test import test_support
from test.test_support import TESTFN, check_warnings, captured_stdout
TESTFN2 = TESTFN + "2"
try:
import grp
import pwd
UID_GID_SUPPORT = True
except ImportError:
UID_GID_SUPPORT = False
try:
import zlib
except ImportError:
zlib = None
try:
import zipfile
ZIP_SUPPORT = True
except ImportError:
ZIP_SUPPORT = find_executable('zip')
class TestShutil(unittest.TestCase):
def setUp(self):
super(TestShutil, self).setUp()
self.tempdirs = []
def tearDown(self):
super(TestShutil, self).tearDown()
while self.tempdirs:
d = self.tempdirs.pop()
shutil.rmtree(d, os.name in ('nt', 'cygwin'))
def write_file(self, path, content='xxx'):
"""Writes a file in the given path.
path can be a string or a sequence.
"""
if isinstance(path, (list, tuple)):
path = os.path.join(*path)
f = open(path, 'w')
try:
f.write(content)
finally:
f.close()
def mkdtemp(self):
"""Create a temporary directory that will be cleaned up.
Returns the path of the directory.
"""
d = tempfile.mkdtemp()
self.tempdirs.append(d)
return d
def test_rmtree_errors(self):
# filename is guaranteed not to exist
filename = tempfile.mktemp()
self.assertRaises(OSError, shutil.rmtree, filename)
# See bug #1071513 for why we don't run this on cygwin
# and bug #1076467 for why we don't run this as root.
if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
and not (hasattr(os, 'geteuid') and os.geteuid() == 0)):
def test_on_error(self):
self.errorState = 0
os.mkdir(TESTFN)
self.childpath = os.path.join(TESTFN, 'a')
f = open(self.childpath, 'w')
f.close()
old_dir_mode = os.stat(TESTFN).st_mode
old_child_mode = os.stat(self.childpath).st_mode
# Make unwritable.
os.chmod(self.childpath, stat.S_IREAD)
os.chmod(TESTFN, stat.S_IREAD)
shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
# Test whether onerror has actually been called.
self.assertEqual(self.errorState, 2,
"Expected call to onerror function did not happen.")
# Make writable again.
os.chmod(TESTFN, old_dir_mode)
os.chmod(self.childpath, old_child_mode)
# Clean up.
shutil.rmtree(TESTFN)
def check_args_to_onerror(self, func, arg, exc):
# test_rmtree_errors deliberately runs rmtree
# on a directory that is chmod 400, which will fail.
# This function is run when shutil.rmtree fails.
# 99.9% of the time it initially fails to remove
# a file in the directory, so the first time through
# func is os.remove.
# However, some Linux machines running ZFS on
# FUSE experienced a failure earlier in the process
# at os.listdir. The first failure may legally
# be either.
if self.errorState == 0:
if func is os.remove:
self.assertEqual(arg, self.childpath)
else:
self.assertIs(func, os.listdir,
"func must be either os.remove or os.listdir")
self.assertEqual(arg, TESTFN)
self.assertTrue(issubclass(exc[0], OSError))
self.errorState = 1
else:
self.assertEqual(func, os.rmdir)
self.assertEqual(arg, TESTFN)
self.assertTrue(issubclass(exc[0], OSError))
self.errorState = 2
def test_rmtree_dont_delete_file(self):
# When called on a file instead of a directory, don't delete it.
handle, path = tempfile.mkstemp()
os.fdopen(handle).close()
self.assertRaises(OSError, shutil.rmtree, path)
os.remove(path)
def test_copytree_simple(self):
def write_data(path, data):
f = open(path, "w")
f.wri
|
te(data)
f.close()
def read_data(path):
f = open(path)
data = f.read()
f.close()
return data
src_dir = tempfile.mkdtemp()
dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
write_data(os.path.join(src_dir, 'test.txt'), '123')
os.mkdir(os.path.join(src_dir, 'test_dir'))
write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
try:
shutil.copytr
|
ee(src_dir, dst_dir)
self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
'test.txt')))
actual = read_data(os.path.join(dst_dir, 'test.txt'))
self.assertEqual(actual, '123')
actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
self.assertEqual(actual, '456')
finally:
for path in (
os.path.join(src_dir, 'test.txt'),
os.path.join(dst_dir, 'test.txt'),
os.path.join(src_dir, 'test_dir', 'test.txt'),
os.path.join(dst_dir, 'test_dir', 'test.txt'),
):
if os.path.exists(path):
os.remove(path)
for path in (src_dir,
os.path.dirname(dst_dir)
):
if os.path.exists(path):
shutil.rmtree(path)
def test_copytree_with_exclude(self):
def write_data(path, data):
f = open(path, "w")
f.write(data)
f.close()
def read_data(path):
f = open(path)
data = f.read()
f.close()
return data
# creating data
join = os.path.join
exists = os.path.exists
src_dir = tempfile.mkdtemp()
try:
dst_dir = join(tempfile.mkdtemp(), 'destination')
write_data(join(src_dir, 'test.txt'), '123')
write_data(join(src_dir, 'test.tmp'), '123')
os.mkdir(join(src_dir, 'test_dir'))
write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
os.mkdir(join(src_dir, 'test_dir2'))
write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'), '456')
write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'), '456')
# testing glob-like patterns
try:
patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
shutil.copytree(src_dir, dst_dir, ignore=patterns)
# checking the result: some elements should not be copied
self.assertTrue(exists(join(dst_dir, 'test.txt')))
self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
finally:
if os.path.exists(dst_dir):
shutil.rmtree(dst_dir)
try:
pat
|
vbelakov/h2o
|
py/testdir_single_jvm/test_parse_mnist_fvec.py
|
Python
|
apache-2.0
| 2,023
| 0.006426
|
import unittest
import random, sys, time, re
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i, h2o_glm, h2o_util, h2o_rf
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
# assume we're at 0xdata with it's hdfs namenode
h2o.init(java_heap_GB=14)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_parse_mnist_A_training(self):
importFolderPath = "mnist"
csvFilelist = [
("mnist_training.csv.gz", 600),
("mnist_training.csv.gz", 600),
]
trial = 0
allDelta = []
for (csvFilename, timeoutSecs) in csvFilelist:
testKey2 = csvFilename + "_" + str(trial) + ".hex"
start = time.time()
parseResult = h2i.import_parse(bucket='home-0xdiag-datasets', path=importFolderPath+"/"+csvFilename,
hex_key=testKey2, timeoutSecs=timeoutSecs)
elapsed = time.time() - start
print "parse end on ", csvFilename, 'took', elapsed, 'seconds',\
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
de
|
f test_parse_mnist_B_t
|
esting(self):
importFolderPath = "mnist"
csvFilelist = [
("mnist_testing.csv.gz", 600),
("mnist_testing.csv.gz", 600),
]
trial = 0
allDelta = []
for (csvFilename, timeoutSecs) in csvFilelist:
testKey2 = csvFilename + "_" + str(trial) + ".hex"
start = time.time()
parseResult = h2i.import_parse(bucket='home-0xdiag-datasets', path=importFolderPath+"/"+csvFilename,
hex_key=testKey2, timeoutSecs=timeoutSecs)
elapsed = time.time() - start
print "parse end on ", csvFilename, 'took', elapsed, 'seconds',\
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
if __name__ == '__main__':
h2o.unit_main()
|
dtamayo/rebound
|
rebound/widget.py
|
Python
|
gpl-3.0
| 25,951
| 0.006127
|
shader_code = """
<script id="orbit_shader-vs" type="x-shader/x-vertex">
uniform vec3 focus;
uniform vec3 aef;
uniform vec3 omegaOmegainc;
attribute float lintwopi;
varying float lin;
uniform mat4 mvp;
const float M_PI = 3.14159265359;
void main() {
float a = aef.x;
float e = aef.y;
float f = aef.z+lintwopi;
lin = lintwopi/(M_PI*2.);
if (e>1.){
float theta_max = acos(-1./e);
f = 0.0001-theta_max+1.9998*lin*theta_max;
lin = sqrt(min(0.5,lin));
}
float omega = omegaOmegainc.x;
float Omega = omegaOmegainc.y;
float inc = omegaOmegainc.z;
float r = a*(1.-e*e)/(1. + e*cos(f));
float cO = cos(Omega);
float sO = sin(Omega);
float co = cos(omega);
float so = sin(omega);
float cf = cos(f);
float sf = sin(f);
float ci = cos(inc);
float si = sin(inc);
vec3 pos = vec3(r*(cO*(co*cf-so*sf) - sO*(so*cf+co*sf)*ci),r*(sO*(co*cf-so*sf) + cO*(so*cf+co*sf)*ci),+ r*(so*cf+co*sf)*si);
gl_Position = mvp*(vec4(focus+pos, 1.0));
}
</script>
<script id="orbit_shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying float lin;
void main() {
float fog = max(max(0.,-1.+2.*gl_FragCoord.z),max(0.,1.-2.*gl_FragCoord.z));
gl_FragColor = vec4(1.,1.,1.,sqrt(lin)*(1.-fog));
}
</script>
<script id="point_shader-vs" type="x-shader/x-vertex">
attribute vec3 vp;
uniform mat4 mvp;
//uniform vec4 vc;
//varying vec4 color;
void main() {
gl_PointSize = 15.0;
gl_Position = mvp*vec4(vp, 1.0);
//color = vc;
}
</script>
<script id="point_shader-fs" type="x-shader/x-fragment">
precision mediump float;
//varying vec4 color;
void main() {
vec2 rel = gl_PointCoord.st;
rel.s -=0.5;
rel.t -=0.5;
if (length(rel)>0.25){
gl_FragColor = vec4(0.,0.,0.,0.);
}else{
vec4 cmod = vec4(1.,1.,1.,1.);
float fog = max(max(0.,-1.+2.*gl_FragCoord.z),max(0.,1.-2.*gl_FragCoord.z));
cmod.a*= (1.-fog)*min(1.,1.-4.*(length(rel)/0.25-0.75));
gl_FragColor = cmod;
}
}
</script>
"""
js_code = """
<script>
function compileShader(glr, shaderSource, shaderType) {
// Create the shader object
var shader = glr.createShader(shaderType);
// Set t
|
he shader source code.
g
|
lr.shaderSource(shader, shaderSource);
// Compile the shader
glr.compileShader(shader);
// Check if it compiled
var success = glr.getShaderParameter(shader, glr.COMPILE_STATUS);
if (!success) {
// Something went wrong during compilation; get the error
throw "could not compile shader:" + glr.getShaderInfoLog(shader);
}
return shader;
}
function createShaderFromScript(glr, scriptId, opt_shaderType) {
// look up the script tag by id.
var shaderScript = document.getElementById(scriptId);
if (!shaderScript) {
throw("*** Error: unknown script element" + scriptId);
}
// extract the contents of the script tag.
var shaderSource = shaderScript.text;
// If we didn't pass in a type, use the 'type' from
// the script tag.
if (!opt_shaderType) {
if (shaderScript.type == "x-shader/x-vertex") {
opt_shaderType = glr.VERTEX_SHADER;
} else if (shaderScript.type == "x-shader/x-fragment") {
opt_shaderType = glr.FRAGMENT_SHADER;
} else if (!opt_shaderType) {
throw("*** Error: shader type not set");
}
}
return compileShader(glr, shaderSource, opt_shaderType);
};
function createProgramFromScripts( glr, vertexShaderId, fragmentShaderId) {
var vertexShader = createShaderFromScript(glr, vertexShaderId, glr.VERTEX_SHADER);
var fragmentShader = createShaderFromScript(glr, fragmentShaderId, glr.FRAGMENT_SHADER);
var program = glr.createProgram();
// attach the shaders.
glr.attachShader(program, vertexShader);
glr.attachShader(program, fragmentShader);
// link the program.
glr.linkProgram(program);
// Check if it linked.
var success = glr.getProgramParameter(program, glr.LINK_STATUS);
if (!success) {
// something went wrong with the link
throw ("program filed to link:" + glr.getProgramInfoLog (program));
}
return program;
}
function quat2mat(A,mat){
var xx = A.x*A.x; var xy = A.x*A.y; var xz = A.x*A.z;
var xw = A.x*A.w; var yy = A.y*A.y; var yz = A.y*A.z;
var yw = A.y*A.w; var zz = A.z*A.z; var zw = A.z*A.w;
mat[0] = 1.-2.*(yy+zz);
mat[1] = 2.*(xy-zw);
mat[2] = 2.*(xz+yw);
mat[4] = 2.*(xy+zw);
mat[5] = 1.-2.*(xx+zz);
mat[6] = 2.*(yz-xw);
mat[8] = 2.*(xz-yw);
mat[9] = 2.*(yz+xw);
mat[10]= 1.-2.*(xx+yy);
mat[3] = mat[7] = mat[11] = mat[12] = mat[13] = mat[14] = 0.; mat[15]= 1.;
}
function multvec(A, B, vecr){
var mat = [0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.];
quat2mat(A,mat);
vecr[0] = mat[0]*B[0] + mat[1]*B[1] + mat[2]*B[2];
vecr[1] = mat[4]*B[0] + mat[5]*B[1] + mat[6]*B[2];
vecr[2] = mat[8]*B[0] + mat[9]*B[1] + mat[10]*B[2];
}
function mattransp(mat){
var matt = [
mat[0], mat[4], mat[8], mat[12],
mat[1], mat[5], mat[9], mat[13],
mat[2], mat[6], mat[10], mat[14],
mat[3], mat[7], mat[11], mat[15]];
return matt;
}
function conjugate(quat){
var cquat = {x:-quat.x, y:-quat.y, z:-quat.z, w:quat.w};
return cquat;
}
function mult(A, B){
var mquat = { x: A.w*B.x + A.x*B.w + A.y*B.z - A.z*B.y,
y: A.w*B.y - A.x*B.z + A.y*B.w + A.z*B.x,
z: A.w*B.z + A.x*B.y - A.y*B.x + A.z*B.w,
w: A.w*B.w - A.x*B.x - A.y*B.y - A.z*B.z};
return mquat;
}
function normalize(quat){
var L = Math.sqrt(quat.x*quat.x + quat.y*quat.y + quat.z*quat.z + quat.w*quat.w);
var nquat = {x:quat.x/L, y:quat.y/L, z:quat.z/L, w:quat.w/L};
return nquat;
}
function matortho(mat, l, r, b, t, n, f){
mat[0] = 2./(r-l); mat[1] = 0.; mat[2] = 0.; mat[3] = -(r+l)/(r-l);
mat[4] = 0.; mat[5] = 2./(t-b); mat[6] = 0.; mat[7] = -(t+b)/(t-b);
mat[8] = 0.; mat[9] = 0.; mat[10] = -2./(f-n); mat[11] = -(f+n)/(f-n);
mat[12] = 0.; mat[13] = 0.; mat[14] = 0.; mat[15] = 1.;
}
function matmult(A,B,C){
for(i=0;i<4;i++){
for(j=0;j<4;j++){
C[i+4*j] = 0.;
for(k=0;k<4;k++){
C[i+4*j] += A[k+4*j]*B[i+4*k];
}}}
}
function startGL(reboundView) {
var canvas = document.getElementById("reboundcanvas-"+reboundView.cid);
if (!canvas){
reboundView.startCount = reboundView.startCount+1;
if (reboundView.startCount>1000){
console.log("Cannot find element.");
}else{
setTimeout(function(){ startGL(reboundView); }, 10);
}
return;
}
var rect = canvas.getBoundingClientRect()
reboundView.ratio = rect.width/rect.height;
reboundView.view = normalize({x:reboundView.orientation[0], y:reboundView.orientation[1], z:reboundView.orientation[2], w:reboundView.orientation[3]});
canvas.addEventListener('mousedown', function() {
reboundView.mouseDown=1;
}, false);
canvas.addEventListener('mouseup', function() {
reboundView.mouseDown=0;
}, false);
canvas.addEventListener('mouseleave', function() {
reboundView.mouseDown=0;
}, false);
canvas.addEventListener('mousemove', function(evt) {
var rect = canvas.getBoundingClientRect()
if (reboundView.mouseDown==1){
reboundView.mouseDown = 2;
reboundView.mouse_x = evt.clientX-rect.left;
reboundView.mouse_y = evt.clientY-rect.top;
return;
}else if (reboundView.mouseDown==2){
var width = rect.width;
var height = rect.height;
var dx = 3.*(evt.clientX-rect.left-reboundView.mouse_x)/width;
var dy = 3.*(evt.clientY-rect.top-reboundView.mouse_y)/height;
reboundView.mouse_x = evt.clientX-rect.left;
reboundView.mouse_y = evt.clientY-rect.top;
if (evt.shiftKey){
reboundView.scale *= (1.+dx+dy);
}else{
|
fmartingr/iosfu
|
setup.py
|
Python
|
mit
| 1,441
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import re
import os
import sys
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, '__init__.py')).read()
return re.search(
"^__version__ = ['\"]([^'\"]+)['\"]",
init_py, re.MULTILINE).group(1)
package = 'iosfu'
version = get_version(package)
if sys.argv[-1] == 'publish':
os.system("python setup.py sdist upload")
args = {'version': version}
print("You probably want to also tag the version now:")
print("
|
git tag -a %(version)s -m 'version %(version)s'" % args)
print(" git push --tags")
sys.exit()
setup(
name='iosfu',
version=version,
url='http://github.com/fmartingr/iosfu',
license='MIT',
description='iOS Forensics Utility',
author='Felipe Martin',
author_email='[email protected]',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=open('req
|
uirements.txt').read().split('\n'),
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
'Operating System :: OS Independent',
'Programming Language :: Python ;; 2.7',
'Programming Language :: Python ;; 3.3',
'Topic :: Security',
]
)
|
tristan-hunt/UVaProblems
|
DataStructures/ac.py
|
Python
|
gpl-3.0
| 6,305
| 0.00571
|
# Least common ancestor Problem
#http://www.ics.uci.edu/~eppstein/261/BenFar-LCA-00.pdf
# http://code.activestate.com/recipes/498243-finding-eulerian-path-in-undirected-graph/
# http://codereview.stackexchange.com/questions/104074/eulerian-tour-in-python
# Store Arrays: Parents: P[i] is the parent of i
# Weights: W[i] is the length of tunnel i
# Linearize the tree:
# 1. Store nodes visited on an Eulerian tour of the tree: E[i] O(n)
# 2. Node level: distance from the root. Compute L[i] O(n)
# 3. Representative of a node: first visit in the Eulerian tour. Compute R[i]. O(n)
from collections import defaultdict
import sys
class Graph:
def __init__(self,vertices):
self.V = vertices #No. of vertices
self.weight = [0] * self.V # An array. length[i] = length of tunnel from node i to its parent
self.parent = [0] * self.V # Another array. Parent[i] = parent hill of hill i.
self.children = defaultdict(list)
self.level = [-1]*self.V # Distance from the node t
|
o the root
self.E = list() # nodes visited on the Eulerian tour of the tree
self.L = list()
self.R = list() # First visit of i on the Eulerian tour
self.RMQ = dict()
self.depth = 0
self.graph = defaultdict(list)
def addEdge(self, u, v):
self.graph[u].append(v)
self.graph[v].append(u)
def eulerTour(self, current):
queue = [current]
self.E = [current]
current
|
= self.graph[current].pop()
while(queue):
if self.graph[current]:
queue.append(current)
current = self.graph[current].pop()
else:
current = queue.pop()
self.E.append(current)
#print(self.E)
def findDepth(self, curr, level):
self.level[curr] = level
for v in self.children[curr]:
if self.level[v] == -1 :
self.findDepth(v, level+1)
self.level
def makeR(self):
"""
Create array R
"""
for v in range(0, self.V):
self.R.append(self.E.index(v))
def rmq(self, L, j, k):
"""
Return the index of the of self.L between i and j.
First try with DP?
"""
if (j, k) in self.RMQ:
return self.RMQ[(j, k)]
if (j+1 == k):
if self.L[j] < self.L[k]:
self.RMQ[(j, k)] = j
return j
else:
self.RMQ[(j, k)] = k
return k
for i in range(j+1, k):
left = self.rmq(L, j, i)
right = self.rmq(L, i, k)
if (L[left] < L[right]):
self.RMQ[(j, k)] = left
return left
else:
self.RMQ[(j, k)] = right
return right
def lca(self, u, v):
"""
The nodes in the Euler tour between the first visits to u and v are E[R[u], .... E[R[v]]
The shallowest node in this subtour is at index RMQ(R[u], R[v]) (since L records the level)
The node at this position is E[RMQ(R[u], R[v])]
"""
j = self.R[u]
k = self.R[v]
if j > k:
return(self.lca(v, u))
i = self.rmq(self.L, j, k)
return i
def WStr(self):
string = "W:"
for i in range(0, len(self.weight)):
string += " {}".format(self.weight[i])
return string
def RStr(self):
string = "R:"
for i in range(0, len(self.R)):
string += " {}".format(self.R[i])
return string
def LevelStr(self):
string = "L:"
for i in range(0, len(self.L)):
string += " {}".format(self.L[i])
return string
def EulerStr(self):
string = "E:"
for i in range(0, len(self.E)):
string += " {}".format(self.E[i])
return string
def parentsStr(self):
string = "parents: \n"
for v in range(0, self.V):
string += "{}: {}, w:{}\n".format(v, self.parent[v], self.weight[v])
return string
def childrenStr(self):
string = "children: \n"
for v in range(0, self.V):
string += "{}:".format(v)
for c in range(0, len(self.children[v])):
string += " {}".format(self.children[v][c])
string += "\n"
return string
def load():
V = int(next(sys.stdin))
#sys.stdout.write("V: {}\n".format(V))
i = 1
while(V != 0):
g = Graph(V)
while(i<V):
line = next(sys.stdin).split()
parent = int(line[0])
weight = int(line[1])
g.addEdge(i, parent)
g.parent[i] = parent
g.children[parent].append(i)
g.weight[i] = weight
i = i + 1
Q = int(next(sys.stdin))
queries = list()
i = 0
while(i < Q):
line = next(sys.stdin).split()
q1 = int(line[0])
q2 = int(line[1])
queries.append((q1, q2))
i = i + 1
yield(g, queries)
V = int(next(sys.stdin))
i = 1
for (g, q) in load():
g.eulerTour(0)
try:
g.findDepth(0, 0)
except Exception:
quit()
for e in g.E:
g.L.append(g.level[e])
g.makeR()
for i in range(0, g.V-1):
for j in range(1, g.V):
g.lca(j, i)
first = 0
for i in range(0, len(q)):
v = q[i][0]
w = q[i][1]
i = g.lca(v, w)
ancestor = g.E[i]
path_length = 0
curr = v
while(curr != ancestor):
child = curr
parent = g.parent[curr]
parent_level = g.L[g.R[parent]]
path_length = path_length + g.weight[curr]
curr = parent
curr = w
while(curr != ancestor):
child = curr
parent = g.parent[curr]
parent_level = g.L[g.R[parent]]
path_length = path_length + g.weight[curr]
curr = parent
if first == 0:
sys.stdout.write("{}".format(path_length))
first = 1
else:
sys.stdout.write(" {}".format(path_length))
sys.stdout.write("\n")
|
max-posedon/telepathy-python
|
examples/tubeconn.py
|
Python
|
lgpl-2.1
| 3,866
| 0.001035
|
# This should eventually land in telepathy-python, so has the same license:
# Copyright (C) 2007 Collabora Ltd. <http://www.collabora.co.uk/>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
__all__ = ('TubeConnection',)
__docformat__ = 'reStructuredText'
import logging
from dbus.connection import Connection
from dbus import PR
|
OPERTIES_IFACE
from telepathy.interfaces import CHANNEL_TYPE_DBUS_TUBE
logger = logging.getLogger('telepathy.tubeconn')
class TubeConnection(Connection):
def __new__(cls, conn, tube, address, group_iface=None, mainloop=None):
self = super(TubeConnection, cls).__new__(cls, address,
mainloop=mainloop)
self._tube
|
= tube
self.participants = {}
self.bus_name_to_handle = {}
self._mapping_watches = []
if group_iface is None:
method = conn.GetSelfHandle
else:
method = group_iface.GetSelfHandle
method(reply_handler=self._on_get_self_handle_reply,
error_handler=self._on_get_self_handle_error)
return self
def _on_get_self_handle_reply(self, handle):
self.self_handle = handle
match = self._tube[CHANNEL_TYPE_DBUS_TUBE].connect_to_signal('DBusNamesChanged',
self._on_dbus_names_changed)
self._tube[PROPERTIES_IFACE].Get(CHANNEL_TYPE_DBUS_TUBE, 'DBusNames',
reply_handler=self._on_get_dbus_names_reply,
error_handler=self._on_get_dbus_names_error)
self._dbus_names_changed_match = match
def _on_get_self_handle_error(self, e):
logging.basicConfig()
logger.error('GetSelfHandle failed: %s', e)
def close(self):
self._dbus_names_changed_match.remove()
self._on_dbus_names_changed((), self.participants.keys())
super(TubeConnection, self).close()
def _on_get_dbus_names_reply(self, names):
self._on_dbus_names_changed(names, ())
def _on_get_dbus_names_error(self, e):
logging.basicConfig()
logger.error('Get DBusNames property failed: %s', e)
def _on_dbus_names_changed(self, added, removed):
for handle, bus_name in added.items():
if handle == self.self_handle:
# I've just joined - set my unique name
self.set_unique_name(bus_name)
self.participants[handle] = bus_name
self.bus_name_to_handle[bus_name] = handle
# call the callback while the removed people are still in
# participants, so their bus names are available
for callback in self._mapping_watches:
callback(added, removed)
for handle in removed:
bus_name = self.participants.pop(handle, None)
self.bus_name_to_handle.pop(bus_name, None)
def watch_participants(self, callback):
self._mapping_watches.append(callback)
if self.participants:
# GetDBusNames already returned: fake a participant add event
# immediately
added = []
for k, v in self.participants.iteritems():
added.append((k, v))
callback(added, [])
|
ClimbsRocks/scikit-learn
|
sklearn/model_selection/_search.py
|
Python
|
bsd-3-clause
| 44,853
| 0.000067
|
"""
The :mod:`sklearn.model_selection._search` includes utilities to fine-tune the
parameters of an estimator.
"""
from __future__ import print_function
from __future__ import division
# Author: Alexandre Gramfort <[email protected]>,
# Gael Varoquaux <[email protected]>
# Andreas Mueller <[email protected]>
# Olivier Grisel <[email protected]>
# License: BSD 3 clause
from abc import ABCMeta, abstractmethod
from collections import Mapping, namedtuple, Sized, defaultdict
from functools import partial, reduce
from itertools import product
import operator
import warnings
import numpy as np
from ..base import BaseEstimator, is_classifier, clone
from ..base import MetaEstimatorMixin
from ._split import check_cv
from ._validation import _fit_and_score
from ..exceptions import NotFittedError
from ..externals.joblib import Parallel, delayed
from ..externals import six
from ..utils import check_random_state
from ..utils.fixes import sp_version
from ..utils.fixes import rankdata
from ..util
|
s.random import sample_without_replacement
from ..utils.validation import indexable, check_is_fitted
from ..utils.metaestimators import if_delegate_has_method
from ..metrics.scorer import check_scoring
__all__ = ['GridSearchCV', 'ParameterGrid', 'fit_grid_point',
'ParameterSampler', 'RandomizedSearchCV']
class ParameterGrid(object):
"""Grid of parameters with a discrete number of values for each.
Can be used to iterate over parameter value combinations with the
Python buil
|
t-in function iter.
Read more in the :ref:`User Guide <search>`.
Parameters
----------
param_grid : dict of string to sequence, or sequence of such
The parameter grid to explore, as a dictionary mapping estimator
parameters to sequences of allowed values.
An empty dict signifies default parameters.
A sequence of dicts signifies a sequence of grids to search, and is
useful to avoid exploring parameter combinations that make no sense
or have no effect. See the examples below.
Examples
--------
>>> from sklearn.model_selection import ParameterGrid
>>> param_grid = {'a': [1, 2], 'b': [True, False]}
>>> list(ParameterGrid(param_grid)) == (
... [{'a': 1, 'b': True}, {'a': 1, 'b': False},
... {'a': 2, 'b': True}, {'a': 2, 'b': False}])
True
>>> grid = [{'kernel': ['linear']}, {'kernel': ['rbf'], 'gamma': [1, 10]}]
>>> list(ParameterGrid(grid)) == [{'kernel': 'linear'},
... {'kernel': 'rbf', 'gamma': 1},
... {'kernel': 'rbf', 'gamma': 10}]
True
>>> ParameterGrid(grid)[1] == {'kernel': 'rbf', 'gamma': 1}
True
See also
--------
:class:`GridSearchCV`:
Uses :class:`ParameterGrid` to perform a full parallelized parameter
search.
"""
def __init__(self, param_grid):
if isinstance(param_grid, Mapping):
# wrap dictionary in a singleton list to support either dict
# or list of dicts
param_grid = [param_grid]
self.param_grid = param_grid
def __iter__(self):
"""Iterate over the points in the grid.
Returns
-------
params : iterator over dict of string to any
Yields dictionaries mapping each estimator parameter to one of its
allowed values.
"""
for p in self.param_grid:
# Always sort the keys of a dictionary, for reproducibility
items = sorted(p.items())
if not items:
yield {}
else:
keys, values = zip(*items)
for v in product(*values):
params = dict(zip(keys, v))
yield params
def __len__(self):
"""Number of points on the grid."""
# Product function that can handle iterables (np.product can't).
product = partial(reduce, operator.mul)
return sum(product(len(v) for v in p.values()) if p else 1
for p in self.param_grid)
def __getitem__(self, ind):
"""Get the parameters that would be ``ind``th in iteration
Parameters
----------
ind : int
The iteration index
Returns
-------
params : dict of string to any
Equal to list(self)[ind]
"""
# This is used to make discrete sampling without replacement memory
# efficient.
for sub_grid in self.param_grid:
# XXX: could memoize information used here
if not sub_grid:
if ind == 0:
return {}
else:
ind -= 1
continue
# Reverse so most frequent cycling parameter comes first
keys, values_lists = zip(*sorted(sub_grid.items())[::-1])
sizes = [len(v_list) for v_list in values_lists]
total = np.product(sizes)
if ind >= total:
# Try the next grid
ind -= total
else:
out = {}
for key, v_list, n in zip(keys, values_lists, sizes):
ind, offset = divmod(ind, n)
out[key] = v_list[offset]
return out
raise IndexError('ParameterGrid index out of range')
class ParameterSampler(object):
"""Generator on parameters sampled from given distributions.
Non-deterministic iterable over random candidate combinations for hyper-
parameter search. If all parameters are presented as a list,
sampling without replacement is performed. If at least one parameter
is given as a distribution, sampling with replacement is used.
It is highly recommended to use continuous distributions for continuous
parameters.
Note that before SciPy 0.16, the ``scipy.stats.distributions`` do not
accept a custom RNG instance and always use the singleton RNG from
``numpy.random``. Hence setting ``random_state`` will not guarantee a
deterministic iteration whenever ``scipy.stats`` distributions are used to
define the parameter search space. Deterministic behavior is however
guaranteed from SciPy 0.16 onwards.
Read more in the :ref:`User Guide <search>`.
Parameters
----------
param_distributions : dict
Dictionary where the keys are parameters and values
are distributions from which a parameter is to be sampled.
Distributions either have to provide a ``rvs`` function
to sample from them, or can be given as a list of values,
where a uniform distribution is assumed.
n_iter : integer
Number of parameter settings that are produced.
random_state : int or RandomState
Pseudo random number generator state used for random uniform sampling
from lists of possible values instead of scipy.stats distributions.
Returns
-------
params : dict of string to any
**Yields** dictionaries mapping each estimator parameter to
as sampled value.
Examples
--------
>>> from sklearn.model_selection import ParameterSampler
>>> from scipy.stats.distributions import expon
>>> import numpy as np
>>> np.random.seed(0)
>>> param_grid = {'a':[1, 2], 'b': expon()}
>>> param_list = list(ParameterSampler(param_grid, n_iter=4))
>>> rounded_list = [dict((k, round(v, 6)) for (k, v) in d.items())
... for d in param_list]
>>> rounded_list == [{'b': 0.89856, 'a': 1},
... {'b': 0.923223, 'a': 1},
... {'b': 1.878964, 'a': 2},
... {'b': 1.038159, 'a': 2}]
True
"""
def __init__(self, param_distributions, n_iter, random_state=None):
self.param_distributions = param_distributions
self.n_iter = n_iter
self.random_state = random_state
def __iter__(self):
# check if all distributions are given as lists
# in this case we want to sample without replacement
all_lists =
|
BenDoan/code_court
|
code_court/courthouse/views/admin/admin.py
|
Python
|
mit
| 497
| 0
|
from flask import current_app, Blueprint, render_template
import util
from database import db_uri
admin = Blueprint("admin", __name__, template_folder="templates")
@admin.route("/", methods=["GET"])
@util.login_required("operator")
def index():
"""
The index page for the admin interface, contains
a list of links to other admin pages
"""
info = {"Databas
|
e URI"
|
: db_uri, "Run Mode": current_app.config["RUNMODE"]}
return render_template("admin_index.html", info=info)
|
hetajen/vnpy161
|
vn.trader/dataRecorder/drEngine.py
|
Python
|
mit
| 11,808
| 0.008268
|
# encoding: UTF-8
'''
本文件中实现了行情数据记录引擎,用于汇总TICK数据,并生成K线插入数据库。
使用DR_setting.json来配置需要收集的合约,以及主力合约代码。
History
<id> <author> <description>
2017050300 hetajen Bat[Auto-CTP连接][Auto-Symbol订阅][Auto-DB写入][Auto-CTA加载]
2017050301 hetajen DB[CtaTemplate增加日线bar数据获取接口][Mongo不保存Tick数据][新增数据来源Sina]
2017051500 hetajen 夜盘tick|bar数据增加tradingDay字段,用于指明夜盘tick|bar数据的真实交易日
2017052500 hetajen DB[增加:5分钟Bar数据的记录、存储和获取]
'''
import json
import os
import copy
from collections import OrderedDict
from datetime import datetime, timedelta
from Queue import Queue
from threading import Thread
from eventEngine import *
from vtGateway import VtSubscribeReq, VtLogData
from drBase import *
from vtFunction import todayDate
from language import text
'''2017050300 Add by hetajen begin'''
'''2017052500 Add by hetajen begin'''
from ctaHistoryData import XH_HistoryDataEngine, HistoryDataEngine
'''2017052500 Add by hetajen end'''
'''2017050300 Add by hetajen end'''
########################################################################
class DrEngine(object):
"""数据记录引擎"""
settingFileName = 'DR_setting.json'
path = os.path.abspath(os.path.dirname(__file__))
settingFileName = os.path.join(path, settingFileName)
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine):
"""Constructor"""
self.mainEngine = mainEngine
self.eventEngine = eventEngine
# 当前日期
self.today = todayDate()
# 主力合约代码映射字典,key为具体的合约代码(如IF1604),value为主力合约代码(如IF0000)
self.activeSymbolDict = {}
# Tick对象字典
self.tickDict = {}
# K线对象字典
self.barDict = {}
# 负责执行数据库插入的单独线程相关
self.active = False # 工作状态
self.queue = Queue() # 队列
self.thread = Thread(target=self.run) # 线程
# 载入设置,订阅行情
self.loadSetting()
#----------------------------------------------------------------------
def loadSetting(self):
"""载入设置"""
with open(self.settingFileName) as f:
drSetting = json.load(f)
# 如果working设为False则不启动行情记录功能
working = drSetting['working']
if not working:
return
if 'tick' in drSetting:
l = drSetting['tick']
for setting in l:
symbol = setting[0]
vtSymbol = symbol
req = VtSubscribeReq()
req.symbol = setting[0]
# 针对LTS和IB接口,订阅行情需要交易所代码
if len(setting)>=3:
req.exchange = setting[2]
vtSymbol = '.'.join([symbol, req.exchange])
# 针对IB接口,订阅行情需要货币和产品类型
if len(setting)>=5:
req.currency = setting[3]
req.productClass = setting[4]
self.mainEngine.subscribe(req, setting[1])
drTick = DrTickData() # 该tick实例可以用于缓存部分数据(目前未使用)
self.tickDict[vtSymbol] = drTick
if 'bar' in drSetting:
l = drSetting['bar']
for setting in l:
symbol = setting[0]
vtSymbol = symbol
req = VtSubscribeReq()
req.symbol = symbol
if len(setting)>=3:
req.exchange = setting[2]
vtSymbol = '.'.join([symbol, req.exchange])
if len(setting)>=5:
req.currency = setting[3]
req.productClass = setting[4]
self.mainEngine.subscribe(req, setting[1])
bar = DrBarData()
self.barDict[vtSymbol] = bar
if 'active' in drSetting:
d = drSetting['active']
# 注意这里的vtSymbol对于IB和LTS接口,应该后缀.交易所
for activeSymbol, vtSymbol in d.items():
self.activeSymbolDict[vtSymbol] = activeSymbol
# 启动数据插入线程
self.start()
# 注册事件监听
self.registerEvent()
#----------------------------------------------------------------------
def procecssTickEvent(self, event):
"""处理行情推送"""
tick = event.dict_['data']
vtSymbol = tick.vtSymbol
# 转化Tick格式
drTick = DrTickData()
d = drTick.__dict__
for key in d.keys():
if key != 'datetime':
d[key] = tick.__getattribute__(key)
drTick.datetime = datetime.strptime(' '.join([tick.date, tick.time]), '%Y%m%d %H:%M:%S.%f')
# 更新Tick数据
if vtSymbol in self.tickDict:
'''2017050301 Delete by hetajen begin'''
# self.insertData(TICK_DB_NAME, vtSymbol, drTick)
#
# if vtSymbol in self.activeSymbolDict:
# activeSymbol = self.activeSymbolDict[vtSymbol]
# self.insertData(TICK_DB_NAME, activeSymbol, drTick)
'''2017050301 Delete by hetajen end'''
# 发出日志
self.writeDrLog(text.TICK_LOGGING_MESSAGE.format(symbol=drTick.vtSymbol,
time=drTick.time,
last=drTick.lastPrice,
bid=drTick.bidPrice1,
ask=drTick.askPrice1))
# 更新分钟线数据
if vtSymbol in self.barDict:
bar = self.barDict[vtSymbol]
# 如果第一个TICK或者新的一分钟
if not bar.datetime or bar.datetime.minute != drTick.datetime.minute:
if bar.vtSymbol:
'''2017050301 Delete by hetajen begin'''
# newBar = copy.copy(bar)
# self.insertData(MINUTE_DB_NAME, vtSymbol, newBar)
# if vtSymbol in self.activeSymbolDict:
# activeSymbol = self.activeSymbolDict[vtSymbol]
# self.insertData(MINUTE_DB_NAME, activeSymbol, newBar)
'''2017050301 Delete by hetajen end'''
self.writeDrLog(text.BAR_LOGGING_MESSAGE.format(symbol=bar.vtSymbol,
time=bar.time,
open=bar.open,
high=bar.high,
low=bar.low,
|
close=bar.close))
bar.vtSymbol = drTick.vtSymbol
bar.symbol = drTick.symbol
bar.exchange = drTick.exchange
bar.open = drTick.lastPrice
bar.high = drTick.lastPrice
bar.low = drTick.lastPrice
bar.c
|
lose = drTick.lastPrice
'''2017051500 Add by hetajen begin'''
bar.tradingDay = drTick.tradingDay
bar.actionDay = drTick.actionDay
'''2017051500 Add by hetajen end'''
bar.date = drTick.date
bar.time = drTick.time
bar.datetime = drTick.datetime
bar.volume = drTick.volume
bar.openInterest = drTick.openInterest
# 否则继续累加新的K线
else:
|
ghoshabhi/Multi-User-Blog
|
utility/filters.py
|
Python
|
mit
| 230
| 0.008696
|
from models import Likes
|
def filterKey(key):
return key.id()
def showCount(post_key):
like_obj = Likes.query(Likes.post == post_key).get()
if like_obj:
return like_obj.like_count
else:
|
return "0"
|
Yukinoshita47/Yuki-Chan-The-Auto-Pentest
|
Module/Spaghetti/modules/discovery/AdminInterfaces.py
|
Python
|
mit
| 1,102
| 0.040835
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Spaghetti: Web Server Security Scanner
#
# @url: https://github.com/m4ll0k/Spaghetti
# @author: Momo Outaadi (M4ll0k
|
)
# @license: See the file 'doc/LICENSE'
from lib.net import http
from lib.utils import printer
from lib.net import utils
class AdminInte
|
rfaces():
def __init__(self,url,agent,proxy,redirect):
self.url = url
self.printer = printer.Printer()
self.http = http.Http(agent=agent,proxy=proxy,redirect=redirect)
self.check = utils.Checker()
def Run(self):
info = {
'name':'Common administration interfaces',
'author':'Momo Outaadi (M4ll0k)',
'description':'Access to administration interfaces panel'
}
dbadmin = open('data/AdminPanels.txt','rb')
dbfiles = list([x.split('\n') for x in dbadmin])
for x in dbfiles:
try:
resp = self.http.Send(self.check.Path(self.url,x[0]))
if resp._content and resp.status_code == 200:
if resp.url == self.check.Path(self.url,x[0]):
self.printer.plus('Admin interface: %s'%(resp.url))
break
else:
pass
except Exception as ERROR:
pass
|
xiligey/xiligey.github.io
|
code/2.py
|
Python
|
apache-2.0
| 1,442
| 0.00208
|
"""This example follows the simple text document Pipeline illustrated in the figures
|
above.
"""
from pyspark.ml import Pipeline
from pyspark.ml.classification import LogisticRegression
from pyspark.ml.feature import HashingTF, Tokenizer
# Prepare training documents from a list of (id, text, label) tuples.
training = spark.createDataFrame([
(0, "a b c d e spark", 1.0),
(1, "b d", 0.0),
(2, "spark f g h", 1.0),
(3, "hadoop mapreduce", 0.0)
], ["id", "text", "label"])
# Configure an ML pipeline, which consists of three stages: tokenizer, hashingTF, and lr.
tokenizer
|
= Tokenizer(inputCol="text", outputCol="words")
hashingTF = HashingTF(inputCol=tokenizer.getOutputCol(), outputCol="features")
lr = LogisticRegression(maxIter=10, regParam=0.001)
pipeline = Pipeline(stages=[tokenizer, hashingTF, lr])
# Fit the pipeline to training documents.
model = pipeline.fit(training)
# Prepare test documents, which are unlabeled (id, text) tuples.
test = spark.createDataFrame([
(4, "spark i j k"),
(5, "l m n"),
(6, "spark hadoop spark"),
(7, "apache hadoop")
], ["id", "text"])
# Make predictions on test documents and print columns of interest.
prediction = model.transform(test)
selected = prediction.select("id", "text", "probability", "prediction")
for row in selected.collect():
rid, text, prob, prediction = row
print("(%d, %s) --> prob=%s, prediction=%f" % (rid, text, str(prob), prediction))
|
NetApp/manila
|
manila/share/utils.py
|
Python
|
apache-2.0
| 3,138
| 0
|
# Copyright (c) 2012 OpenStack Foundation
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Share-related Utilities and helpers."""
from manila.common import constants
DEFAULT_POOL_NAME = '_pool0'
def extract_host(host, level='backend', use_default_pool_name=False):
"""Extract Host, Backend or Pool information from host string.
:param host: String for host, which could include host@backend#pool info
:param level: Indicate which level of information should be extracted
from host string. Level can be 'host', 'backend', 'pool',
or 'backend_name', default value is 'backend'
:param use_default_pool_name: This flag specifies what to do
if level == 'pool' and there is no 'pool' info
encoded in host string. default_pool_name=True
will return DEFAULT_POOL_NAME, otherwise it will
return None. Default value of this parameter
is False.
:return: expected level of information
For example:
ho
|
st = 'HostA@BackendB#PoolC'
ret = extract_host(host, 'host')
# ret is 'HostA'
ret = extract_host(host, 'backend')
# ret is 'HostA@BackendB'
ret = extract_host(host, 'pool')
# ret is 'PoolC'
ret = extract_host(host, 'backend_name')
# ret is 'BackendB'
host = 'HostX@BackendY'
ret = extract_host(host, 'pool')
# ret is None
ret = extract_host(host, 'pool', True)
# ret is '_pool0'
|
"""
if level == 'host':
# Make sure pool is not included
hst = host.split('#')[0]
return hst.split('@')[0]
if level == 'backend_name':
hst = host.split('#')[0]
return hst.split('@')[1]
elif level == 'backend':
return host.split('#')[0]
elif level == 'pool':
lst = host.split('#')
if len(lst) == 2:
return lst[1]
elif use_default_pool_name is True:
return DEFAULT_POOL_NAME
else:
return None
def append_host(host, pool):
"""Encode pool into host info."""
if not host or not pool:
return host
new_host = "#".join([host, pool])
return new_host
def get_active_replica(replica_list):
"""Returns the first 'active' replica in the list of replicas provided."""
for replica in replica_list:
if replica['replica_state'] == constants.REPLICA_STATE_ACTIVE:
return replica
|
Stvad/anki
|
aqt/sync.py
|
Python
|
agpl-3.0
| 18,494
| 0.00173
|
# Copyright: Damien Elmes <[email protected]>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import socket
import time
import traceback
import gc
from aqt.qt import *
import aqt
from anki import Collection
from anki.sync import Syncer, RemoteServer, FullSyncer, MediaSyncer, \
RemoteMediaServer
from anki.hooks import addHook, remHook
from aqt.utils import tooltip, askUserDialog, showWarning, showText, showInfo
# Sync manager
######################################################################
class SyncManager(QObject):
def __init__(self, mw, pm):
QObject.__init__(self, mw)
self.mw = mw
self.pm = pm
def sync(self):
if not self.pm.profile['syncKey']:
auth = self._getUserPass()
if not auth:
return
self.pm.profile['syncUser'] = auth[0]
self._sync(auth)
else:
self._sync()
def _sync(self, auth=None):
# to avoid gui widgets being garbage collected in the worker thread,
# run gc in advance
self._didFullUp = False
self._didError = False
gc.collect()
# create the thread, setup signals and start running
t = self.thread = SyncThread(
self.pm.collectionPath(), self.pm.profile['syncKey'],
auth=auth, media=self.pm.profi
|
le['syncMedia'])
t.event.connect(self.onEvent)
self.label = _("Connectin
|
g...")
self.mw.progress.start(immediate=True, label=self.label)
self.sentBytes = self.recvBytes = 0
self._updateLabel()
self.thread.start()
while not self.thread.isFinished():
self.mw.app.processEvents()
self.thread.wait(100)
self.mw.progress.finish()
if self.thread.syncMsg:
showText(self.thread.syncMsg)
if self.thread.uname:
self.pm.profile['syncUser'] = self.thread.uname
def delayedInfo():
if self._didFullUp and not self._didError:
showInfo(_("""\
Your collection was successfully uploaded to AnkiWeb.
If you use any other devices, please sync them now, and choose \
to download the collection you have just uploaded from this computer. \
After doing so, future reviews and added cards will be merged \
automatically."""))
self.mw.progress.timer(1000, delayedInfo, False)
def _updateLabel(self):
self.mw.progress.update(label="%s\n%s" % (
self.label,
_("%(a)dkB up, %(b)dkB down") % dict(
a=self.sentBytes // 1024,
b=self.recvBytes // 1024)))
def onEvent(self, evt, *args):
pu = self.mw.progress.update
if evt == "badAuth":
tooltip(
_("AnkiWeb ID or password was incorrect; please try again."),
parent=self.mw)
# blank the key so we prompt user again
self.pm.profile['syncKey'] = None
self.pm.save()
elif evt == "corrupt":
pass
elif evt == "newKey":
self.pm.profile['syncKey'] = args[0]
self.pm.save()
elif evt == "offline":
tooltip(_("Syncing failed; internet offline."))
elif evt == "upbad":
self._didFullUp = False
self._checkFailed()
elif evt == "sync":
m = None; t = args[0]
if t == "login":
m = _("Syncing...")
elif t == "upload":
self._didFullUp = True
m = _("Uploading to AnkiWeb...")
elif t == "download":
m = _("Downloading from AnkiWeb...")
elif t == "sanity":
m = _("Checking...")
elif t == "findMedia":
m = _("Syncing Media...")
elif t == "upgradeRequired":
showText(_("""\
Please visit AnkiWeb, upgrade your deck, then try again."""))
if m:
self.label = m
self._updateLabel()
elif evt == "syncMsg":
self.label = args[0]
self._updateLabel()
elif evt == "error":
self._didError = True
showText(_("Syncing failed:\n%s")%
self._rewriteError(args[0]))
elif evt == "clockOff":
self._clockOff()
elif evt == "checkFailed":
self._checkFailed()
elif evt == "mediaSanity":
showWarning(_("""\
A problem occurred while syncing media. Please use Tools>Check Media, then \
sync again to correct the issue."""))
elif evt == "noChanges":
pass
elif evt == "fullSync":
self._confirmFullSync()
elif evt == "send":
# posted events not guaranteed to arrive in order
self.sentBytes = max(self.sentBytes, int(args[0]))
self._updateLabel()
elif evt == "recv":
self.recvBytes = max(self.recvBytes, int(args[0]))
self._updateLabel()
def _rewriteError(self, err):
if "Errno 61" in err:
return _("""\
Couldn't connect to AnkiWeb. Please check your network connection \
and try again.""")
elif "timed out" in err or "10060" in err:
return _("""\
The connection to AnkiWeb timed out. Please check your network \
connection and try again.""")
elif "code: 500" in err:
return _("""\
AnkiWeb encountered an error. Please try again in a few minutes, and if \
the problem persists, please file a bug report.""")
elif "code: 501" in err:
return _("""\
Please upgrade to the latest version of Anki.""")
# 502 is technically due to the server restarting, but we reuse the
# error message
elif "code: 502" in err:
return _("AnkiWeb is under maintenance. Please try again in a few minutes.")
elif "code: 503" in err:
return _("""\
AnkiWeb is too busy at the moment. Please try again in a few minutes.""")
elif "code: 504" in err:
return _("504 gateway timeout error received. Please try temporarily disabling your antivirus.")
elif "code: 409" in err:
return _("Only one client can access AnkiWeb at a time. If a previous sync failed, please try again in a few minutes.")
elif "10061" in err or "10013" in err or "10053" in err:
return _(
"Antivirus or firewall software is preventing Anki from connecting to the internet.")
elif "10054" in err or "Broken pipe" in err:
return _("Connection timed out. Either your internet connection is experiencing problems, or you have a very large file in your media folder.")
elif "Unable to find the server" in err:
return _(
"Server not found. Either your connection is down, or antivirus/firewall "
"software is blocking Anki from connecting to the internet.")
elif "code: 407" in err:
return _("Proxy authentication required.")
elif "code: 413" in err:
return _("Your collection or a media file is too large to sync.")
elif "EOF occurred in violation of protocol" in err:
return _("Error establishing a secure connection. This is usually caused by antivirus, firewall or VPN software, or problems with your ISP.")
elif "certificate verify failed" in err:
return _("Error establishing a secure connection. This is usually caused by antivirus, firewall or VPN software, or problems with your ISP.")
return err
def _getUserPass(self):
d = QDialog(self.mw)
d.setWindowTitle("Anki")
d.setWindowModality(Qt.WindowModal)
vbox = QVBoxLayout()
l = QLabel(_("""\
<h1>Account Required</h1>
A free account is required to keep your collection synchronized. Please \
<a href="%s">sign up</a> for an account, then \
enter your details below.""") %
"https://ankiweb.net/account/login")
l.setOpenExternalLinks(True)
l.setWordWrap(True)
vbox.addWidget(l)
vbox.addSpacing(20)
g = QGridLayout()
|
osantana/quickstartup
|
tests/base.py
|
Python
|
mit
| 2,174
| 0
|
from pathlib import Path
from django.test import SimpleTestCase
TEST_ROOT_DIR = Path(__file__).parent
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'DIRS': (
str(TEST_ROOT_DIR / "templates"),
),
'OPTIONS': {
'debug': True,
'context_processors': (
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.request",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"quickstartup.context_processors.project_infos",
"quickstartup.context_processors.project_settings",
),
},
},
]
def get_mail_payloads(message):
text = ""
html = ""
for payload in message.message().get_payload():
if payload.get_content_type() == "text/plain":
text = payload.as_string()
if payload.get_content_type() == "text/html":
html = payload.as_string()
return text, html
def check_form_error(response, form_name, field, errors, msg_prefix=''):
test_case = SimpleTestCase()
test_case.assertFormError(response, form_name, field, errors, msg_prefix)
return True
def check_redirects(response, expected_url):
test_case = SimpleTestCase()
test_case.assertRedirects(response, expected_url=expected_url)
return True
def check_template_used(response, template_name):
test_case = SimpleTestCase()
|
test_case.assertTemplateUsed(response, template_name=template_name)
return True
def
|
check_contains(response, text):
test_case = SimpleTestCase()
test_case.assertContains(response, text=text)
return True
def check_in_html(needle, haystack):
test_case = SimpleTestCase()
test_case.assertInHTML(needle, haystack)
return True
|
GoogleCloudPlatform/training-data-analyst
|
quests/data-science-on-gcp-edition1_tf2/07_sparkml_and_bqml/experiment.py
|
Python
|
apache-2.0
| 5,377
| 0.015622
|
from __future__ import print_function
from pyspark.mllib.classification import LogisticRegressionWithLBFGS
from pyspark.mllib.regression import LabeledPoint
from pyspark.sql import SparkSession
from pyspark import SparkContext
import numpy as np
sc = SparkContext('local', 'logistic')
spark = SparkSession \
.builder \
.appName("Logistic regression w/ Spark ML") \
.getOrCreate()
BUCKET='BUCKET_NAME'
# read dataset
traindays = spark.read \
.
|
option("header", "true") \
.csv('gs://{}/flights/trainday.csv'.format(BUCKET))
traindays.createOrReplaceTempView('traindays')
from pyspark.sql.types import StringType, FloatType, StructType, StructField
header = 'FL_DATE,UNIQUE_CARRIER,AIRLINE_ID,CARRIER,FL_NUM,ORIGIN_AIRPORT_ID,ORIGIN_AIRPORT_SEQ_ID,ORIGIN_CITY_MARKET_ID,ORIGIN,DEST_AIRPORT_ID,DEST_AIRPORT_SEQ_ID,DEST_CITY_MARKET_ID,D
|
EST,CRS_DEP_TIME,DEP_TIME,DEP_DELAY,TAXI_OUT,WHEELS_OFF,WHEELS_ON,TAXI_IN,CRS_ARR_TIME,ARR_TIME,ARR_DELAY,CANCELLED,CANCELLATION_CODE,DIVERTED,DISTANCE,DEP_AIRPORT_LAT,DEP_AIRPORT_LON,DEP_AIRPORT_TZOFFSET,ARR_AIRPORT_LAT,ARR_AIRPORT_LON,ARR_AIRPORT_TZOFFSET,EVENT,NOTIFY_TIME'
def get_structfield(colname):
if colname in ['ARR_DELAY', 'DEP_DELAY', 'DISTANCE', 'TAXI_OUT', 'DEP_AIRPORT_TZOFFSET']:
return StructField(colname, FloatType(), True)
else:
return StructField(colname, StringType(), True)
schema = StructType([get_structfield(colname) for colname in header.split(',')])
#inputs = 'gs://{}/flights/tzcorr/all_flights-00000-*'.format(BUCKET) # 1/30th
inputs = 'gs://{}/flights/tzcorr/all_flights-*'.format(BUCKET) # FULL
flights = spark.read\
.schema(schema)\
.csv(inputs)
flights.createOrReplaceTempView('flights')
# separate training and validation data
from pyspark.sql.functions import rand
SEED=13
traindays = traindays.withColumn("holdout", rand(SEED) > 0.8) # 80% of data is for training
traindays.createOrReplaceTempView('traindays')
# logistic regression
trainquery = """
SELECT
DEP_DELAY, TAXI_OUT, ARR_DELAY, DISTANCE, DEP_TIME, DEP_AIRPORT_TZOFFSET
FROM flights f
JOIN traindays t
ON f.FL_DATE == t.FL_DATE
WHERE
t.is_train_day == 'True' AND
t.holdout == False AND
f.CANCELLED == '0.00' AND
f.DIVERTED == '0.00'
"""
traindata = spark.sql(trainquery).repartition(1000)
def get_category(hour):
if hour < 6 or hour > 20:
return [1, 0, 0] # night
if hour < 10:
return [0, 1, 0] # morning
if hour < 17:
return [0, 0, 1] # mid-day
else:
return [0, 0, 0] # evening
def get_local_hour(timestamp, correction):
import datetime
TIME_FORMAT = '%Y-%m-%d %H:%M:%S'
timestamp = timestamp.replace('T', ' ') # incase different
t = datetime.datetime.strptime(timestamp, TIME_FORMAT)
d = datetime.timedelta(seconds=correction)
t = t + d
#return t.hour # raw
#theta = np.radians(360 * t.hour / 24.0) # von-Miyes
#return [np.sin(theta), np.cos(theta)]
return get_category(t.hour) # bucketize
def to_example(fields):
features = [ \
fields['DEP_DELAY'], \
fields['DISTANCE'], \
fields['TAXI_OUT'], \
]
features.extend(get_local_hour(fields['DEP_TIME'],
fields['DEP_AIRPORT_TZOFFSET']))
#features.extend(fields['origin_onehot'])
return LabeledPoint(\
float(fields['ARR_DELAY'] < 15), #ontime \
features)
index_model = 0
def add_categorical(df, train=False):
from pyspark.ml.feature import OneHotEncoder, StringIndexer
if train:
indexer = StringIndexer(inputCol='ORIGIN',
outputCol='origin_index')
index_model = indexer.fit(df)
indexed = index_model.transform(df)
encoder = OneHotEncoder(inputCol='origin_index',
outputCol='origin_onehot')
return encoder.transform(indexed)
#traindata = add_categorical(traindata, train=True)
examples = traindata.rdd.map(to_example)
lrmodel = LogisticRegressionWithLBFGS.train(examples, intercept=True)
lrmodel.setThreshold(0.7)
# save model
MODEL_FILE='gs://' + BUCKET + '/flights/sparkmloutput/model'
lrmodel.save(sc, MODEL_FILE)
# evaluate model on the heldout data
evalquery = trainquery.replace("t.holdout == False","t.holdout == True")
evaldata = spark.sql(evalquery).repartition(1000)
#evaldata = add_categorical(evaldata)
examples = evaldata.rdd.map(to_example)
labelpred = examples.map(lambda p: (p.label, lrmodel.predict(p.features)))
def eval(labelpred):
'''
data = (label, pred)
data[0] = label
data[1] = pred
'''
cancel = labelpred.filter(lambda data: data[1] < 0.7)
nocancel = labelpred.filter(lambda data: data[1] >= 0.7)
corr_cancel = cancel.filter(lambda data: data[0] == int(data[1] >= 0.7)).count()
corr_nocancel = nocancel.filter(lambda data: data[0] == int(data[1] >= 0.7)).count()
cancel_denom = cancel.count()
nocancel_denom = nocancel.count()
if cancel_denom == 0:
cancel_denom = 1
if nocancel_denom == 0:
nocancel_denom = 1
return {'total_cancel': cancel.count(), \
'correct_cancel': float(corr_cancel)/cancel_denom, \
'total_noncancel': nocancel.count(), \
'correct_noncancel': float(corr_nocancel)/nocancel_denom \
}
print(eval(labelpred))
|
playpauseandstop/aiohttp
|
tests/test_signals.py
|
Python
|
apache-2.0
| 3,612
| 0
|
import asyncio
from unittest import mock
import pytest
from multidict import CIMultiDict
from aiohttp.signals import Signal
from aiohttp.test_utils import make_mocked_request
from aiohttp.web import Application, Response
@pytest.fixture
def app():
return Application()
@pytest.fixture
def debug_app():
return Application(debug=True)
def make_request(app, method, path, headers=CIMultiDict()):
return make_mocked_request(method, path, headers, app=app)
async def test_add_signal_handler_not_a_callable(app):
callback = True
app.on_response_prepare.append(callback)
with pytest.raises(TypeError):
await app.on_response_prepare(None, None)
async def test_function_signal_dispatch(app):
signal = Signal(app)
kwargs = {'foo': 1, 'bar': 2}
callback_mock = mock.Mock()
@asyncio.coroutine
def callback(**kwargs):
callback_mock(**kwargs)
signal.append(callback)
await signal.send(**kwargs)
callback_mock.assert_called_once_with(**kwargs)
async def test_function_signal_dispatch2(app):
signal = Signal(app)
args = {'a', 'b'}
kwargs = {'foo': 1, 'bar': 2}
callback_mock = mock.Mock()
@asyncio.coroutine
def callback(*args, **kwargs):
callback_mock(*args, **kwargs)
signal.append(callback)
await signal.send(*args, **kwargs)
callback_mock.assert_called_once_with(*args, **kwargs)
async def test_response_prepare(app):
callback = mock.Mock()
@asyncio.coroutine
def cb(*args, **kwargs):
callback(*args, **kwargs)
app.on_response_prepare.append(cb)
request = make_request(app, 'GET', '/')
response = Response(body=b'')
await response.prepare(request)
callback.assert_called_once_with(request, response)
async def test_non_coroutine(app):
signal = Signal(app)
kwargs = {'foo': 1, 'bar': 2}
callback = mock.Mock()
signal.append(callback)
await signal.send(**kwargs)
callback.assert_called_once_with(**kwargs)
async def test_debug_signal(debug_app):
assert debug_app.debug, "Should be True"
signal = Signal(debug_app)
callback = mock.Mock()
pre = mock.Mock()
post = mock.Mock()
signal.append(callback)
debug_app.on_pre_signal.append(pre)
debug_app.on_post_signal.append(post)
await signal.send(1, a=2)
callback.assert_called_once_with
|
(1, a=2)
pre.assert_called_once_with(1, 'aiohttp.signals:Signal', 1, a=2)
post.assert_called_once_with(1, 'aiohttp.signals:Signal', 1, a=2)
def test_setitem(app):
signal = Signal(app)
m1 = mock.Mock()
signal.append(m1)
assert signal[0] is m1
m2 = mock.Mock()
signal[0] = m2
assert signal[0] is m2
def test_delitem(app):
signal = Signal(app)
|
m1 = mock.Mock()
signal.append(m1)
assert len(signal) == 1
del signal[0]
assert len(signal) == 0
def test_cannot_append_to_frozen_signal(app):
signal = Signal(app)
m1 = mock.Mock()
m2 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
signal.append(m2)
assert list(signal) == [m1]
def test_cannot_setitem_in_frozen_signal(app):
signal = Signal(app)
m1 = mock.Mock()
m2 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
signal[0] = m2
assert list(signal) == [m1]
def test_cannot_delitem_in_frozen_signal(app):
signal = Signal(app)
m1 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
del signal[0]
assert list(signal) == [m1]
|
ooici/coi-services
|
ion/util/stored_values.py
|
Python
|
bsd-2-clause
| 1,331
| 0.004508
|
#!/usr/bin/env python
'''
@author Luke C
@date Mon Mar 25 09:57:59 EDT 2013
@file ion/util/stored_values.py
'''
from pyon.core.ex
|
ception
|
import NotFound
import gevent
class StoredValueManager(object):
def __init__(self, container):
self.store = container.object_store
def stored_value_cas(self, doc_key, document_updates):
'''
Performs a check and set for a lookup_table in the object store for the given key
'''
try:
doc = self.store.read_doc(doc_key)
except NotFound:
doc_id, rev = self.store.create_doc(document_updates, object_id=doc_key)
return doc_id, rev
except KeyError as e:
if 'http' in e.message:
doc_id, rev = self.store.create_doc(document_updates, object_id=doc_key)
return doc_id, rev
for k,v in document_updates.iteritems():
doc[k] = v
doc_id, rev = self.store.update_doc(doc)
return doc_id, rev
def read_value(self, doc_key):
doc = self.store.read_doc(doc_key)
return doc
def read_value_mult(self, doc_keys, strict=False):
doc_list = self.store.read_doc_mult(doc_keys, strict=strict)
return doc_list
def delete_stored_value(self, doc_key):
self.store.delete_doc(doc_key)
|
wtsi-hgi/bam2cram-check
|
checks/stats_checks.py
|
Python
|
gpl-3.0
| 11,324
| 0.003621
|
import os
import subprocess
import logging
import re
from checks import utils
import sys
class RunSamtoolsCommands:
@classmethod
def _run_subprocess(cls, args_list):
proc = subprocess.run(args_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
utils.log_error(args_list, proc.stderr, proc.returncode)
if proc.stderr or proc.returncode != 0:
raise RuntimeError("ERROR running process: %s, error = %s and exit code = %s" % (args_list, proc.stderr, proc.returncode))
return proc.stdout
@classmethod
def run_samtools_quickcheck(cls, fpath):
return cls._run_subprocess(['samtools', 'quickcheck', '-v', fpath])
@classmethod
def get_samtools_flagstat_output(cls, fpath):
return cls._run_subprocess(['samtools', 'flagstat', fpath])
@classmethod
def get_samtools_stats_output(cls, fpath):
return cls._run_subprocess(['samtools', 'stats', fpath])
@classmethod
def get_samtools_version_output(cls):
return cls._run_subprocess(['samtools', '--version'])
class HandleSamtoolsStats:
@classmethod
def _get_stats(cls, stats_fpath):
if stats_fpath and os.path.isfile(stats_fpath):
return utils.read_from_file(stats_fpath)
return None
@classmethod
def _generate_stats(cls, data_fpath):
if not data_fpath or not os.path.isfile(data_fpath):
raise ValueError("Can't generate stats from a non-existing file: %s" % str(data_fpath))
return RunSamtoolsCommands.get_samtools_stats_output(data_fpath)
@classmethod
def _is_stats_file_older_than_data(cls, data_fpath, stats_fpath):
if utils.compare_mtimestamp(data_fpath, stats_fpath) >= 0:
return True
return False
@classmethod
def fetch_stats(cls, fpath, stats_fpath):
if not fpath or not os.path.isfile(fpath):
raise ValueError("You need to give a valid file path if you want the stats")
if os.path.isfile(stats_fpath) and not cls._is_stats_file_older_than_data(fpath, stats_fpath) and \
utils.can_read_file(stats_fpath):
stats = HandleSamtoolsStats._get_stats(stats_fpath)
logging.info("Reading stats from file %s" % stats_fpath)
else:
stats = HandleSamtoolsStats._generate_stats(fpath)
logging.info("Generating stats for file %s" % fpath)
if os.path.isfile(stats_fpath) and cls._is_stats_file_older_than_data(fpath, stats_fpath):
logging.warning("The stats file is older than the actual file, you need to remove/update it. "
"Regenerating the stats, but without saving.")
return stats
@classmethod
def persist_stats(cls, stats, stats_fpath):
if not stats or not stats_fpath:
raise ValueError("You must provide both stats and stats file path for saving the stats to a file."
" Received stats = %s and stats fpath = %s" % (str(stats), str(stats_fpath)))
if not os.path.isfile(stats_fpath):
logging.info("Persisting the stats to disk")
return utils.write_to_file(stats_fpath, stats)
else:
logging.info("Skipping persist_stats to disk, apparently there is a valid stats file there already.")
return False
@classmethod
def extract_seq_checksum_from_stats(cls, stats: str) -> str:
for line in stats.split('\n'):
if re.search('^CHK', line):
return line
return None
class HandleSamtoolsVersion:
@classmethod
def _get_version_nr_from_samtools_output(cls, output):
version_line = output.splitlines()[0]
|
tokens = version_line.split()
if len(tokens) < 2:
raise ValueError("samtools --version output looks different than expected. Can't parse it.")
return tokens[1]
@classmethod
def _extract_major_version_nr(cls, version):
return version.split('.', 1)[0]
@classmethod
def _extract_minor_version_nr(cls, version):
vers_tokens = re.split(r'[.-]
|
', version, 1)
if len(vers_tokens) < 2:
raise ValueError("samtools version output looks different than expected.Can't parse it.")
min_vs = re.split(r'[.-]', vers_tokens[1], 1)[0]
return min_vs
@classmethod
def _check_major_version_nr(cls, major_vs_nr):
if not major_vs_nr.isdigit():
raise ValueError("samtools version output looks different than expected. Can't parse it.")
if int(major_vs_nr) < 1:
raise ValueError("You need to use at least samtools version 1.3.")
@classmethod
def _check_minor_version_nr(cls, minor_vs_nr):
if not minor_vs_nr.isdigit():
raise ValueError("samtools version output looks different than expected.Can't parse it.")
minor_nr_1 = minor_vs_nr.split('.', 1)[0]
if not minor_nr_1.isdigit():
raise ValueError("Can't parse samtools version string.")
if int(minor_nr_1[0]) < 3:
raise ValueError("You need to use at least samtools version 1.3.")
@classmethod
def check_samtools_version(cls, version_output):
if not version_output:
raise ValueError("samtools --version output is empty. You need to use at least samtools version 1.3.")
version = cls._get_version_nr_from_samtools_output(version_output)
major_nr = cls._extract_major_version_nr(version)
minor_nr = cls._extract_minor_version_nr(version)
cls._check_major_version_nr(major_nr)
cls._check_minor_version_nr(minor_nr)
class CompareStatsForFiles:
@classmethod
def compare_flagstats(cls, flagstat_b, flagstat_c):
errors = []
if not flagstat_c or not flagstat_b:
errors.append("At least one of the flagstats is missing")
return errors
if flagstat_b != flagstat_c:
logging.error("FLAGSTAT DIFFERENT:\n %s then:\n %s " % (flagstat_b, flagstat_c))
errors.append("FLAGSTAT DIFFERENT:\n %s then:\n %s " % (flagstat_b, flagstat_c))
else:
logging.info("Flagstats are equal.")
return errors
@classmethod
def compare_stats_by_sequence_checksum(cls, stats_b, stats_c):
errors = []
if not stats_b or not stats_c:
errors.append("You need to provide both BAM and CRAM stats for cmparison")
return errors
chk_b = HandleSamtoolsStats.extract_seq_checksum_from_stats(stats_b)
chk_c = HandleSamtoolsStats.extract_seq_checksum_from_stats(stats_c)
if not chk_b:
errors.append("For some reason there is no CHK line in the samtools stats")
logging.error("For some reason there is no CHK line in the samtools stats")
if not chk_c:
errors.append("For some reason there is no CHK line in the samtools stats")
logging.error("For some reason there is no CHK line in the samtools stats")
if chk_b != chk_c:
errors.append("STATS SEQUENCE CHECKSUM DIFFERENT: %s and %s" % (chk_b, chk_c))
logging.error("STATS SEQUENCE CHECKSUM DIFFERENT: %s and %s" % (chk_b, chk_c))
return errors
@classmethod
def compare_bam_and_cram_by_statistics(cls, bam_path, cram_path):
errors = []
# Check that it's a valid file path
if not bam_path or (not utils.is_irods_path(bam_path) and not os.path.isfile(bam_path)):
errors.append("The BAM file path: %s is not valid" % bam_path)
if not cram_path or (not utils.is_irods_path(cram_path) and not os.path.isfile(cram_path)):
errors.append("The CRAM file path:%s is not valid" % cram_path)
if errors:
logging.error("There are errors with the file paths you provided: %s" % errors)
return errors
# Check that the files are readable by me
if not utils.is_irods_path(bam_path) and not utils.can_read_file(bam_path):
errors.append("Can't read file %s" % bam_path)
if not utils.is_irods_path(cram_path) and no
|
datsfosure/ansible
|
lib/ansible/plugins/strategies/__init__.py
|
Python
|
gpl-3.0
| 19,457
| 0.003443
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from six.moves import queue as Queue
import time
from ansible.errors import *
from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.playbook.handler import Handler
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role import hash_params
from ansible.plugins import _basedirs, filter_loader, lookup_loader, module_loader
from ansible.template import Templar
from ansible.utils.debug import debug
__all__ = ['StrategyBase']
# FIXME: this should probably be in the plugins/__init__.py, with
# a smarter
|
mechanism to set all of the attributes based on
# the loaders created there
class SharedPluginLoaderObj:
'''
A simple object to make pass the various plugin loaders to
the forked processes over the queue easier
'''
def __init__(self):
self.basedirs = _basedirs[:]
self.filter_loader = filter_loader
self.lookup_loader = lookup_loader
self.module_loader = module_loader
class StrategyBase:
'''
This is the base class for s
|
trategy plugins, which contains some common
code useful to all strategies like running handlers, cleanup actions, etc.
'''
def __init__(self, tqm):
self._tqm = tqm
self._inventory = tqm.get_inventory()
self._workers = tqm.get_workers()
self._notified_handlers = tqm.get_notified_handlers()
self._variable_manager = tqm.get_variable_manager()
self._loader = tqm.get_loader()
self._final_q = tqm._final_q
# internal counters
self._pending_results = 0
self._cur_worker = 0
# this dictionary is used to keep track of hosts that have
# outstanding tasks still in queue
self._blocked_hosts = dict()
def run(self, iterator, connection_info, result=True):
# save the failed/unreachable hosts, as the run_handlers()
# method will clear that information during its execution
failed_hosts = self._tqm._failed_hosts.keys()
unreachable_hosts = self._tqm._unreachable_hosts.keys()
debug("running handlers")
result &= self.run_handlers(iterator, connection_info)
# now update with the hosts (if any) that failed or were
# unreachable during the handler execution phase
failed_hosts = set(failed_hosts).union(self._tqm._failed_hosts.keys())
unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys())
# send the stats callback
self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats)
if len(unreachable_hosts) > 0:
return 3
elif len(failed_hosts) > 0:
return 2
elif not result:
return 1
else:
return 0
def get_hosts_remaining(self, play):
return [host for host in self._inventory.get_hosts(play.hosts) if host.name not in self._tqm._failed_hosts and host.name not in self._tqm._unreachable_hosts]
def get_failed_hosts(self, play):
return [host for host in self._inventory.get_hosts(play.hosts) if host.name in self._tqm._failed_hosts]
def add_tqm_variables(self, vars, play):
'''
Base class method to add extra variables/information to the list of task
vars sent through the executor engine regarding the task queue manager state.
'''
new_vars = vars.copy()
new_vars['ansible_current_hosts'] = self.get_hosts_remaining(play)
new_vars['ansible_failed_hosts'] = self.get_failed_hosts(play)
return new_vars
def _queue_task(self, host, task, task_vars, connection_info):
''' handles queueing the task up to be sent to a worker '''
debug("entering _queue_task() for %s/%s" % (host, task))
# and then queue the new task
debug("%s - putting task (%s) in queue" % (host, task))
try:
debug("worker is %d (out of %d available)" % (self._cur_worker+1, len(self._workers)))
(worker_prc, main_q, rslt_q) = self._workers[self._cur_worker]
self._cur_worker += 1
if self._cur_worker >= len(self._workers):
self._cur_worker = 0
# create a dummy object with plugin loaders set as an easier
# way to share them with the forked processes
shared_loader_obj = SharedPluginLoaderObj()
main_q.put((host, task, self._loader.get_basedir(), task_vars, connection_info, shared_loader_obj), block=False)
self._pending_results += 1
except (EOFError, IOError, AssertionError) as e:
# most likely an abort
debug("got an error while queuing: %s" % e)
return
debug("exiting _queue_task() for %s/%s" % (host, task))
def _process_pending_results(self, iterator):
'''
Reads results off the final queue and takes appropriate action
based on the result (executing callbacks, updating state, etc.).
'''
ret_results = []
while not self._final_q.empty() and not self._tqm._terminated:
try:
result = self._final_q.get(block=False)
debug("got result from result worker: %s" % (result,))
# all host status messages contain 2 entries: (msg, task_result)
if result[0] in ('host_task_ok', 'host_task_failed', 'host_task_skipped', 'host_unreachable'):
task_result = result[1]
host = task_result._host
task = task_result._task
if result[0] == 'host_task_failed' or task_result.is_failed():
if not task.ignore_errors:
debug("marking %s as failed" % host.name)
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
self._tqm._stats.increment('failures', host.name)
else:
self._tqm._stats.increment('ok', host.name)
self._tqm.send_callback('v2_runner_on_failed', task_result, ignore_errors=task.ignore_errors)
elif result[0] == 'host_unreachable':
self._tqm._unreachable_hosts[host.name] = True
self._tqm._stats.increment('dark', host.name)
self._tqm.send_callback('v2_runner_on_unreachable', task_result)
elif result[0] == 'host_task_skipped':
self._tqm._stats.increment('skipped', host.name)
self._tqm.send_callback('v2_runner_on_skipped', task_result)
elif result[0] == 'host_task_ok':
self._tqm._stats.increment('ok', host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', host.name)
self._tqm.send_callback('v2_runner_on_ok', task_result)
self._pending_results -= 1
if host.name in self._blocked_hosts:
|
quchunguang/test
|
testpy3/testdoctest.py
|
Python
|
mit
| 305
| 0.003279
|
"""
Created on 2013-1-19
@author: Administrator
"""
import doctest
def average(values):
"""Computes the arithmetic mean of a list of numbers.
>>> print(average
|
([20, 30, 70]))
40.0
"""
return sum(values) / len(values)
doctest.testmod() # automatically validate the embe
|
dded tests
|
oddbird/gurtel
|
tests/test_session.py
|
Python
|
bsd-3-clause
| 1,572
| 0
|
import datetime
from mock import patch
from pretend import stub
from gurtel import session
def test_annotates_request():
"""Annotates request with ``session`` proper
|
ty."""
request = stub(
cookies={},
app=stub(secret_key='secret', is_ssl=True, config={}),
)
session.session_middleware(request, lambda req: None)
assert request.session.secret_key == 'secret'
@patch.object(session.JSONSecureCookie, 'save_cookie')
def test_sets_cookie_on_response(mock_save_cookie):
"""Calls ``save_cookie`` on response."""
request = stub(
cookies={},
app=stub(secret_key='secret', is_ssl=True, config
|
={}),
)
response = stub()
session.session_middleware(request, lambda req: response)
mock_save_cookie.assert_called_once_with(
response, httponly=True, secure=True)
@patch.object(session.JSONSecureCookie, 'save_cookie')
@patch.object(session.timezone, 'now')
def test_can_set_expiry(mock_now, mock_save_cookie):
"""Calls ``save_cookie`` on response with expiry date, if configured."""
request = stub(
cookies={},
app=stub(
secret_key='secret',
is_ssl=True,
config={'session.expiry_minutes': '1440'},
),
)
response = stub()
mock_now.return_value = datetime.datetime(2013, 11, 22)
session.session_middleware(request, lambda req: response)
mock_save_cookie.assert_called_once_with(
response,
httponly=True,
secure=True,
expires=datetime.datetime(2013, 11, 23),
)
|
awsdocs/aws-doc-sdk-examples
|
python/example_code/transcribe/transcribe_basics.py
|
Python
|
apache-2.0
| 17,119
| 0.002045
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Purpose
Shows how to use the AWS SDK for Python (Boto3) with the Amazon Transcribe API to
transcribe an audio file to a text file. Also shows how to define a custom vocabulary
to improve the accuracy of the transcription.
This example uses a public domain audio file downloaded from Wikipedia and converted
from .ogg to .mp3 format. The file contains a reading of the poem Jabberwocky by
Lewis Carroll. The original audio source file can be found here:
https://en.wikisource.org/wiki/File:Jabberwocky.ogg
"""
import logging
import sys
import time
import boto3
from botocore.exceptions import ClientError
import requests
# Add relative path to include demo_tools in this code example without need for setup.
sys.path.append('../..')
from demo_tools.custom_waiter import CustomWaiter, WaitState
logger = logging.getLogger(__name__)
class TranscribeCompleteWaiter(CustomWaiter):
"""
Waits for the transcription to complete.
"""
def __init__(self, client):
super().__init__(
'TranscribeComplete', 'GetTranscriptionJob',
'TranscriptionJob.TranscriptionJobStatus',
{'COMPLETED': WaitState.SUCCESS, 'FAILED': WaitState.FAILURE},
client)
def wait(self, job_name):
self._wait(TranscriptionJobName=job_name)
class VocabularyReadyWaiter(CustomWaiter):
"""
Waits for the custom vocabulary to be ready for use.
"""
def __init__(self, client):
super().__init__(
'VocabularyReady', 'GetVocabulary', 'VocabularyState',
{'READY': WaitState.SUCCESS}, client)
def wait(self, vocabulary_name):
self._wait(VocabularyName=vocabulary_name)
# snippet-start:[python.example_code.transcribe.StartTranscriptionJob]
def start_job(
job_name, media_uri, media_format, language_code, transcribe_client,
vocabulary_name=None):
"""
Starts a transcription job. This function returns as soon as the job is started.
To get the current status of the job, call get_transcription_job. The job is
successfully completed when the job status is 'COMPLETED'.
:param job_name: The name of the transcription job. This must be unique for
your AWS account.
:param media_uri: The URI where the audio file is stored. This is typically
in an Amazon S3 bucket.
:param media_format: The format of the audio file. For example, mp3 or wav.
:param language_code: The language code of the audio file.
For example, en-US or ja-JP
:param
|
transcribe_client: The Boto3 Transcribe client.
:param vocabulary_name: The name of a custom vocabulary to use when transcribing
the audio file.
:return: Data about the job.
"""
try:
job_args = {
'TranscriptionJobName': job_name,
'Media': {'MediaFileUri': media_uri},
'MediaFormat': media_format,
'LanguageCode': langua
|
ge_code}
if vocabulary_name is not None:
job_args['Settings'] = {'VocabularyName': vocabulary_name}
response = transcribe_client.start_transcription_job(**job_args)
job = response['TranscriptionJob']
logger.info("Started transcription job %s.", job_name)
except ClientError:
logger.exception("Couldn't start transcription job %s.", job_name)
raise
else:
return job
# snippet-end:[python.example_code.transcribe.StartTranscriptionJob]
# snippet-start:[python.example_code.transcribe.ListTranscriptionJobs]
def list_jobs(job_filter, transcribe_client):
"""
Lists summaries of the transcription jobs for the current AWS account.
:param job_filter: The list of returned jobs must contain this string in their
names.
:param transcribe_client: The Boto3 Transcribe client.
:return: The list of retrieved transcription job summaries.
"""
try:
response = transcribe_client.list_transcription_jobs(
JobNameContains=job_filter)
jobs = response['TranscriptionJobSummaries']
next_token = response.get('NextToken')
while next_token is not None:
response = transcribe_client.list_transcription_jobs(
JobNameContains=job_filter, NextToken=next_token)
jobs += response['TranscriptionJobSummaries']
next_token = response.get('NextToken')
logger.info("Got %s jobs with filter %s.", len(jobs), job_filter)
except ClientError:
logger.exception("Couldn't get jobs with filter %s.", job_filter)
raise
else:
return jobs
# snippet-end:[python.example_code.transcribe.ListTranscriptionJobs]
# snippet-start:[python.example_code.transcribe.GetTranscriptionJob]
def get_job(job_name, transcribe_client):
"""
Gets details about a transcription job.
:param job_name: The name of the job to retrieve.
:param transcribe_client: The Boto3 Transcribe client.
:return: The retrieved transcription job.
"""
try:
response = transcribe_client.get_transcription_job(
TranscriptionJobName=job_name)
job = response['TranscriptionJob']
logger.info("Got job %s.", job['TranscriptionJobName'])
except ClientError:
logger.exception("Couldn't get job %s.", job_name)
raise
else:
return job
# snippet-end:[python.example_code.transcribe.GetTranscriptionJob]
# snippet-start:[python.example_code.transcribe.DeleteTranscriptionJob]
def delete_job(job_name, transcribe_client):
"""
Deletes a transcription job. This also deletes the transcript associated with
the job.
:param job_name: The name of the job to delete.
:param transcribe_client: The Boto3 Transcribe client.
"""
try:
transcribe_client.delete_transcription_job(
TranscriptionJobName=job_name)
logger.info("Deleted job %s.", job_name)
except ClientError:
logger.exception("Couldn't delete job %s.", job_name)
raise
# snippet-end:[python.example_code.transcribe.DeleteTranscriptionJob]
# snippet-start:[python.example_code.transcribe.CreateVocabulary]
def create_vocabulary(
vocabulary_name, language_code, transcribe_client,
phrases=None, table_uri=None):
"""
Creates a custom vocabulary that can be used to improve the accuracy of
transcription jobs. This function returns as soon as the vocabulary processing
is started. Call get_vocabulary to get the current status of the vocabulary.
The vocabulary is ready to use when its status is 'READY'.
:param vocabulary_name: The name of the custom vocabulary.
:param language_code: The language code of the vocabulary.
For example, en-US or nl-NL.
:param transcribe_client: The Boto3 Transcribe client.
:param phrases: A list of comma-separated phrases to include in the vocabulary.
:param table_uri: A table of phrases and pronunciation hints to include in the
vocabulary.
:return: Information about the newly created vocabulary.
"""
try:
vocab_args = {'VocabularyName': vocabulary_name, 'LanguageCode': language_code}
if phrases is not None:
vocab_args['Phrases'] = phrases
elif table_uri is not None:
vocab_args['VocabularyFileUri'] = table_uri
response = transcribe_client.create_vocabulary(**vocab_args)
logger.info("Created custom vocabulary %s.", response['VocabularyName'])
except ClientError:
logger.exception("Couldn't create custom vocabulary %s.", vocabulary_name)
raise
else:
return response
# snippet-end:[python.example_code.transcribe.CreateVocabulary]
# snippet-start:[python.example_code.transcribe.ListVocabularies]
def list_vocabularies(vocabulary_filter, transcribe_client):
"""
Lists the custom vocabularies created for this AWS account.
:param vocabulary_filter: The returned vocabularies must contain this string in
|
carlosb1/examples-python
|
ideas/mango-example/mango.py
|
Python
|
gpl-2.0
| 444
| 0.009009
|
import dryscrape
dryscrape.start_xvfb()
sess = dryscrape.Session(ba
|
se_url = 'http://shop.mango.com')
sess.set_attribute('auto_load_images',False)
sess.visit('/ES/m/hombre/prendas/todas/?m=coleccion')
print sess.at_xpath("//*").children()
print "--------------------------"
print sess.at_xpath("//*[contains(@class,\"searchResultPrice\")]/text()")
#for price in sess.at_xpath("//*[contains(@class,\"searc
|
hResultPrice\")]"):
# print price
|
sfu-rcg/ampush
|
amlib/file_map.py
|
Python
|
mit
| 6,519
| 0.00046
|
import os
import re
from amlib import conf, utils, log
'''
Functions for parsing AD automount maps into a common dict format.
Part of ampush. https://github.com/sfu-rcg/ampush
Copyright (C) 2016 Research Computing Group, Simon Fraser University.
'''
# ff = flat file automount map
def get_names():
'''
Return a list of files in ${conf/flat_file_map_dir} with the master map and
(optional) direct map first.
'''
l_names, fs_map_names = [], []
for root, dirs, filenames in os.walk(conf.c['flat_file_map_dir']):
for map_name in filenames:
fs_map_names.append(map_name)
# ensure the master map and direct map (if one exists) are processed first
l_names.append(conf.c['master_map_name'])
try:
fs_map_names.remove(conf.c['master_map_name'])
except ValueError:
log_msg = '{0} does not exist on the filesystem. Terminating.'
log_msg = log_msg.format(conf.c['master_map_name'])
log.m.critical(log_msg)
print(log_msg)
exit(6)
if conf.c['direct_map_name'] in fs_map_names:
l_names.append(conf.c['direct_map_name'])
fs_map_names.remove(conf.c['direct_map_name'])
fs_map_names.sort()
for map_name in fs_map_names:
if re.match(r'^auto\.', map_name):
l_names.append(map_name)
return l_names
def detect_orphans():
'''
Return a list of maps that exist on the filesystem but are not mentioned
in auto.master.
'''
master_entries = parse(conf.c['master_map_name'])
master_mapnames = []
l_orphans = []
for k, v in master_entries.items():
master_mapnames.append(v['map'])
for ff_mapname in get_names():
# auto.master should not be listed in auto.master
if (ff_mapname not in master_mapnames and
ff_mapname != 'auto.master'):
l_orphans.append(ff_mapname)
if len(l_orphans) > 0:
l_orphans.sort()
log_msg = 'Found unused maps listed in {0}: {1}'
log_msg = log_msg.format(conf.c['master_map_name'],
' '.join(l_orphans))
log.m.warning(log_msg)
print(log_msg)
return
def parse_master(map_lines=None, map_name=None):
'''
Ingest master map as a list of strings. Return a nice dict like this:
{'/-': {'map': 'auto.direct', 'options': '-rw,intr,soft,bg'},
'/foo': {'map': 'auto.foo', 'options': '-rw,intr,soft,bg'},
'/bar': {'map': 'auto.bar', 'options': '-rw,intr,soft,bg'},
'/baz': {'map': 'auto.baz',
'options': '-ro,int,soft,bg,fstype=nfs4,port=2049'},}
'''
d_map = {}
for l in map_lines:
chunks = l.split()
am_key = chunks[0]
joined = ' '.join(chunks)
d_map[am_key] = {}
'''
As with submaps the mount options field is optional.
2 fields == automount entry without mount options.
'''
if len(chunks) == 2:
d_map[am_key] = {'map': chunks[1]}
log_msg = 'No mount options for {0} in {1}'
log_msg = log_msg.format(am_key, conf.c['master_map_name'])
log.m.info(log_msg)
# 3 fields? automount directory + mapname + mount options
elif len(chunks) == 3:
d_map[am_key] = {'map': chunks[1],
'options': chunks[2]}
else:
log_msg = (
'Terminating. Bad flat file master map format: '
'unexpected number of fields in ' + joined
)
log.m.critical(log_msg)
print(log_msg)
exit(11)
return d_map
def parse_submap(map_name=None, map_lines=None):
'''
Ingest a list of automount map entries. Return a nice dict like this:
{'yuv': {'options': '-intr,bg,tcp,vers=4',
'server_dir': '/yuv',
'server_hostname': 'nfssrv01.example.com'},
'luma': {'options': '-nosuid,tcp,intr,bg,vers=3,rw',
'server_dir': '/exports/luma',
'server_hostname': 'nfssrv02.example.com'}, ...}
'''
d_map = {}
log_msg = 'Reading {0}/{1}'.format(conf.c['flat_file_map_dir'],
map_name)
log.m.debug(log_msg)
for l in map_lines:
chunks = l.split()
am_key = chunks[0] # automount key
utils.validate_nis_map_entry(in_list=chunks[1:],
map_name=map_name,
am_key=am_key,
map_type='flat file')
d_map[am_key] = {}
'''
Consider these two valid automount entries:
apps -tcp,vers=3 nfs-server1.example.com:/exports/apps
data nfs-server2.example.com:/srv/data
If a third field exists, use it as the NFS path.
Otherwise use the second field as the NFS path.
'''
try: # server:path pair with options
server_hostname = chunks[2].split(':')[0]
server_dir = chunks[2].split(':')[1]
options = chunks[1]
utils.validate_mount_options(opt_str=options,
map_name=map_name,
am_key=am_key)
d_map[am_key] = {'server_hostname': server_hostname,
'server_dir': server_dir,
'options': options}
except IndexError: # without options
server_hostname = chunks[1].split(':')[0]
serv
|
er_dir = chunks[1].split(':')[1]
|
d_map[am_key] = {'server_hostname': server_hostname,
'server_dir': server_dir,
'options': None}
return d_map
def parse(map_name=None):
'''
Read flat file automount maps ${ampush.conf/flat_file_map_dir} and
pass map names to parser_master_map or parse_submap.
'''
map_pathname = conf.c['flat_file_map_dir'] + '/' + map_name
map_lines = utils.ff_map_to_list(map_pathname)
map_type = 'flat file'
# different map types (master, direct, plain) == different sanity checks
if map_name == conf.c['master_map_name']:
d_map = parse_master(map_name=map_name,
map_lines=map_lines)
utils.master_map_sanity_checks(map_dict=d_map,
map_type=map_type)
else:
d_map = parse_submap(map_name=map_name,
map_lines=map_lines)
utils.submap_sanity_checks(map_dict=d_map,
map_type=map_type)
return d_map
|
kopchik/qtile
|
libqtile/widget/bitcoin_ticker.py
|
Python
|
mit
| 2,607
| 0
|
# Copyright (c) 2013 Jendrik Poloczek
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 Aborilov Pavel
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014-2015 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# -*- coding: utf-8 -*-
from . import base
from .generic_poll_text import GenPollUrl
import locale
class BitcoinTicker(GenPollUrl):
'''
|
A bitcoin ticker widget, data provided by the btc-e.com API. Defaults
to displaying currency in whatever the current locale is.
|
'''
QUERY_URL = "https://btc-e.com/api/2/btc_%s/ticker"
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
('currency', locale.localeconv()['int_curr_symbol'].strip(),
'The currency the value of bitcoin is displayed in'),
('format', 'BTC Buy: {buy}, Sell: {sell}',
'Display format, allows buy, sell, high, low, avg, '
'vol, vol_cur, last, variables.'),
]
def __init__(self, **config):
GenPollUrl.__init__(self, **config)
self.add_defaults(BitcoinTicker.defaults)
@property
def url(self):
return self.QUERY_URL % self.currency.lower()
def parse(self, body):
formatted = {}
if 'error' in body and body['error'] == "invalid pair":
locale.setlocale(locale.LC_MONETARY, "en_US.UTF-8")
self.currency = locale.localeconv()['int_curr_symbol'].strip()
body = self.fetch(self.url)
for k, v in body['ticker'].items():
formatted[k] = locale.currency(v)
return self.format.format(**formatted)
|
eubr-bigsea/tahiti
|
tahiti/workflow_permission_api.py
|
Python
|
apache-2.0
| 5,484
| 0
|
# -*- coding: utf-8 -*-}
import logging
import os
import uuid
import requests
from flask import request, current_app, g
from flask_babel import gettext
from flask_restful import Resource
from sqlalchemy import or_
from sqlalchemy.orm import joinedload
from sqlalchemy.sql.elements import and_
from marshmallow.exceptions import ValidationError
from tahiti.app_auth import requires_auth
from tahiti.schema import *
from tahiti.workflow_api import filter_by_permissions
log = logging.getLogger(__name__)
class WorkflowPermissionApi(Resource):
""" REST API for sharing a Workflow """
@staticmethod
@requires_auth
def post(workflow_id, user_id):
result, result_code = dict(
status="ERROR",
message=gettext('Missing json in the request body')), 400
if request.json is not None:
form = request.json
to_validate = ['permission', 'user_name', 'user_login']
error = False
for check in to_validate:
if check not in form or form.get(check, '').strip() == '':
result, result_code = dict(
status="ERROR", message=gettext('Validation error'),
errors={'Missing': check}), 400
error = True
break
if check == 'permission' and form.get(
'permission') not in list(PermissionType.values()):
result, result_code = dict(
status="ERROR", message=gettext('Validation error'),
errors={'Invalid': check}), 400
error = True
break
if not error:
try:
filtered = filter_by_permissions(
Workflow.query, [PermissionType.WRITE])
workflow = filtered.filter(
Workflow.id == workflow_id).first()
if workflow is not None:
conditions = [WorkflowPermission.workflow_id ==
workflow_id,
WorkflowPermission.user_id == user_id]
permission = WorkflowPermission.query.filter(
*conditions).first()
action_performed = 'Added'
if permission is not None:
action_performed = 'Updated'
permission.permission = form['permission']
else:
permission = WorkflowPermission(
workflow=workflow, user_id=user_id,
user_name=form['user_name'],
user_login=form['user_login'],
|
permission=form['permission'])
|
db.session.add(permission)
db.session.commit()
result, result_code = {'message': action_performed,
'status': 'OK'}, 200
else:
result, result_code = dict(
status="ERROR",
message=gettext("%(type)s not found.",
type=gettext('Data source'))), 404
except Exception as e:
log.exception('Error in POST')
result, result_code = dict(status="ERROR",
message=gettext(
"Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
@staticmethod
@requires_auth
def delete(workflow_id, user_id):
result, result_code = dict(status="ERROR",
message=gettext("%(type)s not found.",
type=gettext(
'Data source'))), 404
filtered = filter_by_permissions(Workflow.query,
[PermissionType.WRITE])
workflow = filtered.filter(Workflow.id == workflow_id).first()
if workflow is not None:
permission = WorkflowPermission.query.filter(
WorkflowPermission.workflow_id == workflow_id,
WorkflowPermission.user_id == user_id).first()
if permission is not None:
try:
db.session.delete(permission)
db.session.commit()
result, result_code = dict(
status="OK",
message=gettext("%(what)s was successively deleted",
what=gettext('Workflow'))), 200
except Exception as e:
log.exception('Error in DELETE')
result, result_code = dict(status="ERROR",
message=gettext(
"Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
|
rohitranjan1991/home-assistant
|
homeassistant/components/emoncms/sensor.py
|
Python
|
mit
| 8,487
| 0.000471
|
"""Support for monitoring emoncms feeds."""
from __future__ import annotations
from datetime import timedelta
from http import HTTPStatus
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import (
PLATFORM_SCHEMA,
SensorDeviceClass,
SensorEntity,
SensorStateClass,
)
from homeassistant.const import (
CONF_API_KEY,
CONF_ID,
CONF_SCAN_INTERVAL,
CONF_UNIT_OF_MEASUREMENT,
CONF_URL,
CONF_VALUE_TEMPLATE,
POWER_WATT,
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import template
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_FEEDID = "FeedId"
ATTR_FEEDNAME = "FeedName"
ATTR_LASTUPDATETIME = "LastUpdated"
ATTR_LASTUPDATETIMESTR = "LastUpdatedStr"
ATTR_SIZE = "Size"
ATTR_TAG = "Tag"
ATTR_USERID = "UserId"
CONF_EXCLUDE_FEEDID = "exclude_feed_id"
CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id"
CONF_SENSOR_NAMES = "sensor_names"
DECIMALS = 2
DEFAULT_UNIT = POWER_WATT
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
ONLY_INCL_EXCL_NONE = "only_include_exclude_or_none"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_URL): cv.string,
vol.Required(CONF_ID): cv.positive_int,
vol.Exclusive(CONF_ONLY_INCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Exclusive(CONF_EXCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Optional(CONF_SENSOR_NAMES): vol.All(
{cv.positive_int: vol.All(cv.string, vol.Length(min=1))}
),
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=DEFAULT_UNIT): cv.string,
}
)
def get_id(sensorid, feedtag, feedname, feedid, feeduserid):
"""Return unique identifier for feed / sensor."""
return f"emoncms{sensorid}_{feedtag}_{feedname}_{feedid}_{feeduserid}"
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Emoncms sensor."""
apikey = config.get(CONF_API_KEY)
url = config.get(CONF_URL)
sensorid = config.get(CONF_ID)
value_template = config.get(CONF_VALUE_TEMPLATE)
config_unit = config.get(CONF_UNIT_OF_MEASUREMENT)
exclude_feeds = config.get(CONF_EXCLUDE_FEEDID)
include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID)
sensor_names = config.get(CONF_SENSOR_NAMES)
interval = config.get(CONF_SCAN_INTERVAL)
if value_template is not None:
value_template.hass = hass
data = EmonCmsData(hass, url, apikey, interval)
data.update()
if data.data is None:
return
sensors = []
for elem in data.data:
if exclude_feeds is not None and int(elem["id"]) in exclude_feeds:
continue
if include_only_feeds is not None and int(elem["id"]) not in include_only_feeds:
continue
name = None
if sensor_names is not None:
name = sensor_names.get(int(elem["id"]), None)
if unit := elem.get("unit"):
unit_of_measurement = unit
else:
unit_of_measurement = config_unit
sensors.append(
EmonCmsSensor(
hass,
data,
name,
value_template,
unit_of_measurement,
str(sensorid),
elem,
)
)
add_entities(sensors)
class EmonCmsSensor(SensorEntity):
"""Implementation of an Emoncms sensor."""
def __init__(
self, hass, data, name, value_template, unit_of_measurement, sensorid, elem
):
"""Initialize the sensor."""
if name is None:
# Suppress ID in sensor name if it's 1, since most people won't
# have more than one EmonCMS source and it's redundant to show the
# ID if there's only one.
id_for_name = "" if str(sensorid) == "1" else sensorid
# Use the feed name assigned in EmonCMS or fall back to the feed ID
feed_name = elem.get("name") or f"Feed {elem['id']}"
self._name = f"EmonCMS{id_for_name} {feed_name}"
else:
self._name = name
self._identifier = get_id(
sensorid, elem["tag"], elem["name"], elem["id"], elem["userid"]
)
self._hass = hass
self._data = data
self._value_template = value_template
self._unit_of_measurement = unit_of_measurement
self._sensorid = sensorid
self._elem = elem
if unit_of_measurement == "kWh":
self._attr_device_class = SensorDeviceClass.ENERGY
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
elif unit_of_measurement == "W":
self._attr_device_class = SensorDeviceClass.POWER
self._attr_state_class = SensorStateClass.MEASUREMENT
if self._value_template is not None:
self._state = self._value_template.render_with_possible_json_value(
elem["value"], STATE_UNKNOWN
)
else:
self._state = round(float(elem["value"]), DECIMALS)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def native_unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def native_value(self):
"""Return the state of the device."""
return self._state
@property
def extra_state_attributes(self):
"""Return the attributes of the sensor."""
return {
ATTR_FEEDID: self._elem["id"],
ATTR_TAG: self._elem["tag"],
ATTR_FEEDNAME: self._elem["name"],
ATTR_SIZE: self._elem["size"],
ATTR_USERID: self._elem["userid"],
ATTR_LASTUPDATETIME: self._elem["time"],
ATTR_LASTUPDATETIMESTR: template.timestamp_local(float(self._elem["time"])),
}
def update(self):
"""Get the latest data and updates the state."""
self._data.update()
if self._data.data is None:
return
elem = next(
(
elem
for elem in self._data.data
if get_id(
self._sensorid,
elem["tag"],
elem["name"],
elem["id"],
elem["userid"],
)
== self._identifier
),
None,
)
if elem is None:
return
self._elem = elem
if self._value_template is not None:
self._state = self._value_template.render_with_possible_json_value(
elem["value"], STATE_UNKNOWN
)
else:
self._state = round(float(elem["va
|
lue"]), DECIMALS)
class EmonCmsDa
|
ta:
"""The class for handling the data retrieval."""
def __init__(self, hass, url, apikey, interval):
"""Initialize the data object."""
self._apikey = apikey
self._url = f"{url}/feed/list.json"
self._interval = interval
self._hass = hass
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from Emoncms."""
try:
parameters = {"apikey": self._apikey}
req = requests.get(
self._url, params=parameters, allow_redirects=True, timeout=5
)
except requests.exceptions.RequestException as exception:
_LOGGER.error(exception)
return
else:
if req.status_code == HTTPStatus.OK:
|
kingvuplus/nn-gui
|
mytest.py
|
Python
|
gpl-2.0
| 17,117
| 0.026757
|
import eConsoleImpl
import eBaseImpl
import enigma
enigma.eTimer = eBaseImpl.eTimer
enigma.eSocketNotifier = eBaseImpl.eSocketNotifier
enigma.eConsoleAppContainer = eConsoleImpl.eConsoleAppContainer
from Tools.Profile import profile, profile_final
profile("PYTHON_START")
from enigma import runMainloop, eDVBDB, eTimer, quitMainloop, \
getDesktop, ePythonConfigQuery, eAVSwitch, eServiceEvent, \
eEPGCache
from tools import *
# Nemesis Patch
from enigma import nemTool
t = nemTool()
print "Restart EMU/CS"
t.sendCmd("/etc/init.d/restartEmu.sh &")
#End
profile("LANGUAGE")
from Components.Language import language
def setEPGLanguage():
print "language set to", language.getLanguage()
eServiceEvent.setEPGLanguage(language.getLanguage())
language.addCallback(setEPGLanguage)
from traceback import print_exc
profile("LOAD:InfoBar")
import Screens.InfoBar
from Screens.SimpleSummary import SimpleSummary
from sys import stdout, exc_info
profile("Bouquets")
eDVBDB.getInstance().reloadBouquets()
profile("ParentalControl")
from Components.ParentalControl import InitParentalControl
InitParentalControl()
profile("LOAD:Navigation")
from Navigation import Navigation
profile("LOAD:skin")
from skin import readSkin
profile("LOAD:Tools")
from Tools.Directories import InitFallbackFiles, resolveFilename, SCOPE_PLUGINS, SCOPE_CURRENT_SKIN
from Components.config import config, configfile, ConfigText, ConfigYesNo, ConfigInteger, NoSave, ConfigSelection
InitFallbackFiles()
profile("config.misc")
config.misc.radiopic = ConfigText(default = resolveFilename(SCOPE_CURRENT_SKIN, "radio.mvi"))
config.misc.isNextRecordTimerAfterEventActionAuto = ConfigYesNo(default=False)
config.misc.useTransponderTime = ConfigYesNo(default=True)
config.misc.startCounter = ConfigInteger(default=0) # number of e2 starts...
config.misc.standbyCounter = NoSave(ConfigInteger(default=0)) # number of standby
config.misc.epgcache_filename = ConfigSelection(default = "/media/usb", choices = ["/media/usb", "/media/cf", "/media/hdd"])
def
|
setEPGCachePat
|
h(configElement):
eEPGCache.getInstance().setCacheFile("%s/epg.dat" % configElement.value)
#demo code for use of standby enter leave callbacks
#def leaveStandby():
# print "!!!!!!!!!!!!!!!!!leave standby"
#def standbyCountChanged(configElement):
# print "!!!!!!!!!!!!!!!!!enter standby num", configElement.value
# from Screens.Standby import inStandby
# inStandby.onClose.append(leaveStandby)
#config.misc.standbyCounter.addNotifier(standbyCountChanged, initial_call = False)
####################################################
def useTransponderTimeChanged(configElement):
enigma.eDVBLocalTimeHandler.getInstance().setUseDVBTime(configElement.value)
config.misc.useTransponderTime.addNotifier(useTransponderTimeChanged)
profile("Twisted")
try:
import twisted.python.runtime
twisted.python.runtime.platform.supportsThreads = lambda: False
import e2reactor
e2reactor.install()
from twisted.internet import reactor
def runReactor():
reactor.run(installSignalHandlers=False)
except ImportError:
print "twisted not available"
def runReactor():
runMainloop()
profile("LOAD:Plugin")
# initialize autorun plugins and plugin menu entries
from Components.PluginComponent import plugins
profile("LOAD:Wizard")
from Screens.Wizard import wizardManager
from Screens.DefaultWizard import *
from Screens.StartWizard import *
from Screens.TutorialWizard import *
import Screens.Rc
from Tools.BoundFunction import boundFunction
from Plugins.Plugin import PluginDescriptor
profile("misc")
had = dict()
def dump(dir, p = ""):
if isinstance(dir, dict):
for (entry, val) in dir.items():
dump(val, p + "(dict)/" + entry)
if hasattr(dir, "__dict__"):
for name, value in dir.__dict__.items():
if not had.has_key(str(value)):
had[str(value)] = 1
dump(value, p + "/" + str(name))
else:
print p + "/" + str(name) + ":" + str(dir.__class__) + "(cycle)"
else:
print p + ":" + str(dir)
# + ":" + str(dir.__class__)
# display
profile("LOAD:ScreenGlobals")
from Screens.Globals import Globals
from Screens.SessionGlobals import SessionGlobals
from Screens.Screen import Screen
profile("Screen")
Screen.global_screen = Globals()
# Session.open:
# * push current active dialog ('current_dialog') onto stack
# * call execEnd for this dialog
# * clear in_exec flag
# * hide screen
# * instantiate new dialog into 'current_dialog'
# * create screens, components
# * read, apply skin
# * create GUI for screen
# * call execBegin for new dialog
# * set in_exec
# * show gui screen
# * call components' / screen's onExecBegin
# ... screen is active, until it calls 'close'...
# Session.close:
# * assert in_exec
# * save return value
# * start deferred close handler ('onClose')
# * execEnd
# * clear in_exec
# * hide screen
# .. a moment later:
# Session.doClose:
# * destroy screen
class Session:
def __init__(self, desktop = None, summary_desktop = None, navigation = None):
self.desktop = desktop
self.summary_desktop = summary_desktop
self.nav = navigation
self.delay_timer = eTimer()
self.delay_timer.callback.append(self.processDelay)
self.current_dialog = None
self.dialog_stack = [ ]
self.summary_stack = [ ]
self.summary = None
self.in_exec = False
self.screen = SessionGlobals(self)
for p in plugins.getPlugins(PluginDescriptor.WHERE_SESSIONSTART):
p(reason=0, session=self)
def processDelay(self):
callback = self.current_dialog.callback
retval = self.current_dialog.returnValue
if self.current_dialog.isTmp:
self.current_dialog.doClose()
# dump(self.current_dialog)
del self.current_dialog
else:
del self.current_dialog.callback
self.popCurrent()
if callback is not None:
callback(*retval)
def execBegin(self, first=True, do_show = True):
assert not self.in_exec
self.in_exec = True
c = self.current_dialog
# when this is an execbegin after a execend of a "higher" dialog,
# popSummary already did the right thing.
if first:
self.pushSummary()
summary = c.createSummary() or SimpleSummary
self.summary = self.instantiateSummaryDialog(summary, c)
self.summary.show()
c.addSummary(self.summary)
c.saveKeyboardMode()
c.execBegin()
# when execBegin opened a new dialog, don't bother showing the old one.
if c == self.current_dialog and do_show:
c.show()
def execEnd(self, last=True):
assert self.in_exec
self.in_exec = False
self.current_dialog.execEnd()
self.current_dialog.restoreKeyboardMode()
self.current_dialog.hide()
if last:
self.current_dialog.removeSummary(self.summary)
self.popSummary()
def create(self, screen, arguments, **kwargs):
# creates an instance of 'screen' (which is a class)
try:
return screen(self, *arguments, **kwargs)
except:
errstr = "Screen %s(%s, %s): %s" % (str(screen), str(arguments), str(kwargs), exc_info()[0])
print errstr
print_exc(file=stdout)
quitMainloop(5)
def instantiateDialog(self, screen, *arguments, **kwargs):
return self.doInstantiateDialog(screen, arguments, kwargs, self.desktop)
def deleteDialog(self, screen):
screen.hide()
screen.doClose()
def instantiateSummaryDialog(self, screen, *arguments, **kwargs):
return self.doInstantiateDialog(screen, arguments, kwargs, self.summary_desktop)
def doInstantiateDialog(self, screen, arguments, kwargs, desktop):
# create dialog
try:
dlg = self.create(screen, arguments, **kwargs)
except:
print 'EXCEPTION IN DIALOG INIT CODE, ABORTING:'
print '-'*60
print_exc(file=stdout)
quitMainloop(5)
print '-'*60
if dlg is None:
return
# read skin data
readSkin(dlg, None, dlg.skinName, desktop)
# create GUI view of this dialog
assert desktop is not None
dlg.setDesktop(desktop)
dlg.applySkin()
return dlg
def pushCurrent(self):
if self.current_dialog is not None:
self.dialog_stack.append((self.current_dialog, self.current_dialog.shown))
self.execEnd(last=False)
def popCurrent(self):
if self.dialog_stack:
(self.current_dialog, do_show) = self.dialog_stack.pop()
self.execBegin(first=False, do_show=do_show)
else:
self.current_dialog = None
def exec
|
GreenLightGo/django-mailer-2
|
django_mailer/management/commands/retry_deferred.py
|
Python
|
mit
| 1,161
| 0.001723
|
from django.core.management.base import NoArgsCommand
from django_mailer import models
from django_mailer.management.commands import create_handler
from optparse import make_option
import logging
class Command(NoArgsCommand):
help = 'Place deferred messages back in the queue.'
option_list = NoArgsCommand.option_list + (
make_option('-m', '--max-retries', type='int',
help="Don't reset deferred messages with more than this many "
"retries."),
)
def handle_noargs(self, verbosity, max_retries=None, **options):
# Send logged messages to the console.
logger = logging.getLogger('django_mailer')
handler = create_handler(verbosity)
logger.addHandler(handler)
count
|
= models.QueuedMessage.objects.retry_deferred(
max_retries=max_retries)
if count:
logger = logging.getLogger('django_mailer.commands.retry_deferred')
logger.warning("%s defer
|
red message%s placed back in the queue" %
(count, count != 1 and 's' or ''))
logger.removeHandler(handler)
|
KonradBreitsprecher/espresso
|
src/python/espressomd/comfixed.py
|
Python
|
gpl-3.0
| 699
| 0.001431
|
from __future__ import print_function, absolute_import
from .script_interface import ScriptInterfaceHelper, script_interface_register
@script_interface_register
class ComFixed(ScriptInterfaceHelper):
"""Fix the center of mass of specific types.
Subtracts mass-weighted fraction of the total
force action on all particles of the type from
the particles after
|
each force calculation. This
keeps the center of mass of the type fixed iff
the total momentum of the type is zero.
Parameters
----------
type
|
s : array_like
List of types of which the center of mass
should be fixed.
"""
_so_name = "ComFixed"
_so_creation_policy = "GLOBAL"
|
tensorflow/tensorflow
|
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/multi_arguments_results_v1.py
|
Python
|
apache-2.0
| 3,570
| 0.005882
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/multi_arguments_results_v1 | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
import tensorflow.compat.v1 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common_v1
from tensorflow.python.ops import array_ops
# Tests multiple inputs and outputs with index paths.
# CHECK-LABEL: func @key(
# CHECK-SAME: %[[ARG0:.*]]: tensor<3x5xf32> {tf_saved_model.index_path = ["y"]}
# CHECK-SAME: %[[ARG1:.*]]: tensor<5
|
x3xf32> {tf_saved_model.index_path = ["
|
x"]}
# CHECK-SAME: tensor<3x3xf32> {tf_saved_model.index_path = ["t"]}
# CHECK-SAME: tensor<5x5xf32> {tf_saved_model.index_path = ["s"]}
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["key"]
# CHECK-DAG: %[[MUL0:.*]] = "tf.MatMul"(%[[ARG1]], %[[ARG0]])
# CHECK-DAG: %[[MUL1:.*]] = "tf.MatMul"(%[[ARG0]], %[[ARG1]])
# CHECK: %[[IDENTITY:.*]]:2 = "tf.IdentityN"(%[[MUL1]], %[[MUL0]])
# CHECK: return %[[IDENTITY]]#0, %[[IDENTITY]]#1
# CHECK-LABEL: func @key2(
# CHECK-SAME: %[[ARG1:.*]]: tensor<5x3xf32> {tf_saved_model.index_path = ["b"]}
# CHECK-SAME: %[[ARG0:.*]]: tensor<3x5xf32> {tf_saved_model.index_path = ["a"]}
# CHECK-SAME: tensor<5x5xf32> {tf_saved_model.index_path = ["d"]}
# CHECK-SAME: tensor<3x3xf32> {tf_saved_model.index_path = ["c"]}
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["key2"]
# CHECK-DAG: %[[MUL1:.*]] = "tf.MatMul"(%[[ARG0]], %[[ARG1]])
# CHECK-DAG: %[[MUL2:.*]] = "tf.MatMul"(%[[ARG1]], %[[ARG0]])
# CHECK: %[[IDENTITY:.*]]:2 = "tf.IdentityN"(%[[MUL1]], %[[MUL2]])
# CHECK: return %[[IDENTITY]]#1, %[[IDENTITY]]#0
def Test():
x = tf.constant(1.0, shape=(5, 3))
y = tf.constant(1.0, shape=(3, 5))
s = tf.matmul(x, y)
t = tf.matmul(y, x)
[t, s] = array_ops.identity_n([t, s])
tensor_info_x = tf.compat.v1.saved_model.utils.build_tensor_info(x)
tensor_info_y = tf.compat.v1.saved_model.utils.build_tensor_info(y)
tensor_info_s = tf.compat.v1.saved_model.utils.build_tensor_info(s)
tensor_info_t = tf.compat.v1.saved_model.utils.build_tensor_info(t)
return {
'key': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def(
inputs={
'x': tensor_info_x,
'y': tensor_info_y
},
outputs={
's': tensor_info_s,
't': tensor_info_t
},
method_name='some_function')),
'key2': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def(
inputs={
'a': tensor_info_y,
'b': tensor_info_x,
},
outputs={
'c': tensor_info_t,
'd': tensor_info_s,
},
method_name='reverse_arguments'))
}, None, None
if __name__ == '__main__':
common_v1.set_tf_options()
common_v1.do_test(Test)
|
openwebinars-django/newspaper
|
newspaper/newspaper/news/admin.py
|
Python
|
apache-2.0
| 353
| 0.002833
|
from django.contrib import admin
from newspaper.news.models import News, Event
class NewsAdmin(admin.ModelA
|
dmin):
list_display = ('title', 'publish_date')
list_filter = ('publish_date',)
search_fields = ('title',)
class EventAdmin(admin.ModelAdmin):
pass
admin.site.register(News, NewsAdmin)
admin.site.register(Event, Eve
|
ntAdmin)
|
slyphon/pants
|
src/python/pants/engine/exp/graph.py
|
Python
|
apache-2.0
| 7,197
| 0.009032
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import collections
import six
from pants.build_graph.address import Address
from pants.engine.exp.addressable import AddressableDescriptor, TypeConstraintError
from pants.engine.exp.mapper import MappingError
from pants.engine.exp.objects import Resolvable, Serializable, SerializableFactory, Validatable
class ResolveError(Exception):
"""Indicates an error resolving an address to an object."""
class CycleError(ResolveError):
"""Indicates a cycle was detected during object resolution."""
class ResolvedTypeMismatchError(ResolveError):
"""Indicates a resolved object was not of the expected type."""
class Resolver(Resolvable):
"""Lazily resolves addressables using a graph."""
def __init__(self, graph, address):
self._graph = graph
self._address = address
def address(self):
return self._address.spec
def resolve(self):
return self._graph.resolve(self._address)
def __hash__(self):
return hash((self._graph, self._address))
def __eq__(self, other):
return (isinstance(other, Resolver) and
(self._graph, self._address) == (other._graph, other._address))
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return 'Graph.Resolver(graph={}, address={!r})'.format(self._graph, self._address)
class Graph(object):
"""A lazy, directed acyclic graph of objects. Not necessarily connected."""
def __init__(self, address_mapper, inline=False):
"""Creates a build graph composed of addresses resolvable by an address mapper.
:param address_mapper: An address mapper that can resolve the objects addresses point to.
:type address_mapper: :class:`pants.engine.exp.mapper.AddressMapper`.
:param bool inline: If `True`, resolved addressables are inlined in the containing object;
otherwise a resolvable pointer is used that dynamically traverses to the
addressable on every access.
"""
self._address_mapper = address_mapper
# TODO(John Sirois): This will need to be eliminated in favor of just using the AddressMapper
# caching or else also expose an invalidation interface based on address.spec_path - aka
# AddressMapper.namespace.
#
# Our resolution cache.
self._resolved_by_address = {}
self._inline = inline
def resolve(self, address):
"""Resolves the object pointed at by the given `address`.
The object will be hydrated from the BUILD graph along with any objects it points to.
The following lifecycle for resolved objects is observed:
1. The object's containing BUILD file family is parsed if not already parsed. This is a 'thin'
parse that just hydrates immediate fields of objects defined in the BUILD file family.
2. The object's addressed values are all first resolved completely if not already resolved.
3. The object is reconstructed using the fully resolved values from step 2.
4. If the reconstructed object is a :class:`pants.engine.exp.objects.SerializableFactory`, its
`create` method is called to allow for a replacement object to be supplied.
5. The reconstructed object from step 3 (or replacement object from step 4) is validated if
it's an instance of :class:`pants.engine.exp.objects.Validatable`.
6. The fully resolved and validated object is cached and returned.
:param address: The BUILD graph address to resolve.
:type address: :class:`pants.build_graph.address.Address`
:returns: The object pointed at by the given `address`.
:raises: :class:`ResolveError` if no object was found at the given `address`.
:raises: :class:`pants.engine.exp.objects.ValidationError` if the object was resolvable but
invalid.
"""
try:
return self._resolve_recursively(address)
except MappingError as e:
raise ResolveError('Failed to resolve {}: {}'.format(address, e))
def _resolve_recursively(self, address, resolve_path=None):
resolved = self._resolved_by_address.get(address)
if resolved:
return resolved
resolve_path = resolve_path or []
if address in resolve_path:
raise CycleError('Cycle detected along path:\n\t{}'
.format('\n\t'.join('* {}'.format(a) if a == address else str(a)
for a in resolve_path + [address])))
resolve_p
|
ath.append(address)
obj = self._address_mapper.resolve(address)
def parse_addr(a):
return Address.parse(a, relative_to=address.spec_path)
def resolve_item(item, addr=None):
if Serializable.is_serializable(item):
hydrated_args = {'address': addr} if addr else {}
# Recurse on the Serializable's values and hydrates any addressables found. This unwinds
# from the leaves thus hydrating item's cl
|
osure in the inline case.
for key, value in item._asdict().items():
is_addressable = AddressableDescriptor.is_addressable(item, key)
def maybe_addr(x):
return parse_addr(x) if is_addressable and isinstance(x, six.string_types) else x
if isinstance(value, collections.MutableMapping):
container_type = type(value)
container = container_type()
container.update((k, resolve_item(maybe_addr(v))) for k, v in value.items())
hydrated_args[key] = container
elif isinstance(value, collections.MutableSequence):
container_type = type(value)
hydrated_args[key] = container_type(resolve_item(maybe_addr(v)) for v in value)
else:
hydrated_args[key] = resolve_item(maybe_addr(value))
# Re-build the thin Serializable with either fully hydrated objects or Resolvables
# substituted for all Address values; ie: Only ever expose fully resolved or resolvable
# closures for requested addresses.
return self._hydrate(type(item), **hydrated_args)
elif isinstance(item, Address):
if self._inline:
return self._resolve_recursively(item, resolve_path)
else:
# TODO(John Sirois): Implement lazy cycle checks across Resolver chains.
return Resolver(self, address=item)
else:
return item
resolved = resolve_item(obj, addr=address)
resolve_path.pop(-1)
self._resolved_by_address[address] = resolved
return resolved
@staticmethod
def _hydrate(item_type, **kwargs):
try:
item = item_type(**kwargs)
except TypeConstraintError as e:
raise ResolvedTypeMismatchError(e)
# Let factories replace the hydrated object.
if isinstance(item, SerializableFactory):
item = item.create()
# Finally make sure objects that can self-validate get a chance to do so before we cache
# them as the pointee of `hydrated_item.address`.
if isinstance(item, Validatable):
item.validate()
return item
|
bringsvor/sponsor
|
wizards/add_sponsorship.py
|
Python
|
agpl-3.0
| 1,922
| 0.011967
|
__author__ = 'tbri'
from openerp import models, fields, api, _
class add_sponsorship_wizard(models.TransientModel):
_name = 'add_sponsorship_wizard'
def _get_all_children(self)
|
:
c = []
children = self.env['res.partner'].search([('sponsored_child', '=', 'True')])
for n in children:
child_ref = '%s %s' % (n.child_ident, n.name)
c.append( (n.id, child_ref) )
return c
#sponsor_id = fields.Many2one('sponsor')
# see partner.py...........
## child_id = fields.Many2one('sponsored_child', domain=[('active','=',True)])
child_id = fields.Selection( _get_all_children , string=_('Child'))
sub_sponsor = fields.Many2
|
one('res.partner', _('Sub Sponsor'), domain=[('sub_sponsor','=',True)])
start_date = fields.Date(_('Start date'))
end_date = fields.Date(_('End date'))
@api.one
def data_save(self):
print "DATA_SAVE 1", self._context
"""
DATA_SAVAE! {'lang': 'en_US', 'search_disable_custom_filters': True, 'tz': False, 'uid': 1, 'active_model': 'sponsor', 'active_ids': [1], 'active_id': 1}
"""
model = self._context['active_model']
active_id = self._context['active_id']
assert model == 'res.partner'
sponsor = self.env['res.partner'].browse(active_id)
assert sponsor.sponsor
print "DATA_SAVE 2", sponsor
print "DATA_SAVE 3", self.child_id
sponsorship = {'sponsor_id' : active_id,
'sponsored_child' : int(self.child_id),
'start_date' : self.start_date,
'end_date' : self.end_date,
'sub_sponsor' : self.sub_sponsor}
print "CREATING SPONSORSHP"
self.env['sponsorship'].create( sponsorship)
return {'type': 'ir.actions.act_window_close'}
|
arpitprogressive/arpittest
|
apps/analytics/models.py
|
Python
|
bsd-3-clause
| 14,238
| 0.000702
|
# -*- coding: utf-8 -*-
"""
analytics.models
Models for Demand and Supply data
:copyright: (c) 2013 by Openlabs Technologies & Consulting (P) Limited
:license: see LICENSE for more details.
"""
import operator
from django.db import models
import django.contrib.admin
from admin.models import Occupation, Institution, Company, SubSector
__all__ = ['DEGREE_CHOICES', 'REGION_CHOICES', 'State', 'City', 'SupplyBase',
'DemandData', 'CompanyYearData', 'DiversityRatioLevel',
'DiversityRatioSubsector', 'GenderDiversity', 'ITSpend',
'RevenueSubsector', 'RevenueOccupation', 'RevenueTotal',
'TalentSaturation']
DEGREE_CHOICES = (
('UG', 'Undergraduate Degree'),
('PG', 'Postgraduate Degree'),
('DOC', 'Ph.D/M.Phil'),
('PSD', 'Post School Diploma'),
('PGD', 'Post Graduate Diploma'),
('UNK', 'Unknown'),
)
REGION_CHOICES = (
('NORTH', 'North'),
('SOUTH', 'South'),
('EAST', 'East'),
('WEST', 'West'),
('CENTRAL', 'Central'),
)
class State(models.Model):
"""
States
"""
name = models.CharField(max_length=50, default=None, unique=True)
region = models.CharField(max_length=12, choices=REGION_CHOICES)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('name', 'region',)
def __unicode__(self):
"""
Returns object display name
"""
return self.name
class City(models.Model):
"""
Cities
"""
name = models.CharField(max_length=50, default=None)
state = models.ForeignKey('State')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('name', 'state',)
verbose_name_plural = 'Cities'
def __unicode__(self):
"""
Returns object display name
"""
return "%s,%s" % (self.name, self.state)
class SupplyBase(models.Model):
"""
Demand supply data
"""
year = models.IntegerField()
city = models.ForeignKey('City')
occupation = models.ForeignKey(Occupation)
institution = models.ForeignKey(Institution)
degree = models.CharField(max_length=3, choices=DEGREE_CHOICES,
default=None)
supply = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'city', 'occupation', 'institution',
'degree',)
verbose_name_plural = 'SupplyBase'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s,%s" % (self.year, self.city, self.occupation,)
class DemandData(models.Model):
"""
Demand data
"""
year = models.IntegerField()
city = models.ForeignKey('City')
occupation = models.ForeignKey(Occupation)
company = models.ForeignKey(Company)
demand = models.IntegerField()
headcount = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'city', 'occupation', 'company',)
verbose_name_plural = 'DemandBase'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s,%s" % (self.year, self.city, self.occupation,)
class CompanyYearData(models.Model):
"""
Revenue, Headcount data for companies annually
"""
year = models.IntegerField()
company = models.ForeignKey(Company)
revenue = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'company', )
verbose_name_plural = 'Company Annual Data'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.company, )
class DiversityRatioLevel(models.Model):
"""
Diversity ratio for levels
"""
year = models.IntegerField(unique=True)
male_leadership = models.IntegerField(
verbose_name='Percent Male in Leadership roles'
)
male_entry = models.IntegerField(
verbose_na
|
me='Percent Male in Entry Level roles'
)
male_middle = models.IntegerField(
verbose_name='Percent Male in Middle Level roles'
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=
|
True)
@property
def female_leadership(self):
"Percent Females in leadership level roles"
return 100 - self.male_leadership
@property
def female_entry(self):
"Percent Females in entry level roles"
return 100 - self.male_entry
@property
def female_middle(self):
"Percent Females in middle level roles"
return 100 - self.male_middle
class Meta:
verbose_name_plural = 'Diversity Ratio for Experience Levels'
def __unicode__(self):
"""
Returns object display name
"""
return "%d" % (self.year, )
class DiversityRatioSubsector(models.Model):
"""
Diversity ratio for subsector
"""
year = models.IntegerField()
subsector = models.ForeignKey(SubSector, verbose_name='Sub-sector')
male = models.IntegerField(verbose_name='Percent males in subsector')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
@property
def female(self):
"Percent Females in subsector"
return 100 - self.male
class Meta:
unique_together = ('year', 'subsector', )
verbose_name_plural = 'Diversity Ratio for Subsector'
def __unicode__(self):
"""
Returns object display name
"""
return "%d, %s" % (self.year, self.subsector, )
class GenderDiversity(models.Model):
"""
Gender diversity as per course
"""
year = models.IntegerField()
category = models.CharField(max_length=60)
male = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'category', )
verbose_name_plural = 'Gender Diversity'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.category, )
class ITSpend(models.Model):
"""
IT Spend data
"""
year = models.IntegerField()
sub_sector = models.ForeignKey(SubSector, verbose_name='Sub-sector')
world_spend = models.IntegerField(verbose_name='World IT Spend')
india_revenue = models.IntegerField(verbose_name='Indian IT Revenue')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'sub_sector', )
verbose_name_plural = 'IT Spend'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.sub_sector, )
class RevenueSubsector(models.Model):
"""
Revenue per subsector
"""
year = models.IntegerField()
sub_sector = models.ForeignKey(SubSector)
revenue = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'sub_sector', )
verbose_name_plural = 'Revenue by Subsector'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.sub_sector, )
class RevenueOccupation(models.Model):
"""
Revenue by occupation
"""
year = models.IntegerField()
occupation = models.ForeignKey(Occupation)
revenue = models.IntegerField()
cagr_next_7_years = models.IntegerField(
verbose_name='CAGR % for next 7 years'
)
create_date = models.DateTimeField(auto_now_add=T
|
cclauss/In-Harms-Way
|
server/run_django.py
|
Python
|
apache-2.0
| 727
| 0.016506
|
#!/usr/bin/env python
import os, sys, webbrowser
try: from subprocess import getstatusoutp
|
ut # Python3
except: from commands import getstatusoutput # Python2
def shell_command(command): # do the command and print the output
cmdResults = getstatusoutput(command)
if True: # not cmdResults[0]:
for theLine in cmdResults[1].splitlines():
print(theLine.partition('==')[0])
if __name__ == '__main__':
port = os.getenv('VCAP_APP_PORT', None)
if port: # running on Bluemix
shell_command('python manage.py runserver --noreload 0.
|
0.0.0:' + port)
else: # running locally
webbrowser.open('http://127.0.0.1:8000')
shell_command('python3 manage.py runserver')
|
yosukesuzuki/url-shortner
|
admin.py
|
Python
|
bsd-2-clause
| 1,232
| 0
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations u
|
nder the License.
# [START app]
from flask import Flask, request
from google.appengine.api import users
from models import User
from tasks import create_dataset, create_click_log_data
app = Flask(__name__)
@app.route('/_admin/createbq', methods=['GET'])
def create_bq():
result = create_dataset()
return result, 200
@app.route('/_admin/createtestdata', methods=['GET'])
def create_test_data():
team_id = request.cookies.get('team', False)
user_key_name = "{}_{}".format(team_id, users.get_current_user().user_id())
user_entity = User.get_by_id(user_key_name)
result = create_click_log_data(user_entity.team)
return result, 200
# [END app]
|
pioneers/topgear
|
ipython-in-depth/examples/Embedding/internal_ipkernel.py
|
Python
|
apache-2.0
| 2,018
| 0.004955
|
#---------------------------------------
|
--------------------------------------
# Imports
#-----------------------------------------------------------------------------
import sys
from IPython.lib.kernel import connect_qtconsole
from IPython.kernel.zmq.kernelapp import IPKernelApp
#-----------------------------------------------------------------------------
# Functions and classes
#-----------------------------------------------------------------------------
def mpl_ke
|
rnel(gui):
"""Launch and return an IPython kernel with matplotlib support for the desired gui
"""
kernel = IPKernelApp.instance()
kernel.initialize(['python', '--matplotlib=%s' % gui,
#'--log-level=10'
])
return kernel
class InternalIPKernel(object):
def init_ipkernel(self, backend):
# Start IPython kernel with GUI event loop and mpl support
self.ipkernel = mpl_kernel(backend)
# To create and track active qt consoles
self.consoles = []
# This application will also act on the shell user namespace
self.namespace = self.ipkernel.shell.user_ns
# Example: a variable that will be seen by the user in the shell, and
# that the GUI modifies (the 'Counter++' button increments it):
self.namespace['app_counter'] = 0
#self.namespace['ipkernel'] = self.ipkernel # dbg
def print_namespace(self, evt=None):
print("\n***Variables in User namespace***")
for k, v in self.namespace.items():
if not k.startswith('_'):
print('%s -> %r' % (k, v))
sys.stdout.flush()
def new_qt_console(self, evt=None):
"""start a new qtconsole connected to our kernel"""
return connect_qtconsole(self.ipkernel.connection_file, profile=self.ipkernel.profile)
def count(self, evt=None):
self.namespace['app_counter'] += 1
def cleanup_consoles(self, evt=None):
for c in self.consoles:
c.kill()
|
wright-group/WrightTools
|
WrightTools/artists/_interact.py
|
Python
|
mit
| 20,114
| 0.00179
|
"""Interactive (widget based) artists."""
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, RadioButtons
from types import SimpleNamespace
from ._helpers import create_figure, plot_colorbar, add_sideplot
from ._base import _order_for_imshow
from ._colors import colormaps
from ..exceptions import DimensionalityError
from .. import kit as wt_kit
from .. import data as wt_data
__all__ = ["interact2D"]
class Focus:
def __init__(self, axes, linewidth=2):
self.axes = axes
self.linewidth = linewidth
ax = axes[0]
for side in ["top", "bottom", "left", "right"]:
ax.spines[side].set_linewidth(self.linewidth)
self.focus_axis = ax
def __call__(self, ax):
if type(ax) == str:
ind = self.axes.ind
|
ex(self.focus_axis)
if ax == "next":
ind -= 1
elif ax == "previous":
ind += 1
ax = self.axes[ind % len(self.axes)]
if self.focus_axis == ax or ax not in self.axes:
return
else: # set new focus
for spine i
|
n ["top", "bottom", "left", "right"]:
self.focus_axis.spines[spine].set_linewidth(1)
ax.spines[spine].set_linewidth(self.linewidth)
self.focus_axis = ax
def _at_dict(data, sliders, xaxis, yaxis):
return {
a.natural_name: (a[:].flat[int(sliders[a.natural_name].val)], a.units)
for a in data.axes
if a not in [xaxis, yaxis]
}
def get_axes(data, axes):
xaxis, yaxis = axes
if type(xaxis) in [int, str]:
xaxis = wt_kit.get_index(data.axis_names, xaxis)
xaxis = data.axes[xaxis]
elif type(xaxis) != wt_data.Axis:
raise TypeError("invalid xaxis type {0}".format(type(xaxis)))
if type(yaxis) in [int, str]:
yaxis = wt_kit.get_index(data.axis_names, yaxis)
yaxis = data.axes[yaxis]
elif type(yaxis) != wt_data.Axis:
raise TypeError("invalid xaxis type {0}".format(type(yaxis)))
return xaxis, yaxis
def get_channel(data, channel):
if isinstance(channel, int):
channel = data.channels[channel]
elif isinstance(channel, str):
channel = [ch for ch in data.channels if ch.natural_name == channel][0]
elif type(channel) != wt_data.Channel:
raise TypeError("invalid channel type {0}".format(type(channel)))
return channel
def get_colormap(channel):
if channel.signed:
cmap = "signed"
else:
cmap = "default"
cmap = colormaps[cmap]
cmap.set_bad([0.75] * 3, 1.0)
cmap.set_under([0.75] * 3, 1.0)
return cmap
def get_clim(channel, current_state):
if current_state.local:
arr = current_state.dat[channel.natural_name][:]
if channel.signed:
mag = np.nanmax(np.abs(arr))
clim = [-mag, mag]
else:
clim = [0, np.nanmax(arr)]
else:
if channel.signed:
clim = [-channel.mag(), channel.mag()]
else:
clim = [0, channel.max()]
return clim
def gen_ticklabels(points, signed=None):
step = np.nanmin(np.diff(points))
if step == 0: # zeros everywhere
ticklabels = ["" for i in range(11)]
if signed:
ticklabels[5] = "0"
else:
ticklabels[0] = "0"
return ticklabels
ordinal = np.log10(np.abs(step))
ndigits = -int(np.floor(ordinal))
if ndigits < 0:
ndigits += 1
fmt = "{0:0.0f}"
else:
fmt = "{" + "0:.{0}f".format(ndigits) + "}"
ticklabels = [fmt.format(round(point, ndigits)) for point in points]
return ticklabels
def norm(arr, signed, ignore_zero=True):
if signed:
norm = np.nanmax(np.abs(arr))
else:
norm = np.nanmax(arr)
if norm != 0 and ignore_zero:
arr /= norm
return arr
def interact2D(
data: wt_data.Data, xaxis=0, yaxis=1, channel=0, local=False, use_imshow=False, verbose=True
):
"""Interactive 2D plot of the dataset.
Side plots show x and y projections of the slice (shaded gray).
Left clicks on the main axes draw 1D slices on side plots at the coordinates selected.
Right clicks remove the 1D slices.
For 3+ dimensional data, sliders below the main axes are used to change which slice is viewed.
Parameters
----------
data : WrightTools.Data object
Data to plot.
xaxis : string, integer, or data.Axis object (optional)
Expression or index of x axis. Default is 0.
yaxis : string, integer, or data.Axis object (optional)
Expression or index of y axis. Default is 1.
channel : string, integer, or data.Channel object (optional)
Name or index of channel to plot. Default is 0.
local : boolean (optional)
Toggle plotting locally. Default is False.
use_imshow : boolean (optional)
If true, matplotlib imshow is used to render the 2D slice.
Can give better performance, but is only accurate for
uniform grids. Default is False.
verbose : boolean (optional)
Toggle talkback. Default is True.
"""
# avoid changing passed data object
data = data.copy()
# unpack
data.prune(keep_channels=channel)
channel = get_channel(data, channel)
xaxis, yaxis = get_axes(data, [xaxis, yaxis])
cmap = get_colormap(channel)
current_state = SimpleNamespace()
# create figure
nsliders = data.ndim - 2
if nsliders < 0:
raise DimensionalityError(">= 2", data.ndim)
# TODO: implement aspect; doesn't work currently because of our incorporation of colorbar
fig, gs = create_figure(width="single", nrows=7 + nsliders, cols=[1, 1, 1, 1, 1, "cbar"])
# create axes
ax0 = plt.subplot(gs[1:6, 0:5])
ax0.patch.set_facecolor("w")
cax = plt.subplot(gs[1:6, -1])
sp_x = add_sideplot(ax0, "x", pad=0.1)
sp_y = add_sideplot(ax0, "y", pad=0.1)
ax_local = plt.subplot(gs[0, 0], aspect="equal", frameon=False)
ax_title = plt.subplot(gs[0, 3], frameon=False)
ax_title.text(
0.5,
0.5,
data.natural_name,
fontsize=18,
horizontalalignment="center",
verticalalignment="center",
transform=ax_title.transAxes,
)
ax_title.set_axis_off()
# NOTE: there are more axes here for more buttons / widgets in future plans
# create lines
x_color = "#00BFBF" # cyan with increased saturation
y_color = "coral"
line_sp_x = sp_x.plot([None], [None], visible=False, color=x_color, linewidth=2)[0]
line_sp_y = sp_y.plot([None], [None], visible=False, color=y_color, linewidth=2)[0]
crosshair_hline = ax0.plot([None], [None], visible=False, color=x_color, linewidth=2)[0]
crosshair_vline = ax0.plot([None], [None], visible=False, color=y_color, linewidth=2)[0]
current_state.xarg = xaxis.points.flatten().size // 2
current_state.yarg = yaxis.points.flatten().size // 2
xdir = 1 if xaxis.points.flatten()[-1] - xaxis.points.flatten()[0] > 0 else -1
ydir = 1 if yaxis.points.flatten()[-1] - yaxis.points.flatten()[0] > 0 else -1
current_state.bin_vs_x = True
current_state.bin_vs_y = True
# create buttons
current_state.local = local
radio = RadioButtons(ax_local, (" global", " local"))
if local:
radio.set_active(1)
else:
radio.set_active(0)
for circle in radio.circles:
circle.set_radius(0.14)
# create sliders
sliders = {}
for axis in data.axes:
if axis not in [xaxis, yaxis]:
if axis.size > np.prod(axis.shape):
raise NotImplementedError("Cannot use multivariable axis as a slider")
slider_axes = plt.subplot(gs[~len(sliders), :]).axes
slider = Slider(slider_axes, axis.label, 0, axis.points.size - 1, valinit=0, valstep=1)
sliders[axis.natural_name] = slider
slider.ax.vlines(
range(axis.points.size - 1),
*slider.ax.get_ylim(),
colors="k",
linestyle=":",
alpha=0.5
)
slider.valtext.set_text(gen_ticklabe
|
pythonchelle/opencomparison
|
apps/core/tests/test_ga.py
|
Python
|
mit
| 1,060
| 0.00566
|
# -*- coding: utf-8 -*-
from core.test_utils.context_managers import SettingsOverride
from django import template
from django.test.testcase
|
s import TestCase
class PackaginatorTagsTests(TestCase):
def test_fixed_ga(self):
tpl = template.Template("""
{% load packaginator_tags %}
{% fixed_ga %}
""")
context = template.Context()
with SettingsOverride(URCHIN_ID='testid', DEBUG=False):
output = tpl.render(context)
self.assertTrue('var pageTr
|
acker = _gat._getTracker("testid");' in output)
with SettingsOverride(URCHIN_ID='testid', DEBUG=True):
output = tpl.render(context)
self.assertEqual(output.strip(), "")
with SettingsOverride(URCHIN_ID=None, DEBUG=True):
output = tpl.render(context)
self.assertEqual(output.strip(), "")
with SettingsOverride(URCHIN_ID=None, DEBUG=False):
output = tpl.render(context)
self.assertEqual(output.strip(), "")
|
libracore/erpnext
|
erpnext/stock/doctype/delivery_note/delivery_note.py
|
Python
|
gpl-3.0
| 23,654
| 0.003551
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.defaults
from erpnext.controllers.selling_controller import SellingController
from erpnext.stock.doctype.batch.batch import set_batch_nos
from erpnext.stock.doctype.serial_no.serial_no import get_delivery_note_serial_no
from frappe import _
from frappe.contacts.doctype.address.address import get_company_address
from frappe.desk.notifications import clear_doctype_notifications
from frappe.model.mapper import get_mapped_doc
from frappe.model.utils import get_fetch_values
from frappe.utils import cint, flt
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class DeliveryNote(SellingController):
def __init__(self, *args, **kwargs):
super(DeliveryNote, self).__init__(*args, **kwargs)
self.status_updater = [{
'source_dt': 'Delivery Note Item',
'target_dt': 'Sales Order Item',
'join_field': 'so_detail',
'target_field': 'delivered_qty',
'target_parent_dt': 'Sales Order',
'target_parent_field': 'per_delivered',
'target_ref_field': 'qty',
'source_field': 'qty',
'percent_join_field': 'against_sales_order',
'status_field': 'delivery_status',
'keyword': 'Delivered',
'second_source_dt': 'Sales Invoice Item',
'second_source_field': 'qty',
'second_join_field': 'so_detail',
'overflow_type': 'delivery',
'second_source_extra_cond': """ and exists(select name from `tabSales Invoice`
where name=`tabSales Invoice Item`.parent and update_stock = 1)"""
},
{
'source_dt': 'Delivery Note Item',
'target_dt': 'Sales Invoice Item',
'join_field': 'si_detail',
'target_field': 'delivered_qty',
'target_parent_dt': 'Sales Invoice',
'target_ref_field': 'qty',
'source_field': 'qty',
'percent_join_field': 'against_sales_invoice',
'overflow_type': 'delivery',
'no_allowance': 1
}]
if cint(self.is_return):
self.status_updater.append({
'source_dt': 'Delivery Note Item',
'target_dt': 'Sales Order Item',
'join_field': 'so_detail',
'target_field': 'returned_qty',
'target_parent_dt': 'Sales Order',
'source_field': '-1 * qty',
'second_source_dt': 'Sales Invoice Item',
'second_source_field': '-1 * qty',
'second_join_field': 'so_detail',
'extra_cond': """ and exists (select name from `tabDelivery Note`
where name=`tabDelivery Note Item`.parent and is_return=1)""",
'second_source_extra_cond': """ and exists (select name from `tabSales Invoice`
where name=`tabSales Invoice Item`.parent and is_return=1 and update_stock=1)"""
})
def before_print(self):
def toggle_print_hide(meta, fieldname):
df = meta.get_field(fieldname)
if self.get("print_without_amount"):
df.set("__print_hide", 1)
else:
df.delete_key("__print_hide")
item_meta = frappe.get_meta("Delivery Note Item")
print_hide_fields = {
"parent": ["grand_total", "rounded_total", "in_words", "currency", "total", "taxes"],
"items": ["rate", "amount", "discount_amount", "price_list_rate", "discount_percentage"]
}
for key, fieldname in print_hide_fields.items():
for f in fieldname:
toggle_print_hide(self.meta if key == "parent" else item_meta, f)
super(DeliveryNote, self).before_print()
def set_actual_qty(self):
for d in self.get('items'):
if d.item_code and d.warehouse:
actual_qty = frappe.db.sql("""select actual_qty from `tabBin`
where item_code = %s and warehouse = %s""", (d.item_code, d.warehouse))
d.actual_qty = actual_qty and flt(actual_qty[0][0]) or 0
def so_required(self):
"""check in manage account if sales order required or not"""
if frappe.db.get_value("Selling Settings", None, 'so_required') == 'Yes':
for d in self.get('items'):
if not d.against_sales_order:
frappe.throw(_("Sales Order required for Item {0}").format(d.item_code))
def validate(self):
self.validate_posting_time()
super(DeliveryNote, self).validate()
self.set_status()
self.so_required()
self.validate_proj_cust()
self.check_sales_order_on_hold_or_close("against_sales_order")
self.validate_for_items()
self.validate_warehouse()
self.validate_uom_is_integer("stock_uom", "stock_qty")
self.validate_uom_is_integer("uom", "qty")
self.validate_with_previous_doc()
if self._action != 'submit' and not self.is_return:
set_batch_nos(self, 'warehouse', True)
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self)
self.update_current_stock()
if not self.installation_status: self.installation_status = 'Not Installed'
def validate_with_previous_doc(self):
super(DeliveryNote, self).validate_with_previous_doc({
"Sales Order": {
"ref_dn_field": "against_sales_order",
"compare_fields": [["customer", "="], ["company", "="], ["project", "="], ["currency", "="]]
},
"Sales Order Item": {
"ref_dn_field": "so_detail",
"compare_fields": [["item_code", "="], ["uom", "="], ["conversion_factor", "="]],
"is_child_table": True,
"allow_duplicate_prev_row_id": True
},
"Sales Invoice": {
"ref_dn_field": "against_sales_invoice",
"compare_fields": [["customer", "="], ["company", "="], ["proje
|
ct", "="], ["currency", "="]]
},
"Sales Invoice Item
|
": {
"ref_dn_field": "si_detail",
"compare_fields": [["item_code", "="], ["uom", "="], ["conversion_factor", "="]],
"is_child_table": True,
"allow_duplicate_prev_row_id": True
},
})
if cint(frappe.db.get_single_value('Selling Settings', 'maintain_same_sales_rate')) \
and not self.is_return:
self.validate_rate_with_reference_doc([["Sales Order", "against_sales_order", "so_detail"],
["Sales Invoice", "against_sales_invoice", "si_detail"]])
def validate_proj_cust(self):
"""check for does customer belong to same project as entered.."""
if self.project and self.customer:
res = frappe.db.sql("""select name from `tabProject`
where name = %s and (customer = %s or
ifnull(customer,'')='')""", (self.project, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project))
def validate_for_items(self):
for d in self.get('items'):
#Customer Provided parts will have zero valuation rate
if frappe.db.get_value('Item', d.item_code, 'is_customer_provided_item'):
d.allow_zero_valuation_rate = 1
def validate_warehouse(self):
super(DeliveryNote, self).validate_warehouse()
for d in self.get_item_list():
if frappe.db.get_value("Item", d['item_code'], "is_stock_item") == 1:
if not d['warehouse']:
frappe.throw(_("Warehouse required for stock Item {0}").format(d["item_code"]))
def update_current_stock(self):
if self.get("_action") and self._action != "update_after_submit":
for d in self.g
|
jeremiahyan/odoo
|
addons/mass_mailing_crm/models/utm.py
|
Python
|
gpl-3.0
| 294
| 0.003401
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full co
|
pyright and licensing details.
from o
|
doo import fields, models
class UtmCampaign(models.Model):
_inherit = 'utm.campaign'
ab_testing_winner_selection = fields.Selection(selection_add=[('crm_lead_count', 'Leads')])
|
katakumpo/nicepy
|
nicepy/assertions/helpers.py
|
Python
|
mit
| 1,696
| 0.004717
|
# -*- coding: utf-8 *-*
from collections import OrderedDict
from nicepy.utils import ljust_all, pretty_rep
|
r
def get_failed_msg(compare_method, values, expected_values, names=None, expected_names=None):
failed_list = []
names = names or map(str,
|
range(len(values)))
expected_names = expected_names or [''] * len(names)
for value, expected_value, name, expected_name in zip(values, expected_values,
names, expected_names):
#print value, expected_value, name, expected_name
if not compare_method(expected_value, value):
failed_list.append((pretty_repr(value), pretty_repr(expected_value),
name, expected_name))
return _get_failed_msg(failed_list)
def _get_failed_msg(failed_list):
if not failed_list:
return None
msg = 'actual values != expected values:'
failed_list = zip(*map(ljust_all, zip(*failed_list)))
for value_repr, expected_value_repr, name, expected_name in sorted(failed_list):
msg += '\n\t%s' % name
if expected_name:
msg += ' != %s' % expected_name
msg += ': %s != %s' % (value_repr, expected_value_repr)
return msg
def get_multi_failed_msg(assert_method, *lists):
failed_msgs = OrderedDict()
for index, args in enumerate(zip(*lists)):
try:
assert_method(*args)
except AssertionError as e:
failed_msgs[index] = e.message
msg = None
if failed_msgs:
msg = 'Multi-assert failed:'
for index, error_msg in sorted(failed_msgs.iteritems()):
msg += '\nIndex %d: %s' % (index, error_msg)
return msg
|
isstiaung/Adimal
|
adimal/twitter_feed/migrations/0002_tweet_links.py
|
Python
|
mit
| 490
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-12 23:00
from __future__ import unicode_literals
from django.db import migrations, mode
|
ls
class Migration(migrations.Migration):
dependencies = [
('twitter_feed', '0001_initial'),
]
|
operations = [
migrations.AddField(
model_name='tweet',
name='links',
field=models.CharField(default=' ', max_length=200),
preserve_default=False,
),
]
|
chemelnucfin/tensorflow
|
tensorflow/python/keras/metrics_confusion_matrix_test.py
|
Python
|
apache-2.0
| 51,243
| 0.002381
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras metrics functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.keras import metrics
from tensorflow.python.keras.utils import metrics_utils
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class FalsePositivesTest(test.TestCase):
def test_config(self):
fp_obj = metrics.FalsePositives(name='my_fp', thresholds=[0.4, 0.9])
self.assertEqual(fp_obj.name, 'my_fp')
self.assertEqual(len(fp_obj.variables), 1)
self.assertEqual(fp_obj.thresholds, [0.4, 0.9])
# Check save and restore config
fp_obj2 = metrics.FalsePositives.from_config(fp_obj.get_config())
self.assertEqual(fp_obj2.name, 'my_fp')
self.assertEqual(len(fp_obj2.variables), 1)
self.assertEqual(fp_obj2.thresholds, [0.4, 0.9])
def test_unweighted(self):
fp_obj = metrics.FalsePositives()
self.evaluate(variables.variables_initializer(fp_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = fp_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = fp_obj.result()
self.assertAllClose(7., result)
def test_weighted(self):
fp_obj = metrics.FalsePositives()
self.evaluate(variables.variables_initializer(fp_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant
|
_op.constant(((0, 0,
|
1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = fp_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(14., self.evaluate(result))
def test_unweighted_with_thresholds(self):
fp_obj = metrics.FalsePositives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(fp_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
update_op = fp_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = fp_obj.result()
self.assertAllClose([7., 4., 2.], result)
def test_weighted_with_thresholds(self):
fp_obj = metrics.FalsePositives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(fp_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
sample_weight = ((1.0, 2.0, 3.0, 5.0), (7.0, 11.0, 13.0, 17.0),
(19.0, 23.0, 29.0, 31.0), (5.0, 15.0, 10.0, 0))
result = fp_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose([125., 42., 12.], self.evaluate(result))
def test_threshold_limit(self):
with self.assertRaisesRegexp(
ValueError,
r'Threshold values must be in \[0, 1\]. Invalid values: \[-1, 2\]'):
metrics.FalsePositives(thresholds=[-1, 0.5, 2])
with self.assertRaisesRegexp(
ValueError,
r'Threshold values must be in \[0, 1\]. Invalid values: \[None\]'):
metrics.FalsePositives(thresholds=[None])
@test_util.run_all_in_graph_and_eager_modes
class FalseNegativesTest(test.TestCase):
def test_config(self):
fn_obj = metrics.FalseNegatives(name='my_fn', thresholds=[0.4, 0.9])
self.assertEqual(fn_obj.name, 'my_fn')
self.assertEqual(len(fn_obj.variables), 1)
self.assertEqual(fn_obj.thresholds, [0.4, 0.9])
# Check save and restore config
fn_obj2 = metrics.FalseNegatives.from_config(fn_obj.get_config())
self.assertEqual(fn_obj2.name, 'my_fn')
self.assertEqual(len(fn_obj2.variables), 1)
self.assertEqual(fn_obj2.thresholds, [0.4, 0.9])
def test_unweighted(self):
fn_obj = metrics.FalseNegatives()
self.evaluate(variables.variables_initializer(fn_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = fn_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = fn_obj.result()
self.assertAllClose(3., result)
def test_weighted(self):
fn_obj = metrics.FalseNegatives()
self.evaluate(variables.variables_initializer(fn_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = fn_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(5., self.evaluate(result))
def test_unweighted_with_thresholds(self):
fn_obj = metrics.FalseNegatives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(fn_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
update_op = fn_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = fn_obj.result()
self.assertAllClose([1., 4., 6.], result)
def test_weighted_with_thresholds(self):
fn_obj = metrics.FalseNegatives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(fn_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
sample_weight = ((3.0,), (5.0,), (7.0,), (4.0,))
result = fn_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose([4., 16., 23.], self.evaluate(result))
@test_util.run_all_in_graph_and_eager_modes
class TrueNegativesTest(test.TestCase):
def test_config(self):
tn_obj = metrics.TrueNegatives(name='my_tn', thresholds=[0.4, 0.9])
self.assertEqual(tn_obj.name, 'my_tn')
self.assertEqual(len(tn_obj.variables), 1)
self.assertEqual(tn_obj.thresholds, [0.4, 0.9])
# Check save and restore conf
|
JianfengYao/thefuck
|
thefuck/logs.py
|
Python
|
mit
| 1,661
| 0.000602
|
import sys
from traceback import format_exception
import colorama
def color(color_, settings):
"""Utility for ability to disabling colored output."""
if settings.no_colors:
return ''
else:
return color_
def exception(title, exc_info, settings):
sys.stderr.write(
u'{warn}[WARN] {title}:{reset}\n{trace}'
u'{warn}----------------------------{reset}\n\n'.format(
warn=color(
|
colorama.Back.RED + colorama.Fo
|
re.WHITE
+ colorama.Style.BRIGHT, settings),
reset=color(colorama.Style.RESET_ALL, settings),
title=title,
trace=''.join(format_exception(*exc_info))))
def rule_failed(rule, exc_info, settings):
exception('Rule {}'.format(rule.name), exc_info, settings)
def show_command(new_command, settings):
sys.stderr.write('{bold}{command}{reset}\n'.format(
command=new_command,
bold=color(colorama.Style.BRIGHT, settings),
reset=color(colorama.Style.RESET_ALL, settings)))
def confirm_command(new_command, settings):
sys.stderr.write(
'{bold}{command}{reset} [{green}enter{reset}/{red}ctrl+c{reset}]'.format(
command=new_command,
bold=color(colorama.Style.BRIGHT, settings),
green=color(colorama.Fore.GREEN, settings),
red=color(colorama.Fore.RED, settings),
reset=color(colorama.Style.RESET_ALL, settings)))
sys.stderr.flush()
def failed(msg, settings):
sys.stderr.write('{red}{msg}{reset}\n'.format(
msg=msg,
red=color(colorama.Fore.RED, settings),
reset=color(colorama.Style.RESET_ALL, settings)))
|
sio2project/filetracker
|
filetracker/servers/storage.py
|
Python
|
gpl-3.0
| 17,784
| 0.00045
|
"""This module is responsible for storing files on disk.
The storage strategy is as follows:
- Files themselves are stored in a separate directory called 'blobs'.
- Stored files are named by their SHA256 hashes (in hex).
- Stored files are grouped into directories by their first byte (two hex
characters), referred to as 'prefix'.
- To minimize disk usage, duplicate files are only stored once.
- All blobs are stored compressed (gzip).
- A directory tree is maintanted with symlinks that mirror the logical
file naming and hierarchy.
- Symlinks are created and deleted by the server as needed, and they
have their own modification time ("version") different from the
modification time of the blob.
- Accesses to links and blobs are protected by separate fcntl locks
to avoid concurrent modification.
- Additional metadata about blobs is stored in a BSDDB kv-store.
- The metadata stored ATM is the symlink count and decompressed
("logical") size.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import email.utils
import errno
import fcntl
import gevent
import gzip
import logging
import os
import shutil
import subprocess
import sys
import tempfile
import bsddb3
import six
from filetracker.utils import file_digest
|
_LOCK_RETRIES = 20
_LOCK_SLEEP_TIME_S = 1
logger = logging.getLogger(__name__)
class FiletrackerFileNotFoundError(Exception):
pass
class ConcurrentModificationError(Exception):
"""Raised after acquiring lock failed multiple times."""
def __init__(self, lock_name):
message = 'Failed to acquire lock: {}'.format(lock_name)
super(ConcurrentModificationError, self).__init__(self, message)
class FileStorage(object):
"""Manages the whole file
|
storage."""
def __init__(self, base_dir):
self.base_dir = base_dir
self.blobs_dir = os.path.join(base_dir, 'blobs')
self.links_dir = os.path.join(base_dir, 'links')
self.locks_dir = os.path.join(base_dir, 'locks')
self.db_dir = os.path.join(base_dir, 'db')
_makedirs(self.blobs_dir)
_makedirs(self.links_dir)
_makedirs(self.locks_dir)
_makedirs(self.db_dir)
# https://docs.oracle.com/cd/E17076_05/html/programmer_reference/transapp_env_open.html
self.db_env = bsddb3.db.DBEnv()
try:
self.db_env.open(
self.db_dir,
bsddb3.db.DB_CREATE
| bsddb3.db.DB_INIT_LOCK
| bsddb3.db.DB_INIT_LOG
| bsddb3.db.DB_INIT_MPOOL
| bsddb3.db.DB_INIT_TXN
| bsddb3.db.DB_REGISTER,
)
except bsddb3.db.DBRunRecoveryError:
raise RuntimeError(
'DB requires recovery! It should have run in .run.main...'
)
self.db = bsddb3.db.DB(self.db_env)
self.db.open(
'metadata',
dbtype=bsddb3.db.DB_HASH,
flags=bsddb3.db.DB_CREATE | bsddb3.db.DB_AUTO_COMMIT,
)
def __del__(self):
self.db.close()
self.db_env.close()
def store(
self,
name,
data,
version,
size=0,
compressed=False,
digest=None,
logical_size=None,
):
"""Adds a new file to the storage.
If the file with the same name existed before, it's not
guaranteed that the link for the old version will exist until
the operation completes, but it's guaranteed that the link
will never point to an invalid blob.
Args:
name: name of the file being stored.
May contain slashes that are treated as path separators.
data: binary file-like object with file contents.
Files with unknown length are supported for compatibility with
WSGI interface: ``size`` parameter should be passed in these
cases.
version: new file "version"
Link modification time will be set to this timestamp. If
the link exists, and its modification time is higher, the
file is not overwritten.
size: length of ``data`` in bytes
If not 0, this takes priority over internal ``data`` size.
compressed: whether ``data`` is gzip-compressed
If True, the compression is skipped, and file is written as-is.
Note that the current server implementation sends
'Content-Encoding' header anyway, mandating client to
decompress the file.
digest: SHA256 digest of the file before compression
If specified, the digest will not be computed again, saving
resources.
logical_size: if ``data`` is gzip-compressed, this parameter
has to be set to decompressed file size.
"""
with _exclusive_lock(self._lock_path('links', name)):
logger.debug('Acquired lock to link for %s.', name)
link_path = self._link_path(name)
if _path_exists(link_path) and _file_version(link_path) > version:
logger.info(
'Tried to store older version of %s (%d < %d), ignoring.',
name,
version,
_file_version(link_path),
)
return _file_version(link_path)
# data is managed by contents now, and shouldn't be used directly
with _InputStreamWrapper(data, size) as contents:
if digest is None or logical_size is None:
contents.save()
if compressed:
# This shouldn't occur if the request came from a proper
# filetracker client, so we don't care if it's slow.
logger.warning('Storing compressed stream without hints.')
with gzip.open(contents.current_path, 'rb') as decompressed:
digest = file_digest(decompressed)
with gzip.open(contents.current_path, 'rb') as decompressed:
logical_size = _read_stream_for_size(decompressed)
else:
digest = file_digest(contents.current_path)
logical_size = os.stat(contents.current_path).st_size
blob_path = self._blob_path(digest)
with _exclusive_lock(self._lock_path('blobs', digest)):
logger.debug('Acquired lock for blob %s.', digest)
digest_bytes = digest.encode()
with self._db_transaction() as txn:
logger.debug('Started DB transaction (adding link).')
link_count = int(self.db.get(digest_bytes, 0, txn=txn))
new_count = str(link_count + 1).encode()
self.db.put(digest_bytes, new_count, txn=txn)
if link_count == 0:
self.db.put(
'{}:logical_size'.format(digest).encode(),
str(logical_size).encode(),
txn=txn,
)
logger.debug('Commiting DB transaction (adding link).')
logger.debug('Committed DB transaction (adding link).')
# Create a new blob if this isn't a duplicate.
if link_count == 0:
logger.debug('Creating new blob.')
_create_file_dirs(blob_path)
if compressed:
contents.save(blob_path)
else:
contents.save()
with open(contents.current_path, 'rb') as raw, gzip.open(
blob_path, 'wb'
) as blob:
shutil.copyfileobj(raw, blob)
logger.de
|
srottem/indy-sdk
|
wrappers/python/tests/wallet/test_import_wallet.py
|
Python
|
apache-2.0
| 1,307
| 0.00153
|
import pytest
from indy import IndyError
from indy import did
from indy import wallet
from indy.error import ErrorCode
@pytest.mark.asyncio
@pytest.mark.parametrize("wallet_handle_cleanup", [
|
False])
async def test_import_wallet_works(wallet_handle, wallet_config, credentials, export_config):
(_did, _verkey) = await did.create_and_store_my_did(wallet_handle, "{}")
await did.set_did_metadata(wallet_handle, _did, "metadata")
did_with_meta_before = await did.get_my_did_with_meta(wallet_handle, _did
|
)
await wallet.export_wallet(wallet_handle, export_config)
await wallet.close_wallet(wallet_handle)
await wallet.delete_wallet(wallet_config, credentials)
await wallet.import_wallet(wallet_config, credentials, export_config)
wallet_handle = await wallet.open_wallet(wallet_config, credentials)
did_with_meta_after = await did.get_my_did_with_meta(wallet_handle, _did)
assert did_with_meta_before == did_with_meta_after
await wallet.close_wallet(wallet_handle)
@pytest.mark.asyncio
async def test_import_wallet_works_for_not_exit_path(wallet_config, credentials, export_config):
with pytest.raises(IndyError) as e:
await wallet.import_wallet(wallet_config, credentials, export_config)
assert ErrorCode.CommonIOError == e.value.error_code
|
kbase/narrative
|
src/biokbase/service/Client.py
|
Python
|
mit
| 7,649
| 0.000392
|
try:
import json as _json
except ImportError:
import sys
sys.path.append("simplejson-2.3.3")
import simplejson as _json
import requests as _requests
import urllib.parse as _urlparse
import random as _random
import base64 as _base64
from configparser import ConfigParser as _ConfigParser
import os as _os
_CT = "content-type"
_AJ = "application/json"
_URL_SCHEME = frozenset(["http", "https"])
def _get_token(
user_id,
password,
auth_svc="https://nexus.api.globusonline.org/goauth/token?"
+ "grant_type=client_credentials",
):
# This is bandaid helper function until we get a full
# KBase python auth client released
auth = _base64.b64encode(user_id + ":" + password)
headers = {"Authorization": "Basic " + auth}
ret = _requests.get(auth_svc, headers=headers, allow_redirects=True)
status = ret.status_code
if status >= 200 and status <= 299:
tok = _json.loads(ret.text)
elif status == 403:
raise Exception(
"Authentication failed: Bad user_id/password "
+ "combination for user %s" % (user_id)
)
else:
raise Exception(ret.text)
return tok["access_token"]
def _read_rcfile(file=_os.environ["HOME"] + "/.authrc"): # @ReservedAssignment
# Another bandaid to read in the ~/.authrc file if one is present
authdata = None
if _os.path.exists(file):
try:
with open(file) as authrc:
rawdata = _json.load(authrc)
# strip down whatever we read to only what is legit
authdata = {
x: rawdata.get(x)
for x in (
"user_id",
"token",
"client_secret",
"keyfile",
"keyfile_passphrase",
"password",
)
}
except Exception as e:
print("Error while reading authrc file %s: %s" % (file, e))
return authdata
def _read_inifile(
file=_os.environ.get( # @ReservedAssignment
"KB_DEPLOYMENT_CONFIG", _os.environ["HOME"] + "/.kbase_config"
)
):
# Another bandai
|
d to read in the
|
~/.kbase_config file if one is present
authdata = None
if _os.path.exists(file):
try:
config = _ConfigParser()
config.read(file)
# strip down whatever we read to only what is legit
authdata = {
x: config.get("authentication", x)
if config.has_option("authentication", x)
else None
for x in (
"user_id",
"token",
"client_secret",
"keyfile",
"keyfile_passphrase",
"password",
)
}
except Exception as e:
print("Error while reading INI file %s: %s" % (file, e))
return authdata
class ServerError(Exception):
def __init__(self, name, code, message, data=None, error=None):
self.name = name
self.code = code
self.message = "" if message is None else message
self.data = data or error or ""
# data = JSON RPC 2.0, error = 1.1
def __str__(self):
return (
self.name + ": " + str(self.code) + ". " + self.message + "\n" + self.data
)
class _JSONObjectEncoder(_json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, frozenset):
return list(obj)
return _json.JSONEncoder.default(self, obj)
class Client(object):
def __init__(
self,
url=None,
timeout=30 * 60,
user_id=None,
password=None,
token=None,
ignore_authrc=False,
trust_all_ssl_certificates=False,
use_url_lookup=True,
):
if url is None:
raise ValueError("A url is required")
scheme, _, _, _, _, _ = _urlparse.urlparse(url)
if scheme not in _URL_SCHEME:
raise ValueError(url + " isn't a valid http url")
self.url = url
self.timeout = int(timeout)
self._headers = dict()
self.trust_all_ssl_certificates = trust_all_ssl_certificates
self.use_url_lookup = use_url_lookup
# token overrides user_id and password
if token is not None:
self._headers["AUTHORIZATION"] = token
elif user_id is not None and password is not None:
self._headers["AUTHORIZATION"] = _get_token(user_id, password)
elif "KB_AUTH_TOKEN" in _os.environ:
self._headers["AUTHORIZATION"] = _os.environ.get("KB_AUTH_TOKEN")
elif not ignore_authrc:
authdata = _read_inifile()
if authdata is None:
authdata = _read_rcfile()
if authdata is not None:
if authdata.get("token") is not None:
self._headers["AUTHORIZATION"] = authdata["token"]
elif (
authdata.get("user_id") is not None
and authdata.get("password") is not None
):
self._headers["AUTHORIZATION"] = _get_token(
authdata["user_id"], authdata["password"]
)
if self.timeout < 1:
raise ValueError("Timeout value must be at least 1 second")
def _call(self, url, method, params, json_rpc_context=None):
arg_hash = {
"method": method,
"params": params,
"version": "1.1",
"id": str(_random.random())[2:],
}
if json_rpc_context:
arg_hash["context"] = json_rpc_context
body = _json.dumps(arg_hash, cls=_JSONObjectEncoder)
ret = _requests.post(
url,
data=body,
headers=self._headers,
timeout=self.timeout,
verify=not self.trust_all_ssl_certificates,
)
if ret.status_code == _requests.codes.server_error:
if _CT in ret.headers:
ret.headers[_CT]
if _CT in ret.headers and ret.headers[_CT] == _AJ:
err = _json.loads(ret.text)
if "error" in err:
raise ServerError(**err["error"])
else:
raise ServerError("Unknown", 0, ret.text)
else:
raise ServerError("Unknown", 0, ret.text)
if ret.status_code != _requests.codes.OK:
ret.raise_for_status()
ret.encoding = "utf-8"
resp = _json.loads(ret.text)
if "result" not in resp:
raise ServerError("Unknown", 0, "An unknown server error occurred")
return resp["result"]
def sync_call(
self, service_method, param_list, service_version=None, json_rpc_context=None
):
if json_rpc_context and not isinstance(json_rpc_context, dict):
raise ValueError(
"Method send_data: argument json_rpc_context is not type dict as required."
)
url = self.url
if self.use_url_lookup:
module_name = service_method.split(".")[0]
service_status_ret = self._call(
self.url,
"ServiceWizard.get_service_status",
[{"module_name": module_name, "version": service_version}],
None,
)[0]
url = service_status_ret["url"]
return self._call(url, service_method, param_list, json_rpc_context)
|
vlegoff/tsunami
|
src/secondaires/navigation/commandes/matelot/__init__.py
|
Python
|
bsd-3-clause
| 3,210
| 0.000312
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and
|
the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of it
|
s contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO Ematelot SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'matelot' et ses sous-commandes.
Dans ce fichier se trouve la commande même.
"""
from primaires.interpreteur.commande.commande import Commande
from .affecter import PrmAffecter
from .creer import PrmCreer
from .editer import PrmEditer
from .info import PrmInfo
from .liste import PrmListe
from .poste import PrmPoste
from .promouvoir import PrmPromouvoir
from .recruter import PrmRecruter
from .renommer import PrmRenommer
from .retirer import PrmRetirer
from .score import PrmScore
class CmdMatelot(Commande):
"""Commande 'matelot'.
"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "matelot", "seaman")
self.nom_categorie = "navire"
self.aide_courte = "manipulation des matelots"
self.aide_longue = \
"Cette commande permet de manipuler les matelots de " \
"votre équipage individuellement. Il existe également " \
"la commande %équipage% qui permet de manipuler l'équipage " \
"d'un coup d'un seul."
def ajouter_parametres(self):
"""Ajout des paramètres"""
self.ajouter_parametre(PrmAffecter())
self.ajouter_parametre(PrmCreer())
self.ajouter_parametre(PrmEditer())
self.ajouter_parametre(PrmInfo())
self.ajouter_parametre(PrmListe())
self.ajouter_parametre(PrmPoste())
self.ajouter_parametre(PrmPromouvoir())
self.ajouter_parametre(PrmRecruter())
self.ajouter_parametre(PrmRenommer())
self.ajouter_parametre(PrmRetirer())
self.ajouter_parametre(PrmScore())
|
wolfram74/numerical_methods_iserles_notes
|
venv/lib/python2.7/site-packages/sympy/integrals/tests/test_transforms.py
|
Python
|
mit
| 31,213
| 0.002531
|
from sympy.integrals.transforms import (mellin_transform,
inverse_mellin_transform, laplace_transform, inverse_laplace_transform,
fourier_transform, inverse_fourier_transform,
sine_transform, inverse_sine_transform,
cosine_transform, inverse_cosine_transform,
hankel_transform, inverse_hankel_transform,
LaplaceTransform, FourierTransform, SineTransform, CosineTransform,
InverseLaplaceTransform, InverseFourierTransform, InverseSineTransform, InverseCosineTransform,
HankelTransform, InverseHankelTransform)
from sympy import (
gamma, exp, oo, Heaviside, symbols, Symbol, re, factorial, pi,
cos, S, And, sin, sqrt, I, log, tan, hyperexpand, meijerg,
EulerGamma, erf, besselj, bessely, besseli, besselk,
exp_polar, polar_lift, unpolarify, Function, expint, expand_mul,
combsimp, trigsimp)
from sympy.utilities.pytest import XFAIL, slow, skip
from sympy.matrices import Matrix, eye
from sympy.abc import x, s, a, b, c, d
nu, beta, rho = symbols('nu beta rho')
def test_undefined_function():
from sympy import Function, MellinTransform
f = Function('f')
assert mellin_transform(f(x), x, s) == MellinTransform(f(x), x, s)
assert mellin_transform(f(x) + exp(-x), x, s) == \
(MellinTransform(f(x), x, s) + gamma(s), (0, oo), True)
assert laplace_transform(2*f(x), x, s) == 2*LaplaceTransform(f(x), x, s)
# TODO test derivative and other rules when implemented
def test_free_symbols():
from sympy import Function
f = Function('f')
assert mellin_transform(f(x), x, s).free_symbols == set([s])
assert mellin_transform(f(x)*a, x, s).free_symbols == set([s, a])
def test_as_integral():
from sympy import Function, Integral
f = Function('f')
assert mellin_transform(f(x), x, s).rewrite('Integral') == \
Integral(x**(s - 1)*f(x), (x, 0, oo))
assert fourier_transform(f(x), x, s).rewrite('Integral') == \
Integral(f(x)*exp(-2*I*pi*s*x), (x, -oo, oo))
assert laplace_transform(f(x), x, s).rewrite('Integral') == \
Integral(f(x)*exp(-s*x), (x, 0, oo))
assert str(inverse_mellin_transform(f(s), s, x, (a, b)).rewrite('Integral')) \
== "Integral(x**(-s)*f(s), (s, _c - oo*I, _c + oo*I))"
assert str(inverse_laplace_transform(f(s), s, x).rewrite('Integral')) == \
"Integral(f(s)*exp(s*x), (s, _c - oo*I, _c + oo*I))"
assert inverse_fourier_transform(f(s), s, x).rewrite('Integral') == \
Integral(f(s)*exp(2*I*pi*s*x), (s, -oo, oo))
# NOTE this is stuck in risch because meijerint cannot handle it
@slow
@XFAIL
def test_mellin_transform_fail():
skip("Risch takes forever.")
from sympy import Max, Min
MT = mellin_transform
bpos = symbols('b', positive=True)
bneg = symbols('b', negative=True)
expr = (sqrt(x + b**2) + b)**a/sqrt(x + b**2)
# TODO does not work with bneg, argument wrong. Needs changes to matching.
assert MT(expr.subs(b, -bpos), x, s) == \
((-1)**(a + 1)*2**(a + 2*s)*bpos**(a + 2*s - 1)*gamma(a + s)
*gamma(1 - a - 2*s)/gamma(1 - s),
(-re(a), -re(a)/2 + S(1)/2), True)
expr = (sqrt(x + b**2) + b)**a
assert MT(expr.subs(b, -bpos), x, s) == \
(
2**(a + 2*s)*a*bpos**(a + 2*s)*gamma(-a - 2*
s)*gamma(a + s)/gamma(-s + 1),
(-re(a), -re(a)/2), True)
# Test exponent 1:
assert MT(expr.subs({b: -bpos, a: 1}), x, s) == \
(-bpos**(2*s + 1)*gamma(s)*gamma(-s - S(1)/2)/(2*sqrt(pi)),
(-1, -S(1)/2), True)
def test_mellin_transform():
from sympy import Max, Min, Ne
MT = mellin_transform
bpos = symbols('b', positive=True)
# 8.4.2
assert MT(x**nu*Heaviside(x - 1), x, s) == \
(-1/(nu + s), (-oo, -re(nu)), True)
assert MT(x**nu*Heaviside(1 - x), x, s) == \
(1/(nu + s), (-re(nu), oo), True)
assert MT((1 - x)**(beta - 1)*Heaviside(1 - x), x, s) == \
(gamma(beta)*gamma(s)/gamma(beta + s), (0, oo), re(-beta) < 0)
assert MT((x - 1)**(beta - 1)*Heaviside(x - 1), x, s) == \
(gamma(beta)*gamma(1 - beta - s)/gamma(1 - s),
(-oo, -re(beta) + 1), re(-beta) < 0)
assert MT((1 + x)**(-rho), x, s) == \
(gamma(s)*gamma(rho - s)/gamma(rho), (0, re(rho)), True)
# TODO also the conditions should be simplified
assert MT(abs(1 - x)**(-rho), x, s) == (
cos(pi*(rho/2 - s))*gamma(s)*gamma(rho - s)/(cos(pi*rho/2)*gamma(rho)),
(0, re(rho)), And(re(rho) - 1 < 0, re(rho) < 1))
mt = MT((1 - x)**(beta - 1)*Heaviside(1 - x)
+ a*(x - 1)**(beta - 1)*Heaviside(x - 1), x, s)
assert mt[1], mt[2] == ((0, -re(beta) + 1), True)
assert MT((x**a - b**a)/(x - b), x, s)[0] == \
pi*b**(a + s - 1)*sin(pi*a)/(sin(pi*s)*sin(pi*(a + s)))
assert MT((x**a - bpos**a)/(x - bpos), x, s) == \
(pi*bpos**(a + s - 1)*sin(pi*a)/(sin(pi*s)*sin(pi*(a + s))),
(Max(-re(a), 0), Min(1 - re(a), 1)), True)
expr = (sqrt(x + b**2) + b)**a
assert MT(expr.subs(b, bpos), x, s) == \
(-a*(2*bpos)**(a + 2*s)*gamma(s)*gamma(-a - 2*s)/gamma(-a - s + 1),
(0, -re(a)/2), True)
expr = (sqrt(x + b**2) + b)**a/sqrt(x + b**2)
assert MT(expr.subs(b, bpos), x, s) == \
(2**(a + 2*s)*bpos**(a + 2*s - 1)*gamma(s)
*gamma(1 - a - 2*s)/gamma(1 - a - s),
(0, -re(a)/2 + S(1)/2), True)
# 8.4.2
assert MT(exp(-x), x, s) == (gamma(s), (0, oo), True)
assert MT(exp(-1/x), x, s) == (gamma(-s), (-oo, 0), True)
# 8.4.5
assert MT(log(x)**4*Heaviside(1 - x), x, s) == (24/s**5, (0, oo), True)
assert MT(log(x)**3*Heaviside(x - 1), x, s) == (6/s**4, (-oo, 0), True)
assert MT(log(x + 1), x, s) == (pi/(s*sin(pi*s)), (-1, 0), True)
assert MT(log(1/x + 1), x, s) == (pi/(s*sin(pi*s)), (0, 1), True)
assert MT(log(abs(1 - x)), x, s) == (pi/(s*tan(pi*s)), (-1, 0), True)
assert MT(log(abs(1 - 1/x)), x, s) == (pi/(s*tan(pi*s)), (0, 1), True)
# TODO we cannot currently do these (needs summation of 3F2(-1))
# this also implies that they cannot be written as a single g-function
# (although this is possible)
mt = MT(log(x)/(x + 1), x, s)
assert mt[1:] == ((0, 1), True)
assert not hyperexpand(mt[0], allow_hyper=True).has(meijerg)
mt = MT(log(x)**2/(x + 1), x, s)
assert mt[1:] == ((0, 1), True)
assert not hyperexpand(mt[0], allow_hyper=True).has(meijerg)
mt = MT(log(x)/(x + 1)**2, x, s)
assert mt[1:] == ((0, 2), True)
assert not hyperexpand(mt[0], allow_hyper=True).has(meijerg)
# 8.4.14
assert MT(erf(sqrt(x)), x, s) == \
(-gamma(s + S(1)/2)/(sqrt(pi)*s), (-S(1)/2, 0), True)
def test_mellin_transform_bessel():
from sympy import Max, Min, hyper, meijerg
MT = mellin_transform
# 8.4.19
assert MT(besselj(a, 2*sqrt(x)), x, s) == \
(gamma(a/2 + s)/gamma(a/2 - s + 1), (-re(a)/2, S(3)/4), True)
assert MT(sin(sqrt(x))*besselj(a, sqrt(x)), x, s) == \
(2**a*gamma(-2*s + S(1)/2)*gamma(a/2 + s + S(1)/2)/(
gamma(-a/2 - s + 1)*gamma(a - 2*s + 1)), (
-re(a)/2 - S(1)/2, S(1)/4), True)
assert MT(cos(sqrt(x))*besselj(
|
a, sqrt(x)), x, s) == \
(2**a
|
*gamma(a/2 + s)*gamma(-2*s + S(1)/2)/(
gamma(-a/2 - s + S(1)/2)*gamma(a - 2*s + 1)), (
-re(a)/2, S(1)/4), True)
assert MT(besselj(a, sqrt(x))**2, x, s) == \
(gamma(a + s)*gamma(S(1)/2 - s)
/ (sqrt(pi)*gamma(1 - s)*gamma(1 + a - s)),
(-re(a), S(1)/2), True)
assert MT(besselj(a, sqrt(x))*besselj(-a, sqrt(x)), x, s) == \
(gamma(s)*gamma(S(1)/2 - s)
/ (sqrt(pi)*gamma(1 - a - s)*gamma(1 + a - s)),
(0, S(1)/2), True)
# NOTE: prudnikov gives the strip below as (1/2 - re(a), 1). As far as
# I can see this is wrong (since besselj(z) ~ 1/sqrt(z) for z large)
assert MT(besselj(a - 1, sqrt(x))*besselj(a, sqrt(x)), x, s) == \
(gamma(1 - s)*gamma(a + s - S(1)/2)
/ (sqrt(pi)*gamma(S(3)/2 - s)*gamma(a - s + S(1)/2)),
(S(1)/2 - re(a), S(1)/2), True)
assert MT(besselj(a, sqrt(x))*besselj(b, sqrt(x)), x,
|
e-koch/VLA_Lband
|
16B/pipeline4.7.1_custom/EVLA_pipe_fluxgains.py
|
Python
|
mit
| 6,065
| 0.019621
|
######################################################################
#
# Copyright (C) 2013
# Associated Universities, Inc. Washington DC, USA,
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Library General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public
# License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 675 Massachusetts Ave, Cambridge, MA 02139, USA.
#
# Correspondence concerning VLA Pipelines should be addressed as follows:
# Please register and submit helpdesk tickets via: https://help.nrao.edu
# Postal address:
# National Radio Astronomy Observatory
# VLA Pipeline Support Office
# PO Box O
# Socorro, NM, USA
#
######################################################################
# MAKE GAIN TABLE FOR FLUX DENSITY BOOTSTRAPPING
# Make a gain table that includes gain and opacity corrections for final
# amp cal, for flux density bootstrapping
logprint ("Starting EVLA_pipe_fluxgains.py", logfileout='logs/fluxgains.log')
time_list=runtiming('fluxgains', 'start')
QA2_fluxgains='Pass'
#logprint ("Making fresh calibrators.ms", logfileout='logs/fluxgains.log')
#
#syscommand='rm -rf calibrators.ms'
#os.system(syscommand)
#
#default('split')
#vis=ms_active
#outputvis='calibrators.ms'
#datacolumn='corrected'
#field=''
#spw=''
#width=int(max(channels))
#antenna=''
#timebin='0s'
#timerange=''
#scan=calibrator_scan_select_string
#intent=''
#array=''
#uvrange=''
#correlation=''
#observation=''
#keepflags=False
#split()
logprint ("Setting models for standard primary calibrators", logfileout='logs/fluxgains.log')
tb.open('ca
|
librators.ms')
positions = []
for ii in range(0,len(field_positions[0][0])):
positions.append([field_positions[0][0][ii], field_position
|
s[1][0][ii]])
standard_source_names = [ '3C48', '3C138', '3C147', '3C286' ]
standard_source_fields = find_standards(positions)
ii=0
for fields in standard_source_fields:
for myfield in fields:
spws = field_spws[myfield]
for myspw in spws:
reference_frequency = center_frequencies[myspw]
EVLA_band = find_EVLA_band(reference_frequency)
logprint ("Center freq for spw "+str(myspw)+" = "+str(reference_frequency)+", observing band = "+EVLA_band, logfileout='logs/fluxgains.log')
model_image = standard_source_names[ii]+'_'+EVLA_band+'.im'
logprint ("Setting model for field "+str(myfield)+" spw "+str(myspw)+" using "+model_image, logfileout='logs/fluxgains.log')
try:
default('setjy')
vis='calibrators.ms'
field=str(myfield)
spw=str(myspw)
selectdata=False
scalebychan=True
standard='Perley-Butler 2013'
model=model_image
listmodels=False
usescratch=scratch
setjy()
except:
logprint('no data found for field ' + str(myfield)+" spw "+str(myspw), logfileout='logs/fluxgains.log')
ii=ii+1
tb.close()
logprint ("Making gain tables for flux density bootstrapping", logfileout='logs/fluxgains.log')
logprint ("Short solint = "+new_gain_solint1, logfileout='logs/fluxgains.log')
logprint ("Long solint = "+gain_solint2, logfileout='logs/fluxgains.log')
print ""
print "Finding a reference antenna"
print ""
refantspw=''
refantfield=calibrator_field_select_string
findrefant=RefAntHeuristics(vis='calibrators.ms',field=refantfield,geometry=True,flagging=True)
RefAntOutput=findrefant.calculate()
refAnt=str(RefAntOutput[0])+','+str(RefAntOutput[1])+','+str(RefAntOutput[2])+','+str(RefAntOutput[3])
logprint ("The pipeline will use antenna(s) "+refAnt+" as the reference", logfileout='logs/fluxgains.log')
# Derive amp gain table. Note that gaincurves and opacity
# corrections have already been applied during applycal and split in
# semiFinalBPdcals/solint.py.
# Need to add check for 3C84 in here, when heuristics have been sorted out
default('gaincal')
vis='calibrators.ms'
caltable='fluxphaseshortgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=new_gain_solint1
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
docallib=False
#gaintable=filter(None, [priorcals,'delay.k','BPcal.b'])
gaintable=['']
gainfield=['']
interp=['']
spwmap=[]
parang=False
gaincal()
default('gaincal')
vis='calibrators.ms'
caltable='fluxgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=gain_solint2
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=5.0
solnorm=False
gaintype='G'
smodel=[]
calmode='ap'
append=False
docallib=False
#gaintable=filter(None, [priorcals,'delay.k','BPcal.b','fluxphaseshortgaincal.g'])
gaintable=['fluxphaseshortgaincal.g']
gainfield=['']
interp=['']
spwmap=[]
parang=False
gaincal()
logprint ("Gain table fluxgaincal.g is ready for flagging", logfileout='logs/fluxgains.log')
# Calculate fractions of flagged solutions for final QA2; note, can
# tolerate higher fraction of flagged solutions for this step than in
# other gain tables
flaggedGainSolns=getCalFlaggedSoln('fluxgaincal.g')
if (flaggedGainSolns['all']['total'] == 0):
QA2_fluxgains='Fail'
elif (flaggedGainSolns['antmedian']['fraction'] > 0.2):
QA2_fluxgains='Partial'
logprint ("QA2 score: "+QA2_fluxgains, logfileout='logs/fluxgains.log')
logprint ("Finished EVLA_pipe_fluxgains.py", logfileout='logs/fluxgains.log')
time_list=runtiming('fluxgains', 'end')
pipeline_save()
|
hardworkingcoder/dw_experiments
|
migrations/versions/059e2a9bfb4c_.py
|
Python
|
mit
| 1,851
| 0.010805
|
"""empty message
Revision ID: 059e2a9bfb4c
Revises:
Create Date: 2017-07-10 21:50:44.380938
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '059e2a9bfb4c'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('user_uuid', postgresql.UUID(as_uuid=True), server_default=sa.text(u'uuid_generate_v4()'), nullable=False),
sa.Column('permissions_group', sa.String(), nullable=True),
sa.Column('ddw_access_token', sa.String(), nullable=True),
sa.Column('ddw_token_expires_in', sa.Integer(), nullable=True),
sa.Column('ddw_avatar_url', sa.String(), nullable=True),
sa.Column('ddw_display_name', sa.String(), nullable=True),
sa.Column('ddw_user_id', sa.String(), nullable=True),
sa.Column('ddw_user_created', sa.Date(), nullable=True),
sa.Column('ddw_user_updated', sa.Date(), nullable=True),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.PrimaryKeyConstraint('user_uuid')
)
op.create_table('sessions',
sa.Column('session_uuid', postgresql.UUID(as_uuid=True), server_default=sa.text(u'uuid_generate_v4()'), nullable=False),
sa.Column('created', sa.DateTime(), nullable=F
|
alse),
sa.Column('user_uuid', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('is_active', sa
|
.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['user_uuid'], ['users.user_uuid'], ),
sa.PrimaryKeyConstraint('session_uuid')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('sessions')
op.drop_table('users')
# ### end Alembic commands ###
|
fhfuih/MCEdit-Unified
|
pymclevel/setup_leveldb.py
|
Python
|
isc
| 11,800
| 0.003136
|
#!/usr/bin/python2.7
#
# setup_leveldb.py
#
# Compiles and install Minecraft Pocket Edtition binary support.
#
__author__ = "D.C.-G. 2017"
__version__ = "0.3.0"
import sys
import os
import platform
import fnmatch
if sys.platform != "linux2":
print "This script can't run on other platforms than Linux ones..."
sys.exit(1)
bin_deps = ('gcc', 'g++', 'unzip', 'wget|curl')
wget_curl = None
wget_cmd = "wget -q --no-check-certificate -O"
curl_cmd = "curl -LskS -o"
mojang_sources_url = "https://codeload.github.com/Mojang/leveldb-mcpe/zip/"
mojang_commit = "a056ea7c18dfd7b806d6d693726ce79d75543904"
jocopa3_sources_url = "https://codeload.github.com/jocopa3/leveldb-mcpe/zip/"
jocopa3_commit = "56bdd1f38dde7074426d85eab01a5c1c0b5b1cfe"
zlib_sources_url = "https://codeload.github.com/madler/zlib/zip/"
zlib_commit = "4a090adef8c773087ec8916ad3c2236ef560df27"
zlib_ideal_version = "1.2.10"
zlib_minimal_version = "1.2.8"
def check_bins(bins):
print 'Searching for the needed binaries %s...' % repr(bins).replace("'", '')
missing_bin = False
for name in bins:
names = []
if '|' in name:
names = name.split('|')
if names:
found = False
for n in names:
if not os.system('which %s > /dev/null' % n):
found = True
break
else:
print "Could not find %s." % n
if found:
g_keys = globals().keys()
g_name = name.replace('|', '_')
print "g_name", g_name, g_name in g_keys
if g_name in g_keys:
globals()[g_name] = globals()['%s_cmd' % n]
else:
print '*** WARNING: None of these binaries were found on your system: %s.'%', '.join(names)
else:
if os.system('which %s > /dev/null' % name):
print '*** WARNING: %s not found.' % name
missing_bin = True
if missing_bin:
a = raw_input('The binary dependencies are not satisfied. The build may fail.\nContinue [y/N]?')
if a and a in 'yY':
pass
else:
sys.exit()
else:
print 'All the needed binaries were found.'
# Picked from another project to find the lib and adapted to the need
import re
ARCH = {'32bit': '32', '64bit': '64'}[platform.architecture()[0]]
default_paths = ['/lib', '/lib32', '/lib64', '/usr/lib', '/usr/lib32','/usr/lib64',
'/usr/local/lib', os.path.expanduser('~/.local/lib'), '.']
# Gather the libraries paths.
def get_lib_paths(file_name):
paths = []
if os.path.isfile(file_name):
lines = [a.strip() for a in open(file_name).readlines()]
for i, line in enumerate(lines):
if not line.startswith('#') and line.strip():
if line.startswith('include'):
line = line.split(' ', 1)[1]
if '*' in line:
pat = r"%s" % line.split(os.path.sep)[-1].replace('.', '\.').replace('*', '.*')
d = os.path.split(line)[0]
if os.path.isdir(d):
for n in os.listdir(d):
r = re.findall(pat, n)
if r:
paths += [a for a in get_lib_paths(os.path.join(d, n)) if a not in paths]
else:
paths += [a for a in get_lib_paths(line) if not a in paths]
elif not line in paths and os.path.isdir(line):
paths.append(line)
return paths
def find_lib(lib_name, input_file='/etc/ld.so.conf'):
paths = default_paths + get_lib_paths(input_file)
arch_paths = []
other_paths = []
while paths:
path = paths.pop(0)
if ARCH in path:
arch_paths.insert(0, path)
elif path.endswith('/lib'):
arch_paths.append(path)
else:
other_paths.append(path)
paths = arch_paths + other_paths
found = None
r = None
ver = None
name = lib_name
hash_list = name.split('.')
hash_list.reverse()
idx = hash_list.index('so')
i = 0
while i <= idx and not found:
for path in paths:
print "Scanning %s for %s" % (path, name)
if os.path.exists(path):
for path, dirnames, filenames in os.walk(path):
if name in filenames:
found = os.path.join(path, name)
break
if found:
break
i += 1
name = name.rsplit('.', 1)[0]
cur_dir = os.getcwd()
os.chdir(path)
if found:
base_path = os.path.split(found)[0]
while os.path.islink(found):
found = os.readlink(found)
if not found.startswith("/"):
found = os.path.abspath(os.path.join(base_path, found))
# Verify the architecture of the library
inp, outp = os.popen2('file %s | grep "ELF %s"' % (found, ARCH))
r = bool(outp.read())
inp.close()
outp.close()
# If th
|
e architecture could not be check with library internal data, rely on the folder name.
if os.path.split(found)[0] in arch_paths:
r = True
v = found.rsplit('.so.', 1)
if len(v) == 2:
ver = v[1]
os.chdir(cur_dir)
return found, r
|
, ver
def get_sources(name, url):
print "Downloading sources for %s" % name
print "URL: %s" % url
os.system("%s %s.zip %s" % (wget_curl, name, url))
print "Unpacking %s" % name
os.system("unzip -q %s.zip" % name)
os.system("mv $(ls -d1 */ | egrep '{n}-') {n}".format(n=name))
print "Cleaning archive."
os.remove("%s.zip" % name)
def build_zlib():
print "Building zlib..."
return os.WEXITSTATUS(os.system("./configure; make"))
def build_leveldb(zlib):
print "Building leveldb..."
# Looks like the '-lz' option has to be changed...
if zlib:
data = open('Makefile').read()
data = data.replace("LIBS += $(PLATFORM_LIBS) -lz", "LIBS += $(PLATFORM_LIBS) %s" % zlib)
open("Makefile", "w").write(data)
cpath = os.environ.get("CPATH")
if cpath:
os.environ["CPATH"] = "./zlib:$CPATH"
else:
os.environ["CPATH"] = "./zlib"
return os.WEXITSTATUS(os.system("make"))
def main():
print "=" * 72
print "Building Linux Minecraft Pocket Edition for MCEdit..."
print "-----------------------------------------------------"
global leveldb_commit
global zlib_commit
global zlib_sources_url
force_zlib = False
leveldb_source_url = mojang_sources_url
leveldb_commit = mojang_commit
cur_dir = os.getcwd()
if "--force-zlib" in sys.argv:
force_zlib = True
sys.argv.remove("--force-zlib")
if "--alt-leveldb" in sys.argv:
leveldb_source_url = jocopa3_sources_url
leveldb_commit = jocopa3_commit
for arg, var in (("--leveldb-commit", 'leveldb_commit'), ("--zlib-commit", 'zlib_commit')):
if arg in sys.argv:
globals()[var] = sys.argv[sys.argv.index(arg) + 1]
leveldb_source_url += leveldb_commit
zlib_sources_url += zlib_commit
check_bins(bin_deps)
# Get the sources here.
get_sources("leveldb", leveldb_source_url)
os.chdir("leveldb")
# os.rmdir("zlib")
get_sources("zlib", zlib_sources_url)
os.chdir(cur_dir)
zlib = (None, None, None)
# Check zlib
if not force_zlib:
print "Checking zlib."
zlib = find_lib("libz.so.%s" % zlib_ideal_version)
print zlib
if zlib == (None, None, None):
zlib = None
print "*** WARNING: zlib not found!"
print " It is recommended you install zlib v%s on your system or" % zlib_ideal_version
print " let this script install it only for leveldb."
print " Enter 'b' to build zlib v1.2.10 only for leveldb."
print " Enter 'a' to quit now and install zlib on your yourself."
print "
|
googleapis/python-grafeas
|
grafeas/grafeas_v1/services/grafeas/transports/__init__.py
|
Python
|
apache-2.0
| 1,131
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.
|
org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import GrafeasTransport
from .grpc im
|
port GrafeasGrpcTransport
from .grpc_asyncio import GrafeasGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[GrafeasTransport]]
_transport_registry["grpc"] = GrafeasGrpcTransport
_transport_registry["grpc_asyncio"] = GrafeasGrpcAsyncIOTransport
__all__ = (
"GrafeasTransport",
"GrafeasGrpcTransport",
"GrafeasGrpcAsyncIOTransport",
)
|
django-bmf/django-bmf
|
djangobmf/contrib/task/permissions.py
|
Python
|
bsd-3-clause
| 2,444
| 0.003682
|
#!/usr/bin/python
# ex:set fileencoding=utf-8:
from __future__ import unicode_literals
from django.db.models import Q
# from djangobmf.permissions import ModulePermission
from djangobmf.utils import FilterQueryset
class GoalFilter(FilterQueryset):
def filter_queryset(self, qs, user):
if user.has_perm('%s.can_manage' % qs.model._meta.app_label, qs.model):
return qs
qs_filter = Q(referee=user.djangobmf.employee or -1)
qs_filter |= Q(employees=user.djangobmf.employee or -1)
qs_filter |= Q(team__in=user.djangobmf.team)
if hasattr(qs.model, "project"): # pragma: no branch
project = qs.model._meta.get_field_by_name("project")[0].model
if user.has_perm('%s.can_manage' % project._meta.app_label, project):
qs_filter |= Q(project__isnull=False)
else:
qs_filter |= Q(project__isnull=False, project__employees=user.djangobmf.employee or -1)
qs_filter |= Q(project__isnull=False, project__team__in=user.dj
|
angobmf.team)
return qs.filter(qs_filter)
class TaskFilter(FilterQueryset):
|
def filter_queryset(self, qs, user):
qs_filter = Q(project__isnull=True, goal__isnull=True)
qs_filter |= Q(employee=user.djangobmf.employee or -1)
qs_filter |= Q(in_charge=user.djangobmf.employee)
if hasattr(qs.model, "goal"): # pragma: no branch
goal = qs.model._meta.get_field_by_name("goal")[0].model
if user.has_perm('%s.can_manage' % goal._meta.app_label, goal):
qs_filter |= Q(goal__isnull=False)
else:
qs_filter |= Q(goal__isnull=False, goal__referee=user.djangobmf.employee or -1)
qs_filter |= Q(goal__isnull=False, goal__employees=user.djangobmf.employee or -1)
qs_filter |= Q(goal__isnull=False, goal__team__in=user.djangobmf.team)
if hasattr(qs.model, "project"): # pragma: no branch
project = qs.model._meta.get_field_by_name("project")[0].model
if user.has_perm('%s.can_manage' % project._meta.app_label, project):
qs_filter |= Q(project__isnull=False)
else:
qs_filter |= Q(project__isnull=False, project__employees=user.djangobmf.employee or -1)
qs_filter |= Q(project__isnull=False, project__team__in=user.djangobmf.team)
return qs.filter(qs_filter)
|
chrys87/orca-beep
|
src/orca/scripts/apps/soffice/script_utilities.py
|
Python
|
lgpl-2.1
| 22,259
| 0.001483
|
# Orca
#
# Copyright 2010 Joanmarie Diggs.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
"""Commonly-required utility methods needed by -- and potentially
customized by -- application and toolkit scripts. They have
been pulled out from the scripts because certain scripts had
gotten way too large as a result of including these methods."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2010 Joanmarie Diggs."
__license__ = "LGPL"
import pyatspi
import orca.debug as debug
import orca.orca_state as orca_state
import orca.script_utilities as script_utilities
#############################################################################
# #
# Utilities #
# #
#############################################################################
class Utilities(script_utilities.Utilities):
def __init__(self, script):
"""Creates an instance of the Utilities class.
Arguments:
- script: the script with which this instance is associated.
"""
script_utilities.Utilities.__init__(self, script)
#########################################################################
# #
# Utilities for finding, identifying, and comparing accessibles #
# #
#########################################################################
def displayedText(self, obj):
"""Returns the text being displayed for an object. Overridden here
because OpenOffice uses symbols (e.g. ">>" for buttons but exposes
more useful information via the accessible's name.
Arguments:
- obj: the object
Returns the text being displayed for an object or None if there isn't
any text being shown.
"""
try:
role = obj.getRole()
except:
return ""
if role == pyatspi.ROLE_PUSH_BUTTON and obj.name:
return obj.name
if role == pyatspi.ROLE_TABLE_CELL:
strings = list(map(self.displayedText, [child for child in obj]))
text = "\n".join(strings)
if text.strip():
return text
try:
text = super().displayedText(obj)
except:
return ""
# TODO - JD: This is needed because the default behavior is to fall
# back on the name, which is bogus. Once that has been fixed, this
# hack can go.
if role == pyatspi.ROLE_TABLE_CELL and text == obj.name \
and (self.isSpreadSheetCell(obj) or self.isTextDocumentCell(obj)):
return ""
return text
def isTextArea(self, obj):
return obj and obj.getRole() == pyatspi.ROLE_TEXT
def isCellBeingEdited(self, obj):
if not obj:
return False
parent = obj.parent
if parent and parent.getRoleName() == 'text frame':
if self.spreadSheetCellName(parent):
return True
return False
def spreadSheetCellName(self, cell):
nameList = cell.name.split()
for name in nameList:
name = name.replace('.', '')
if not name.isalpha() and name.isalnum():
return name
return ''
def getRowColumnAndTable(self, cell):
"""Returns the (row, column, table) tuple for cell."""
if not (cell and cell.getRole() == pyatspi.ROLE_TABLE_CELL):
return -1, -1, None
cellParent = cell.parent
if cellParent and cellParent.getRole() == pyatspi.ROLE_TABLE_CELL:
cell = cellParent
cellParent = cell.parent
table = cellParent
if table and table.getRole() != pyatspi.ROLE_TABLE:
table = table.parent
try:
iTable = table.queryTable()
except:
return -1, -1, None
index = self.cellIndex(cell)
row = iTable.getRowAtIndex(index)
column = iTable.getColumnAtIndex(index)
return row, column, table
def getShowingCellsInRow(self, obj):
row, column, parentTable = self.getRowColumnAndTable(obj)
try:
table = parentTable.queryTable()
except:
return []
startIndex, endIndex = self.getTableRowRange(obj)
cells = []
for i in range(startIndex, endIndex):
cell = table.getAccessibleAt(row, i)
try:
|
showing = cell.getState().contains(pya
|
tspi.STATE_SHOWING)
except:
continue
if showing:
cells.append(cell)
return cells
def getTableRowRange(self, obj):
"""If this is spread sheet cell, return the start and end indices
of the spread sheet cells for the table that obj is in. Otherwise
return the complete range (0, parentTable.nColumns).
Arguments:
- obj: a table cell.
Returns the start and end table cell indices.
"""
parent = obj.parent
try:
parentTable = parent.queryTable()
except:
return [-1, -1]
startIndex = 0
endIndex = parentTable.nColumns
if self.isSpreadSheetCell(obj):
extents = parent.queryComponent().getExtents(pyatspi.DESKTOP_COORDS)
y = extents.y
leftX = extents.x + 1
leftCell = \
parent.queryComponent().getAccessibleAtPoint(leftX, y, 0)
if leftCell:
table = leftCell.parent.queryTable()
index = self.cellIndex(leftCell)
startIndex = table.getColumnAtIndex(index)
rightX = extents.x + extents.width - 1
rightCell = \
parent.queryComponent().getAccessibleAtPoint(rightX, y, 0)
if rightCell:
table = rightCell.parent.queryTable()
index = self.cellIndex(rightCell)
endIndex = table.getColumnAtIndex(index) + 1
return [startIndex, endIndex]
def rowHeadersForCell(self, obj):
rowHeader, colHeader = self.getDynamicHeadersForCell(obj)
if rowHeader:
return [rowHeader]
return super().rowHeadersForCell(obj)
def columnHeadersForCell(self, obj):
rowHeader, colHeader = self.getDynamicHeadersForCell(obj)
if colHeader:
return [colHeader]
return super().columnHeadersForCell(obj)
def getDynamicHeadersForCell(self, obj, onlyIfNew=False):
if not (self._script.dynamicRowHeaders or self._script.dynamicColumnHeaders):
return None, None
objRow, objCol, table = self.getRowColumnAndTable(obj)
if not table:
return None, None
headersRow = self._script.dynamicColumnHeaders.get(hash(table))
headersCol = self._script.dynamicRowHeaders.get(hash(table))
if headersRow == objRow or headersCol == objCol:
return None, None
getRowHeader = headersCol != None
getColHeader = headersRow != None
if onlyIfNew:
getRowHeader = \
getRowHeader and objRow != self._script.pointOfReferenc
|
geosoco/reddit_coding
|
api/urls.py
|
Python
|
bsd-3-clause
| 1,309
| 0
|
from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter(trailing_slash=True)
router.register(
r'sysusers', views.DjangoUserViewSet, base_name="sysusers")
router.register(
r'sysgroups', views.DjangoGroupView
|
Set, base_name="sysgroups")
router.register(
r'comment', views.CommentViewSet, base_name="comment")
router.register(
r'submission', views.SubmissionViewSet, base_name="submission")
router.r
|
egister(
r'codescheme', views.CodeSchemeViewSet, base_name="codescheme")
router.register(
r'code', views.CodeViewSet, base_name="code")
router.register(
r'commentcodeinstance',
views.CommentCodeInstanceViewSet,
base_name="commentcodeinstance")
router.register(
r'assignment', views.AssignmentViewSet, base_name="assignment")
router.register(
r'commentthread', views.CommentThreadViewSet, base_name="commentthread")
router.register(
r'codedcommentthread',
views.CodedCommentThreadViewSet,
base_name="codedcommentthread")
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'', include(router.urls, namespace='api')),
url(r'', include(
'rest_framework.urls', namespace='rest_framework')),
]
|
bricaud/OCR-classif
|
pdf2txt.py
|
Python
|
apache-2.0
| 3,948
| 0.029889
|
#!/usr/bin/env python3
""" Program that convert a pdf to a text file using Tesseract OCR.
The pdf file is first converted to a png file using ghostscript,
then the png file if processed by Tesseract.
"""
import os
import subprocess
import glob
import platform
import argparse
parser = argparse.ArgumentParser(description='Convert pdf files to txt files in the given folder.')
parser.add_argument('folder',
help='folder where the pdf files are stored')
args = parser.parse_args()
input_dic = vars(args)
print('Selected pdf folder: ',input_dic['folder'])
PDF_PATH = input_dic['folder']
#PDF_PATH = '/media/benjamin/Elements/pdfs/'
def png_to_txt(pngpath,short_name,txtpath,log_file):
""" Extract the text from a set of png files.
The png files associated to a single pdf file are numbered according to the page,
they share the same short_name.
"""
png_in = os.path.join(pngpath,short_name)
# Iterate over the pages of the document (different png files)
for pngfile in glob.glob(png_in+'*'):
path,filename = os.path.split(pngfile)
txtfile = filename[0:-4] #+'.txt'
txt_out = os.path.join(txtpath,txtfile)
try:
cmd_png2txt = 'tesseract '+ pngfile +' '+txt_out+ ' -l fra+eng'
proc_results = subprocess.run(cmd_png2txt.split(), stdout=subprocess.PIPE,timeout=60)
if proc_results.returncode:
print('Error encountered with file: {}\n'.format(filename))
with open(log_file, 'a') as logfile:
logfile.write('Error with file: {}\n'.format(filename)) # report errors
else:
print('Text extracted form file: {}'.format(filename))
except:
print('error extracting text with file {}'.format(filename))
with open(log_file, 'a') as logfile:
logfile.write('Error with file (exception raised): {}\n'.format(filename)) # report errors
def pdf_to_png(pdf_file,short_name,png_path,page_limit=4):
""" Convert the pdf to png, each page of the pdf gives a different png file."""
out_name = short_name+'.%d.png'
out_file = os.path.join(png_path,out_name)
if platform.system() == 'Windows':
cmd_pdf2png = ('gswin32c -
|
dSAFER -dNOPAUSE -q -r300x300 -sDEVICE=pnggray -dBATCH -dLastPage=' + str(page_limit) +
' -sOutputFile=' + out_file + ' ' + pdf_file)
else:
cmd_pdf2png = ('gs -dSAFER -dNOPAUSE -q -r300x300 -sDEVICE=pnggray -dBATCH -dLastPage=' + str(page_limit) +
' -sOutputFile=' + out_file + ' ' + pdf_file)
proc_results = subprocess.run(cmd_pdf2png.split(), stdout=subprocess.PIPE,timeout=60)
return proc_results
#PDF_PATH = '/media/benjamin/Elements/pdfs/'
LOG_FILE1 = 'logfile_pdf2png.txt'
LOG_FILE2
|
= 'logfile_png2txt.txt'
# initiate log file to report errors
with open(LOG_FILE1, 'a') as logfile:
logfile.write('Logfile produced by pdf2txt.py\n')
with open(LOG_FILE2, 'a') as logfile:
logfile.write('Logfile produced by pdf2txt.py\n')
# init paths
png_path = os.path.join(PDF_PATH,'png')
txt_path = os.path.join(PDF_PATH,'txt')
if not os.path.exists(png_path):
os.makedirs(png_path)
if not os.path.exists(txt_path):
os.makedirs(txt_path)
# Loop over all the file in the pdf folder
nb_files = len(list(glob.glob(os.path.join(PDF_PATH,'*.pdf'))))
for idx,pdf_file in enumerate(glob.glob(os.path.join(PDF_PATH,'*.pdf'))):
pdf_path,filename = os.path.split(pdf_file)
print('processing {}. File {}/{}.'.format(filename,idx+1,nb_files))
short_name = filename[0:-4]
try:
proc_results = pdf_to_png(pdf_file,short_name,png_path,page_limit=4)
if proc_results.returncode:
print('Error encountered with file: {}\n'.format(filename))
with open(LOG_FILE1, 'a') as logfile:
logfile.write('Error with file: {}\n'.format(filename)) # report errors
else:
png_to_txt(png_path,short_name,txt_path,LOG_FILE2)
except subprocess.TimeoutExpired:
print('!!!!!! Timed out for file {} !!!!!!'.format(filename))
with open(LOG_FILE1, 'a') as logfile:
logfile.write('Timed out with file: {}\n'.format(filename)) # report time out
|
sundream/shell
|
backup_db.py
|
Python
|
gpl-2.0
| 896
| 0.014509
|
#coding=utf-8
import sys
import os
import time
from shutil import *
def backup_db(dirname):
assert os.path.isdir(dirname),"not dirname"
for root,dirs,filenames in os.walk(dirname):
#print root,dirs,filenames
for filename in filenames:
filename = os.path.join(root,filename)
if filename.endswith(".rdb") or filename.endswith("aof"):
now=time.strftime("%Y%m%d%H%M%S",time.localtime())
|
backup_filename = "%s_%s.bak" % (filename,now)
tmp_filename = backup_filename + ".tmp"
copy2(filename,tmp_filename) # preserve attr
copy2(tmp_filename,backup_filename)
os.remove(tmp_filename)
break
if __name__ == "__main__":
if len(sys.argv) != 2:
print "backup_db arguments != 2"
|
exit(0)
dirname = sys.argv[1]
backup_db(dirname)
|
pymedusa/Medusa
|
ext/deluge_client/client.py
|
Python
|
gpl-3.0
| 12,558
| 0.00223
|
import logging
import socket
import ssl
import struct
import warnings
import zlib
import io
import os
import platform
from functools import wraps
from threading import local as thread_local
from .rencode import dumps, loads
DEFAULT_LINUX_CONFIG_DIR_PATH = '~/.config/deluge'
RPC_RESPONSE = 1
RPC_ERROR = 2
RPC_EVENT = 3
MESSAGE_HEADER_SIZE = 5
READ_SIZE = 10
logger = logging.getLogger(__name__)
class DelugeClientException(Exception):
"""Base exception for all deluge client exceptions"""
class ConnectionLostException(DelugeClientException):
pass
class CallTimeoutException(DelugeClientException):
pass
class InvalidHeaderException(DelugeClientException):
pass
class FailedToReconnectException(DelugeClientException):
pass
class RemoteException(DelugeClientException):
pass
class DelugeRPCClient(object):
timeout = 20
def __init__(self, host, port, username, password, decode_utf8=False, automatic_reconnect=True):
self.host = host
self.port = port
self.username = username
self.password = password
self.deluge_version = None
# This is only applicable if deluge_version is 2
self.deluge_protocol_version = None
self.decode_utf8 = decode_utf8
if not self.decode_utf8:
warnings.warn('Using `decode_utf8=False` is deprecated, please set it to True.'
'The argument will be removed in a future release where it will be always True', DeprecationWarning)
self.automatic_reconnect = automatic_reconnect
self.request_id = 1
self.connected = False
self._create_socket()
def _create_socket(self, ssl_version=None):
if ssl_version is not None:
self._socket = ssl.wrap_socket(socket.socket(socket.AF_INET, socket.SOCK_STREAM), ssl_version=ssl_version)
else:
self._socket = ssl.wrap_socket(socket.socket(socket.AF_INET, socket.SOCK_STREAM))
self._socket.settimeout(self.timeout)
def connect(self):
"""
Connects to the Deluge instance.
"""
self._connect()
logger.debug('Connected to Deluge, detecting daemon version')
self._detect_deluge_version()
logger.debug('Daemon version {} detected, logging in'.format(self.deluge_version))
if self.deluge_version == 2:
result = self.call('daemon.login', self.username, self.password, client_version='deluge-client')
else:
result = self.call('daemon.login', self.username, self.password)
logger.debug('Logged in with value %r' % result)
self.connected = True
def _connect(self):
logger.info('Connecting to %s:%s' % (self.host, self.port))
try:
self._socket.connect((self.host, self.port))
except ssl.SSLError as e:
# Note: have not verified that we actually get errno 258 for this error
if (hasattr(ssl, 'PROTOCOL_SSLv3') and
(getattr(e, 'reason', None) == 'UNSUPPORTED_PROTOCOL' or e.errno == 258)):
logger.warning('Was unable to ssl handshake, trying to force SSLv3 (insecure)')
self._create_socket(ssl_version=ssl.PROTOCOL_SSLv3)
self._socket.connect((self.host, self.port))
else:
raise
def disconnect(self):
"""
Disconnect from deluge
"""
if self.connected:
self._socket.close()
self._socket = None
self.connected = False
def _detect_deluge_version(self):
if self.deluge_version is not None:
return
self._send_call(1, None, 'daemon.info')
self._send_call(2, None, 'daemon.info')
self._send_call(2, 1, 'daemon.info')
result = self._socket.recv(1)
if result[:1] == b'D':
# This is a protocol deluge 2.0 was using before release
self.deluge_version = 2
self.deluge_protocol_version = None
# If we need the specific version of deluge 2, this is it.
daemon_version = self._receive_response(2, None, partial_data=result)
elif ord(result[:1]) == 1:
self.deluge_version = 2
self.deluge_protocol_version = 1
# If we need the specific version of deluge 2, this is it.
daemon_version = self._receive_response(2, 1, partial_data=result)
else:
self.deluge_version = 1
# Deluge 1 doesn't recover well from the bad request. Re-connect the socket.
self._socket.close()
self._create_socket()
self._connect()
def _send_call(self, deluge_version, protocol_version, method, *args, **kwargs):
self.request_id += 1
if method == 'daemon.login':
debug_args = list(args)
if len(debug_args) >= 2:
debug_args[1] = '<password hidden>'
logger.debug('Calling reqid %s method %r with args:%r kwargs:%r' % (self.request_id, method, debug_args, kwargs))
else:
logger.debug('Calling reqid %s method %r with args:%r kwargs:%r' % (self.request_id, method, args, kwargs))
req = ((self.request_id, method, args, kwargs), )
req = zlib.compress(dumps(req))
if deluge_version == 2:
if protocol_version is None:
# This was a protocol for deluge 2 before they introduced protocol version numbers
self._socket.send(b'D' + struct.pack("!i", len(req)))
elif protocol_version == 1:
self._socket.send(struct.pack('!BI', protocol_version, len(req)))
else:
raise Exception('Deluge protocol version {} is not (yet) supported.'.format(protocol_version))
self._socket.send(req)
def _receive_response(self, deluge_version, protocol_version, partial_data=b''):
expected_bytes = None
data = partial_data
while True:
try:
d = self._socket.recv(READ_SIZE)
except ssl.SSLError:
raise CallTimeoutException()
data += d
if deluge_version == 2:
if expected_bytes is None:
if len(data) < 5:
continue
header = data[:MESSAGE_HEADER_SIZE]
data = data[MESSAGE_HEADER_SIZE:]
if protocol_version is None:
if header[0] != b'D'[0]:
raise InvalidHeaderException('Expected D as first byte in reply')
elif ord(header[:1]) != protocol_version:
raise InvalidHeaderException(
'Expected protocol version ({}) as first byte in reply'.format(protocol_version)
)
if protocol_version is None:
expected_bytes = struct.unpack('!i', header[1:])[0]
else:
expected_bytes = struct.unpack('!I', header[1:])[0]
if len(data) >= expected_bytes:
data = zlib.decompress(data)
break
else:
try:
data = zlib.decompress(
|
data)
except zlib.error:
if not d:
raise ConnectionLostException()
continue
break
data = list(loads(data, decode_utf8=self.decode_utf8))
msg_type = data.pop(0)
req
|
uest_id = data.pop(0)
if msg_type == RPC_ERROR:
if self.deluge_version == 2:
exception_type, exception_msg, _, traceback = data
# On deluge 2, exception arguments are sent as tuple
if self.decode_utf8:
exception_msg = ', '.join(exception_msg)
else:
exception_msg = b', '.join(exception_msg)
else:
exception_type, exception_msg, traceback = data[0]
if self.decode_utf8:
exception = type(str(exception_type), (Remo
|
flavoi/diventi
|
diventi/core/migrations/0002_auto_20190430_1520.py
|
Python
|
apache-2.0
| 446
| 0.002242
|
# Generated by Django 2.1.7 on 2019-04-30 13:20
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='publishablemodel',
name='id',
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
),
|
]
|
|
arunkgupta/gramps
|
gramps/gen/utils/alive.py
|
Python
|
gpl-2.0
| 26,674
| 0.005436
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2009 Gary Burton
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
A utility to make a best guess if a person is alive. This is used to provide
privacy in reports and exports.
"""
#-------------------------------------------------------------------------
#
# Standard python modules
#
#-------------------------------------------------------------------------
import logging
LOG = logging.getLogger(".gen.utils.alive")
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ..display.name import displayer as name_displayer
from ..lib.date import Date, Today
from ..errors import DatabaseError
from ..ggettext import sgettext as _
#-------------------------------------------------------------------------
#
# Constants from config .ini keys
#
#-------------------------------------------------------------------------
# cache values; use refresh_constants() if they change
try:
from ..config import config
_MAX_AGE_PROB_ALIVE = config.get('behavior.max-age-prob-alive')
_MAX_SIB_AGE_DIFF = config.get('behavior.max-sib-age-diff')
_AVG_GENERATION_GAP = config.get('behavior.avg-generation-gap')
except ImportError:
# Utils used as module not part of GRAMPS
_MAX_AGE_PROB_ALIVE = 110
_MAX_SIB_AGE_DIFF = 20
_AVG_GENERATION_GAP = 20
#-------------------------------------------------------------------------
#
# ProbablyAlive class
#
#-------------------------------------------------------------------------
class ProbablyAlive(object):
"""
An object to hold the parameters for considering someone alive.
"""
def __init__(self,
db,
max_sib_age_diff=None,
max_age_prob_alive=None,
avg_generation_gap=None):
self.db = db
if max_sib_age_diff is None:
max_sib_age_diff = _MAX_SIB_AGE_DIFF
if max_age_prob_alive is None:
max_age_prob_alive = _MAX_AGE_PROB_ALIVE
if avg_generation_gap is None:
avg_generation_gap = _AVG_GENERATION_GAP
self.MAX_SIB_AGE_DIFF = max_sib_age_diff
self.MAX_AGE_PROB_ALIVE = max_age_prob_alive
self.AVG_GENERATION_GAP = avg_generation_gap
def probably_alive_range(self, person, is_spouse=False):
# FIXME: some of these computed dates need to be a span. For
# example, if a person could be born +/- 20 yrs around
# a date then it should be a span, and yr_offset should
# deal with it as well ("between 1920 and 1930" + 10 =
# "between 1930 and 1940")
if person is None:
return (None, None, "", None)
birth_ref = person.get_birth_ref()
death_ref = person.get_death_ref()
death_date = None
birth_date = None
explain = ""
# If the recorded death year is before current year then
# things are simple.
if death_ref and death_ref.get_role().is_primary():
if death_ref:
death = self.db.get_event_from_handle(death_ref.ref)
if death and death.get_date_object().get_start_date() != Date.EMPTY:
death_date = death.get_date_object()
# Look for Cause Of Death, Burial or Cremation events.
# These are fairly good indications that someone's not alive.
if not death_date:
for ev_ref in person.get_primary_event_ref_list():
if ev_ref:
ev = self.db.get_event_from_handle(ev_ref.ref)
if ev and ev.type.is_death_fallback():
death_date = ev.get_date_object()
explain = _("death-related evidence")
# If they were born within X years before current year then
# assume they are alive (we already know they are not dead).
if not birth_date:
if birth_ref and birth_ref.get_role().is_primary():
birth = self.db.get_event_from_handle(birth_ref.ref)
if birth and birth.get_date_object().get_start_date() != Date.EMPTY:
birth_date = birth.get_date_object()
# Look for Baptism, etc events.
# These are fairly good indications that someone's birth.
if not birth_date:
for ev_ref in person.get_primary_event_ref_list():
ev = self.db.get_event_from_handle(ev_ref.ref)
if ev and ev.type.is_birth_fallback():
birth_date = ev.get_date_object()
explain = _("birth-related evidence")
if not birth_date and death_date:
# person died more than MAX after current year
birth_date = death_date.copy_offset_ymd(year=-self.MAX_AGE_PROB_ALIVE)
explain = _("death date")
if not death_date and birth_date:
# person died more than MAX after current year
death_date = birth_date.copy_offset_ymd(year=self.MAX_AGE_PROB_ALIVE)
explain = _("birth date")
if death_date and birth_date:
return (birth_date, death_date, explain, person) # direct self evidence
# Neither birth nor death events are available. Try looking
# at siblings. If a sibling was born more than X years past,
# or more than Z future, then probably this person is
# not alive. If the sibling died more than X years
# past, or more than X years future, then probably not alive.
family_list = person.get_parent_family_handle_list()
for family_handle in family_list:
family = self.db.get_family_from_handle(family_handle)
if family is None:
continue
for child_ref in family.get_child_ref_list():
child_handle = child_ref.ref
child = self.db.get_person_from_handle(child_handle)
if child is None:
continue
# Go through once looking for direct evidence:
for ev_ref in child.get_primary_event_ref_list():
ev = self.db.get_event_from_handle(ev_ref.ref)
if ev and ev.type.is_birth():
dobj = ev.get_date_object()
if dobj.get_start_date() != Date.EMPTY:
# if sibling birth date too far away, then not alive:
year = dobj.get_year()
|
if year != 0:
# sibling birth date
return (Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF),
Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF + self.MAX_AGE_PROB_ALIVE),
_("sibling birth date"),
child)
elif ev and ev.type.is_death():
|
dobj = ev.get_date_object()
if dobj.get_start_date() != Date.EMPTY:
# if sibling death date too far away, then not alive:
year = dobj.get_year()
if year != 0:
# sibl
|
ljx0305/ice
|
python/test/Ice/adapterDeactivation/AllTests.py
|
Python
|
gpl-2.0
| 2,229
| 0.007627
|
# **********************************************************************
#
# Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under
|
the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import sys, Ice, Test
def test(b):
if not b:
raise RuntimeError('test assertion failed')
def allTests(communicator):
sys.stdout.write("testing stringToProxy... ")
sys.stdout.flush()
base = communicator.stringTo
|
Proxy("test:default -p 12010")
test(base)
print("ok")
sys.stdout.write("testing checked cast... ")
sys.stdout.flush()
obj = Test.TestIntfPrx.checkedCast(base)
test(obj)
test(obj == base)
print("ok")
sys.stdout.write("creating/destroying/recreating object adapter... ")
sys.stdout.flush()
adapter = communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default")
try:
communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default")
test(False)
except Ice.LocalException:
pass
adapter.destroy()
adapter = communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default")
adapter.destroy()
print("ok")
sys.stdout.write("creating/activating/deactivating object adapter in one operation... ")
sys.stdout.flush()
obj.transient()
print("ok")
sys.stdout.write("deactivating object adapter in the server... ")
sys.stdout.flush()
obj.deactivate()
print("ok")
sys.stdout.write("testing connection closure... ");
sys.stdout.flush();
for x in range(10):
initData = Ice.InitializationData();
initData.properties = communicator.getProperties().clone();
comm = Ice.initialize(initData);
comm.stringToProxy("test:default -p 12010").ice_pingAsync();
comm.destroy();
print("ok");
sys.stdout.write("testing whether server is gone... ")
sys.stdout.flush()
try:
obj.ice_timeout(100).ice_ping() # Use timeout to speed up testing on Windows
test(False)
except Ice.LocalException:
print("ok")
return obj
|
PMR2/pmr2.oauth
|
pmr2/oauth/tests/test_form.py
|
Python
|
gpl-2.0
| 4,711
| 0
|
import unittest
import zope.component
from zExceptions import Unauthorized
from Products.PloneTestCase import ptc
from Products.PloneTestCase.ptc import default_user
from pmr2.oauth.interfaces import ITokenManager, IConsumerManager
from pmr2.oauth.interfaces import IScopeManager
from pmr2.oauth.token import Token
from pmr2.oauth.consumer import Consumer
from pmr2.oauth.browser import consumer
from pmr2.oauth.browser import token
from pmr2.oauth.browser import user
from pmr2.oauth.tests.base import TestRequest
class FormTestCase(ptc.PloneTestCase):
"""
Testing functionalities of forms that don't fit well into doctests.
"""
def afterSetUp(self):
request = TestRequest()
self.consumerManager = zope.component.getMultiAdapter(
(self.portal, request), IConsumerManager)
self.consumer = Consumer('consumer.example.com', 'consumer-secret')
self.consumerManager.add(self.consumer)
self.tokenManager = zope.component.getMultiAdapter(
(self.portal, request), ITokenManager)
self.scopeManager = zope.component.getMultiAdapter(
(self.portal, request), IScopeManager)
self.reqtoken = self.tokenManager.generateRequestToken(
self.consumer.key, 'oob')
self.scopeManager.requestScope(self.reqtoken.key, None)
def test_0000_authform_render(self):
request = TestRequest(form={
'oauth_token': self.reqtoken.key,
|
})
form = token.AuthorizeTokenForm(self.portal, request)
form.update()
result = form.render()
self.assertTrue('_authenticator' in result)
def test_0001_authform_post_authfail(self):
request = TestRequest(form={
'oauth_token': self.reqtoken.key,
'form.buttons.approve': 1,
})
# simulate lack of CSRF
request.form['_authenticator'] = None
form =
|
token.AuthorizeTokenForm(self.portal, request)
self.assertRaises(Unauthorized, form.update)
def test_0002_authform_post_authgood(self):
request = TestRequest(form={
'oauth_token': self.reqtoken.key,
'form.buttons.approve': 1,
})
form = token.AuthorizeTokenForm(self.portal, request)
form.update()
result = form.render()
self.assertTrue(self.reqtoken.verifier in result)
def test_1000_consumermanageform_fail(self):
request = TestRequest(form={
'form.buttons.remove': 1,
})
request.form['_authenticator'] = None
form = consumer.ConsumerManageForm(self.portal, request)
self.assertRaises(Unauthorized, form.update)
def test_2000_usertokenform_fail(self):
# have to add a token to show the button.
atok = self.tokenManager._generateBaseToken(self.consumer.key)
atok.access = True
atok.user = default_user
self.tokenManager.add(atok)
request = TestRequest(form={
'form.buttons.revoke': 1,
})
request.form['_authenticator'] = None
form = user.UserTokenForm(self.portal, request)
self.assertRaises(Unauthorized, form.update)
def test_2100_usertokenform_revoke(self):
# have to add a token to show the button.
atok = self.tokenManager._generateBaseToken(self.consumer.key)
atok.access = True
atok.user = default_user
self.tokenManager.add(atok)
self.login(default_user)
request = TestRequest()
form = user.UserTokenForm(self.portal, request)
result = form()
self.assertTrue(atok.key in result)
self.assertTrue('Revoke' in result)
request = TestRequest(form={
'form.widgets.key': [atok.key],
'form.buttons.revoke': 1,
})
form = user.UserTokenForm(self.portal, request)
result = form()
self.assertFalse(atok.key in result)
# Ideally this would not be rendered, but it is, due to how the
# button and handler are coupled together. If the button is not
# available the action wouldn't be executed, which would have
# meant that the token wouldn't be revoked...
# This whole issue can probably be sidestepped with a redirect.
# self.assertFalse('Revoke' in result)
def test_2200_usertokenform_no_token_no_button(self):
# have to add a token to show the button.
request = TestRequest()
form = user.UserTokenForm(self.portal, request)
self.assertFalse('Revoke' in form())
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(FormTestCase))
return suite
|
devolio/devolio
|
users/migrations/0002_auto_20170321_2209.py
|
Python
|
gpl-3.0
| 589
| 0.001698
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-21 22:09
from __fu
|
ture__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='skill',
name='level',
field=models.CharField(choices=[('beginner', 'Beginner'), ('intermediate', 'Intermediate'), ('advanced', 'Advanced'), ('expert', 'Expert')]
|
, max_length=50, verbose_name="What's your level?"),
),
]
|
kalrey/swift
|
swift/common/constraints.py
|
Python
|
apache-2.0
| 10,266
| 0
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import urllib
from urllib import unquote
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.common import utils, exceptions
from swift.common.swob import HTTPBadRequest, HTTPLengthRequired, \
HTTPRequestEntityTooLarge, HTTPPreconditionFailed
MAX_FILE_SIZE = 5368709122
MAX_META_NAME_LENGTH = 128
MAX_META_VALUE_LENGTH = 256
MAX_META_COUNT = 90
MAX_META_OVERALL_SIZE = 4096
MAX_HEADER_SIZE = 8192
MAX_OBJECT_NAME_LENGTH = 1024
CONTAINER_LISTING_LIMIT = 10000
ACCOUNT_LISTING_LIMIT = 10000
MAX_ACCOUNT_NAME_LENGTH = 256
MAX_CONTAINER_NAME_LENGTH = 256
# If adding an entry to DEFAULT_CONSTRAINTS, note that
# these constraints are automatically published by the
# proxy server in responses to /info requests, with values
# updated by reload_constraints()
DEFAULT_CONSTRAINTS = {
'max_file_size': MAX_FILE_SIZE,
'max_meta_name_length': MAX_META_NAME_LENGTH,
'max_meta_value_length': MAX_META_VALUE_LENGTH,
'max_meta_count': MAX_META_COUNT,
'max_meta_overall_size': MAX_META_OVERALL_SIZE,
'max_header_size': MAX_HEADER_SIZE,
'max_object_name_length': MAX_OBJECT_NAME_LENGTH,
'container_listing_limit': CONTAINER_LISTING_LIMIT,
'account_listing_limit': ACCOUNT_LISTING_LIMIT,
'max_account_name_length': MAX_ACCOUNT_NAME_LENGTH,
'max_container_name_length': MAX_CONTAINER_NAME_LENGTH,
}
SWIFT_CONSTRAINTS_LOADED = False
OVERRIDE_CONSTRAINTS = {} # any constraints overridden by SWIFT_CONF_FILE
EFFECTIVE_CONSTRAINTS = {} # populated by reload_constraints
def reload_constraints():
"""
Parse SWIFT_CONF_FILE and reset module level global contraint attrs,
populating OVERRIDE_CONSTRAINTS AND EFFECTIVE_CONSTRAINTS along the way.
"""
global SWIFT_CONSTRAINTS_LOADED, OVERRIDE_CONSTRAINTS
SWIFT_CONSTRAINTS_LOADED = False
OVERRIDE_CONSTRAINTS = {}
constraints_conf = ConfigParser()
if constraints_conf.read(utils.SWIFT_CONF_FILE):
SWIFT_CONSTRAINTS_LOADED = True
for name in DEFAULT_CONSTRAINTS:
try:
value = int(constraints_conf.get('swift-constraints', name))
except NoOptionError:
pass
except NoSectionError:
# We are never going to find the section for another option
break
else:
OVERRIDE_CONSTRAINTS[name] = value
for name, default in DEFAULT_
|
CONSTRAINTS.items():
value = OVERRIDE_CONSTRAINTS.get(name, default)
EFFECTIVE_CONSTRAINTS[name] = value
# "globals"
|
in this context is module level globals, always.
globals()[name.upper()] = value
reload_constraints()
# Maximum slo segments in buffer
MAX_BUFFERED_SLO_SEGMENTS = 10000
#: Query string format= values to their corresponding content-type values
FORMAT2CONTENT_TYPE = {'plain': 'text/plain', 'json': 'application/json',
'xml': 'application/xml'}
def check_metadata(req, target_type):
"""
Check metadata sent in the request headers.
:param req: request object
:param target_type: str: one of: object, container, or account: indicates
which type the target storage for the metadata is
:returns: HTTPBadRequest with bad metadata otherwise None
"""
prefix = 'x-%s-meta-' % target_type.lower()
meta_count = 0
meta_size = 0
for key, value in req.headers.iteritems():
if isinstance(value, basestring) and len(value) > MAX_HEADER_SIZE:
return HTTPBadRequest(body='Header value too long: %s' %
key[:MAX_META_NAME_LENGTH],
request=req, content_type='text/plain')
if not key.lower().startswith(prefix):
continue
key = key[len(prefix):]
if not key:
return HTTPBadRequest(body='Metadata name cannot be empty',
request=req, content_type='text/plain')
meta_count += 1
meta_size += len(key) + len(value)
if len(key) > MAX_META_NAME_LENGTH:
return HTTPBadRequest(
body='Metadata name too long: %s%s' % (prefix, key),
request=req, content_type='text/plain')
elif len(value) > MAX_META_VALUE_LENGTH:
return HTTPBadRequest(
body='Metadata value longer than %d: %s%s' % (
MAX_META_VALUE_LENGTH, prefix, key),
request=req, content_type='text/plain')
elif meta_count > MAX_META_COUNT:
return HTTPBadRequest(
body='Too many metadata items; max %d' % MAX_META_COUNT,
request=req, content_type='text/plain')
elif meta_size > MAX_META_OVERALL_SIZE:
return HTTPBadRequest(
body='Total metadata too large; max %d'
% MAX_META_OVERALL_SIZE,
request=req, content_type='text/plain')
return None
def check_object_creation(req, object_name):
"""
Check to ensure that everything is alright about an object to be created.
:param req: HTTP request object
:param object_name: name of object to be created
:returns HTTPRequestEntityTooLarge: the object is too large
:returns HTTPLengthRequired: missing content-length header and not
a chunked request
:returns HTTPBadRequest: missing or bad content-type header, or
bad metadata
"""
if req.content_length and req.content_length > MAX_FILE_SIZE:
return HTTPRequestEntityTooLarge(body='Your request is too large.',
request=req,
content_type='text/plain')
if req.content_length is None and \
req.headers.get('transfer-encoding') != 'chunked':
return HTTPLengthRequired(request=req)
if 'X-Copy-From' in req.headers and req.content_length:
return HTTPBadRequest(body='Copy requests require a zero byte body',
request=req, content_type='text/plain')
if len(object_name) > MAX_OBJECT_NAME_LENGTH:
return HTTPBadRequest(body='Object name length of %d longer than %d' %
(len(object_name), MAX_OBJECT_NAME_LENGTH),
request=req, content_type='text/plain')
if 'Content-Type' not in req.headers:
return HTTPBadRequest(request=req, content_type='text/plain',
body='No content type')
if not check_utf8(req.headers['Content-Type']):
return HTTPBadRequest(request=req, body='Invalid Content-Type',
content_type='text/plain')
return check_metadata(req, 'object')
def check_mount(root, drive):
"""
Verify that the path to the device is a mount point and mounted. This
allows us to fast fail on drives that have been unmounted because of
issues, and also prevents us for accidentally filling up the root
partition.
:param root: base path where the devices are mounted
:param drive: drive name to be checked
:returns: True if it is a valid mounted device, False otherwise
"""
if not (urllib.quote_plus(drive) == drive):
return False
path = os.path.join(root, drive)
return utils.ismount(path)
def check_float(string):
"""
Helper function for checking if a string can be converted to a float.
:param string: string to be verified as a float
:returns: True if the string can be converted to a float, False oth
|
cts2/pyjxslt
|
pyjxslt-python/tests/testXMLtoJSON.py
|
Python
|
apache-2.0
| 3,904
| 0.003842
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Mayo Clinic
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the <ORGANIZATION> nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
import pyjxslt
from dict_compare import dict_compare
import json
xml1 = """<?xml version="1.0" encoding="UTF-8"?>
<doc>
<entry id='17'>FOO</entry>
<entry id='42'>BAR</entry>
</doc>"""
expected_json = """{
"doc": {
"entry": [
{
"_content": "FOO",
"id": "17"
},
{
"_content": "BAR",
"id": "42"
}
]
}
}"""
bad_xml = """<?xml version="1.0" encoding="UTF-8"?>
<doc>
<entry id='17'>FOO</entry>
<entry id='42'>BAR</entry>
</dod>"""
xml_with_processing_instruction = """<?xml version="1.0" encoding="UTF
|
-8"?>
<?xml-stylesheet type="text/xsl" href="./datadict_v2.xsl"?>
<data_table id="pht003897.v1" study_id="phs000722.v1" participant_set="1">
</data_table>"""
expected_pi = '{ "data_table": { "id": "pht003897.v1", "study_id": "phs000722.v1", "participant_set": "1" } }'
expected_bad = 'ERROR: Transformer exception: org.xml.sax.SAXParseException; lineNumber: 5; columnNumber: 3; ' \
'The element type "doc" must be terminated by the matching end-tag
|
"</doc>".'
class XMLToJsonTestCase(unittest.TestCase):
# Just a quick test as the actual transform is tested elsewhere. Our job is just to make sure
# that we get what we expect through the gateway
gw = pyjxslt.Gateway()
if not gw.gateway_connected(reconnect=False):
print("Gateway must be running on port 25333")
def compare_jsons(self, json1, json2):
json1d = json.loads(json1)
try:
json2d = json.loads(json2)
except json.JSONDecodeError as e:
print(str(e))
return False
success, txt = dict_compare(json1d, json2d)
if not success:
print(txt)
return success
def test1(self):
self.assertTrue(self.compare_jsons(expected_json, self.gw.to_json(xml1)))
self.assertEqual(expected_bad, self.gw.to_json(bad_xml))
self.assertTrue(self.compare_jsons(expected_pi, self.gw.to_json(xml_with_processing_instruction)))
class NoGatewayTestCase(unittest.TestCase):
def test_gw_down(self):
gw = pyjxslt.Gateway(port=23456) # a non-existent port
self.assertIsNone(gw.to_json(xml1))
if __name__ == '__main__':
unittest.main()
|
FederatedAI/FATE
|
python/fate_arch/metastore/db_models.py
|
Python
|
apache-2.0
| 4,898
| 0.001429
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Lic
|
ense is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import inspect
import os
import sys
from peewee import CharField, IntegerField, BigIntegerField, TextField, CompositeKey, BooleanField
from fate_arch.federation import FederationEngine
from fate_arch.metastore.base_mo
|
del import DateTimeField
from fate_arch.common import file_utils, log, EngineType, conf_utils
from fate_arch.common.conf_utils import decrypt_database_config
from fate_arch.metastore.base_model import JSONField, SerializedField, BaseModel
LOGGER = log.getLogger()
DATABASE = decrypt_database_config()
is_standalone = conf_utils.get_base_config("default_engines", {}).get(EngineType.FEDERATION).upper() == \
FederationEngine.STANDALONE
def singleton(cls, *args, **kw):
instances = {}
def _singleton():
key = str(cls) + str(os.getpid())
if key not in instances:
instances[key] = cls(*args, **kw)
return instances[key]
return _singleton
@singleton
class BaseDataBase(object):
def __init__(self):
database_config = DATABASE.copy()
db_name = database_config.pop("name")
if is_standalone:
from playhouse.apsw_ext import APSWDatabase
self.database_connection = APSWDatabase(file_utils.get_project_base_directory("fate_sqlite.db"))
else:
from playhouse.pool import PooledMySQLDatabase
self.database_connection = PooledMySQLDatabase(db_name, **database_config)
DB = BaseDataBase().database_connection
def close_connection():
try:
if DB:
DB.close()
except Exception as e:
LOGGER.exception(e)
class DataBaseModel(BaseModel):
class Meta:
database = DB
@DB.connection_context()
def init_database_tables():
members = inspect.getmembers(sys.modules[__name__], inspect.isclass)
table_objs = []
create_failed_list = []
for name, obj in members:
if obj != DataBaseModel and issubclass(obj, DataBaseModel):
table_objs.append(obj)
LOGGER.info(f"start create table {obj.__name__}")
try:
obj.create_table()
LOGGER.info(f"create table success: {obj.__name__}")
except Exception as e:
LOGGER.exception(e)
create_failed_list.append(obj.__name__)
if create_failed_list:
LOGGER.info(f"create tables failed: {create_failed_list}")
raise Exception(f"create tables failed: {create_failed_list}")
class StorageConnectorModel(DataBaseModel):
f_name = CharField(max_length=100, primary_key=True)
f_engine = CharField(max_length=100, index=True) # 'MYSQL'
f_connector_info = JSONField()
class Meta:
db_table = "t_storage_connector"
class StorageTableMetaModel(DataBaseModel):
f_name = CharField(max_length=100, index=True)
f_namespace = CharField(max_length=100, index=True)
f_address = JSONField()
f_engine = CharField(max_length=100) # 'EGGROLL', 'MYSQL'
f_store_type = CharField(max_length=50, null=True) # store type
f_options = JSONField()
f_partitions = IntegerField(null=True)
f_id_delimiter = CharField(null=True)
f_in_serialized = BooleanField(default=True)
f_have_head = BooleanField(default=True)
f_extend_sid = BooleanField(default=False)
f_auto_increasing_sid = BooleanField(default=False)
f_schema = SerializedField()
f_count = BigIntegerField(null=True)
f_part_of_data = SerializedField()
f_description = TextField(default='')
f_read_access_time = BigIntegerField(null=True)
f_read_access_date = DateTimeField(null=True)
f_write_access_time = BigIntegerField(null=True)
f_write_access_date = DateTimeField(null=True)
class Meta:
db_table = "t_storage_table_meta"
primary_key = CompositeKey('f_name', 'f_namespace')
class SessionRecord(DataBaseModel):
f_engine_session_id = CharField(max_length=150, null=False)
f_manager_session_id = CharField(max_length=150, null=False)
f_engine_type = CharField(max_length=10, index=True)
f_engine_name = CharField(max_length=50, index=True)
f_engine_address = JSONField()
class Meta:
db_table = "t_session_record"
primary_key = CompositeKey("f_engine_type", "f_engine_name", "f_engine_session_id")
|
agile-geoscience/agilegeo
|
bruges/rockphysics/test/fluidsub_test.py
|
Python
|
apache-2.0
| 2,698
| 0
|
# -*- coding: utf-8 -*-
"""
Tests.
"""
import unittest
from bruges.rockphysics import fluidsub
# Inputs... GAS case
vp_gas = 2429.0
vs_gas = 1462.4
rho_gas = 2080.
# Expected outputs... BRINE case
vp_brine = 2850.5
vs_brine = 1416.1
rho_brine = 2210.0
phi = 0.275 # Don't know this... reading from fig
rhohc = 250.0 # gas
rhow = 1040.0 # brine
sw = 0.3 # Don't know this... just guessing
swnew = 1.0 # Don't know this... just guessing
khc = 207000000.0 # gas
kw = 2950000000.0 # brine
kclay = 25000000000.0
kqtz = 37000000000.0
vclay = 0.05
kmin = 36266406250.0 # Don't know this... reading from fig
class FluidsubTest
|
(unittest.TestCase):
"""
Tests fluid sub calculations against Smith et al 2003.
htt
|
ps://dl.dropboxusercontent.com/u/14965965/Smith_etal_2003.pdf
"""
def test_avseth(self):
# Base case: gas
# Subbing with: brine
sub = fluidsub.avseth_fluidsub(vp=vp_gas,
vs=vs_gas,
rho=rho_gas,
phi=phi,
rhof1=rhohc,
rhof2=rhow,
kmin=37000000000,
kf1=khc,
kf2=kw)
self.assertAlmostEqual(sub[0], vp_brine, places=-1) # Cannot match :(
self.assertAlmostEqual(sub[1], vs_brine, places=-1) # Cannot match :(
self.assertAlmostEqual(sub[2], rho_brine, places=-1) # Cannot match :(
def test_smith(self):
# Base case: gas
# Subbing with: brine
sub = fluidsub.smith_fluidsub(vp=vp_gas,
vs=vs_gas,
rho=rho_gas,
phi=phi,
rhohc=rhohc,
rhow=rhow,
sw=sw,
swnew=swnew,
khc=khc,
kw=kw,
kclay=kclay,
kqtz=kqtz,
vclay=vclay)
self.assertAlmostEqual(sub[0], vp_brine, places=-1)
self.assertAlmostEqual(sub[1], vs_brine, places=-1)
self.assertAlmostEqual(sub[2], rho_brine, places=-1) # Cannot match :(
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(FluidsubTest)
unittest.TextTestRunner(verbosity=2).run(suite)
|
saullocastro/compmech
|
compmech/conecyl/modelDB.py
|
Python
|
bsd-3-clause
| 17,932
| 0.001617
|
r"""
Used to configure the main parameters for each implemented model.
.. currentmodule:: compmech.conecyl.modelDB
"""
import numpy as np
from scipy.sparse import coo_matrix
from clpt import *
from fsdt import *
db = {
'clpt_donnell_bc1': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc1,
'linear': clpt_donnell_bc1_linear,
'non-linear': clpt_donnell_bc1_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_donnell_bc2': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc2,
'linear': clpt_donnell_bc2_linear,
'non-linear': clpt_donnell_bc2_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'iso_clpt_donnell_bc2': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc2,
'linear': iso_clpt_donnell_bc2_linear,
'non-linear': iso_clpt_donnell_bc2_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_donnell_bc3': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc3,
'linear': clpt_donnell_bc3_linear,
'non-linear': clpt_donnell_bc3_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'iso_clpt_donnell_bc3': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc3,
'linear': iso_clpt_donnell_bc3_linear,
'non-linear': iso_clpt_donnell_bc3_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_donnell_bc4': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc4,
'linear': clpt_donnell_bc4_linear,
'non-linear': clpt_donnell_bc4_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_donnell_bcn': {
'linear static': True,
'linear buckling': False,
'non-linear static': None,
'commons': clpt_commons_bcn,
'linear': clpt_donnell_bcn_linear,
'non-linear': None,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1
|
': 3,
'num2': 8,
},
'clpt_sanders_bc1': {
'linear static': True,
'linear bucklin
|
g': True,
'non-linear static': True,
'commons': clpt_commons_bc1,
'linear': clpt_sanders_bc1_linear,
'non-linear': clpt_sanders_bc1_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_sanders_bc2': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc2,
'linear': clpt_sanders_bc2_linear,
'non-linear': clpt_sanders_bc2_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_sanders_bc3': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc3,
'linear': clpt_sanders_bc3_linear,
'non-linear': clpt_sanders_bc3_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_sanders_bc4': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc4,
'linear': clpt_sanders_bc4_linear,
'non-linear': clpt_sanders_bc4_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_geier1997_bc2': {
'linear static': None,
'linear buckling': True,
'non-linear static': None,
'commons': clpt_geier1997_bc2,
'linear': clpt_geier1997_bc2,
'non-linear': None,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 0,
'num0': 0,
'num1': 0,
'num2': 3,
},
'fsdt_donnell_bcn': {
'linear static': True,
'linear buckling': False,
'non-linear static': True,
'commons': fsdt_commons_bcn,
'linear': fsdt_donnell_bcn_linear,
'non-linear': fsdt_donnell_bcn_nonlinear,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 5,
'num2': 10,
},
'fsdt_donnell_bc1': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': fsdt_commons_bc1,
'linear': fsdt_donnell_bc1_linear,
'non-linear': fsdt_donnell_bc1_nonlinear,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 5,
'num2': 10,
},
'fsdt_donnell_bc2': {
'linear static': True,
|
vtbassmatt/django-expression-fields
|
src/expression_fields/expr.py
|
Python
|
mit
| 668
| 0.01497
|
from __future__ import division
from math import *
def calculate(expr_string):
math_list = ['math', 'acos',
|
'asin', 'atan', 'atan2', 'ceil',
'cos', 'cosh', 'degrees', 'e', 'exp', 'fabs', 'floor', 'fmod',
'frexp', 'hypot', 'ldexp', 'log', 'log10', 'modf', 'pi',
'pow', 'radians', 'sin', 'sinh', 'sqrt', 'tan', 'tanh']
builtins_list = [abs]
local_ctx = dict([ (k, globals().get(k, None)) for k in math
|
_list ])
local_ctx.update(dict([ (b.__name__, b) for b in builtins_list ]))
try:
return eval(expr_string, { "__builtins__": None }, local_ctx)
except (SyntaxError, TypeError, NameError):
return None
|
StanfordBioinformatics/loom
|
server/loomengine_server/api/test/models/test_data_nodes.py
|
Python
|
agpl-3.0
| 10,494
| 0.019726
|
from django.core.exceptions import ValidationError
from django.test import TestCase
from api.test.models import _get_string_data_object
from api.models.data_nodes import *
class TestDataNode(TestCase):
INPUT_DATA=(
([(0,3),(0,1)], 'i'),
([(1,3),(0,2)], 'a'),
([(1,3),(1,2)], 'm'),
([(2,3),(0,5)], 'r'),
([(2,3),(1,5)], 'o'),
([(2,3),(2,5)], 'b'),
([(2,3),(3,5)], 'o'),
([(2,3),(4,5)], 't'),
)
def getTree(self, data):
root = DataNode.objects.create(type='string')
self.addData(root, data)
return root
def addData(self, root, data):
for data_path, value in data:
data_object = _get_string_data_object(value)
root.add_data_object(data_path, data_object)
def testAddDataObject(self):
root = self.getTree(self.INPUT_DATA)
# spot check [['i'],['a','m'],['r','o','b','o','t']]
self.assertEqual(root.get_data_object([(0,3),(0,1)]).substitution_value, 'i')
self.assertEqual(root.get_data_object([(1,3),(0,2)]).substitution_value, 'a')
self.assertEqual(root.get_data_object([(1,3),(1,2)]).substitution_value, 'm')
self.assertEqual(root.get_data_object([(2,3),(4,5)]).substitution_value, 't')
# Verify that we get the same result after saving
self.assertTrue(root.get_children()[0].id is None)
root.save_with_children()
self.assertEqual(root.get_data_object([(0,3),(0,1)]).substitution_value, 'i')
self.assertEqual(root.get_data_object([(1,3),(0,2)]).substitution_value, 'a')
self.assertEqual(root.get_data_object([(1,3),(1,2)]).substitution_value, 'm')
self.assertEqual(root.get_data_object([(2,3),(4,5)]).substitution_value, 't')
self.assertTrue(root.get_children()[0].id is not None)
def testMissingData(self):
input_data=(
([(0,3),(0,1)], 'i'),
#([(1,3),(0,2)], 'a'),
#([(1,3),(1,2)], 'm'),
([(2,3),(0,5)], 'r'),
#([(2,3),(1,5)], 'o'),
([(2,3),(2,5)], 'b'),
([(2,3),(3,5)], 'o'),
([(2,3),(4,5)], 't'),
)
root = self.getTree(input_data)
# spot check [['i'],['a','m'],['r','o','b','o','t']]
self.assertEqual(root.get_data_object([(0,3),(0,1)]).substitution_value, 'i')
with self.assertRaises(MissingBranchError):
root.get_data_object([(1,3),])
with self.assertRaises(MissingBranchError):
root.get_data_object([(2,3),(1,5)])
self.assertEqual(root.get_data_object([(2,3),(4,5)]).substitution_value, 't')
def testAddScalarDataObject(self):
root = DataNode.objects.create(type='string')
text = 'text'
data_object = _get_string_data_object(text)
data_path = []
root.add_data_object(data_path, data_object)
self.assertEqual(root.get_data_object(data_path).substitution_value, text)
def testAddScalarDataObjectTwice(self):
root = DataNode.objects.create(type='string')
text = 'text'
data_object = _get_string_data_object(text)
data_path = []
root.add_data_object(data_path, data_object)
with self.assertRaises(DataAlreadyExistsError):
root.add_data_object(data_path, data_object)
def testAddBranchTwice(self):
root = DataNode.objects.create(degree=2, type='string')
branch1 = root.add_branch(1, 1)
branch2 = root.add_branch(1, 1)
self.assertEqual(branch1.id, branch2.id)
def testAddBranchOverLeaf(self):
root = DataNode.objects.create(degree=2, type='string')
data_object = _get_string_data_object('text')
root.add_leaf(1, data_object)
with self.assertRaises(UnexpectedLeafNodeError):
root.add_branch(1, 1)
def testAddLeafOverBranch(self):
root = DataNode.objects.create(degree=2, type='string')
data_object = _get_string_data_object('text')
root.add_leaf(1, data_object)
with self.assertRaises(UnexpectedLeafNodeError):
root.add_branch(1, 1)
def testAddLeafTwice(self):
root = DataNode.objects.create(degree=1, type='string')
data_object = _get_string_data_object('text')
root.add_leaf(0, data_object)
with self.assertRaises(NodeAlreadyExistsError):
root.add_leaf(0, data_object)
def testIndexOutOfRangeError(self):
degree = 2
data_object = _get_string_data_object('text')
root = DataNode.objects.create(degree=degree, type='string')
with self.assertRaises(IndexOutOfRangeE
|
rror):
root.add_leaf(degree, data_object)
with self.assertRaises(IndexOutOfRangeError):
root.add_leaf(-1, data_object)
def testDegreeMismatchError(self):
data_object = _get_string_data_object('text')
root = DataNode.objects.create(degree=2, type='string')
root.add_branch(1, 2)
with self.assertRaises(DegreeMismatchError):
root.add_branch(1, 3
|
)
def testUnknownDegreeError(self):
data_object = _get_string_data_object('text')
root = DataNode.objects.create(type='string')
with self.assertRaises(UnknownDegreeError):
root.add_leaf(0, data_object)
def testIsReady(self):
some_of_the_data=(
([(0,3),(0,1)], 'i'),
([(1,3),(0,2)], 'a'),
([(2,3),(0,5)], 'r'),
([(2,3),(1,5)], 'o'),
([(2,3),(2,5)], 'b'),
([(2,3),(4,5)], 't'),
)
the_rest_of_the_data = (
([(2,3),(3,5)], 'o'),
([(1,3),(1,2)], 'm'),
)
root = self.getTree(some_of_the_data)
self.assertFalse(root.is_ready([]))
self.assertFalse(root.is_ready([(2,3),]))
self.assertFalse(root.is_ready([(2,3),(3,5)]))
self.assertTrue(root.is_ready([(0,3),]))
self.assertTrue(root.is_ready([(0,3),(0,1)]))
self.addData(root, the_rest_of_the_data)
self.assertTrue(root.is_ready([]))
self.assertTrue(root.is_ready([(2,3),]))
self.assertTrue(root.is_ready([(2,3),(3,5)]))
self.assertTrue(root.is_ready([(0,3),]))
self.assertTrue(root.is_ready([(0,3),(0,1)]))
def testClone(self):
tree1 = self.getTree(self.INPUT_DATA)
child1 = tree1.get_node([(2,3)])
grandchild1 = tree1.get_node([(2,3),(4,5)])
tree2 = tree1.clone()
child2 = tree2.get_node([(2,3)])
grandchild2 = tree2.get_node([(2,3),(4,5)])
self.assertEqual(grandchild1.data_object.uuid, grandchild2.data_object.uuid)
self.assertNotEqual(tree1.uuid, tree2.uuid)
self.assertNotEqual(child1.uuid, child2.uuid)
self.assertNotEqual(grandchild1.uuid, grandchild2.uuid)
def testClone_withSeed(self):
tree1 = self.getTree(self.INPUT_DATA)
child1 = tree1.get_node([(2,3)])
grandchild1 = tree1.get_node([(2,3),(4,5)])
tree2 = DataNode.objects.create(type='string')
tree1.clone(seed=tree2)
child2 = tree2.get_node([(2,3)])
grandchild2 = tree2.get_node([(2,3),(4,5)])
self.assertEqual(grandchild1.data_object.uuid, grandchild2.data_object.uuid)
self.assertNotEqual(tree1.uuid, tree2.uuid)
self.assertNotEqual(child1.uuid, child2.uuid)
self.assertNotEqual(grandchild1.uuid, grandchild2.uuid)
def testClone_leaf(self):
leaf = DataNode.objects.create(type='string')
leaf.add_data_object(
[], _get_string_data_object(
'al ultimo se lo estan comiendo las hormigas'))
clone = leaf.clone()
self.assertNotEqual(leaf.uuid, clone.uuid)
self.assertEqual(leaf.data_object.uuid, clone.data_object.uuid)
def testFlattenedClone(self):
tree1 = self.getTree(self.INPUT_DATA)
penult_grandchild1 = tree1.get_node([(2,3),(3,5)])
last_grandchild1 = tree1.get_node([(2,3),(4,5)])
tree2 = tree1.flattened_clone()
penult_child2 = tree2.get_node([(6,8)])
last_child2 =
|
stlim0730/glide
|
api/views.py
|
Python
|
mit
| 35,037
| 0.013643
|
from rest_framework.decorators import api_view, parser_classes
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
from rest_framework.response import Response
from .serializers import *
import json
from urllib.parse import urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
from django.contrib.auth.models import User
from workspace.models import Project, Theme
from copy import deepcopy
import markdown
import base64
import mimetypes
import yaml
from jinja2 import Template, Environment, meta
import traceback
import re
import os
from glide import *
from django.conf import settings
import pathlib, shutil, subprocess
def _isBinary(fileName):
fileType, encoding = mimetypes.guess_type(fileName)
if fileType.startswith('text/')\
or fileType == 'application/json'\
or fileType == 'application/x-latex'\
or fileType == 'application/javascript'\
or fileType == 'application/yaml'\
or fileName.endswith('.md'): # Just in case
return False
else:
return True
# @api_view(['GET'])
# def theme(request, slug):
# """
# Responds with a list of all the themes available
# or a theme when specified
# """
# themes = Theme.objects.all()
# if slug:
# themes = themes.filter(slug=slug)
# serializer = ThemeSerializer(themes, many=True)
# return Response(serializer.data)
# @api_view(['GET'])
# def project(request, slug):
# """
# Responds with a project object specified
# """
# projects = Project.objects.all()
# if slug:
# projects = projects.filter(slug=slug)
# serializer = ProjectSerializer(projects, many=True)
# return Response(serializer.data)
@api_view(['GET'])
def repositories(request):
"""
Responds with a list of repositories
that are accessible to the authenticated user
"""
accessToken = request.session['accessToken']
getAllReposUrl = 'https://api.github.com/user/repos?&per_page=100&access_token={}'
getAllReposUrl = getAllReposUrl.format(accessToken)
getAllReposUrl = getAuthUrl(getAllReposUrl)
with urlopen(getAllReposUrl) as allReposRes:
resStr = allReposRes.read().decode('utf-8')
return Response({ 'repositories': resStr })
@api_view(['GET'])
def readme(request, owner, repo):
"""
Responds with HTML-rendered README.md
that are accessible to the authenticated user
"""
accessToken = request.session['accessToken']
getReadmeUrl = 'https://api.github.com/repos/{}/{}/readme?access_token={}'
getReadmeUrl = getReadmeUrl.format(owner, repo, accessToken)
getReadmeUrl = getAuthUrl(getReadmeUrl)
# with urlopen(getReadmeUrl) as readmeRes:
# resStr = readmeRes.read().decode('utf-8')
# return Response({ 'branches': resStr })
req = Request(
url=getReadmeUrl, method='GET',
headers={'Content-Type': 'application/vnd.github.v3.html+json'})
try:
with urlopen(req) as readmeRes:
resStr = readmeRes.read().decode('utf-8')
readmeObj = json.loads(resStr)
mdContent = readmeObj['content']
if readmeObj['encoding'] == 'base64':
mdContent = base64.b64decode(mdContent).decode('utf-8')
res = _mdToHtml(mdContent)
return Response({
'readme': res
})
else:
return Response({
'error': 'decoding'
})
except HTTPError:
return Response({
'error': 'HTTPE
|
rror'
})
@api_view(['POST'])
def cdn(request, owner, repo):
"""
Responds with RawGit url for the specified file
"""
res = {}
accessToken = request.session['accessToken']
file = request.dat
|
a['file']
# branch = request.data['branch']
commit = _getLatestCommit(accessToken, owner, repo)
cdnUrl = 'https://cdn.rawgit.com/{}/{}/{}/{}'
cdnUrl = cdnUrl.format(owner, repo, commit['sha'], file['path'])
return Response({
'cdnUrl': cdnUrl
})
@api_view(['POST'])
def parse(request):
template = request.data['templateFileContent']
jinjaEnv = Environment()
absSynTree = jinjaEnv.parse(template)
keys = list(meta.find_undeclared_variables(absSynTree))
# TODO: Sort it properly:
# Allow whitespaces after/before the curly braces
keys = sorted(keys, key=lambda x:template.index('{{'+x+'}}'))
return Response({
'keys': keys
})
@api_view(['GET'])
def branches(request, repositoryFullName):
"""
Responds with a list of branches in the specified project
"""
accessToken = request.session['accessToken']
getBranchesUrl = 'https://api.github.com/repos/{}/branches?&per_page=100&access_token={}'
getBranchesUrl = getBranchesUrl.format(repositoryFullName, accessToken)
getBranchesUrl = getAuthUrl(getBranchesUrl)
with urlopen(getBranchesUrl) as branchesRes:
resStr = branchesRes.read().decode('utf-8')
res = json.loads(resStr)
return Response({ 'branches': res })
@api_view(['GET'])
def commits(request, owner, repo, branch):
"""
Responds with a list of commits on the specified branch
in the specified repository
"""
accessToken = request.session['accessToken']
getCommitsUrl = 'https://api.github.com/repos/{}/{}/commits?sha={}&access_token={}'
getCommitsUrl = getCommitsUrl.format(owner, repo, branch, accessToken)
getCommitsUrl = getAuthUrl(getCommitsUrl)
with urlopen(getCommitsUrl) as commitsRes:
resStr = commitsRes.read().decode('utf-8')
res = json.loads(resStr)
return Response({ 'commits': res })
def _getLatestCommit(accessToken, repoUsername, projectSlug):
"""
Returns the latest commit object of a repository
"""
commitsUrl = 'https://api.github.com/repos/{}/{}/commits?access_token={}'
commitsUrl = commitsUrl.format(repoUsername, projectSlug, accessToken)
commitsUrl = getAuthUrl(commitsUrl)
with urlopen(commitsUrl) as commitsRes:
res = commitsRes.read().decode('utf-8')
commits = json.loads(res)
# commits[0] is guaranteed
# as every Glide repo has been created with the option 'auto_init': True
return commits[0]
def _getLatestSha(accessToken, repoUsername, projectSlug):
"""
Returns the hash value of the latest commit of a repository
"""
latestCommit = _getLatestCommit(accessToken, repoUsername, projectSlug)
return latestCommit['sha']
def _getRepoTree(accessToken, repoUsername, projectSlug, branch='master', commit=None):
"""
Returns the latest tree structure of a repository.
The branch can be specified. Otherwise, it assumes master.
The commit SHA can be specified. Otherwise, it assumes latest commit.
"""
sha = ''
if not commit:
sha = _getLatestSha(accessToken, repoUsername, projectSlug)
else:
sha = commit
repoTreeUrl = 'https://api.github.com/repos/{}/{}/git/trees/{}?recursive=1?access_token={}'
repoTreeUrl = repoTreeUrl.format(repoUsername, projectSlug, sha, accessToken)
repoTreeUrl = getAuthUrl(repoTreeUrl)
with urlopen(repoTreeUrl) as repoTreeRes:
# TODO: This API request sometimes gives 409 conflicts response. # Why?
res = repoTreeRes.read().decode('utf-8')
repoTree = json.loads(res)
for file in repoTree['tree']:
#
# TODO: File extension?
#
# file['ext'] = file['path'].split('.')[-1]
# if file['path'] == file['ext']:
# # It's a folder
# file['ext'] = None
# # file['downloadUrl'] = None
#
# TODO: Editor type?
#
# file['editor'] = ''
# if file['ext'] in ['glide', 'md', 'yml', 'yaml']:
# file['editor'] = 'data'
# elif file['ext'] in ['html', 'htm']:
# file['editor'] = 'html'
# elif file['ext'] in ['css']:
# file['editor'] = 'css'
#
file['name'] = file['path'].split('/')[-1]
# TODO: Use GitHub Blobs API rather than custom string operations
# downloadUrl = 'https://raw.githubusercontent.com/{}/{}/{}/{}?access_token={}'
# file['downloadUrl'] = downloadUrl.format(repoUsername, projectSlug, branch, file['path'], accessToken)
# repoTree['tree'] = [file for file in repoTree['tree'] if file['type'] != 'tree']
return repoTree
def _createReference(accessToken, owner, repo, ref, refTo):
#
# In case of creating a new branch,
# ref is the new branch name
# and refTo is sha of a commit you branc
|
kernevil/samba
|
python/samba/netcmd/drs.py
|
Python
|
gpl-3.0
| 36,173
| 0.001603
|
# implement samba_tool drs commands
#
# Copyright Andrew Tridgell 2010
# Copyright Andrew Bartlett 2017
#
# based on C implementation by Kamen Mazdrashki <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import samba.getopt as options
import ldb
import logging
from . import common
import json
from samba.auth import system_session
from samba.netcmd import (
Command,
CommandError,
Option,
SuperCommand,
)
from samba.samdb import SamDB
from samba import drs_utils, nttime2string, dsdb
from samba.dcerpc import drsuapi, misc
from samba.join import join_clone
from samba import colour
from samba.uptodateness import (
get_partition_maps,
get_utdv_edges,
get_utdv_distances,
get_utdv_summary,
get_kcc_and_dsas,
)
from samba.common import get_string
from samba.samdb import get_default_backend_store
def drsuapi_connect(ctx):
'''make a DRSUAPI connection to the server'''
try:
(ctx.drsuapi, ctx.drsuapi_handle, ctx.bind_supported_extensions) = drs_utils.drsuapi_connect(ctx.server, ctx.lp, ctx.creds)
except Exception as e:
raise CommandError("DRS connection to %s failed" % ctx.server, e)
def samdb_connect(ctx):
'''make a ldap connection to the server'''
try:
ctx.samdb = SamDB(url="ldap://%s" % ctx.server,
session_info=system_session(),
credentials=ctx.creds, lp=ctx.lp)
except Exception as e:
raise CommandError("LDAP connection to %s failed" % ctx.server, e)
def drs_errmsg(werr):
'''return "was successful" or an error string'''
(ecode, estring) = werr
if ecode == 0:
return "was successful"
return "failed, result %u (%s)" % (ecode, estring)
def attr_default(msg, attrname, default):
'''get an attribute from a ldap msg with a default'''
if attrname in msg:
return msg[attrname][0]
return default
def drs_parse_ntds_dn(ntds_dn):
'''parse a NTDS DN returning a site and server'''
a = ntds_dn.split(',')
if a[0] != "CN=NTDS Settings" or a[2] != "CN=Servers" or a[4] != 'CN=Sites':
raise RuntimeError("bad NTDS DN %s" % ntds_dn)
server = a[1].split('=')[1]
site = a[3].split('=')[1]
return (site, server)
DEFAULT_SHOWREPL_FORMAT = 'classic'
class cmd_drs_showrepl(Command):
"""Show replication status."""
synopsis = "%prog [<DC>] [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_options = [
Option("--json", help="replication details in JSON format",
dest='format', action='store_const', const='json'),
Option("--summary", help=("summarize overall DRS health as seen "
"from this server"),
dest='format', action='store_const', const='summary'),
Option("--pull-summary", help=("Have we successfully replicated "
"from all relevent servers?"),
dest='format', action='store_const', const='pull_summary'),
Option("--notify-summary", action='store_const',
const='notify_summary', dest='format',
help=("Have we successfully notified all relevent servers of "
"local changes, and did they say they successfully "
"replicated?")),
Option("--classic", help="print local replication details",
dest='format', action='store_const', const='classic',
default=DEFAULT_SHOWREPL_FORMAT),
Option("-v", "--verbose", help="Be verbose", action="store_true"),
Option("--color", help="Use colour output (yes|no|auto)",
default='no'),
]
takes_args = ["DC?"]
def parse_neighbour(self, n):
"""Convert an ldb neighbour object into a python dictionary"""
dsa_objectguid = str(n.source_dsa_obj_guid)
d = {
'NC dn': n.naming_context_dn,
"DSA objectGUID": dsa_objectguid,
"last attempt time": nttime2string(n.last_attempt),
"last attempt message": drs_errmsg(n.result_last_attempt),
"consecutive failures": n.consecutive_sync_failures,
"last success": nttime2string(n.last_success),
"NTDS DN": str(n.source_dsa_obj_dn),
'is deleted': False
}
try:
self.samdb.search(base="<GUID=%s>" % dsa_objectguid,
scope=ldb.SCOPE_BASE,
attrs=[])
except ldb.LdbError as e:
(errno, _) = e.args
if errno == ldb.ERR_NO_SUCH_OBJECT:
d['is deleted'] = True
else:
raise
try:
(site, server) = drs_parse_ntds_dn(n.source_dsa_obj_dn)
d["DSA"] = "%s\\%s" % (site, server)
except RuntimeError:
pass
return d
def print_neighbour(self, d):
'''print one set of neighbour information'''
self.message("%s" % d['NC dn'])
if 'DSA' in d:
self.message("\t%s via RPC" % d['DSA'])
else:
self.message("\tNTDS DN: %s" % d['NTDS DN'])
self.message("\t\tDSA object GUID: %s" % d['DSA objectGUID'])
self.message("\t\tLast attempt @ %s %s" % (d['last attempt time'],
d['last attempt message']))
self.message("\t\t%u consecutive failure(s)." %
d['consecutive failures'])
self.message("\t\tLast success @ %s" % d['last success'])
self.message("")
def get_neighbours(self, info_type):
req1 = drsuapi.DsReplicaGetInfoRequest1()
req1.info_type = info_type
try:
(info_type, info) = self.drsuapi.DsReplicaGetInfo(
self.drsuapi_handle, 1, req1)
except Exception as e:
raise CommandError("DsReplicaGetInfo of type %u failed" % info_type, e)
reps = [self.parse_neighbour(n) for n in info.array]
return reps
def run(self, DC=None, sambaopts=None,
credopts=None, versionopts=None,
format=DEFAULT_SHOWREPL_FORMAT,
verbose=False, color='no'):
self.apply_colour_choice(color)
self.lp = sambaopts.get_loadparm()
if DC is None:
DC = common.netcmd_dnsname(self.lp)
self.server = DC
self.
|
creds = credopts.get_
|
credentials(self.lp, fallback_machine=True)
self.verbose = verbose
output_function = {
'summary': self.summary_output,
'notify_summary': self.notify_summary_output,
'pull_summary': self.pull_summary_output,
'json': self.json_output,
'classic': self.classic_output,
}.get(format)
if output_function is None:
raise CommandError("unknown showrepl format %s" % format)
return output_function()
def json_output(self):
data = self.get_local_repl_data()
del data['site']
del data['server']
json.dump(data, self.outf, indent=2)
def summary_output_handler(self, typeof_output):
"""Print a short message if every seems fine, but print details of any
links that seem broken."""
failing_repsto = []
failing_repsfrom = []
local_data = self.get_local_repl_data()
if typeof_output != "pull_summary":
|
michaelbratsch/bwb
|
populate.py
|
Python
|
gpl-3.0
| 1,775
| 0
|
#!/usr/bin/env python
from datetime import timedelta
import os
import random
from django.utils.dateparse import parse_date
from faker import Faker
test_email = '[email protected]'
fake = Faker('de')
fake.seed(1)
random.seed(1)
def get_random_date():
return parse_date('1983-03-31') + timedelta(days=random.randint(-5000,
1000))
def populate():
for _ in range(100):
candidate = add_candidate(first_name=fake.first_name(),
last_name=fake.last_name(),
|
date_of_birth=get_random_date())
add_registration(candidate=candidate,
bicycle_kind=random.randint(1, 4),
email=fake.email())
def add_candidate(first_name, last_name, date_of_birth):
return Ca
|
ndidate.objects.create(first_name=first_name,
last_name=last_name,
date_of_birth=date_of_birth)
def add_registration(candidate, bicycle_kind, email):
return UserRegistration.objects.create(candidate=candidate,
bicycle_kind=bicycle_kind,
email=email)
def add_event(due_date):
return HandoutEvent.objects.create(due_date=due_date)
def add_bicycle():
b = Bicycle.objects.create()
return b
# Start execution here!
if __name__ == '__main__':
print("Starting FIRST_APP population script...")
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bwb.settings')
import django
django.setup()
from register.models import UserRegistration, Candidate, Bicycle
from register.models import HandoutEvent
populate()
|
allenlavoie/tensorflow
|
tensorflow/contrib/distributions/python/ops/estimator.py
|
Python
|
apache-2.0
| 7,908
| 0.004299
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions to bridge `Distribution`s and `tf.contrib.learn.estimator` APIs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.estimators.head import _compute_weighted_loss
from tensorflow.contrib.learn.python.learn.estimators.head import _RegressionHead
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
__all__ = [
"estimator_head_distribution_regression",
]
def estimator_head_distribution_regression(make_distribution_fn,
label_dimension=1,
logits_dimension=None,
label_name=None,
weight_column_name=None,
enable_centered_bias=False,
head_name=None):
"""Creates a `Head` for regression under a generic distribution.
Args:
make_distribution_fn: Python `callable` which returns a `tf.Distribution`
instance created using only logits.
label_dimension: Number of regression labels per example. This is the size
of the last dimension of the labels `Tensor` (typically, this has shape
`[batch_size, label_dimension]`).
logits_dimension: Number of logits per example. This is the size of the last
dimension of the logits `Tensor` (typically, this has shape
`[batch_size, logits_dimension]`).
Default value: `label_dimension`.
label_name: Python `str`, name of the key in label `dict`. Can be `None` if
label is a `Tensor` (single headed models).
weight_column_name: Python `str` defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
enable_centered_bias: Python `bool`. If `True`, estimator will learn a
centered bias variable for each class. Rest of the model structure learns
the residual after centered bias.
head_name: Python `str`, name of the head. Predictions, summary and metrics
keys are suffixed by `"/" + head_name` and the default variable scope is
`head_name`.
Returns:
An instance of `Head` for generic regression.
"""
return _DistributionRegressionHead(
make_distribution_fn=make_distribution_fn,
label_dimension=label_dimension,
logits_dimension=logits_dimension,
label_name=label_name,
weight_column_name=weight_column_name,
|
enable_centered_bias=enable_centered_bias,
head_name=head_name)
class _DistributionRegressionHead(_RegressionHead):
"""Creates a _RegressionHead instance from an arbitrary `Distribution`."""
def __init__(self,
make_distribution_fn,
label_dimension,
logits_dimension=None,
label_name=None,
weight_column_name=None,
enab
|
le_centered_bias=False,
head_name=None):
"""`Head` for regression.
Args:
make_distribution_fn: Python `callable` which returns a `tf.Distribution`
instance created using only logits.
label_dimension: Number of regression labels per example. This is the
size of the last dimension of the labels `Tensor` (typically, this has
shape `[batch_size, label_dimension]`).
logits_dimension: Number of logits per example. This is the size of the
last dimension of the logits `Tensor` (typically, this has shape
`[batch_size, logits_dimension]`).
Default value: `label_dimension`.
label_name: Python `str`, name of the key in label `dict`. Can be `None`
if label is a tensor (single headed models).
weight_column_name: Python `str` defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
enable_centered_bias: Python `bool`. If `True`, estimator will learn a
centered bias variable for each class. Rest of the model structure
learns the residual after centered bias.
head_name: Python `str`, name of the head. Predictions, summary and
metrics keys are suffixed by `"/" + head_name` and the default variable
scope is `head_name`.
Raises:
TypeError: if `make_distribution_fn` is not `callable`.
"""
if not callable(make_distribution_fn):
raise TypeError("`make_distribution_fn` must be a callable function.")
self._distributions = {}
self._make_distribution_fn = make_distribution_fn
def static_value(x):
"""Returns the static value of a `Tensor` or `None`."""
return tensor_util.constant_value(ops.convert_to_tensor(x))
def concat_vectors(*args):
"""Concatenates input vectors, statically if possible."""
args_ = [static_value(x) for x in args]
if any(vec is None for vec in args_):
return array_ops.concat(args, axis=0)
return [val for vec in args_ for val in vec]
def loss_fn(labels, logits, weights=None):
"""Returns the loss of using `logits` to predict `labels`."""
d = self.distribution(logits)
labels_batch_shape = labels.shape.with_rank_at_least(1)[:-1]
labels_batch_shape = (
labels_batch_shape.as_list() if labels_batch_shape.is_fully_defined()
else array_ops.shape(labels)[:-1])
labels = array_ops.reshape(
labels,
shape=concat_vectors(labels_batch_shape, d.event_shape_tensor()))
return _compute_weighted_loss(
loss_unweighted=-d.log_prob(labels),
weight=weights)
def link_fn(logits):
"""Returns the inverse link function at `logits`."""
# Note: What the API calls a "link function" is really the inverse-link
# function, i.e., the "mean".
d = self.distribution(logits)
return d.mean()
super(_DistributionRegressionHead, self).__init__(
label_dimension=label_dimension,
loss_fn=loss_fn,
link_fn=link_fn,
logits_dimension=logits_dimension,
label_name=label_name,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias,
head_name=head_name)
@property
def distributions(self):
"""Returns all distributions created by `DistributionRegressionHead`."""
return self._distributions
def distribution(self, logits, name=None):
"""Retrieves a distribution instance, parameterized by `logits`.
Args:
logits: `float`-like `Tensor` representing the parameters of the
underlying distribution.
name: The Python `str` name to given to this op.
Default value: "distribution".
Returns:
distribution: `tf.Distribution` instance parameterized by `logits`.
"""
with ops.name_scope(name, "distribution", [logits]):
d = self._distributions.get(logits, None)
if d is None:
d = self._make_distribution_fn(logits)
self._distributions[logits] = d
return d
|
unho/pootle
|
pootle/apps/pootle_app/management/commands/update_tmserver.py
|
Python
|
gpl-3.0
| 13,500
| 0.000963
|
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
from hashlib import md5
# This must be run before importing Django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'
from elasticsearch import Elasticsearch, helpers
from translate.storage import factory
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.utils import dateparse
from django.utils.encoding import force_bytes
from pootle.core.utils import dateformat
from pootle_store.models import Unit
from pootle_translationproject.models import TranslationProject
BULK_CHUNK_SIZE = 5000
class BaseParser(object):
def __init__(self, *args, **kwargs):
"""Initialize the parser."""
self.stdout = kwargs.pop('stdout')
self.INDEX_NAME = kwargs.pop('index', None)
def get_units(self):
"""Gets the units to import and its total count."""
raise NotImplementedError
def get_unit_data(self, unit):
"""Return dict with data to import for a single unit."""
raise NotImplementedError
class DBParser(BaseParser):
def __init__(self, *args, **kwargs):
super(DBParser, self).__init__(*args, **kwargs)
self.exclude_disabled_projects = not kwargs.pop('disabled_projects')
self.tp_pk = None
def get_units(self):
"""Gets the units to import and its total count."""
units_qs = (
Unit.objects.exclude(target_f__isnull=True)
.exclude(target_f__exact='')
.filter(store__translation_project__pk=self.tp_pk)
.filter(revision__gt=self.last_indexed_revision))
units_qs = units_qs.select_related(
'change__submitted_by',
'store',
'store__translation_project__project',
'store__translation_project__language')
if self.exclude_disabled_projects:
units_qs = units_qs.exclude(
store__translation_project__project__disabled=True
).exclude(store__obsolete=True)
units_qs = units_qs.values(
'id',
'revision',
'source_f',
'target_f',
'change__submitted_on',
'change__submitted_by__username',
'change__submitted_by__full_name',
'change__submitted_by__email',
'store__translation_project__project__fullname',
'store__pootle_path',
'store__translation_project__language__code'
).order_by()
return units_qs.iterator(), units_qs.count()
def get_unit_data(self, unit):
"""Return dict with data to import for a single unit."""
fullname = (unit['change__submitted_by__full_name'] or
unit['change__submitted_by__username'])
email_md5 = None
if unit['change__submitted_by__email']:
email_md5 = md5(
force_bytes(unit['change__submitted_by__email'])).hexdigest()
iso_submitted_on = unit.get('change__submitted_on', None)
display_submitted_on = None
if iso_submitted_on:
display_submitted_on = dateformat.format(
dateparse.parse_datetime(str(iso_submitted_on))
)
return {
'_index': self.INDEX_NAME,
'_type': unit['store__translation_project__language__code'],
'_id': unit['id'],
'revision': int(unit['revision']),
'project': unit['store__translation_project__project__fullname'],
'path': unit['store__pootle_path'],
'username': unit['change__submitted_by__username'],
'fullname': fullname,
'email_md5': email_md5,
'source': unit['source_f'],
'target': unit['target_f'],
'iso_submitted_on': iso_submitted_on,
'display_submitted_on': display_submitted_on,
}
class FileParser(BaseParser):
def __init__(self, *args, **kwargs):
super(FileParser, self).__init__(*args, **kwargs)
self.target_language = kwargs.pop('language', None)
self.project = kwargs.pop('project', None)
self.filenames = kwargs.pop('filenames')
def get_units(self):
"""Gets the units to import and its total count."""
units = []
all_filenames = set()
for filename in self.filenames:
if not os.path.exists(filename):
self.stdout.write("File %s doesn't exist. Skipping it." %
filename)
continue
if os.path.isdir(filename):
for dirpath, dirs_, fnames in os.walk(filename):
if (os.path.basename(dirpath) in
["CVS", ".svn", "_darcs", ".git", ".hg", ".bzr"]):
continue
for f in fnames:
all_filenames.add(os.path.join(dirpath, f))
else:
all_filenames.add(filename)
for filename in all_filenames:
store = factory.getobject(filename)
if not store.gettargetlanguage() and not self.target_language:
raise CommandError("Unable to determine target language for "
"'%s'. Try again specifying a fallback "
"target language with --target-language" %
filename)
self.filename = filename
units.extend([unit for unit in store.units if unit.istranslated()])
return units, len(units)
def get_unit_data(self, unit):
"""Return dict with data to import for a single unit."""
target_language = unit.gettargetlanguage()
if target_language is None:
target_language = self.target_language
return {
'_index': self.INDEX_NAME,
'_type': target_language,
'_id': unit.getid(),
'revision': 0,
'project': self.project,
'path': self.filename,
'username': None,
'fullname': None,
'email_md5': None,
'source': unit.source,
'target': unit.target,
'iso_submitted_on': None,
'display_submitted_on': None,
}
class Command(BaseCommand):
help = "Load Translation Memory with translations"
def add_arguments(self, parser):
parser.add_argument(
'--refresh',
action='store_true',
dest='refresh',
default=False,
help='Process all items, not just the new ones, so '
'existing translations are refreshed'
)
parser.add_argument(
'--rebuild',
action='store_true',
dest='rebuild',
default=False,
help='Drop the entire TM on start and update everything '
'from scratch'
)
parser.add_argument(
'--dry-run',
action='store_true',
dest='dry_run',
default=False,
help='Report the number of translations to index and quit'
)
# Local TM specific options.
local = parser.add_argument_group('Local TM', 'Pootle Local '
'Translation Memory')
local.add_argument(
'--include-disabled-projects',
action='store_true',
dest='disabled_projects',
default=False,
help='Add translations from disabled projects'
)
# External TM specific
|
options.
|
external = parser.add_argument_group('External TM', 'Pootle External '
'Translation Memory')
external.add_argument(
nargs='*',
dest='files',
help='Translation memory files',
)
e
|
andreaso/ansible
|
lib/ansible/modules/packaging/os/homebrew_tap.py
|
Python
|
gpl-3.0
| 7,344
| 0.000681
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Daniel Jaouen <[email protected]>
# (c) 2016, Indrajit Raychaudhuri <[email protected]>
#
# Based on homebrew (Andrew Dunham <[email protected]>)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/license
|
s/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: homebrew_tap
author:
- "Indrajit Raych
|
audhuri (@indrajitr)"
- "Daniel Jaouen (@danieljaouen)"
short_description: Tap a Homebrew repository.
description:
- Tap external Homebrew repositories.
version_added: "1.6"
options:
name:
description:
- The GitHub user/organization repository to tap.
required: true
aliases: ['tap']
url:
description:
- The optional git URL of the repository to tap. The URL is not
assumed to be on GitHub, and the protocol doesn't have to be HTTP.
Any location and protocol that git can handle is fine.
- I(name) option may not be a list of multiple taps (but a single
tap instead) when this option is provided.
required: false
version_added: "2.2"
state:
description:
- state of the repository.
choices: [ 'present', 'absent' ]
required: false
default: 'present'
requirements: [ homebrew ]
'''
EXAMPLES = '''
- homebrew_tap:
name: homebrew/dupes
- homebrew_tap:
name: homebrew/dupes
state: absent
- homebrew_tap:
name: homebrew/dupes,homebrew/science
state: present
- homebrew_tap:
name: telemachus/brew
url: 'https://bitbucket.org/telemachus/brew'
'''
import re
def a_valid_tap(tap):
'''Returns True if the tap is valid.'''
regex = re.compile(r'^([\w-]+)/(homebrew-)?([\w-]+)$')
return regex.match(tap)
def already_tapped(module, brew_path, tap):
'''Returns True if already tapped.'''
rc, out, err = module.run_command([
brew_path,
'tap',
])
taps = [tap_.strip().lower() for tap_ in out.split('\n') if tap_]
tap_name = re.sub('homebrew-', '', tap.lower())
return tap_name in taps
def add_tap(module, brew_path, tap, url=None):
'''Adds a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif not already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'tap',
tap,
url,
])
if already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully tapped: %s' % tap
else:
failed = True
msg = 'failed to tap: %s' % tap
else:
msg = 'already tapped: %s' % tap
return (failed, changed, msg)
def add_taps(module, brew_path, taps):
'''Adds one or more taps.'''
failed, unchanged, added, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = add_tap(module, brew_path, tap)
if failed:
break
if changed:
added += 1
else:
unchanged += 1
if failed:
msg = 'added: %d, unchanged: %d, error: ' + msg
msg = msg % (added, unchanged)
elif added:
changed = True
msg = 'added: %d, unchanged: %d' % (added, unchanged)
else:
msg = 'added: %d, unchanged: %d' % (added, unchanged)
return (failed, changed, msg)
def remove_tap(module, brew_path, tap):
'''Removes a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'untap',
tap,
])
if not already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully untapped: %s' % tap
else:
failed = True
msg = 'failed to untap: %s' % tap
else:
msg = 'already untapped: %s' % tap
return (failed, changed, msg)
def remove_taps(module, brew_path, taps):
'''Removes one or more taps.'''
failed, unchanged, removed, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = remove_tap(module, brew_path, tap)
if failed:
break
if changed:
removed += 1
else:
unchanged += 1
if failed:
msg = 'removed: %d, unchanged: %d, error: ' + msg
msg = msg % (removed, unchanged)
elif removed:
changed = True
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
else:
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
return (failed, changed, msg)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['tap'], type='list', required=True),
url=dict(default=None, required=False),
state=dict(default='present', choices=['present', 'absent']),
),
supports_check_mode=True,
)
brew_path = module.get_bin_path(
'brew',
required=True,
opt_dirs=['/usr/local/bin']
)
taps = module.params['name']
url = module.params['url']
if module.params['state'] == 'present':
if url is None:
# No tap URL provided explicitly, continue with bulk addition
# of all the taps.
failed, changed, msg = add_taps(module, brew_path, taps)
else:
# When an tap URL is provided explicitly, we allow adding
# *single* tap only. Validate and proceed to add single tap.
if len(taps) > 1:
msg = "List of muliple taps may not be provided with 'url' option."
module.fail_json(msg=msg)
else:
failed, changed, msg = add_tap(module, brew_path, taps[0], url)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
elif module.params['state'] == 'absent':
failed, changed, msg = remove_taps(module, brew_path, taps)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
jackrzhang/zulip
|
zerver/data_import/import_util.py
|
Python
|
apache-2.0
| 21,272
| 0.003197
|
import random
import requests
import shutil
import logging
import os
import traceback
import ujson
from typing import List, Dict, Any, Optional, Set, Callable, Iterable, Tuple, TypeVar
from django.forms.models import model_to_dict
from zerver.models import Realm, RealmEmoji, Subscription, Recipient, \
Attachment, Stream, Message, UserProfile
from zerver.data_import.sequencer import NEXT_ID
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS as stream_colors
from zerver.lib.avatar_hash import user_avatar_path_from_ids
from zerver.lib.parallel import run_parallel, JobData
# stubs
ZerverFieldsT = Dict[str, Any]
def build_zerver_realm(realm_id: int, realm_subdomain: str, time: float,
other_product: str) -> List[ZerverFieldsT]:
realm = Realm(id=realm_id, date_created=time,
name=realm_subdomain, string_id=realm_subdomain,
description=("Organization imported from %s!" % (other_product)))
auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods]
realm_dict = model_to_dict(realm, exclude='authentication_methods')
realm_dict['authentication_methods'] = auth_methods
return[realm_dict]
def build_user_profile(avatar_source: str,
date_joined: Any,
delivery_email: str,
email: str,
|
full_name: str,
id: int,
is_active: bool,
is_realm_admin: bool,
is_guest: bool,
is_mirror_dummy: bool,
realm_id: int,
short_name: str,
timezone: Optional[str]) -> ZerverFieldsT:
pointer = -1
obj = UserProfile(
avatar_source=avatar_source,
|
date_joined=date_joined,
delivery_email=delivery_email,
email=email,
full_name=full_name,
id=id,
is_active=is_active,
is_realm_admin=is_realm_admin,
is_guest=is_guest,
pointer=pointer,
realm_id=realm_id,
short_name=short_name,
timezone=timezone,
)
dct = model_to_dict(obj)
return dct
def build_avatar(zulip_user_id: int, realm_id: int, email: str, avatar_url: str,
timestamp: Any, avatar_list: List[ZerverFieldsT]) -> None:
avatar = dict(
path=avatar_url, # Save original avatar url here, which is downloaded later
realm_id=realm_id,
content_type=None,
user_profile_id=zulip_user_id,
last_modified=timestamp,
user_profile_email=email,
s3_path="",
size="")
avatar_list.append(avatar)
def make_subscriber_map(zerver_subscription: List[ZerverFieldsT]) -> Dict[int, Set[int]]:
'''
This can be convenient for building up UserMessage
rows.
'''
subscriber_map = dict() # type: Dict[int, Set[int]]
for sub in zerver_subscription:
user_id = sub['user_profile']
recipient_id = sub['recipient']
if recipient_id not in subscriber_map:
subscriber_map[recipient_id] = set()
subscriber_map[recipient_id].add(user_id)
return subscriber_map
def build_subscription(recipient_id: int, user_id: int,
subscription_id: int) -> ZerverFieldsT:
subscription = Subscription(
color=random.choice(stream_colors),
id=subscription_id)
subscription_dict = model_to_dict(subscription, exclude=['user_profile', 'recipient_id'])
subscription_dict['user_profile'] = user_id
subscription_dict['recipient'] = recipient_id
return subscription_dict
def build_public_stream_subscriptions(
zerver_userprofile: List[ZerverFieldsT],
zerver_recipient: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
'''
This function is only used for Hipchat now, but it may apply to
future conversions. We often don't get full subscriber data in
the Hipchat export, so this function just autosubscribes all
users to every public stream. This returns a list of Subscription
dicts.
'''
subscriptions = [] # type: List[ZerverFieldsT]
public_stream_ids = {
stream['id']
for stream in zerver_stream
if not stream['invite_only']
}
public_stream_recipient_ids = {
recipient['id']
for recipient in zerver_recipient
if recipient['type'] == Recipient.STREAM
and recipient['type_id'] in public_stream_ids
}
user_ids = [
user['id']
for user in zerver_userprofile
]
for recipient_id in public_stream_recipient_ids:
for user_id in user_ids:
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_private_stream_subscriptions(
get_users: Callable[..., Set[int]],
zerver_recipient: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
subscriptions = [] # type: List[ZerverFieldsT]
stream_ids = {
stream['id']
for stream in zerver_stream
if stream['invite_only']
}
recipient_map = {
recipient['id']: recipient['type_id'] # recipient_id -> stream_id
for recipient in zerver_recipient
if recipient['type'] == Recipient.STREAM
and recipient['type_id'] in stream_ids
}
for recipient_id, stream_id in recipient_map.items():
user_ids = get_users(stream_id=stream_id)
for user_id in user_ids:
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_personal_subscriptions(zerver_recipient: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
subscriptions = [] # type: List[ZerverFieldsT]
personal_recipients = [
recipient
for recipient in zerver_recipient
if recipient['type'] == Recipient.PERSONAL
]
for recipient in personal_recipients:
recipient_id = recipient['id']
user_id = recipient['type_id']
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_recipient(type_id: int, recipient_id: int, type: int) -> ZerverFieldsT:
recipient = Recipient(
type_id=type_id, # stream id
id=recipient_id,
type=type)
recipient_dict = model_to_dict(recipient)
return recipient_dict
def build_recipients(zerver_userprofile: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
'''
As of this writing, we only use this in the HipChat
conversion. The Slack and Gitter conversions do it more
tightly integrated with creating other objects.
'''
recipients = []
for user in zerver_userprofile:
type_id = user['id']
type = Recipient.PERSONAL
recipient = Recipient(
type_id=type_id,
id=NEXT_ID('recipient'),
type=type,
)
recipient_dict = model_to_dict(recipient)
recipients.append(recipient_dict)
for stream in zerver_stream:
type_id = stream['id']
type = Recipient.STREAM
recipient = Recipient(
type_id=type_id,
id=NEXT_ID('recipient'),
type=type,
)
recipient_dict = model_to_dict(recipient)
recipients.append(recipient_dict)
return recipients
def build_realm(zerver_realm: List[ZerverFieldsT], realm_id: int,
domain_name: str) -> ZerverFieldsT:
realm = dict(zerver_client=[{"name": "populate_db", "id": 1},
{"na
|
vedujoshi/tempest
|
tempest/api/network/test_floating_ips.py
|
Python
|
apache-2.0
| 10,882
| 0
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base
from tempest.common.utils import net_utils
from tempest import config
from tempest.lib import decorators
from tempest import test
CONF = config.CONF
class FloatingIPTestJSON(base.BaseNetworkTest):
"""Tests the following operations in the Neutron API:
Create a Floating IP
Update a Floating IP
Delete a Floating IP
List all Floating IPs
Show Floating IP details
Associate a Floating IP with a port and then delete that port
Associate a Floating IP with a port and then with a port on another
router
v2.0 of the Neutron API is assumed. It is also assumed that the following
options are defined in the [network] section of etc/tempest.conf:
public_network_id which is the id for the external network present
"""
@classmethod
def skip_checks(cls):
super(FloatingIPTestJSON, cls).skip_checks()
if not test.is_extension_enabled('router', 'network'):
msg = "router extension not enabled."
raise cls.skipException(msg)
if not CONF.network.public_network_id:
msg = "The public_network_id option must be specified."
raise cls.skipException(msg)
if not CONF.network_feature_enabled.floating_ips:
raise cls.skipException("Floating ips are not available")
@classmethod
def resource_setup(cls):
super(FloatingIPTestJSON, cls).resource_setup()
cls.ext_net_id = CONF.network.public_network_id
# Create network, subnet, router and add interface
cls.network = cls.create_network()
cls.subnet = cls.create_subnet(cls.network, enable_dhcp=False)
cls.router = cls.create_router(external_network_id=cls.ext_net_id)
cls.create_router_interface(cls.router['id'], cls.subnet['id'])
# Create two ports one each for Creation and Updating of floatingIP
for i in range(2):
cls.create_port(cls.network)
@decorators.attr(type='smoke')
@decorators.idempotent_id('62595970-ab1c-4b7f-8fcc-fddfe55e8718')
def test_create_list_show_update_delete_floating_ip(self):
# Creates a floating IP
body = self.floating_ips_client.create_floatingip(
floating_network_id=self.ext_net_id,
port_id=self.ports[0]['id'])
created_floating_ip = body['floatingip']
self.addCleanup(self.floating_ips_client.delete_floatingip,
created_floating_ip['id'])
self.assertIsNotNone(created_floating_ip['id'])
self.assertIsNotNone(created_floating_ip['tenant_id'])
self.assertIsNotNone(created_floating_ip['floating_ip_address'])
self.assertEqual(created_floating_ip['port_id'], self.ports[0]['id'])
self.assertEqual(created_floating_ip['floating_network_id'],
self.ext_net_id)
self.assertIn(created_floating_ip['fixed_ip_address'],
[ip['ip_address'] for ip in self.ports[0]['fixed_ips']])
# Verifies the details of a floating_ip
floating_ip = self.floating_ips_client.show_floatingip(
created_floating_ip['id'])
shown_floating_ip = floating_ip['floatingip']
self.assertEqual(shown_floating_ip['id'], created_floating_ip['id'])
self.assertEqual(shown_floating_ip['floating_network_id'],
self.ext_net_id)
self.assertEqual(shown_floating_ip['tenant_id'],
created_floating_ip['tenant_id'])
self.assertEqual(shown_floating_ip['floating_ip_address'],
created_floating_ip['floating_ip_address'])
self.assertEqual(shown_floating_ip['port_id'], self.ports[0]['id'])
# Verify the floating ip exists in the list of all floating_ips
floating_ips = self.floating_ips_client.list_floatingips()
floatingip_id_list = list()
for f in floating_ips['floatingips']:
floatingip_id_list.append(f['id'])
self.assertIn(created_floating_ip['id'], floatingip_id_list)
# Associate floating IP to the other port
floating_ip = self.floating_ips_client.update_floatingip(
created_floating_ip['id'],
port_id=self.ports[1]['id'])
updated_floating_ip = floating_ip['floatingip']
self.assertEqual(updated_floating_ip['port_id'], self.ports[1]['id'])
self.assertEqual(updated_floating_ip['fixed_ip_address'],
self.ports[1]['fixed_ips'][0]['ip_address'])
self.assertEqual(updated_floating_ip['router_id'], self.router['id'])
# Disassociate floating IP from the port
floating_ip = self.floating_ips_client.update_floatingip(
created_floating_ip['id'],
port_id=None)
updated_floating_ip = floating_ip['floatingip']
self.assertIsNone(updated_floating_ip['port_id'])
self.assertIsNone(updated_floating_ip['fixed_ip_address'])
self.assertIsNone(updated_floating_ip['router_id'])
@decorators.idempotent_id('e1f6bffd-442f-4668-b30e-df13f2705e77')
def test_floating_ip_delete_port(self):
# Create a floating IP
body = self.floating_ips_client.create_floati
|
ngip(
floating_network_id=self.ext_net_id)
created_floating_ip = body['floatingip']
self.addCleanup(self.floating_ips_client.delete_floatingip,
created_floating_ip['id'])
# Create a port
port = self.ports_client.create_port(network_id=self.network['id'])
created_port = port['port']
floating_ip = self.floating_ips_client.update_floatingip(
created_floating_ip['id'],
|
port_id=created_port['id'])
# Delete port
self.ports_client.delete_port(created_port['id'])
# Verifies the details of the floating_ip
floating_ip = self.floating_ips_client.show_floatingip(
created_floating_ip['id'])
shown_floating_ip = floating_ip['floatingip']
# Confirm the fields are back to None
self.assertEqual(shown_floating_ip['id'], created_floating_ip['id'])
self.assertIsNone(shown_floating_ip['port_id'])
self.assertIsNone(shown_floating_ip['fixed_ip_address'])
self.assertIsNone(shown_floating_ip['router_id'])
@decorators.idempotent_id('1bb2f731-fe5a-4b8c-8409-799ade1bed4d')
def test_floating_ip_update_different_router(self):
# Associate a floating IP to a port on a router
body = self.floating_ips_client.create_floatingip(
floating_network_id=self.ext_net_id,
port_id=self.ports[1]['id'])
created_floating_ip = body['floatingip']
self.addCleanup(self.floating_ips_client.delete_floatingip,
created_floating_ip['id'])
self.assertEqual(created_floating_ip['router_id'], self.router['id'])
network2 = self.create_network()
subnet2 = self.create_subnet(network2)
router2 = self.create_router(external_network_id=self.ext_net_id)
self.create_router_interface(router2['id'], subnet2['id'])
port_other_router = self.create_port(network2)
# Associate floating IP to the other port on another router
floating_ip = self.floating_ips_client.update_floatingip(
created_floating_ip['id'],
port_id=port_other_router['id'])
updated_floating_ip = floating_ip['floatingip']
self.assertEqual(updated_floating_ip['router_
|
missulmer/Pythonstudy
|
coursera_python_specialization/9_4.py
|
Python
|
cc0-1.0
| 1,246
| 0.005618
|
""" 9.4
|
Write a program to read through the mbox-short.txt and
figure out who has the sent the greatest number of mail messages.
The program
|
looks for 'From ' lines and takes the second word of those lines as the person who sent the mail.
The program creates a Python dictionary that maps the sender's mail address to a count of the number of times they appear in the file.
After the dictionary is produced, the program reads through the dictionary using a maximum loop to find the most prolific committer.
Desired output = [email protected] 5
"""
filename = raw_input("enter file name:")
handle = None
try:
handle = open(filename)
except:
print 'File cannot be opened or read.', filename
exit()
counts = {}
for line in handle:
if line.strip().startswith('From:'):
line = line.strip().lower()
words = line.split()
for word in words:
if '@' in word:
counts[word] = counts.get(word, 0) + 1
handle.close()
# always close the file as soon as possible. Freeing resources asap is a best practice.
email = None
email_count = 0
for word,count in counts.items():
if email is None or count > email_count:
email = word
email_count = count
print email, email_count
|
yubinbai/python_practice
|
priorityqueue.py
|
Python
|
apache-2.0
| 1,718
| 0.005239
|
# use priority queue to implement stack and queue
import heapq
class stack:
data = []
highestPriority = 0
lowestPriority = 0
def push(self, e):
self.highestPriority -= 1 # smaller value means priority is higher
heapq.heappush(self.data, (self.highestPriority, e))
def pop(self):
if not s.isEmpty():
self.highestPriority += 1
return heapq.heappop(self.data)[1]
else:
return None
def isEmpty(self):
return self.highestPriority >= self.lowestPriority
class queue:
data = []
highestPriority = 0
lowestPriority = 0
def enqueue(self, e):
self.lowestPriority += 1 # increase the lowest priority (lowering)
heapq.heappush(self.data, (self.lowestPriority, e))
def dequeue(self):
if
|
self.isEmpty():
return None
else:
# increaste the highest priority (lowering it )
self.highestPriority += 1
return heapq.heappop(self.data)[1]
def isEmpty(self):
if self.highestPriority >= self.lowestPriority:
self.highestPriority = 0
self.lowestPriority = 0
return True
else:
return False
def heapsort(iterable):
h = []
for i in iterable:
heapq.heappu
|
sh(h, i)
return [heapq.heappop(h) for x in range(len(iterable))]
if __name__ == '__main__':
import random
data = [random.randint(1, 100) for x in range(15)]
data.sort()
'''
s = stack()
for i in data:
s.push(i)
while not s.isEmpty():
print(s.pop())
'''
q = queue()
for i in data:
q.enqueue(i)
while not q.isEmpty():
print(q.dequeue())
|
eduNEXT/edunext-ecommerce
|
ecommerce/extensions/partner/migrations/0017_auto_20200305_1448.py
|
Python
|
agpl-3.0
| 887
| 0.003382
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-03-05
|
14:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('partner', '0016_auto_20191115_2151'),
]
operations = [
migrations.AlterField(
|
model_name='historicalpartner',
name='name',
field=models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Name'),
),
migrations.AlterField(
model_name='partner',
name='name',
field=models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Name'),
),
migrations.AlterField(
model_name='stockalert',
name='date_created',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Date Created'),
),
]
|
willycs40/zoopla_pull
|
db.py
|
Python
|
bsd-2-clause
| 795
| 0.015094
|
import MySQLdb
from parameters import Parameters
import logging
def run_sql(sql, db=None):
db = MySQLdb.connect(host=Parameters.DB_HOST, user=Parameters.DB_USER, passwd=Parameters.DB_PASSWORD, db=Parameters.DB_SCH
|
EMA)
cursor = db.cursor()
logging.debug(sql)
try:
cursor.execute(sql)
db.commit()
data = cursor.fetchall()
db.close()
except Exception as e:
logging.error(e)
db.rollback()
try:
return data[0][0]
except:
return True
def run_sql_multi(sql_list):
for sql in sql_lis
|
t:
run_sql(sql)
def initialise_db():
run_sql_multi(Parameters.SQL_INITIALISE)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
initialise_db()
|
atagar/ReviewBoard
|
reviewboard/notifications/email.py
|
Python
|
mit
| 13,727
| 0.000437
|
import logging
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from django.utils import timezone
from djblets.siteconfig.models import SiteConfiguration
from reviewboard.accounts.signals import user_registered
from reviewboard.reviews.models import ReviewRequest, Review
from reviewboard.reviews.signals import review_request_published, \
review_published, reply_published
from reviewboard.reviews.views import build_diff_comment_fragments
def review_request_published_cb(sender, user, review_request, changedesc,
**kwargs):
"""
Listens to the ``review_request_published`` signal and sends an
email if this type of notification is enabled (through
``mail_send_review_mail`` site configuration).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("mail_send_review_mail"):
mail_review_request(user, review_request, changedesc)
def review_published_cb(sender, user, review, **kwargs):
"""
Listens to the ``review_published`` signal and sends an email if
this type of notification is enabled (through
``mail_send_review_mail`` site configuration).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("mail_send_review_mail"):
mail_review(user, review)
def reply_published_cb(sender, user, reply, **kwargs):
"""
Listens to the ``reply_published`` signal and sends an email if
this type of notification is enabled (through
``mail_send_review_mail`` site configuration).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("mail_send_review_mail"):
mail_reply(user, reply)
def user_registered_cb(user, **kwargs):
"""
Listens for new user registrations and sends a new user registration
e-mail to administrators, if enabled.
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("mail_send_new_user_mail"):
mail_new_user(user)
def connect_signals():
review_request_published.connect(review_request_published_cb,
sender=ReviewRequest)
review_published.connect(review_published_cb, sender=Review)
reply_published.connect(reply_published_cb, sender=Review)
user_registered.connect(user_registered_cb)
def build_email_address(fullname, email):
if not fullname:
return email
else:
return u'"%s" <%s>' % (fullname, email)
def get_email_address_for_user(u):
return build_email_address(u.get_full_name(), u.email)
def get_email_addresses_for_group(g):
if g.mailing_list:
if g.mailing_list.find(",") == -1:
# The mailing list field has only one e-mail address in it,
# so we can just use that and the group's display name.
return [u'"%s" <%s>' % (g.display_name, g.mailing_list)]
else:
# The mailing list field has multiple e-mail addresses in it.
# We don't know which one should have the group's display name
# attached to it, so just return their cust
|
om list as-is.
return g.mailing_list.split(',')
else:
return [get_email_address_for_user(u)
for u in g.users.filter(is_active=True)]
class SpiffyEmailMessage(EmailMultiAlternatives):
"""An EmailMessage subclass with improved header and message ID support.
This also knows about several headers (standard and variations),
including Sender/X-Sender, In-Reply-To/References, and Reply-To.
The generat
|
ed Message-ID header from the e-mail can be accessed
through the :py:attr:`message_id` attribute after the e-mail is sent.
"""
def __init__(self, subject, text_body, html_body, from_email, sender,
to, cc, in_reply_to, headers={}):
headers = headers.copy()
if sender:
headers['Sender'] = sender
headers['X-Sender'] = sender
if in_reply_to:
headers['In-Reply-To'] = in_reply_to
headers['References'] = in_reply_to
headers['Reply-To'] = from_email
# Mark the mail as 'auto-generated' (according to RFC 3834) to
# hopefully avoid auto replies.
headers['Auto-Submitted'] = 'auto-generated'
headers['From'] = from_email
super(SpiffyEmailMessage, self).__init__(subject, text_body,
settings.DEFAULT_FROM_EMAIL,
to, headers=headers)
self.cc = cc or []
self.message_id = None
self.attach_alternative(html_body, "text/html")
def message(self):
msg = super(SpiffyEmailMessage, self).message()
self.message_id = msg['Message-ID']
return msg
def recipients(self):
"""Returns a list of all recipients of the e-mail. """
return self.to + self.bcc + self.cc
def send_review_mail(user, review_request, subject, in_reply_to,
extra_recipients, text_template_name,
html_template_name, context={}):
"""
Formats and sends an e-mail out with the current domain and review request
being added to the template context. Returns the resulting message ID.
"""
current_site = Site.objects.get_current()
from_email = get_email_address_for_user(user)
recipients = set()
to_field = set()
if from_email:
recipients.add(from_email)
if review_request.submitter.is_active:
recipients.add(get_email_address_for_user(review_request.submitter))
for u in review_request.target_people.filter(is_active=True):
recipients.add(get_email_address_for_user(u))
to_field.add(get_email_address_for_user(u))
for group in review_request.target_groups.all():
for address in get_email_addresses_for_group(group):
recipients.add(address)
for profile in review_request.starred_by.all():
if profile.user.is_active:
recipients.add(get_email_address_for_user(profile.user))
if extra_recipients:
for recipient in extra_recipients:
if recipient.is_active:
recipients.add(get_email_address_for_user(recipient))
siteconfig = current_site.config.get()
domain_method = siteconfig.get("site_domain_method")
context['user'] = user
context['domain'] = current_site.domain
context['domain_method'] = domain_method
context['review_request'] = review_request
if review_request.local_site:
context['local_site_name'] = review_request.local_site.name
text_body = render_to_string(text_template_name, context)
html_body = render_to_string(html_template_name, context)
# Set the cc field only when the to field (i.e People) are mentioned,
# so that to field consists of Reviewers and cc consists of all the
# other members of the group
if to_field:
cc_field = recipients.symmetric_difference(to_field)
else:
to_field = recipients
cc_field = set()
base_url = '%s://%s' % (domain_method, current_site.domain)
headers = {
'X-ReviewBoard-URL': base_url,
'X-ReviewRequest-URL': base_url + review_request.get_absolute_url(),
'X-ReviewGroup': ', '.join(group.name for group in \
review_request.target_groups.all())
}
sender = None
if settings.DEFAULT_FROM_EMAIL:
sender = build_email_address(user.get_full_name(),
settings.DEFAULT_FROM_EMAIL)
if sender == from_email:
# RFC 2822 states that we should only include Sender if the
# two are not equal.
sender = None
message = SpiffyEmailMessage(subject.strip(), text_body, html_body,
from_email, sender, list(to_field),
list(cc_field), in_reply_to, headers)
t
|
pacoqueen/bbinn
|
informes/albaran_multipag.py
|
Python
|
gpl-2.0
| 26,260
| 0.01394
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2008 Francisco José Rodríguez Bogado #
# ([email protected]) #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, Image, XPreformatted, Preformatted, PageBreak, KeepTogether, CondPageBreak
from reportlab.platypus.flowables import Flowable
from reportlab.rl_config import defaultPageSize
from reportlab.lib import colors, enums
from reportlab.lib.units import cm
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
import sys, os#, Image
from factura_multipag import Linea, TablaFija
try:
import pclases, utils
except ImportError:
try:
import sys, os
sys.path.insert(0, os.path.join("..", "framework"))
import pclases, utils
except ImportError:
sys.path.insert(0, ".")
import pclases, utils
try:
from geninformes import give_me_the_name_baby, escribe, rectangulo, el_encogedor_de_fuentes_de_doraemon, agregarFila
except ImportError:
import sys
sys.path.append(os.path.join("..", "informes"))
from geninformes import give_me_the_name_baby, escribe, rectangulo, el_encogedor_de_fuentes_de_doraemon, agregarFila
from tempfile import gettempdir
PAGE_HEIGHT = defaultPageSize[1]; PAGE_WIDTH = defaultPageSize[0]
estilos = getSampleStyleSheet()
class lastPageNumberFlowable(Flowable):
def __init__(self, xoffset = 0, yoffset = 0):
Flowable.__init__(self)
self._xoffset = xoffset
self._yoffset = yoffset
def draw(self):
canvas = self.canv
if not canvas.hasForm("lastPageNumber"):
canvas.beginForm("lastPageNumber")
canvas.setFont("Times-Italic", 9)
canvas.drawString(self._xoffset,
self._yoffset,
str(canvas.getPageNumber()))
canvas.endForm()
class LineaHorizontal(Flowable):
def __init__(self, ancho = None, grosor = 1):
self.line_thickness = grosor
if ancho:
self._width = ancho
else:
self._width = None
def wrap(self, availWidth, availHeight):
if self._width is None:
self._width = availWidth
self._height = self.line_thickness
return self._width, self._height
def draw(self):
self.canv.setLineWidth(self.line_thickness)
orig = (PAGE_WIDTH / 2) - (self._width / 2)
orig -= 2.75 * cm # Margen al llamar a mi draw desde el build.
self.canv.line(orig,
.5 * self.line_thickness,
self._width + orig,
.5 * self.line_thickness)
def sanitize(d):
"""
Sustituye todo lo que no sea texto:
- Si es float, por su representación con puntos y una coma con 2 decimales.
- Si es entero, por su equivalente en texto.
"""
def tostr(v):
if isinstance(v, float):
v = utils.float2str(v)
elif isinstance(v, int):
v = utils.int2str(v)
elif isinstance(v, (list, tuple)):
# Recursividad, divino tesoro...
v = sanitize(v)
return v
if isinstance(d, dict):
for k in d.keys():
d[k] = tostr(d[k])
elif isinstance(d, (list, tuple)):
res = []
for i in d:
res.append(tostr(i))
d = type(d)(res)
return d
def cabecera_y_cliente(canvas,
doc,
datos_cliente,
datos_de_la_empresa,
datos_albaran):
"""
Escribe el texto "ALBARÁN" y los datos del cliente.
Los datos del cliente vienen en un diccionario con:
código (de cliente), cif, razón social, dirección, población, provincia.
"""
fuente = "Helvetica"
tamanno = 24
canvas.saveState()
canvas.setFont(fuente, tamanno)
canvas.drawString(PAGE_WIDTH
- canvas.stringWidth(escribe("ALBARÁN"),fuente,tamanno)
- 1.0*cm,
PAGE_HEIGHT - 1.5*cm,
escribe("ALBARÁN"))
canvas.restoreState()
tamanno = 12
altura_linea = 16
xCliente = (PAGE_WIDTH - 1*cm) / 2.5
linea = (PAGE_HEIGHT-1.5*cm) - 0.10*cm - 2*cm
rectangulo(canvas,
(xCliente - 0.2*cm, PAGE_HEIGHT - 1.5*cm + altura_linea - 2*cm),
(PAGE_WIDTH - 1*cm,
(PAGE_HEIGHT- 1.5*cm + altura_linea)
-(altura_linea*5 + 0.5*cm) - 2*cm)
)
canvas.drawString(xCliente,
linea,
escribe(
"Cód. cliente: %s C.I.F.: %s" % (
datos_cliente['código'],
datos_cliente['cif'])))
linea -= altura_linea
el_encogedor_de_fuentes_de_doraemon(canvas,
fuente,
tamanno,
xCliente,
PAGE_WIDTH - 1*cm,
linea,
escribe(datos_cliente['razón social']))
linea -= altura_linea
el_encogedor_de_fuentes_de_doraemon(canvas,
fuente,
tamanno,
xCliente,
PAGE_WIDTH - 1*cm,
linea,
escribe(datos_cliente['dirección']))
linea -= altura_linea
canvas.drawString(xCliente,
linea,
escribe(datos_cliente['población']))
linea -= altura_linea
canvas.drawString(xCliente,
linea,
escribe(datos_cliente['provincia']))
# Datos de la empresa
dibujar_datos_empresa(
|
canvas, datos_de_la_empresa)
# Cabecera de factura
build_tabla_cabecera(canvas, datos_albaran, 22.5*cm)
def dibujar_datos_empresa(canvas, datos_de_la_empresa):
"""
Dibuja los datos de la empresa en la parte superior.
"""
logo, empresa = build_logo_y_
|
empresa_por_separado(datos_de_la_empresa)
logo.drawOn(canvas, 1*cm, PAGE_HEIGHT - 2.8 * cm)
fuente = "Helvetica"
tamanno = 16
for i in range(len(empresa)):
if i == 1:
tamanno -= 4 # Primera línea (nombre empresa) un poco más grande.
linea = PAGE_HEIGHT - 1.5 * cm
el_encogedor_de_fuentes_de_doraemon(canvas,
|
FredrikAugust/server-status
|
statuspagescript.py
|
Python
|
mit
| 2,247
| 0.015576
|
#!/usr/bin/python
"""A script to get inform
|
ation from MrTijn's new server so I (MrMadsenMalmo) can display it
on a website
"""
__author__ = "MrMadsenMalmo - Fredrik A. Madsen-Malmo & Tijndagamer"
import os
import time
import re
import datetime
def main():
dataList = []
dataList.append(os.popen("uptime").read() + "\n")
dataList.append(os.popen("cpuload").read())
dataList.append("CPU temp: \n" + os.popen("getcputemp").read())
dataList.append("Network stats:\n" + os.popen("getdown").read())
dataList.appen
|
d(os.popen("getup").read() + "\n")
dataList.append("Memory stats:\n" + os.popen("free -h").read() + "\n")
dataList.append("Drive stats: TOTAL | USED | FREE\n" + os.popen("df -h | grep '/dev/' && df -h --total | grep 'total'").read())
data = str(dataList)
data = data.replace('[', '')
data = data.replace(']', '')
data = data.replace(',', '')
# os.popen("echo " + data + " > /var/www/html/status")
# for data in dataList:
# print data
with open("/var/www/html/status.txt", "w+") as file:
for data in dataList:
file.write(data)
write()
def get_time():
return re.search("\d\d\:\d\d\:\d\d", os.popen("uptime").read(), re.VERBOSE).group(0) + "\n"
def get_load():
return re.search("CPU\sload\:\s([\d\.]+\%)", os.popen("cpuload").read(), re.VERBOSE).group(1) + "\n"
def get_temp():
return re.search("[\w]+\:\s([\d\.]+)", os.popen("getcputemp").read(), re.VERBOSE).group(1) + "C\n"
def write(time=""):
for type in [["temp", get_temp], ["load", get_load]]:
with open(type[0] + time + ".txt", "a+") as file:
if time == 'day':
file.write(str(datetime.datetime.today()).split(' ')[0] + type[1])
else:
file.write(get_time() + type[1]())
prev_time = get_time()
prev_day = datetime.datetime.today().day
while True:
# minute change
if get_time()[3:5] != prev_time[3:5]:
write("min")
# hour change
if get_time()[0:2] != prev_time[0:2]:
write("hr")
# day change
if datetime.datetime.today().day != prev_day:
write("day")
main()
prev_time = get_time()
prev_day = datetime.datetime.today().day
time.sleep(5)
|
osasto-kuikka/KGE
|
tools/sqf_validator.py
|
Python
|
gpl-2.0
| 8,231
| 0.008018
|
#!/usr/bin/env python3
import fnmatch
import os
import re
import ntpath
import sys
import argparse
def validKeyWordAfterCode(content, index):
keyWords = ["for", "do", "count", "each", "forEach", "else", "and", "not", "isEqualTo", "in", "call", "spawn", "execVM", "catch"];
for word in keyWords:
try:
subWord = content.index(word, index, index+len(word))
return True;
except:
pass
return False
def check_sqf_syntax(filepath):
bad_count_file = 0
def pushClosing(t):
closingStack.append(closing.expr)
closing << Literal( closingFor[t[0]] )
def popClosing():
closing << closingStack.pop()
with open(filepath, 'r', encoding='utf-8', errors='ignore') as file:
content = file.read()
# Store all brackets we find in this file, so we can validate everything on the end
brackets_list = []
# To check if we are in a comment block
isInCommentBlock = False
checkIfInComment = False
# Used in case we are in a line comment (//)
ignoreTillEndOfLine = False
# Used in case we are in a comment block (/* */). This is true if we detect a * inside a comment block.
# If the next character is a /, it means we end our comment block.
checkIfNextIsClosingBlock = False
# We ignore everything inside a string
isInString = False
# Used to store the starting type of a string, so we can match that to the end of a string
inStringType = '';
lastIsCurlyBrace = False
checkForSemiColumn = False
# Extra information so we know what line we find errors at
lineNumber = 0
indexOfCharacter = 0
# Parse all characters in the content of this file to search for potential errors
for c in content:
if (lastIsCurlyBrace):
lastIsCurlyBrace = False
checkForSemiColumn = True
if c == '\n': # Keeping track of our line numbers
lineNumber += 1 # so we can print accurate line number information when we detect a possible error
if (isInString): # while we are in a string, we can ignore everything else, except the end of the string
if (c == inStringType):
isInString = False
# if we are not in a comment block, we will check if we are at the start of one or count the () {} and []
elif (isInCommentBlock ==
|
False):
# This means we have encountered a /, so we are now checking if this is an inline comment or a comment block
if (checkIfInComment):
checkIfInComment = False
if c == '*': # if the next character after / is a *, we are at the start of a comment block
|
isInCommentBlock = True
elif (c == '/'): # Otherwise, will check if we are in an line comment
ignoreTillEndOfLine = True # and an line comment is a / followed by another / (//) We won't care about anything that comes after it
if (isInCommentBlock == False):
if (ignoreTillEndOfLine): # we are in a line comment, just continue going through the characters until we find an end of line
if (c == '\n'):
ignoreTillEndOfLine = False
else: # validate brackets
if (c == '"' or c == "'"):
isInString = True
inStringType = c
elif (c == '#'):
ignoreTillEndOfLine = True
elif (c == '/'):
checkIfInComment = True
elif (c == '('):
brackets_list.append('(')
elif (c == ')'):
if (brackets_list[-1] in ['{', '[']):
print("ERROR: Possible missing round bracket ')' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append(')')
elif (c == '['):
brackets_list.append('[')
elif (c == ']'):
if (brackets_list[-1] in ['{', '(']):
print("ERROR: Possible missing square bracket ']' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append(']')
elif (c == '{'):
brackets_list.append('{')
elif (c == '}'):
lastIsCurlyBrace = True
if (brackets_list[-1] in ['(', '[']):
print("ERROR: Possible missing curly brace '}}' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append('}')
elif (c== '\t'):
print("ERROR: Tab detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
if (checkForSemiColumn):
if (c not in [' ', '\t', '\n', '/']): # keep reading until no white space or comments
checkForSemiColumn = False
if (c not in [']', ')', '}', ';', ',', '&', '!', '|', '='] and not validKeyWordAfterCode(content, indexOfCharacter)): # , 'f', 'd', 'c', 'e', 'a', 'n', 'i']):
print("ERROR: Possible missing semi-column ';' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
else: # Look for the end of our comment block
if (c == '*'):
checkIfNextIsClosingBlock = True;
elif (checkIfNextIsClosingBlock):
if (c == '/'):
isInCommentBlock = False
elif (c != '*'):
checkIfNextIsClosingBlock = False
indexOfCharacter += 1
if brackets_list.count('[') != brackets_list.count(']'):
print("ERROR: A possible missing square bracket [ or ] in file {0} [ = {1} ] = {2}".format(filepath,brackets_list.count('['),brackets_list.count(']')))
bad_count_file += 1
if brackets_list.count('(') != brackets_list.count(')'):
print("ERROR: A possible missing round bracket ( or ) in file {0} ( = {1} ) = {2}".format(filepath,brackets_list.count('('),brackets_list.count(')')))
bad_count_file += 1
if brackets_list.count('{') != brackets_list.count('}'):
print("ERROR: A possible missing curly brace {{ or }} in file {0} {{ = {1} }} = {2}".format(filepath,brackets_list.count('{'),brackets_list.count('}')))
bad_count_file += 1
return bad_count_file
def main():
print("Validating SQF")
sqf_list = []
bad_count = 0
parser = argparse.ArgumentParser()
parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default="")
args = parser.parse_args()
# Allow running from root directory as well as from inside the tools directory
rootDir = "../addons"
if (os.path.exists("addons")):
rootDir = "addons"
for root, dirnames, filenames in os.walk(rootDir + '/' + args.module):
for filename in fnmatch.filter(filenames, '*.sqf'):
sqf_list.append(os.path.join(root, filename))
for filename in sqf_list:
bad_count = bad_count + check_sqf_syntax(filename)
print("------\nChecked {0} files\nErrors detected: {1}".format(len(sqf_list), bad_count))
if (bad_count == 0):
print("SQF validation PASSED")
else:
print("SQF validation FAILED")
return bad_coun
|
QISKit/qiskit-sdk-py
|
qiskit/result/exceptions.py
|
Python
|
apache-2.0
| 1,290
| 0
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Exception for errors when there's an error in the Result
"""
from qiskit.exceptions import QiskitError
class ResultError(QiskitError):
"""Exceptions raised due to errors in result output.
It may be better for the Qiskit API to raise this exception.
Args:
error (dict): This is the error reco
|
rd as it comes back from
the API. The format is like::
error = {'status': 403,
'message': 'Your credits are not enough.',
'code': 'MAX_CREDITS_EXCEEDED'}
"""
def __init__(self, error):
super().__init__(error['message'])
self.status = error['status']
self.code = error['code']
|
def __str__(self):
return '{}: {}'.format(self.code, self.message)
|
mushtaqak/edx-platform
|
common/djangoapps/third_party_auth/tasks.py
|
Python
|
agpl-3.0
| 6,642
| 0.003917
|
# -*- coding: utf-8 -*-
"""
Code to manage fetching and storing the metadata of IdPs.
"""
#pylint: disable=no-member
from celery.task import task # pylint: disable=import-error,no-name-in-module
import datetime
import dateutil.parser
import logging
from lxml import etree
import requests
from onelogin.saml2.utils import OneLogin_Saml2_Utils
from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig, SAMLProviderData
log = logging.getLogger(__name__)
SAML_XML_NS = 'urn:oasis:names:tc:SAML:2.0:metadata' # The SAML Metadata XML namespace
class MetadataParseError(Exception):
""" An error occurred while parsing the SAML metadata from an IdP """
pass
@task(name='third_party_auth.fetch_saml_metadata')
def fetch_saml_metadata():
"""
Fetch and store/update the metadata of all IdPs
This task should be run on a daily basis.
It's OK to run this whether or not SAML is enabled.
Return value:
tuple(num_changed, num_failed, num_total)
num_changed: Number of providers that are either new or whose metadata has changed
num_failed: Number of providers that could not be updated
num_total: Total number of providers whose metadata was fetched
"""
if not SAMLConfiguration.is_enabled():
return (0, 0, 0) # Nothing to do until SAML is enabled.
num_changed, num_failed = 0, 0
# First make a list of all the metadata XML URLs:
url_map = {}
for idp_slug in SAMLProviderConfig.key_values('idp_slug', flat=True):
config = SAMLProviderConfig.current(idp_slug)
if not config.enabled:
continue
url = config.metadata_source
if url not in url_map:
url_map[url] = []
if config.entity_id not in url_map[url]:
url_map[url].append(config.entity_id)
# Now fetch the metadata:
for url, entity_ids in url_map.items():
try:
log.info("Fetching %s", url)
if not url.lower().startswith('https'):
log.warning("This SAML metadata URL is not secure! It should use HTTPS. (%s)", url)
response = requests.get(url, verify=True) # May raise HTTPError or SSLError or ConnectionError
response.raise_for_status() # May raise an HTTPError
try:
parser = etree.XMLParser(remove_comments=True)
xml = etree.fromstring(response.text, parser)
except etree.XMLSyntaxError:
raise
# TODO: Can use OneLogin_Saml2_Utils to validate signed XML if anyone is using that
for entity_id in entity_ids:
log.info(u"Processing IdP with entityID %s", entity_id)
public_key, sso_url, expires_at = _parse_metadata_xml(xml, entity_id)
changed = _update_data(entity_id, public_key, sso_url, expires_at)
if changed:
log.info(u"→ Created new record for SAMLProviderData")
num_changed += 1
else:
log.info(u"→ Updated existing SAMLProviderData. Nothing has changed.")
|
except Exception as err: # pylint: disable=broad-except
log.exception(err.message)
num_failed += 1
return (num_changed, num_failed, len(url_map))
def _parse_metadata_xml(xml, entity_id):
"""
Given an XML do
|
cument containing SAML 2.0 metadata, parse it and return a tuple of
(public_key, sso_url, expires_at) for the specified entityID.
Raises MetadataParseError if anything is wrong.
"""
if xml.tag == etree.QName(SAML_XML_NS, 'EntityDescriptor'):
entity_desc = xml
else:
if xml.tag != etree.QName(SAML_XML_NS, 'EntitiesDescriptor'):
raise MetadataParseError("Expected root element to be <EntitiesDescriptor>, not {}".format(xml.tag))
entity_desc = xml.find(
".//{}[@entityID='{}']".format(etree.QName(SAML_XML_NS, 'EntityDescriptor'), entity_id)
)
if not entity_desc:
raise MetadataParseError("Can't find EntityDescriptor for entityID {}".format(entity_id))
expires_at = None
if "validUntil" in xml.attrib:
expires_at = dateutil.parser.parse(xml.attrib["validUntil"])
if "cacheDuration" in xml.attrib:
cache_expires = OneLogin_Saml2_Utils.parse_duration(xml.attrib["cacheDuration"])
if expires_at is None or cache_expires < expires_at:
expires_at = cache_expires
sso_desc = entity_desc.find(etree.QName(SAML_XML_NS, "IDPSSODescriptor"))
if not sso_desc:
raise MetadataParseError("IDPSSODescriptor missing")
if 'urn:oasis:names:tc:SAML:2.0:protocol' not in sso_desc.get("protocolSupportEnumeration"):
raise MetadataParseError("This IdP does not support SAML 2.0")
# Now we just need to get the public_key and sso_url
public_key = sso_desc.findtext("./{}//{}".format(
etree.QName(SAML_XML_NS, "KeyDescriptor"), "{http://www.w3.org/2000/09/xmldsig#}X509Certificate"
))
if not public_key:
raise MetadataParseError("Public Key missing. Expected an <X509Certificate>")
public_key = public_key.replace(" ", "")
binding_elements = sso_desc.iterfind("./{}".format(etree.QName(SAML_XML_NS, "SingleSignOnService")))
sso_bindings = {element.get('Binding'): element.get('Location') for element in binding_elements}
try:
# The only binding supported by python-saml and python-social-auth is HTTP-Redirect:
sso_url = sso_bindings['urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect']
except KeyError:
raise MetadataParseError("Unable to find SSO URL with HTTP-Redirect binding.")
return public_key, sso_url, expires_at
def _update_data(entity_id, public_key, sso_url, expires_at):
"""
Update/Create the SAMLProviderData for the given entity ID.
Return value:
False if nothing has changed and existing data's "fetched at" timestamp is just updated.
True if a new record was created. (Either this is a new provider or something changed.)
"""
data_obj = SAMLProviderData.current(entity_id)
fetched_at = datetime.datetime.now()
if data_obj and (data_obj.public_key == public_key and data_obj.sso_url == sso_url):
data_obj.expires_at = expires_at
data_obj.fetched_at = fetched_at
data_obj.save()
return False
else:
SAMLProviderData.objects.create(
entity_id=entity_id,
fetched_at=fetched_at,
expires_at=expires_at,
sso_url=sso_url,
public_key=public_key,
)
return True
|
intel/ipmctl
|
BaseTools/Source/Python/AutoGen/GenPcdDb.py
|
Python
|
bsd-3-clause
| 75,819
| 0.007755
|
## @file
# Routines for generating Pcd Database
#
# Copyright (c) 2013 - 2016, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
from StringIO import StringIO
from Common.Misc import *
from Common.String import StringToArray
from struct import pack
from ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER
from ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB
from ValidCheckingInfoObject import VAR_VALID_OBJECT_FACTORY
from Common.VariableAttributes import VariableAttributes
DATABASE_VERSION = 6
gPcdDatabaseAutoGenC = TemplateString("""
//
// External PCD database debug information
//
#if 0
${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
/* SkuIdTable */
{ ${BEGIN}${SKUID_VALUE}, ${END} },
${BEGIN} { ${INIT_VALUE_UINT64} }, /* ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}] */
${END}
${BEGIN} ${VARDEF_VALUE_UINT64}, /* ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64} */
${END}
${BEGIN} { ${INIT_VALUE_UINT32} }, /* ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}] */
${END}
${BEGIN} ${VARDEF_VALUE_UINT32}, /* ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32} */
${END}
/* VPD */
${BEGIN} { ${VPD_HEAD_VALUE} }, /* ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}] */
${END}
/* ExMapTable */
{
${BEGIN} { ${EXMAPPING_TABLE_EXTOKEN}, ${EXMAPPING_TABLE_LOCAL_TOKEN}, ${EXMAPPING_TABLE_GUID_INDEX} },
${END}
},
/* LocalTokenNumberTable */
{
${BEGIN} offsetof(${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE},
${END}
},
/* GuidTable */
{
${BEGIN} ${GUID_STRUCTURE},
${END}
},
${BEGIN} { ${STRING_HEAD_VALUE} }, /* ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}] */
${END}
${BEGIN} /* ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}] */
{
${VARIABLE_HEAD_VALUE}
},
${END}
/* SkuHead */
{
${BEGIN} offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE}, /* */
offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.SkuHead) /* */
${END}
},
/* StringTable */
${BEGIN} ${STRING_TABLE_VALUE}, /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
${END}
/* SizeTable */
{
${BEGIN} ${SIZE_TABLE_MAXIMUM_LENGTH}, ${SIZE_TABLE_CURRENT_LENGTH}, /* ${SIZE_TABLE_CNAME}_${SIZE_TABLE_GUID} */
${END}
},
${BEGIN} { ${INIT_VALUE_UINT16} }, /* ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}] */
${END}
${BEGIN} ${VARDEF_VALUE_UINT16}, /* ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16} */
${END}
${BEGIN} { ${INIT_VALUE_UINT8} }, /* ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}] */
${END}
${BEGIN} ${VARDEF_VALUE_UINT8}, /* ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8} */
${END}
${BEGIN} { ${INIT_VALUE_BOOLEAN} }, /* ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}] */
${END}
${BEGIN} ${VARDEF_VALUE_BOOLEAN}, /* ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN} */
${END}
${SYSTEM_SKU_ID_VALUE}
};
#endif
""")
## Mapping between PCD driver type and EFI phase
gPcdPhaseMap = {
"PEI_PCD_DRIVER" : "PEI",
"DXE_PCD_DRIVER" : "DXE"
}
gPcdDatabaseAutoGenH = TemplateString("""
#define PCD_${PHASE}_SERVICE_DRIVER_VERSION ${SERVICE_DRIVER_VERSION}
//
// External PCD database debug information
//
#if 0
#define ${PHASE}_GUID_TABLE_SIZE ${GUID_TABLE_SIZE}
#define ${PHASE}_STRING_TABLE_SIZE ${STRING_TABLE_SIZE}
#define ${PHASE}_SKUID_TABLE_SIZE ${SKUID_TABLE_SIZE}
#define ${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE ${LOCAL_TOKEN_NUMBER_TABLE_SIZE}
#define ${PHASE}_LOCAL_TOKEN_NUMBER ${LOCAL_TOKEN_NUMBER}
#define ${PHASE}_EXMAPPING_TABLE_SIZE ${EXMAPPING_TABLE_SIZE}
#define ${PHASE}_EX_TOKEN_NUMBER ${EX_TOKEN_NUMBER}
#define ${PHASE}_SIZE_TABLE_SIZE ${SIZE_TABLE_SIZE}
#define ${PHASE}_SKU_HEAD_SIZE ${SKU_HEAD_SIZE}
#define ${PHASE}_GUID_TABLE_EMPTY ${GUID_TABLE_EMPTY}
#define ${PHASE}_STRING_TABLE_EMPTY ${STRING_TABLE_EMPTY}
#define ${PHASE}_SKUID_TABLE_EMPTY ${SKUID_TABLE_EMPTY}
#define ${PHASE}_DATABASE_EMPTY ${DATABASE_EMPTY}
#define ${PHASE}_EXMAP_TABLE_EMPTY ${EXMAP_TABLE_EMPTY}
typedef struct {
UINT64 SkuIdTable[${PHASE}_SKUID_TABLE_SIZE];
${BEGIN} UINT64 ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}];
${END}
${BEGIN} UINT64 ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64};
${END}
${BEGIN} UINT32 ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}];
${END}
${BEGIN} UINT32 ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32};
${END}
${BEGIN}
|
VPD_HEAD ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}];
${END}
DYNAMICEX_MAPPING ExMapTable[${PHASE}_EXMAPPING_TABLE_SIZE];
UINT32 LocalTokenNumberTable[${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE];
GUID GuidTable[${PHASE}_GUID_TABLE_SIZE];
${BEGIN} STRING_HEAD ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}];
${END}
${BEGIN} VARIA
|
BLE_HEAD ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}];
${END}
${BEGIN} SKU_HEAD SkuHead[${PHASE}_SKU_HEAD_SIZE];
${END}
${BEGIN} UINT8 StringTable${STRING_TABLE_INDEX}[${STRING_TABLE_LENGTH}]; /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
${END}
SIZE_INFO SizeTable[${PHASE}_SIZE_TABLE_SIZE];
${BEGIN} UINT16 ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}];
${END}
${BEGIN} UINT16 ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16};
${END}
${BEGIN} UINT8 ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}];
${END}
${BEGIN} UINT8 ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8};
${END}
${BEGIN} BOOLEAN ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}];
${END}
${BEGIN} BOOLEAN ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN};
${END}
${SYSTEM_SKU_ID}
} ${PHASE}_PCD_DATABASE_INIT;
typedef struct {
${PCD_DATABASE_UNINIT_EMPTY}
${BEGIN} UINT64 ${UNINIT_CNAME_DECL_UINT64}_${UNINIT_GUID_DECL_UINT64}[${UNINIT_NUMSKUS_DECL_UINT64}];
${END}
${BEGIN} UINT32 ${UNINIT_CNAME_DECL_UINT32}_${UNINIT_GUID_DECL_UINT32}[${UNINIT_NUMSKUS_DECL_UINT32}];
${END}
${BEGIN} UINT16 ${UNINIT_CNAME_DECL_UINT16}_${UNINIT_GUID_DECL_UINT16}[${UNINIT_NUMSKUS_DECL_UINT16}];
${END}
${BEGIN} UINT8 ${UNINIT_CNAME_DECL_UINT8}_${UNINIT_GUID_DECL_UINT8}[${UNINIT_NUMSKUS_DECL_UINT8}];
${END}
${BEGIN} BOOLEAN ${UNINIT_CNAME_DECL_BOOLEAN}_${UNINIT_GUID_DECL_BOOLEAN}[${UNINIT_NUMSKUS_DECL_BOOLEAN}];
${END}
} ${PHASE}_PCD_DATABASE_UNINIT;
typedef struct {
//GUID Signature; // PcdDataBaseGuid
//UINT32 BuildVersion;
//UINT32 Leng
|
jlgoldman/writetogov
|
database/populate_rep_status_from_propublica.py
|
Python
|
bsd-3-clause
| 2,149
| 0.005119
|
import requests
from config import constants
from database import db
from database import db_models
from util import fips
R = db_models.Rep
HOUSE_MEMBERS_LEAVING_OFFICE_URL = 'https://api.propublica.org/congress/v1/114/house/members/leaving.json'
SENATE_MEMBERS_LEAVING_OFFICE_URL = 'https://api.propublica.org/congress/v1/114/senate/members/leaving.json'
PP_STATUS_TO_DB_STATUS = {
'Retiring': R.Status.RETIRING,
'Seeking another office': R.Status.SEEKING_OTHER_OFFICE,
'Left Congress': R.Status.LEFT_CONGRESS,
'Defeated in primary election': R.Status.DEFEATED_IN_PRIMARY,
}
def main():
populate_senators()
populate_reps()
def populate_senators():
response = requests.get(SENATE_MEMBERS_LEAVING_OFFICE_URL, headers={'X-API-Key': constants.PROPUBLICA_API_KEY})
for db_rep in R.query.filter(R.chamber == R.Chamber.SENATE):
for member in response.json()['results'][0]['members']:
if db_rep.state_code == member['state'] and db_rep.last_name == member['last_name']:
db_rep.status = PP_STATUS_TO_DB_STATUS[member['status']]
db_rep.status_note = member['note']
|
break
db.session.commit()
def populate_reps():
response = requests.get(HOUSE_MEMBERS_LEAVING_OFFICE_URL, headers={'X-API-Key
|
': constants.PROPUBLICA_API_KEY})
info_by_district_code = {}
for member in response.json()['results'][0]['members']:
if member['state'] in fips.ONE_DISTRICT_STATE_CODES:
district_code = '%s00' % member['state']
else:
district_code = '%s%02d' % (member['state'], int(member['district']))
info_by_district_code[district_code] = {
'status': member['status'],
'note': member['note'],
}
for db_rep in R.query.filter(R.district_code.in_(info_by_district_code.keys())):
info = info_by_district_code[db_rep.district_code]
db_rep.status = PP_STATUS_TO_DB_STATUS[info['status']]
db_rep.status_note = info['note']
db.session.commit()
if __name__ == '__main__':
from tools import db_utils
with db_utils.request_context():
main()
|
alex-eri/aiohttp-1
|
aiohttp/client_proto.py
|
Python
|
apache-2.0
| 6,070
| 0
|
import asyncio
import asyncio.streams
from .client_exceptions import (ClientOSError, ClientPayloadError,
ClientResponseError, ServerDisconnectedError)
from .http import HttpResponseParser, StreamWriter
from .streams import EMPTY_PAYLOAD, DataQueue
class ResponseHandler(DataQueue, asyncio.streams.FlowControlMixin):
"""Helper class to adapt between Protocol and StreamReader."""
def __init__(self, *, loop=None, **kwargs):
asyncio.streams.FlowControlMixin.__init__(self, loop=loop)
DataQueue.__init__(self, loop=loop)
self.paused = False
self.transport = None
self.writer = None
self._should_close = False
self._message = None
self._payload = None
self._payload_parser = None
self._reading_paused = False
self._timer = None
self._skip_status = ()
self._tail = b''
self._upgraded = False
self._parser = None
@property
def upgraded(self):
return self._upgraded
@property
def should_close(self):
if (self._payload is not None and
not self._payload.is_eof() or self._upgraded):
return True
return (self._should_close or self._upgraded or
self.exception() is not None or
self._payload_parser is not None or
len(self) or self._tail)
def close(self):
transport = self.transport
if transport is not None:
transport.close()
self.transport = None
return transport
def is_connected(self):
return self.transport is not None
def connection_made(self, transport):
self.transport = transport
self.writer = StreamWriter(self, transport, self._loop)
def connection_lost(self, exc):
if self._payload_parser is not None:
try:
self._payload_parser.feed_eof()
except Exception:
pass
try:
self._parser.feed_eof()
except Exception as e:
if self._payload is not None:
self._payload.set_exception(
ClientPayloadError('Response payload is not completed'))
if not self.is_eof():
if isinstance(exc, OSError):
exc = ClientOSError(*exc.args)
if exc is None:
exc = ServerDisconnectedError()
DataQueue.set_exception(self, exc)
self.transport = self.writer = None
self._should_close = True
self._parser = None
|
self._message = None
self._payload = None
self._payload_parser = None
self._reading_paused = False
super().connection_lost(exc)
def eof_received(self):
pass
def pause_reading(self):
if not self._re
|
ading_paused:
try:
self.transport.pause_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = True
def resume_reading(self):
if self._reading_paused:
try:
self.transport.resume_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = False
def set_exception(self, exc):
self._should_close = True
super().set_exception(exc)
def set_parser(self, parser, payload):
self._payload = payload
self._payload_parser = parser
if self._tail:
data, self._tail = self._tail, None
self.data_received(data)
def set_response_params(self, *, timer=None,
skip_payload=False,
skip_status_codes=(),
read_until_eof=False):
self._skip_payload = skip_payload
self._skip_status_codes = skip_status_codes
self._read_until_eof = read_until_eof
self._parser = HttpResponseParser(
self, self._loop, timer=timer,
payload_exception=ClientPayloadError,
read_until_eof=read_until_eof)
if self._tail:
data, self._tail = self._tail, b''
self.data_received(data)
def data_received(self, data):
if not data:
return
# custom payload parser
if self._payload_parser is not None:
eof, tail = self._payload_parser.feed_data(data)
if eof:
self._payload = None
self._payload_parser = None
if tail:
self.data_received(tail)
return
else:
if self._upgraded or self._parser is None:
# i.e. websocket connection, websocket parser is not set yet
self._tail += data
else:
# parse http messages
try:
messages, upgraded, tail = self._parser.feed_data(data)
except BaseException as exc:
import traceback
traceback.print_exc()
self._should_close = True
self.set_exception(
ClientResponseError(code=400, message=str(exc)))
self.transport.close()
return
self._upgraded = upgraded
for message, payload in messages:
if message.should_close:
self._should_close = True
self._message = message
self._payload = payload
if (self._skip_payload or
message.code in self._skip_status_codes):
self.feed_data((message, EMPTY_PAYLOAD), 0)
else:
self.feed_data((message, payload), 0)
if upgraded:
self.data_received(tail)
else:
self._tail = tail
|
getupcloud/openshift-nginx-python-2.7
|
app.py
|
Python
|
apache-2.0
| 1,287
| 0.018648
|
#!/usr/bin/env python
import imp
import os
import sys
PYCART_DIR = ''.join(['python-', '.'.join(map(str, sys.version_info[:2]))])
try:
zvirtenv = os.path.join(os.environ['OPENSHIFT_HOMEDIR'], PYCART_DIR,
'virtenv', 'bin', 'activate_this.py')
execfile(zvirtenv, dict(__file__ = zvirtenv) )
except IOError:
pass
def run_gevent_server(app, ip, port=8181):
from gevent.pywsgi import WSGIServer
WSGIServer((ip, port), app).serve_forever()
def run_simple_httpd_server(app, ip, port=8181):
from wsgiref.simple_server
|
import make_server
make_server(ip, port, app).serve_forever()
#
# IMPORTANT: Put any additional includes below this line. If placed above this
# line, it's possible required libraries won't be in your searchable path
#
#
# main():
#
if __name__ == '__main__':
ip = os.environ['OPENSHIFT_PYTHON_IP']
port = 8181
zapp = imp.load_source('application', 'wsgi/application')
# Use gevent if we have it, otherwise run a simple httpd server.
print 'Starting WSGIServer on
|
%s:%d ... ' % (ip, port)
try:
run_gevent_server(zapp.application, ip, port)
except:
print 'gevent probably not installed - using default simple server ...'
run_simple_httpd_server(zapp.application, ip, port)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.