repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1
value | license stringclasses 15
values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
MyRobotLab/pyrobotlab | home/brotherbrown831/old_py/HomeAutomation.py | Python | apache-2.0 | 1,156 | 0.016436 | from org.myrobotlab.net import BareBonesBrowserLaunch
def outsideLights(value):
if value = 1
BareBonesBrowserLaunch.openURL("http://ip_address:3480/data_request?id=action&output_format=xml&DeviceNum=6&serviceId=urn:upnp-org:serviceId:SwitchPower1&action=SetTarget&newTargetValue=01")
else
BareBonesBrowser... | 6&serviceId=urn:upnp-org:serviceId:SwitchPower1&action=SetTarget&newTargetValue=0")
def garageLights(value):
if value = 1
BareBonesBrowserLaunch.o | penURL("http://ip_address:3480/data_request?id=action&output_format=xml&DeviceNum=6&serviceId=urn:upnp-org:serviceId:SwitchPower1&action=SetTarget&newTargetValue=01")
else
BareBonesBrowserLaunch.openURL("http://ip_address:3480/data_request?id=action&output_format=xml&DeviceNum=6&serviceId=urn:upnp-org:serviceId:... |
ziirish/burp-ui | burpui/misc/auth/ldap.py | Python | bsd-3-clause | 11,183 | 0.001162 | # -*- coding: utf8 -*-
from flask_login import AnonymousUserMixin
from .interface import BUIhandler, BUIuser, BUIloader
from ...utils import __
import ssl
try:
from ldap3 import (
Server,
Connection,
Tls,
ALL,
RESTARTABLE,
AUTO_BIND_TLS_BEFORE_BIND,
AUTO_BI... | or handler.priority
)
except:
pass
for (opt, key) in mapping.items():
setattr(self, opt, conf.safe_get(key, "force_string", section=self.section))
if self.validate and self.validate.lower() in ["none", "optional", "required"]:
self.validate ... | sl, "CERT_{}".format(self.validate.upper()))
else:
self.validate = None
self.version = ssl.OP_NO_SSLv3
self.users = []
self.tls = None
self.ssl = False
self.auto_bind = AUTO_BIND_NONE
if self.encryption == "ssl":
self.ssl = True
eli... |
ARMmbed/greentea | src/htrun/host_tests_plugins/host_test_plugins.py | Python | apache-2.0 | 12,644 | 0.00174 | #
# Copyright (c) 2021 Arm Limited and Contributors. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
"""Base class for plugins."""
import os
import sys
import platform
from os import access, F_OK
from sys import stdout
from time import sleep
from subprocess import call
from mbed_lstools.main import creat... | dout."""
stdout.write(char)
stdout.flush()
return True
def check_mount_point_ready(
| self,
destination_disk,
init_delay=0.2,
loop_delay=0.25,
target_id=None,
timeout=60,
):
"""Wait until destination_disk is ready and can be accessed.
Args:
destination_disk: Mount point (disk) which will be checked for readiness.
init_d... |
googleapis/python-compute | google/cloud/compute_v1/services/region_commitments/transports/__init__.py | Python | apache-2.0 | 1,127 | 0.000887 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or... | ense.
#
from collections import OrderedDict
from typing import Dict, Type
from .base | import RegionCommitmentsTransport
from .rest import RegionCommitmentsRestTransport
from .rest import RegionCommitmentsRestInterceptor
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[RegionCommitmentsTransport]]
_transport_registry["rest"] = RegionCommitmentsRestTranspor... |
karlind/ewu-v4 | products/migrations/0004_auto_20160319_1100.py | Python | gpl-3.0 | 566 | 0.001767 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-19 03:00
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('products', '0003_auto_20160319_1021'),
... | d=models.DateTimeField(default=datetime.datetime(2016, 3, 19, 3, 0, 38, 97125, tz | info=utc)),
),
]
|
ajbouh/tfi | src/tfi/parse/iterators.py | Python | mit | 7,857 | 0.000255 | # -*- coding: utf-8 -*-
"""
sphinx.ext.napoleon.iterators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A collection of helpful iterators.
:copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import collections
class peek_iter(object):
"""An iterator... | one, modifier=lambda x: x)"""
if 'modifier' in kwargs:
self.modifier = kwargs['modifier']
elif len(args) > 2:
self.modifier = args[2]
args = args[:2]
else:
self.modifier = lambda x: x
if not callable(self.modifier):
raise TypeEr... | t__(*args)
def _fillcache(self, n):
# type: (int) -> None
"""Cache `n` modified items. If `n` is 0 or None, 1 item is cached.
Each item returned by the iterator is passed through the
`modify_iter.modified` function before being cached.
"""
if not n:
n =... |
Phyks/libbmc | libbmc/tests/test_fetcher.py | Python | mit | 483 | 0 | import unittest
from libbmc.fetcher import *
class TestFetcher(unittest.Test | Case):
def test_download(self):
dl, contenttype = download('http://arxiv.org/pdf/1312.4006.pdf')
self.assertIn(contenttype, ['pdf', 'djvu'])
self.assertNotEqual(dl, '')
def test_download_invalid_type(self):
self.assertEqual(download('http://phyks.me/'), (None, None))
def te... | rtEqual(download('a'), (None, None))
|
mitsuhiko/sentry | tests/sentry/models/test_file.py | Python | bsd-3-clause | 1,674 | 0 | from __future__ import absolute_import
from django.core.files.base import ContentFile
from sentry.models import File, FileBlob
f | rom sentry.testutils import TestCase
class FileBlobTest(TestCase):
def test_from_file(self):
fileobj = ContentFile("foo bar")
my_file1 = FileBlob.from_file(fileobj)
assert my_file1.path
my_file2 = FileBlob.from_file(fileobj)
# deep check
assert my_file1.id == my_... | == my_file2.path
class FileTest(TestCase):
def test_file_handling(self):
fileobj = ContentFile("foo bar")
file1 = File.objects.create(
name='baz.js',
type='default',
size=7,
)
results = file1.putfile(fileobj, 3)
assert len(results) == 3
... |
rrooij/youtube-dl | youtube_dl/extractor/asiancrush.py | Python | unlicense | 3,951 | 0.001012 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from .kaltura import KalturaIE
from ..utils import (
extract_attributes,
remove_end,
)
class AsianCrushIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?asiancrush\.com/video/(?:[^/]+/)?0+(?P<id>\d+)v\... | L = r'https?://(?:www\.)?asiancrush\.com/series/0+(?P<id>\d+)s\b'
_TEST = {
'url': 'https://www.asiancrush.com/series/012481s/scholar-walks-night/',
'info_dict': {
'id': '12481',
'title': 'Scholar Who Walks the Night',
| 'description': 'md5:7addd7c5132a09fd4741152d96cce886',
},
'playlist_count': 20,
}
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
entries = []
for mobj in re.finditer(
... |
ldong/vim_youcompleteme | cpp/ycm/.ycm_extra_conf.py | Python | gpl-3.0 | 6,515 | 0.02287 | # This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either... | lag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.appe... | new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking ... |
dtrip/weevely3 | tests/test_file_grep.py | Python | gpl-3.0 | 6,721 | 0.013837 | from testfixtures import log_capture
from tests.base_test import BaseTest
from tests import config
from core.sessions import SessionURL
from core import modules
import utils
from core import messages
import subprocess
import os
import tempfile
import random
def setUpModule():
subprocess.check_output("""
BASE_FOLDE... | module_file_grep.failed_retrieve_info,
log_captured.records[-1].msg)
# wrong regex generate None and warning print
self.assertEqual(self.run_argv([ '\'', 'tring4' ])[0], None)
self.assertEqual(messages.module_file_grep.failed_retrieve_info,
log_captured... | v([ '-vector', 'grep_sh', 'bogus', 'tring4' ])[0], None)
self.assertEqual(messages.module_file_grep.failed_retrieve_info,
log_captured.records[-1].msg)
# wrong regex generate None and warning print
self.assertEqual(self.run_argv([ '-vector', 'grep_sh', '\'', 'tring4' ])... |
anovak10/plots | DDTmethod/CutCounter.py | Python | mit | 2,167 | 0.031841 | #
import os
import math
from array import array
import optparse
import ROOT
from ROOT import *
import scipy
import Plotting_Header
from Plotting_Header import *
def cutcount(varname):
VAR = [varname, 50, 0, 100 ]
YT = "events / "+str((VAR[3]-VAR[2])/VAR[1])+" GeV"
#YT = "events"
XT = varname+" (GeV)"
H = "Type 1 ... | /andrzejnovak/March/SE.root", treename, Data, VAR[0], Cut, "(1.0)")
quickplot("/home/storage/andrzejnovak/March/SM.root", treename, Data, VAR[0], Cut, "(1.0)")
d = Data.GetEntries()
W = TH1F("W", "", VAR[1], VAR[2], VAR[3])
W.SetLineColor(kGreen-6)
W.SetLineWidth(2)
quickplot("/home/storage/andrzejnovak/March/W... | or w in ["100To200", "200To400", "400To600", "600To800", "800To1200", "1200To2500", "2500ToInf"]:
quickplot("/home/storage/andrzejnovak/March/WJetsToLNu_HT-"+w+".root", treename, W, VAR[0], Cut, "("+lumi+"*weight)")
w = W.GetEntries()
QCD = TH1F("QCD", "", VAR[1], VAR[2], VAR[3])
QCD.SetLineColor(kYellow)
QCD.Se... |
tobiagru/ML | src/lib_IO.py | Python | gpl-3.0 | 3,590 | 0.032033 | import numpy as np
import pandas as pd
import h5py
import sys
import traceback
import logging
logging.basicConfig(stream=sys.stdout,level=logging.DEBUG)
# import/export functions --------------------------------------------------------------------
def load_Y(fname, usecols = [1], asNpArray = False):
if asNpArra... | red, index = X_test.index, columns = ['y'])
| f = open(fname, 'w+')
data.to_csv(f, header=["Id","Prediction"])
f.close()
elif Ids is not 0:
if Y_pred.shape[0] != Ids.shape[0]:
print("error Ids- dimension of y matrix does not match number of expected predictions")
print('y: {0} - expected: {1}'.format(Y_pr... |
johnmgregoire/PythonCompositionPlots | quaternary_binary_lines.py | Python | bsd-3-clause | 2,643 | 0.020431 | import matplotlib.cm as cm
import numpy
import pylab
import operator, copy, os
#os.chdir('C:/Users/Gregoire/Documents/PythonCode/ternaryplot')
from myquaternaryutility import QuaternaryPlot
class binarylines:
def __init__(self, ax, insetax, ellabels=['A', 'B', 'C', 'D'], offset=0.02, numcomppts=21, view_azim=-1... | ls, offset=offset)
comppairs=[]
a=numpy.linspace(0, 1, 21)
count=-1
for i in range(4):
for j in range(i+1, 4):
| count+=1
b=numpy.zeros((numcomppts, 4), dtype='float64')
b[:, i]=a
b[:, j]=1.-a
comppairs+=[(c1, c2) for c1, c2 in zip(b[:-1], b[1:])]
for (c1, c2) in comppairs:
self.stpq.line(c1, c2, fmt='-', c=self.stpq.rgb_comp([(c1+c2)/2.])[0], **... |
citrix/netscaler-ansible-modules | ansible-collections/adc/plugins/modules/citrix_adc_appfw_settings.py | Python | gpl-3.0 | 12,977 | 0.002543 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Citrix Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# ... | or learning-enabled security checks. The application firewall drops any connections
this limit from the list of connections used by the learning engine.
- "Minimum value = C(1)"
- "Maxim | um value = C(1000)"
type: str
sessionlifetime:
description:
- >-
Maximum amount of time (in seconds) that the application firewall allows a user session to remain
regardless of user activity. After this time, the user session is terminated. Before continu... |
JayTeeGeezy/pypunters | pypunters/html_utils.py | Python | mit | 5,608 | 0.018545 | import re
def get_attributes(parent, selector, attribute):
"""Get a list of attribute values for child elements of parent matching the given CSS selector"""
return [child.get(attribute) for child in parent.cssselect(selector)]
def get_attribute(parent, selector, attribute, index=0):
"""Get the attribute value f... | egex match | group with group_index for the text content of the child element at the specified zero-based index; otherwise, return the regex match group with group_index for the text content of the first matching child element.
"""
groups = get_child_match_groups(parent, selector, pattern, child_index)
if groups is not None an... |
zerotk/terraformer | zerotk/fifo.py | Python | mit | 1,072 | 0.00653 | from __future__ import unicode_literals
from collections import OrderedDict
class FIFO(OrderedDict):
"""
This is a First in, First out cache, so, when the maximum size is reached, the first item added
is removed.
"""
def __init__(self, maxsize):
"""
:param int maxsize:
... | (self, key, value):
"""
Sets an item in the cache. Pops items as needed so that the max size is never passed.
:param object key:
Key to be set
:param ob | ject value:
Corresponding value to be set for the given key
"""
l = len(self)
# Note, we must pop items before adding the new one to the cache so that
# the size does not exceed the maximum at any time.
while l >= self._maxsize:
l -= 1
# Pop t... |
CloudHeads/lambda_utils | lambda_utils/response_handlers/__init__.py | Python | mit | 305 | 0 | import logging
class BaseResponseHandler:
def on_execution(self, event):
logging.debug(event)
return event
def on_exception(self, ex):
logging.exception(str(ex) | )
raise
def on_response(self, response):
logging.debug(response)
re | turn response
|
joshua-cogliati-inl/raven | tests/framework/ensembleModelTests/EM_A_dummy.py | Python | apache-2.0 | 681 | 0.002937 | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Li | cense at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable | law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def run(self, Input):
self.sumTemp... |
OrhanOdabasi/PixPack | pixpack/process.py | Python | mit | 3,262 | 0.003372 | #!/usr/bin/env python3
# process.py
# This script consists of all core functions.
# Author: Orhan Odabasi (0rh.odabasi[at]gmail.com)
import locale
import csv
import os
from PIL import Image
import re
from collections import Counter
def scanDir(path):
# scan the path and collect media data for copy process
wh... | f.close()
with open(report_dest_v, "w") as f:
w = csv.writer(f, delimiter="\t")
w.writerows(video_datas)
f.close()
def listphotos(path):
# Listing all files in target directory
pho | tos_dataset = []
videos_dataset = []
for root, dirs, files in os.walk(path):
for name in files:
p_data_list = []
v_data_list = []
# filename name [0]
file_name = name
# file path [1]
file_path = os.path.join(root, file_name)
... |
wuga214/FullyConnectedDeepNeuralNetwork | ANN/src/utils/tools.py | Python | mit | 135 | 0.037037 | '''
Created on Feb 6, 2016
@author: Wuga
'''
import numpy as np
| def add_ones(A):
| return np.hstack(( np.ones((A.shape[0],1)), A )) |
SPP1665DataAnalysisCourse/elephant | elephant/surrogates.py | Python | bsd-3-clause | 15,562 | 0.000771 | #--------------------------------------------------------------------------
# NAME : spiketrains_utils.py:
# DESCRIPTION : routines for generating and binning time series, extracting
# information and generating surrogates
# AUTHOR : Emiliano Torre
# CREATED : September 12, 2012
#----... | ke_dithering(x, dither, n=1, decimals=None, edges='['):
"""
Generates surrogates of a spike train by spike dithering.
The surrogates are obtained by uniformly dithering times around the
| original position. The dithering is performed independently for each
surrogate.
The surrogates retain the t_start and t_stop of the original spike train.
Spikes moved beyond this range are lost or moved to the range's ends,
depending on the parameter edge.
Parameters
----------
x : Spi... |
kaedroho/wagtail | wagtail/admin/tests/pages/test_workflow_history.py | Python | bsd-3-clause | 2,629 | 0.004184 | from django.contrib.auth.models import Permission
from django.test import TestCase
from django.urls import reverse
from wagtail.core.models import Page
from wagtail.tests.utils import WagtailTestUtils
class TestWorkflowHistoryDetail(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
... | ()
self.login(self.user)
self.christmas_event = Page.objects.get(url_path='/home/events/christmas/')
self.christmas_event.save_revision()
workflow = self.christmas_event.get_workflow()
self.workflow_state = workflow.start(self.christmas_event, self.user)
def test_get_index... | nt.get(
reverse('wagtailadmin_pages:workflow_history', args=[self.christmas_event.id])
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('wagtailadmin_pages:edit', args=[self.christmas_event.id]))
self.assertContains(response, reverse('wagta... |
svox1/e2openplugin-OpenWebif | plugin/controllers/models/info.py | Python | gpl-2.0 | 22,829 | 0.030492 | # -*- coding: utf-8 -*-
##############################################################################
# 2011 E2OpenPlugins #
# #
# This file is open source software; you can redistribute... | :
chipset = "STi7109 @266MHz"
elif model in ("adb2850", "adb2849", "dsi87"):
chipset = "STi7111 @450MHz"
elif model in ("sagemcom88", "esi88"):
chipset = "STi7105 @450MHz"
elif model.startswith("spark"):
| if model == "spark7162":
chipset = "STi7162 @540MHz"
else:
chipset = "STi7111 @450MHz"
if fileExists("/proc/stb/info/chipset"):
f = open("/proc/stb/info/chipset",'r')
chipset = f.readline().strip()
f.close()
info['chipset'] = chipset
memFree = 0
for line in open("/proc/meminfo",'r'):
parts = l... |
StefanoFenu/colorizer | app.py | Python | gpl-3.0 | 2,268 | 0.007937 | from flask import Flask, request, render_template, redirect, url_for, jsonify, send_from_directory
from celery import Celery
from colorize import process_image
app = Flask(__name__)
# Celery configuration
app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0'
app.config['CELERY_RESULT_BACKEND'] = 'redis://local... | e('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
return redirect(url_for('index'))
@app.route('/color', methods=['POST'])
def color():
image = request.json
task = run_colorizer.s(image).apply_async()
return jsonify({}), 202, {'Lo... | <path:path>')
def style(path):
return send_from_directory('style', path)
@app.route('/status/<task_id>')
def taskstatus(task_id):
task = run_colorizer.AsyncResult(task_id)
if task.state == 'PENDING':
response = {
'state': task.state,
'current': 0,
... |
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractAbysslibraryWordpressCom.py | Python | bsd-3-clause | 739 | 0.028417 | def extractAbysslibraryWordpressCom(item):
'''
Parser for 'abysslibrary.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('Doomsday Carnival', 'Doomsday Carnival', ... | ', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, | chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
kinow-io/kinow-python-sdk | test/test_blog_category_response.py | Python | apache-2.0 | 813 | 0.00246 | # coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
| """
from __future__ import absolute_import
import os
import sys
import unittest
import kinow_client
from kinow_client.rest import ApiException
from kinow_client.models.blog_category_response import BlogCategoryResponse
class TestBlogCategoryResponse(unittest.TestCase):
""" BlogCategoryResponse unit test stub | s """
def setUp(self):
pass
def tearDown(self):
pass
def testBlogCategoryResponse(self):
"""
Test BlogCategoryResponse
"""
model = kinow_client.models.blog_category_response.BlogCategoryResponse()
if __name__ == '__main__':
unittest.main()
|
dvl/cookiecutter-django-clean-template | {{ cookiecutter.repo_name }}/{{ cookiecutter.repo_name }}/urls.py | Python | mit | 413 | 0 | from django.conf import settings
from django.contrib import admin
f | rom django.views.generic import TemplateView
from django.urls import include, path
urlpatterns = [
path('', TemplateV | iew.as_view(template_name='base.html'), name='index'),
path('admin/', admin.site.urls),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
path('__debug__/', include(debug_toolbar.urls)),
]
|
datapythonista/pandas | pandas/tests/window/test_dtypes.py | Python | bsd-3-clause | 5,077 | 0.001379 | import numpy as np
import pytest
from pandas import (
DataFrame,
Series,
)
import pandas._testing as tm
from pandas.core.base import DataError
# gh-12373 : rolling functions error on float32 data
# make sure rolling functions works for different dtypes
#
# further note that we are only checking rolling for fu... |
None,
),
("var", np.arange(5), [np.nan, 0.5, 0.5, 0.5, 0.5], True, None),
("var", np.arange(10, 0, -2), [np.nan, 2, 2, 2, 2], True, None),
("var", [0, 1, 2, np.nan, 4], [np.nan, 0.5, 0.5, np.nan, np.nan], False, None),
("median", np.arange(5), [np.nan, 0.5, 1.5, 2.5,... | n",
[0, 1, 2, np.nan, 4],
[np.nan, 0.5, 1.5, np.nan, np.nan],
False,
None,
),
],
)
def test_series_dtypes(method, data, expected_data, coerce_int, dtypes, min_periods):
s = Series(data, dtype=get_dtype(dtypes, coerce_int=coerce_int))
if dtypes in ("m8[... |
graphql-python/graphene | examples/complex_example.py | Python | mit | 1,461 | 0 | import graphene
class GeoInput(graphene.InputObjectType):
lat = graphene.Float(required=True)
lng = graphene.Float(required=True)
@property
def latlng(self):
return f"({self.lat},{self.lng})"
class Address(gra | phene.ObjectType):
latlng = graphene.String()
class Query(graphene.ObjectType):
address = graphene.Field(Address, geo=GeoInput(required=True))
def resolve_address(root, info, geo):
return Address(latlng=geo.latlng)
class CreateAddress(graphene.Mutation):
class Arguments:
geo = GeoIn... |
class Mutation(graphene.ObjectType):
create_address = CreateAddress.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
query = """
query something{
address(geo: {lat:32.2, lng:12}) {
latlng
}
}
"""
mutation = """
mutation addAddress{
createAddress(geo: {lat:3... |
Scalr/scalr-ctl | scalrctl/commands/farm_role_gv.py | Python | apache-2.0 | 314 | 0.006369 | __author__ = 'Dmitriy Korsakov'
__doc__ = 'Manage global variables for farm roles'
from scalrctl import commands
class UpdateFarmRoleGlobalVariable(commands.Action):
prompt_for = ["roleId", "globalVariableName"]
class | DeleteFarmRoleGlobalVariable(commands | .Action):
delete_target = 'globalVariableName' |
Southpaw-TACTIC/TACTIC | src/tactic/ui/examples/__init__.py | Python | epl-1.0 | 823 | 0.001215 | ###########################################################
#
# Copyright (c) 2005-2009, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology | , and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
from ui_playground_panel_wdg import *
from font_palettes_example_wdg import *
from panning_scroll_example_wdg import *
from menu_examples_wdg import *
from event_examples_wdg import *
from misc_examples_wdg import ... | efficient_table_example_wdg import *
from dev_sandbox_01_wdg import *
from dev_sandbox_02_wdg import *
from dev_sandbox_03_wdg import *
|
benschmaus/catapult | telemetry/telemetry/internal/util/ts_proxy_server.py | Python | bsd-3-clause | 4,604 | 0.008036 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Start and stop tsproxy."""
import logging
import os
import re
import subprocess
import sys
from telemetry.core import util
from telemetry.internal.util ... | andwidth_kbps, timeout)
def StopServer(self):
"""Stop TsProxy Server."""
if not self._is_running:
logging.debug('Attempting to stop TsProxy server that is not running.')
return
if self._proc:
self._proc.terminate()
self._proc.wait()
err = self._proc.stderr.read()
self._p | roc = None
self._port = None
self._is_running = False
return err
def __enter__(self):
"""Add support for with-statement."""
self.StartServer()
return self
def __exit__(self, unused_exc_type, unused_exc_val, unused_exc_tb):
"""Add support for with-statement."""
self.StopServer()
|
buildinspace/peru | peru.py | Python | mit | 411 | 0 | #! /usr/bin/env python3
# This script is for running peru directly from the repo, mainly fo | r
# development. This isn't what gets installed when you install peru. That would
# be a script generated by setup.py, which calls peru.main.main().
import os
import sys
repo_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, repo_roo | t)
import peru.main # noqa: E402
sys.exit(peru.main.main())
|
andremilke/utility | portscan.py | Python | gpl-3.0 | 344 | 0 | import socket
import sys
if (len(sys.argv) < 2):
print("How to use portscan")
print(sys.argv[0], "10.1.1.1")
else:
for Port in range(1, 65535):
mysocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
| i | f (mysocket.connect_ex((sys.argv[1], Port)) == 0):
print("Port ", Port, "[OPENED]")
|
anthonyalmarza/ngen | ngen/tests/test_models.py | Python | mit | 18,264 | 0.000493 | from __future__ import unicode_literals, print_function, absolute_import
import unittest
try:
import mock
except ImportError:
from unittest import mock
from ngen.exceptions import ValidationError
from ngen.models import (
BaseOptions,
BooleanField,
CharField,
Field,
FieldError,
FieldO... | options = ModelOptions()
self.assertFalse(options.abstract)
de | f test_init_with_meta_cls(self):
class Meta:
abstract = True
options = ModelOptions(Meta)
self.assertTrue(options.abstract)
def test_init_with_meta_cls_unknown_options(self):
class Meta:
unknown = 'value'
self.assertRaises(ImproperlyConfigured, Mode... |
Deepak345/al-go-rithms | math/LCM/Python/LCM.py | Python | mit | 469 | 0.025586 | #Program to find the LCM of tw | o numbers
#Function to find GCD
def gcd(num1, num2):
if | num1 == num2:
return num1
if num1 > num2:
return gcd(num1-num2, num2)
return gcd(num1, num2-num1)
#Function to find LCM
def lcm(num1, num2):
return (num1*num2) // gcd(num1, num2)
#Driver function for testing above
def test():
num1, num2 = 12, 4
print('LCM of {} and {} is ... |
dimagi/commcare-hq | corehq/apps/data_dictionary/migrations/0006_caseproperty_group.py | Python | bsd-3-clause | 363 | 0 | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data_dictionary', '0005_casetype_fully_generated'),
]
| operations = [
migrations.AddField(
model_name='caseproperty',
name='group',
field=models. | TextField(default='', blank=True),
),
]
|
d1m0/browser_bench | compare.py | Python | mit | 1,909 | 0.014667 | #! /usr/bin/env python
from SimpleHTTPServer | import SimpleHTTPRequestHandler
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from threading i | mport Thread
import os
import json
import subprocess
import sys
import argparse
from benchmark import runOneBenchmark, availableBenchmarks, runBenchmarkSet
from pickle import dump
from pick_safedispatch import pickSDBrowser
from common import error, debug
def compareBrowsers(browsers, benchmark, nruns, port):
result... |
sergey-raevskiy/dcs | src/smart/reed.py | Python | bsd-2-clause | 450 | 0.006667 | import dev, random, time
class reed(dev.dev):
def __init__(self, sendto, name):
self.sendto = sendto
self.name = name
def _loop(self):
open = 1 if random.uniform(0, 100) > 95 else 0
self.send(self.sendto, open)
time.sleep(2)
def main(argv):
src = int... | argv[1])
sendto = int(argv[2])
name = argv[3]
reed(sendto, name).run(src, uplevel)
| |
thiagopena/djangoSIGE | djangosige/apps/estoque/views/movimento.py | Python | mit | 12,100 | 0.001903 | # -*- coding: utf-8 -*-
from django.urls import reverse_lazy
from django.shortcuts import redirect
from itertools import chain
from datetime import datetime
from decimal import Decimal
from djangosige.apps.base.custom_views import CustomDetailView, CustomCreateView, CustomListView
from djangosige.apps.estoque.forms... | sso."
def view_context(self, context):
context['title_complete'] = 'ADICIONAR TRANSFERÊNCIA EM ESTOQUE'
context['return_url'] = reverse_lazy(
'estoque:listatransferenciasestoqueview')
return context
class MovimentoEstoqueBaseListView(CustomListView):
permission_codename = ... | self).get_context_data(**kwargs)
return self.view_context(context)
class MovimentoEstoqueListView(MovimentoEstoqueBaseListView):
template_name = 'estoque/movimento/movimento_estoque_list.html'
context_object_name = 'all_movimentos'
success_url = reverse_lazy('estoque:listamovimentoestoqueview')... |
chrisortman/CIS-121 | k0765042/Midterm/Main.py | Python | mit | 5,343 | 0.003556 | """
Bryant Conquest
This is a Game
"""
from death import Finish
from death import lives
from scene import Scene
import lock
import random
import time
#Weapons
fist = 0
metal_pipe = 0
class Engine(object):
def __init__(self, scene_map):
self.scene_map = scene_map
def play(self):
current_s... | N for an answer"
return "the_beginning"
print ("Which Tunnel do you pick the left or the right")
choice = raw_input(">")
if choice == "left":
return 'left_t | unnel'
if choice == "right":
return 'right_tunnel'
class LeftTunnel(Scene):
def enter(self):
global lives
print "You find a chest and wonder what's inside"
print "The Lock looks to be some mastermind type game"
right = lock.passcode()
print right
... |
hellohaptik/chatbot_ner | external_api/api.py | Python | gpl-3.0 | 12,022 | 0.003244 | from __future__ import absolute_import
import json
import random
from django.http import HttpResponse
from datastore.datastore import DataStore
from datastore.exceptions import (DataStoreSettingsImproperlyConfiguredException, EngineNotImplementedException,
EngineConnectionException, I... | esponse : HttpResponse with appropriate status and error message.
"""
response = {"success": False, "error": "", "result": []}
try:
external_api_data = json.loads(request.POST.get(EXTERNAL_API_DATA))
entity_name = external_api_data.get(ENTITY_NAME)
entity_data = external_api_data.get... | a.get(LANGUAGE_SCRIPT)
datastore_obj = DataStore()
datastore_obj.update_entity_data(entity_name=entity_name,
entity_data=entity_data,
language_script=language_script)
response['success'] = True
except (DataSto... |
ducu/twitter-most-followed | main.py | Python | mit | 3,521 | 0.027833 | """
Twitter Most Followed
Finding out top most followed accounts by a particular
group of Twitter users such as the Hacker News community.
For this exercise we consider @newsyc20 as our *source*,
and @newsyc20 followers as the HNers, our *target group*.
You can easily run the exercise for a different target
group by ... | iends_count'],
user_data['name'],
user_data['screen_name'], user_data['screen_name'])
i += 1
def main():
"""
Starting from a source (e.g. @newsyc20),
consider the target group as the source's followers, and
find out top most followed accounts by the target group.
"""
# Step 1: Identify the source
print... | s
print "\nStep 2: %s" % datetime.now()
followers = load_followers(source_id) # target group
# Step 3: Load friends of target group members
print "\nStep 3: %s" % datetime.now()
for follower_id in followers:
load_friends(user_id=follower_id)
# Step 4: Aggregate friends into top most followed
print "\nStep 4... |
mfwarren/FreeCoding | 2015/05/fc_2015_05_04.py | Python | mit | 412 | 0.004854 | #!/usr/bin/env python3
# imports go here
import sched
import time
#
# Fre | e Coding session for 2015-05-04
# Written by Matt Warren
#
scheduler = sched.scheduler(time.time, time.sleep)
def print_time():
print(time.time())
return True
scheduler.enter(3, 1, print_time)
scheduler.enter(5, 1, print_time)
print(scheduler.queue)
schedul | er.run() # blocking until all scheduled things finish
print("done")
|
openpli-arm/enigma2-arm | lib/python/Tools/Trashcan.py | Python | gpl-2.0 | 4,571 | 0.03916 | import time
import os
try:
import enigma
from Components.config import config
except:
print "Cannot import enigma"
from Directories import resolveFilename, SCOPE_HDD
def getTrashFolder():
# Returns trash folder without symlinks
return os.path.realpath(os.path.join(resolveFilename(SCOPE_HDD), ".Trash"))
def cre... | to stat %s:"% name, e
# Remove empty directories if possible
for name in dirs:
try:
os.rmdir(os.path.join(root, name))
| except:
pass
candidates.sort()
# Now we have a list of ctime, candidates, size. Sorted by ctime (=deletion time)
print "[Trashcan] Bytes to remove:", bytesToRemove
print "[Trashcan] Size now:", size
for st_ctime, fn, st_size in candidates:
if bytesToRemove < 0:
break
enigma.eBackgroundFileEr... |
dedelost/py-demoList | python基础教程/第六章抽象/抽象.py | Python | mit | 215 | 0.013953 | # -*- coding:utf-8 -*-
def fib(num):
'fibs function'
fibs = [0,1]
| for x in range(num):
fibs.append(fibs[-2]+fibs[-1])
print fibs
print callable(fib)
fib(15)
print fib.__doc__
print he | lp(fib) |
resmo/ansible | lib/ansible/modules/network/slxos/slxos_facts.py | Python | gpl-3.0 | 13,825 | 0.000796 | #!/usr/bin/python
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any late... | self.facts['memtotal_mb'] = int(round(int(self.parse_memtotal(data)) / 1024, 0))
self.facts[ | 'memfree_mb'] = int(round(int(self.parse_memfree(data)) / 1024, 0))
def parse_memtotal(self, data):
match = re.search(r'Total\s*Memory: (\d+)\s', data, re.M)
if match:
return match.group(1)
def parse_memfree(self, data):
match = re.search(r'Total Free: (\d+)\s', data, re.M)... |
vmendez/DIRAC | DataManagementSystem/private/FTS3/FTS3Placement.py | Python | gpl-3.0 | 5,870 | 0.028109 | from DIRAC import S_ERROR, S_OK, gLogger
from DIRAC.DataManagementSystem.private.FTSAbstractPlacement import FTSAbstractPlacement, FTSRoute
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getFTS3Servers
from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus
import random
class FTS... | iloverServerPolicy( attempt = attempt )
else:
self.log.error( 'Unknown server policy %s. Using Random instead' % self.__serverPolicy )
fts3Server = self.__randomServerPolicy()
if not ftsServerStatus:
self.log.warn( 'FTS server %s is not in good shape. Choose another one' % fts3Serve... | t
if fts3Server:
return S_OK( fts3Server )
return S_ERROR ( "Could not find an FTS3 server (max attempt reached)" )
def findRoute( self, sourceSE, targetSE ):
""" Find the appropriate route from point A to B
:param sourceSE : source SE
:param targetSE : destination SE
:returns ... |
openSUSE/docmanager | src/docmanager/cli/cmd_analyze.py | Python | gpl-3.0 | 1,621 | 0.001234 | #
# Copyr | ight (c) 2015 SUSE Linux GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 3 of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will b | e useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, contact SUSE LLC.
#
# To conta... |
autowitch/pypov | scenes/geomorphs/lib/geomorphs/edge_5x10_003.py | Python | mit | 1,577 | 0.013951 | from pypov.pov import Texture, Pigment, Intersection, Cylinder
from pypov.pov import Union, Difference, Object, Box, Sphere
from pypov.common import grey, white
fr | om pypov.colors import Colors
from lib.base import five_by_ten_edge
from lib.textures import cross_hatch, cross_hatch_2, wall_texture_1
from lib.metadata import Metadata
def edge_5x10_003_info():
| return Metadata("Non connected edge passages", "e3",
description="Non connected edge passages",
block_type="edge",
bottom=0, top=20,
size="5x10",
repeatable=True,
fully_connected=False,
dead_ends=False,
entrance=False,
... |
DjangoAdminHackers/django-link-report | link_report/management/commands/update_sentry_404s.py | Python | mit | 232 | 0 | from __fu | ture__ import unicode_literals
from django.core.management.base import BaseCommand
from ...utils import update_sentry_404s
class Command(BaseCommand):
def handle(self, *args, **kwargs):
| update_sentry_404s()
|
LPM-HMS/COSMOS2 | setup.py | Python | gpl-3.0 | 2,740 | 0.00146 | import os
import re
import sys
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), "cosmos/VERSION"), "r") as fh:
__version__ = fh.read().strip()
def find_all(path, reg_expr, inverse=False, remove_prefix=False):
if not path.endswith("/"):
path = path + "/"
... | me",
"black",
"pytest-timeout",
"pytest-xdist",
"ghp-import",
"sphinx",
"sphinx_rtd_theme",
]
},
packages=find_packages(),
include_package_data=True,
package_data=package_data,
# package_dir = {'cosmos': 'cosmos'},
c... | 3.7",
"Programming Language :: Python :: 3.8",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Programming... |
release-monitoring/anitya | anitya/wsgi.py | Python | gpl-2.0 | 835 | 0.001198 | # This file is part of the Anitya project.
# Copyright (C) 2017 Red Hat, Inc.
#
# This | program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITH... | d have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from .app import create
application = create()
|
jmaher/treeherder | tests/client/test_perfherder_client.py | Python | mpl-2.0 | 1,809 | 0.003317 | import unittest
import responses
from treeherder.client.thclient import PerfherderClient
class PerfherderClientTest(unittest.TestCase):
@responses.activate
def test_get_performance_signatures(self):
| pc = PerfherderClient()
url = pc._get_endpoint_url(pc.PERFORMANCE_SIGNATURES_ENDPOINT, project='mozilla-central')
content = {
'signature1': {'cheezburgers': 1},
'signature2': {'hamburgers': 2},
'signature3': {'cheezburgers': 2},
}
responses.add(r... | mozilla-central')
self.assertEqual(len(sigs), 3)
self.assertEqual(sigs.get_signature_hashes(), ['signature1', 'signature2', 'signature3'])
self.assertEqual(sigs.get_property_names(), set(['cheezburgers', 'hamburgers']))
self.assertEqual(sigs.get_property_values('cheezburgers'), set([1, 2... |
cjohnson98/transistor-pi | radio3.py | Python | gpl-3.0 | 2,243 | 0.027196 | #!/usr/bin/python
# selects stream from tuning dial position
# monitors battery condition
from __future__ import division
import spidev
import time
import os
import gc
import sys
import math
global tune1, tune2, tunerout, volts2, volume1, volume2, volumeout, IStream
tune1 = False
tune2 = False
tunerout = False
volts2... | ery
time.sleep(1)
sys.exit()
# read the tuning dial:
tune2 = ReadChannel(0)
if (tune2 == 0):
IStream = False
if (tune2 == 1023):
IStream = True
ditherfactor = int(tune2 / 50) + 3 # anti-dither
if ((tune2 < tune1 - ditherfactor) or (tune2 > tune1 + ditherfactor)): # tuning change?
tunerout = int(tune2... | the mpc instruction
os.popen("mpc clear -q") # stop play and clear the playlist
os.popen(tuneroutstring) # load the new playlist
os.popen("mpc play -q ") # start play
tune1 = tune2
time.sleep(.5)
# read the volume control:
volume2 = ReadChannel(1)
ditherfactor = int(volume2 / 50) + 1
if ((volume2 < vo... |
GNUDimarik/dimecoin | qa/rpc-tests/util.py | Python | mit | 5,261 | 0.008554 | # Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append... | allets.
bitcoind and bitcoin-cli must be in search path.
"""
if not os.path.isdir(os.path.join("cache", "node0")):
devnull = open("/dev/null", "w+")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir = os.path.join("cache", "node"+str(i))
o... | in.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(START_P2P_PORT+i)+"\n");
f.write("rpcport="+str(START_RPC_PORT+i)+"\n");
args = [ "bitcoind", "-keypool=1", ... |
mesosphere/dcos-commons | frameworks/helloworld/tests/test_region_awareness.py | Python | apache-2.0 | 3,469 | 0.003171 | import logging
import os
import pytest
import sdk_cmd
import sdk_install
import sdk_marathon
import sdk_plan
impo | rt sdk_utils
from tests import config
log = logging.getLogger(__name__)
POD_NAMES = ["hello-0", "world-0", "world-1"]
REMOTE_REGION = os.environ.get("REMOTE_REGION") or None
def remote_region_enabled():
return REMOTE_REGION is not None
@pytest.fixture
def local_service():
try:
sdk_install.install(... | "MULTI_REGION", "allow_region_awareness": True}
},
)
yield
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
@pytest.fixture
def remote_service():
try:
sdk_install.install(
config.PACKAGE_NAME,
config.SERVICE_NAME,
... |
seraphln/onedrop | onedrop/scripts/batch_add_crawler_seeds.py | Python | gpl-3.0 | 999 | 0.005417 | # coding=utf8
#
"""
批量将采集任务的种子信息放到对应的采集队列里
"""
# 添加django的环境变量
import os
import sys
from os.path import dirname, join
sys.path.appen | d(join(dirname(__file__), '../'))
sys.path.append(join(dirname(__file__), '../../'))
os.environ['DJANGO_SETTINGS_MODULE'] = 'onedrop.settings'
import django
django.setup()
from datetime import datetime
from datetime import timedelta
from onedrop.odtasks.models import CrawlerSeeds
from onedrop.utils.redis_op impor... | eeds.objects.filter()
for seed in seeds:
seed.status = "crawling"
seed.modified_on = now
seed.last_crawl_on = now
seed.save()
print "Putting %s to redis" % seed.name
rop.add_task_queue("onedrop.crawler.seed", str(seed.id))
rop.add_task_queue("seed", str(see... |
sreichholf/python-coherence | coherence/backends/tracker_storage.py | Python | mit | 28,384 | 0.011133 | # Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2008, Frank Scholz <[email protected]>
import os.path
from twisted.internet import reactor, defer
from twisted.python import failure, util
from coherence.upnp.core import DIDLLite
from coherence.upnp.core.soap_service ... | self.parent_id = parent_id
self.path = unicode(file)
duration = str(duration).strip()
duration = duration.split('.')[0]
if len(duration) == 0:
duration = 0
seconds = int(duration)
hours = seconds / 3600
seconds = seconds - hours * 3600
minu... | 60
self.duration = ("%d:%02d:%02d") % (hours, minutes, seconds)
self.bitrate = 0
self.title = unicode(title)
self.artist = unicode(artist)
self.album = unicode(album)
self.genre = unicode(genre)
track_number = str(track_number).strip()
if len(track_numb... |
voer-platform/vp.repo | vpr/rest_framework/urls.py | Python | agpl-3.0 | 724 | 0.002762 | """
Login and logout views for the browseable API.
Add these to your root URLconf if you're using the browseable API and
your API requires authentication.
The urls must be namespaced a | s 'rest_framework', and you should make sure
your authentication settings include `SessionAuthentication`.
urlpatterns = patterns('',
...
url(r'^auth', include('rest_framework.urls', namespace='rest_framework'))
)
"""
from django.conf.urls.defaults import patterns, url
template_name = {'templ... | , template_name, name='logout'),
)
|
pbmanis/acq4 | acq4/devices/Stage/calibration.py | Python | mit | 4,016 | 0.003237 | from __future__ import print_function
import numpy as np
import scipy.stats, scipy.optimize
import acq4.pyqtgraph as pg
class StageCalibration(object):
def __init__(self, stage):
self.stage = stage
self.framedelay = None
def calibrate(self, camera):
import imreg_dft # FFT image regis... | ame.getImage(), self.frames[compareIndex].getImage())
px = self.camera.getPixelSize()
offset = self.offsets[compareIndex] + offset.astype(float) * [px.x(), px.y()]
self.offsets[index] = offset
# finish up if there are no more positions
if finished:
pg.disconn... | s.append(frame.getImage()[np.newaxis, ...])
# self.frameArray = np.concatenate(frames, axis=0)
# self.imageView = pg.image(self.frameArray)
# linear regression to determine scale between stage steps and camera microns
x = ((self.positions - self.positions[0])**2).sum(axis=1)**0.5
... |
inkenbrandt/EPAEN | prism/prism.py | Python | gpl-2.0 | 7,910 | 0.003287 | __author__ = 'jbellino'
import os
import csv
import gdal
import gdalconst
import zipfile as zf
import numpy as np
import pandas as pd
from unitconversion import *
prismGrid_shp = r'G:\archive\datasets\PRISM\shp\prismGrid_p.shp'
prismGrid_pts = r'G:\archive\datasets\PRISM\shp\prismGrid_p.txt'
prismProj = r'G:\archive\d... | mg = gdal.Open(self.bil_file, gdalconst.GA_ReadOnly)
band = img.GetRasterBand(1)
self.nodatavalue = band.GetNoDataValue()
self.data = band.ReadAsArray()
self.data = np.ma.masked_where(self.data==self.nodatavalue, self.data)
if mask is not None:
self.data = np.ma.maske... | .geotransform = img.GetGeoTransform()
def save_to_esri_grid(self, out_grid, conversion_factor=None, proj=None):
import arcpy
arcpy.env.overwriteOutput = True
arcpy.env.workspace = os.getcwd()
arcpy.CheckOutExtension('Spatial')
arcpy.env.outputCoordinateSystem = prismProj
... |
indhub/mxnet | python/mxnet/gluon/data/sampler.py | Python | apache-2.0 | 4,279 | 0.001636 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not u... | batch, self._prev = self._prev, []
for i | in self._sampler:
batch.append(i)
if len(batch) == self._batch_size:
yield batch
batch = []
if batch:
if self._last_batch == 'keep':
yield batch
elif self._last_batch == 'discard':
return
... |
bioasp/caspo | caspo/console/__init__.py | Python | gpl-3.0 | 714 | 0 | # Copyright (c) 2014-2016, Santiago Videla
#
# This file is part of caspo.
#
# caspo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Pub | lic License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# caspo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See ... | <http://www.gnu.org/licenses/>.
# -*- coding: utf-8 -*-
|
blaiseli/p4-phylogenetics | share/Examples/L_mcmc/G_posteriorSamples/Protein_2parts/sPostSamps.py | Python | gpl-2.0 | 1,242 | 0.004831 | read("d.nex")
read('sets.nex')
a = var.alignments[0]
a.setCharPartition('p1')
d = Data()
t = func.randomTree(taxNames=d.taxNames)
t.data = d
pNum=0
t.newComp(partNum=pNum, free=1, spec='wag')
t.newRMatrix(partNum=pNum, free=0, spec='wag')
t.setNGammaCat(partNum=pNum, nGammaCat=4)
t.newGdasrv(partNum=pNum, free=1, val=... | runNum=0, program='p4', verbose=3)
for sampNum in range(0,10):
t2 = ps.getSample(sampNum)
t2.data = d
t2.simulate()
ret = t2.data.simpleBigXSquared()
print ret[0], ret[1]
ps = PosteriorSamples(t, runNum=1, program='mrbayes', mbBaseName='mbout32 | ', verbose=3)
for sampNum in range(0,10):
t2 = ps.getSample(sampNum)
t2.data = d
t2.simulate()
ret = t2.data.simpleBigXSquared()
print ret[0], ret[1]
|
CingHu/neutron-ustack | neutron/db/firewall/firewall_db.py | Python | apache-2.0 | 23,675 | 0.000169 | # Copyright 2013 Big Switch Networks, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless... | ocol'],
'ip_version': firewall_rule['ip_version'],
'source_ip_address': firewall_rule['source_ip_address'],
| 'destination_ip_address':
firewall_rule['destination_ip_address'],
'source_port': src_port_range,
'destination_port': dst_port_range,
'action': firewall_rule['action'],
'position': position,
'enabled': firewall_rule['enabled']}
... |
paulmadore/Eric-IDE | 6-6.0.9/eric/Plugins/PluginWizardQMessageBox.py | Python | gpl-3.0 | 4,158 | 0.002165 | # -*- coding: utf-8 -*-
# Copyright (c) 2007 - 2015 Detlev Offenbach <[email protected]>
#
"""
Module implementing the QMessageBox wizard plugin.
"""
from __future__ import unicode_literals
from PyQt5.QtCore import QObject
from PyQt5.QtWidgets import QDialog
from E5Gui.E5Application import e5App
from E5Gui.... | is wizard opens a dialog for entering all the parameters"""
""" needed | to create a QMessageBox. The generated code is"""
""" inserted at the current cursor position.</p>"""
))
self.action.triggered.connect(self.__handle)
self.__ui.addE5Actions([self.action], 'wizards')
def __initMenu(self):
"""
Private method to add the ac... |
cirocosta/avisenchente | tests/test_fetcher.py | Python | mit | 1,113 | 0.006289 | import unittest
import samples
from src.utils import iotsdk
from src.utils import fetcher
class TestFetcher(unittest.TestCase):
""" Tests src.utils.fetcher """
def setUp(self):
self.fetcher = fetcher.Fetcher("dummy_token")
pass
def test_initialization(self):
self.assertEqual(sel... | Iot)
def test_toDatabaseMeasure(self):
self.assertFalse(self.fetcher._toDatabaseMeasure(dict()))
self.assertFalse(self.fetcher._toDatabaseMeasure(\
samples.SAMPLE_FALSE_MEASURE))
self.assertTrue(self.fetcher._toDatabaseMeasure(\
samples.SAMPLE_TRUE_MEASURE))
de... | samples.SAMPLE_FALSE_MEASURE_COLLECTION))
self.assertTrue(self.fetcher._toDatabaseMeasureCollection(\
samples.SAMPLE_TRUE_MEASURE_COLLECTION))
def test_fetch_data(self):
pass
if __name__ == "__main__":
unittest.main()
|
jldaniel/Gaia | Models/zdt2.py | Python | mit | 1,190 | 0.00084 | __author__ = 'jdaniel'
from GaiaSolve.model import Model
class ZDT2(Model):
def __init__(self):
super(ZDT2, self).__init__()
def evaluate(self):
g = 1.0 + 9.0*sum(self.x[1:])/(len(self.x) - 1)
f1 = self.x[0]
f2 = g*(1.0 - (f1/g)**2)
self | .obj = [f1, f2]
self.eqcon = []
self.neqcon = []
def number_of_design_variables(self):
return 30
def lower_bound(self):
return [0.0]*30
def upper_bound(self):
return [1.0]*30
def number_of_objectives(self):
return 2
def has_equality_constraints(se... | straints(self):
return 0
def decision_variable_names(self):
x_names = []
for i in range(30):
x_names.append('x' + str(i))
return x_names
def objective_variable_names(self):
return ['f1', 'f2']
def equality_constraint_variable_names(self):
return... |
crowning-/dash | contrib/testgen/base58.py | Python | mit | 2,973 | 0.007064 | # Copyright (c) 2012 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Bitcoin base58 encoding and decoding.
Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
... | ong_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(r... | urn 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
... |
cnodell/mrps | mrps.py | Python | mit | 1,477 | 0.003385 | #!/usr/bin/env python
import sys
import configparser
import os
import | shutil
from PyQt5 import QtWidgets
from PyQt5 import QtWebKitWidgets
from PyQt5 import QtCore
# Read config file
home_dir = os.path.expanduser("~")
conf_path = os.path.join(home_dir, ".config/mrps/mrps.conf")
config = configparser.ConfigParser(delimiters=('='))
config.read(conf_path)
def clean_up():
os.remove(h... | ean_up)
if len(sys.argv) == 2:
o_file_full = os.path.abspath(sys.argv[1])
else:
o_file_full = QtWidgets.QFileDialog.getOpenFileName()[0]
if o_file_full:
o_file_dir = os.path.dirname(o_file_full)
o_file_name = os.path.basename(os.path.normpath(o_file_full))
o_file_name_bare = os.path.splitext(o_fi... |
indexofire/feincms-markup | feincms_markup/__init__.py | Python | mit | 446 | 0.004484 | # -*- coding: utf-8 -*-
VERSION = (0, 0, 1, | 'alpha', 0)
__version__ = '.'.join(map(str, VERSION))
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
| version = '%s %s %s' % (version, VERSION[3], VERSION[4])
return version
|
unnikrishnankgs/va | venv/lib/python3.5/site-packages/tensorflow/contrib/distributions/python/ops/bijectors/__init__.py | Python | bsd-2-clause | 2,469 | 0.002835 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may ob | tain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
| # See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Bijector Ops.
@@Affine
@@AffineLinearOperator
@@Bijector
@@Chain
@@CholeskyOuterProduct
@@ConditionalBijector
@@Exp
@@Identity
@@Inl... |
Jumpscale/web | pythonlib/eve/tests/test_version.py | Python | apache-2.0 | 70 | 0 | # -*- coding: utf-8 | -*-
API_VERSION = 'v1'
| DOMAIN = {'contacts': {}}
|
GLolol/PyLink | test/test_protocol_p10.py | Python | mpl-2.0 | 2,145 | 0.000932 | """
Tests for protocols/p10
"""
import unittest
from pylinkirc.protocols import | p10
class P10UIDGeneratorTest(unittest.TestCase):
def setUp(self):
self.uidgen = p10.P10UIDGenerator('HI')
def test_initial_UID(self):
expected = [
"HIAAA",
"HIAAB",
"HIAAC",
"HIAAD",
"HIAAE",
"HIAAF"
]
se... | self.assertEqual(expected, actual)
def test_rollover_first_lowercase(self):
expected = [
"HIAAY",
"HIAAZ",
"HIAAa",
"HIAAb",
"HIAAc",
"HIAAd",
]
self.uidgen.counter = 24
actual = [self.uidgen.next_uid() for i in... |
godiard/typing-turtle-activity | balloongame.py | Python | gpl-3.0 | 13,291 | 0.004439 | # Copyright 2008 by Kate Scheppke and Wade Brainerd.
# This file is part of Typing Turtle.
#
# Typing Turtle is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your opt... | rned a %(type)s medal!') % self.medal + '\n'
| report += '\n'
report += _('Press the ENTER key to continue.')
cr.set_source_rgb(0, 0, 0)
pango_layout = PangoCairo.create_layout(cr)
fd = Pango.FontDescription('Times')
fd.set_size(12 * Pango.SCALE)
pango_layout.set_font_description(fd)
pango_layout.set... |
johnnoone/salt-targeting | src/salt/utils/__init__.py | Python | mit | 537 | 0.001862 | '''
salt.utils
~~~~~~~~~~
'''
class lazy_property(object):
'''
meant to be used for lazy evaluation of | an object attribute.
property should represent non-mutable data, as it replaces itself.
http://stackoverflow.com/a/6849299/564003
'''
def __init__(self, fget):
self.fget = fget
self.func_name = fget.__name__
def __get__(self, obj, cls):
if obj is None:
| return None
value = self.fget(obj)
setattr(obj, self.func_name, value)
return value
|
marcsans/cnn-physics-perception | phy/lib/python2.7/site-packages/theano/compile/nanguardmode.py | Python | mit | 11,118 | 0.00018 | from __future__ import print_function
import collections
import logging
from six.moves import StringIO
import numpy as np
import theano
from theano.configparser import config
import theano.tensor as T
import theano.sandbox.cuda as cuda
from theano.compile import Mode
logger = logging.getLogger("theano.compile.nangua... | )
| except RuntimeError:
# This can happen if cuda is available, but the
# device is in exclusive mode and used by another
# process.
cuda_compile_failed = True
if inf_is_error and not cuda_compile_failed and f_gpumax is None:
try:
f_gpumax = theano.fu... |
almarklein/bokeh | sphinx/source/tutorial/exercises/scatter.py | Python | bsd-3-clause | 2,389 | 0.001674 | from __future__ import division
import numpy as np
from bokeh.plotting import figure, HBox, output_file, show, VBox
from bokeh.models import Range1d
# create some data using python lists
x1 = [1, 2, 5, 7, -8, 5, 2, 7, 1, -3, -5, 1.7, 5.4, -5]
y1 = [5, 6, -3, 1.5, 2, 1, 1, 9, 2.4, -3, 6, 8, 2, 4]
# creat... | ge` and `y_range` for each figure. Set different colors
# as well. Try setting line_color and fill_color instead of just color. You can
# also set alpha, line_alpha, and fill_alpha if you like. Set tools to TOOLS on
# the figures. Change the value of the 'marker' parameter, "circle", "square",
# "triangle", etc. One ex... | height=300)
p1.scatter(x1, y1, size=12, color="red", alpha=0.5)
# EXERCISE: Try panning and zooming one of the plots with another one visible!
# Set the plot_width and plot_height to smaller if necessary
# EXERCISE: create a new figure p4
# Lets plot 4000 circles, you can play around with this if you like
N = 4000
... |
laginha/yard | src/yard/version.py | Python | mit | 1,888 | 0.009534 | #!/usr/bin/env python
# encoding: utf-8
from yard.exceptions import NoDefaultVersion
from yard.consts import RESOURCE_VERSION_RE
import re
class Metaclass(type):
def __getattr__(self, attrname):
return getattr(self.versions[self.default], attrname)
class VersionController(object):
__metaclass__ = Me... | tp_accept )
if match and match.group(1):
return match.group(1)
return request.GET.get('version', self.default)
def handle_request(self, request, * | *kwargs):
def dispatch(resource):
return resource(self.routes).handle_request(request, **kwargs)
requested_version = self.get_version( request )
if requested_version in self.versions:
return dispatch( self.versions[requested_version] )
elif hasattr(self,... |
DXCanas/content-curation | performance/run_perftests.py | Python | mit | 975 | 0.002051 | import sys
import gevent
from locust.env import Environment
from locust.event import EventHook
from locust.log import setup_logging
from locust.stats import stats_printer
from locustfile import StudioDesktopBrowserUser
setup_logging("DEBUG", None)
def error_output(*args, **kwargs):
print("Error: {}, {}".format(ar... | v.create_web_ui("127.0.0.1", 8089)
# start a greenlet that periodically outputs the current stats
gevent.spawn(stats_printer(env.stats))
# star | t the test
env.runner.start(10, hatch_rate=10)
# in 60 seconds stop the runner
gevent.spawn_later(60, lambda: env.runner.quit())
# wait for the greenlets
env.runner.greenlet.join()
# stop the web server for good measures
env.web_ui.stop()
|
yichoi/jerryscript | tools/run-tests.py | Python | apache-2.0 | 16,290 | 0.002394 | #!/usr/bin/env python
# Copyright JS Foundation and other contributors, http://js.foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.... | ygen', action='store_true',
help='Run doxygen')
parser.add_argument('--check-pylint', action='store_true',
help='Run pylint')
parser.add_argument('--check-vera', action='store_true',
| help='Run vera check')
parser.add_argument('--check-license', action='store_true',
help='Run license check')
parser.add_argument('--check-magic-strings', action='store_true',
help='Run "magic string source code generator should be executed" check')
... |
rohitranjan1991/home-assistant | homeassistant/components/mqtt/debug_info.py | Python | mit | 8,644 | 0.000925 | """Helper to handle a set of topics to subscribe to."""
from __future__ import annotations
from collections import deque
from collections.abc import Callable
import datetime as dt
from functools import wraps
from typing import Any
import attr
from homeassistant.core import HomeAssistant
from homeassistant.helpers im... | .const import ATTR_DISCOVERY_PAYLOAD, ATTR_DISCOVERY_TOPIC
from .models import MessageCallbackType, PublishPayloadType
DATA_MQTT_DEBUG_INFO = "mqtt_debug_info"
STORED_MESSAGES = 10
def initialize(hass: HomeAssistant):
"""Initialize MQTT debug info."""
hass.data[DATA_MQTT_DEBUG_INFO] = {"entities": {}, "tri | ggers": {}}
def log_messages(
hass: HomeAssistant, entity_id: str
) -> Callable[[MessageCallbackType], MessageCallbackType]:
"""Wrap an MQTT message callback to support message logging."""
def _log_message(msg):
"""Log message."""
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
messa... |
TakayukiSakai/tensorflow | tensorflow/contrib/layers/python/layers/feature_column_ops.py | Python | apache-2.0 | 14,753 | 0.003254 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | ing from feature column to tensors. 'string' key
means a base feature (not-transformed). It can have FeatureColumn as a
key too. That means that FeatureColumn is already transformed by input
pipeline. For example, `inflow` may have handled transformations.
feature_columns: A set containing all the... | rived by FeatureColumn.
weight_collections: List of graph collections to which weights are added.
name: The name for this operation is used to name operations and to find
variables. If specified it must be unique for this scope, otherwise a
unique name starting with "fully_connected" will be created... |
AudioCommons/ac-mediator | docs/conf.py | Python | apache-2.0 | 11,164 | 0.001164 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Audio Commons Service adaptor guidelines documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 8 13:46:45 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuratio... | " is shown in the HTML footer. Default is True.
#
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The val | ue of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'd... |
aroth-arsoft/arsoft-web-openvpn | arsoft/web/utils.py | Python | gpl-3.0 | 46,718 | 0.002055 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# kate: space-indent on; indent-width 4; mixedindent off; indent-mode python;
import sys
import types
import re
import os.path
import collections
def _get_system_language_code():
# Language code for this installation. All choices can be found here:
# http://www.i18nguy... | + str(in_devserver) + ' basepath=' + str(settings_obj.BASE_PATH))
if 'debug' in options:
settings_obj.DEBUG = option | s['debug']
else:
settings_obj.DEBUG = in_devserver
# If DISABLE_DEBUG_INFO_PAGE is set the
settings_obj.DISABLE_DEBUG_INFO_PAGE = False
settings_obj.ADMINS = _get_default_admin()
settings_obj.MANAGERS = settings_obj.ADMINS
# If you set this to False, Django will make some optimizati... |
grovesr/django-ims | ims/migrations/0011_auto_20160115_1328.py | Python | bsd-3-clause | 517 | 0.001934 | # -* | - coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-15 18:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ims', '0010_auto_20151227_1147'),
]
operations = [
migrations.AlterField(
... | ]
|
mezz64/home-assistant | homeassistant/components/keba/__init__.py | Python | apache-2.0 | 8,577 | 0.000816 | """Support for KEBA charging st | ations."""
import as | yncio
import logging
from keba_kecontact.connection import KebaKeContact
import voluptuous as vol
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.help... |
stefan-caraiman/cloudbase-init | cloudbaseinit/shell.py | Python | apache-2.0 | 1,433 | 0 | # Copyright 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.a | pache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissi... | struct.calcsize("P") == 8 and sys.platform == 'win32':
# This is needed by Nano Server.
# Set COINIT_MULTITHREADED only on x64 interpreters due to issues on x86.
import pythoncom
sys.coinit_flags = pythoncom.COINIT_MULTITHREADED
pythoncom.CoInitializeEx(pythoncom.COINIT_MULTITHREADED)
from oslo_lo... |
JeffRoy/mi-dataset | mi/dataset/driver/ctdmo_ghqr/sio/test/test_ctdmo_ghqr_sio_co_recovered_driver.py | Python | bsd-2-clause | 1,101 | 0.011807 |
__author__ = 'mworden'
from mi.core.log import get_logger
log = get_logger()
from mi.idk.config import Config
import unittest
import os
from mi.dataset.driver.ctdmo_ghqr.sio.ctdmo_ghqr_s | io_co_recovered_driver import parse
from mi.dataset.dataset_driver impor | t ParticleDataHandler
class SampleTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_one(self):
sourceFilePath = os.path.join('mi','dataset','driver','ctdmo_ghqr','sio',
'resource','CTD15906.DAT')
parti... |
dfdx2/ancfinder | scripts/update_building_permits.py | Python | cc0-1.0 | 1,929 | 0.016071 | import requests, zipfile, subprocess, csv, json
update_data_file = 'http://data.octo.dc.gov/feeds/dcra_building_permits/dcra_building_permits_current_csv.zip'
zip_filename = 'dcra_building_permits_current_csv.zip'
# Gather the f | ile
zip_request = requests.get('http://data.octo.dc.gov/feeds/dcra_building_permits/dcra_building_permits_current_csv.zip', stream=True)
# Download the file and save locally
with open(zip_filename, 'wb') as zip_file:
for chunk in zip_request.iter_content(chunk_size=1024):
if chunk:
| zip_file.write(chunk)
zip_file.flush()
# Unzip the locally saved file
zipfile.main(['-e', zip_filename, 'data'])
# Clean up downloaded file
subprocess.call('rm dcra_building_permits_current_csv.zip', shell=True)
# Calculate permits in each ANC
permits_read = csv.reader(open('data/dcra_buildin... |
WST/django-project-template | portal/middleware.py | Python | mit | 388 | 0.025773 | # -*- coding: utf-8 -*-
class PortalMiddleware:
def __init__(self, get_response):
self.get_response = get_response
| def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
response = self.get_response(request)
# Cod | e to be executed for each request/response after
# the view is called.
return response
|
cmayes/md_utils | tests/test_press_dups.py | Python | bsd-3-clause | 1,638 | 0.001832 | # coding=utf-8
"""
Tests for wham_rad.
"""
import unittest
import os
from md_utils.md_common import silent_remove, diff_lines
from md_utils.press_dups import avg_rows, compress_dups, main
__author__ = 'mayes'
DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data')
DUPS_DIR = os.path.join(DATA_DIR, 'press_du... | .2, "c": 19.0}, {"a": 99, "b": 1.0, "c": -4.2},
{"a": -22, "b": 1.0, "c": -4.2}]
avg = compress_dups(data, "a")
self.assertEqual(3, len(avg))
class TestMainNoOutput(unittest.TestCase):
def testNoArg(self):
main([])
class TestMain(unittest.TestCase):
def testWithHead07... | D_RAW])
self.assertFalse(diff_lines(HEAD_STD, HEAD_PRESS))
finally:
silent_remove(HEAD_PRESS)
# pass
|
Yarrick13/hwasp | tests/asp/AllAnswerSets/aggregates/solitaire15.test.py | Python | apache-2.0 | 40,300 | 0.000149 | input = """
1 2 0 0
1 3 0 0
1 4 0 0
1 5 0 0
1 6 0 0
1 7 0 0
1 8 0 0
1 9 0 0
1 10 0 0
1 11 0 0
1 12 0 0
1 13 0 0
1 14 0 0
1 15 0 0
1 16 0 0
1 17 0 0
1 18 0 0
1 19 0 0
1 20 0 0
1 21 0 0
1 22 0 0
1 23 0 0
1 24 0 0
1 25 0 0
1 26 0 0
1 27 0 0
1 28 0 0
1 29 0 0
1 30 0 0
1 31 0 0
1 32 0 0
1 33 0 0
1 34 0 0
1 35 0 0
1 36 0 0
1... | 301 302 303 304 305 306 307 308 | 309 310 311 312 313 314 315 316 317 318 319 320 321 322
1 1 2 0 325 323
1 323 0 0
1 326 1 0 187
1 327 1 0 186
1 328 1 0 185
1 329 1 0 184
1 330 1 0 183
1 331 1 0 182
1 332 1 0 181
1 333 1 0 180
1 334 1 0 179
1 335 1 0 178
1 336 1 0 177
1 337 1 0 176
1 338 1 0 175
1 339 1 0 174
1 340 1 0 173
1 341 1 0 172
1 342 1 0 171
... |
sergeyf/scikit-learn | sklearn/decomposition/_kernel_pca.py | Python | bsd-3-clause | 21,772 | 0.000459 | """Kernel Principal Components Analysis."""
# Author: Mathieu Blondel <[email protected]>
# Sylvain Marie <[email protected]>
# License: BSD 3 clause
import numpy as np
from scipy import linalg
from scipy.sparse.linalg import eigsh
from ..utils._arpack import _init_arpack_v0
from ..util... | mator, TransformerMixin, _ClassNamePrefixFeaturesOutMixin
from ..preprocessing import KernelCenterer
from ..metrics.pairwise import pairwise_kernels
class KernelPCA(_ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator):
"""Kerne | l Principal component analysis (KPCA) [1]_.
Non-linear dimensionality reduction through the use of kernels (see
:ref:`metrics`).
It uses the :func:`scipy.linalg.eigh` LAPACK implementation of the full SVD
or the :func:`scipy.sparse.linalg.eigsh` ARPACK implementation of the
truncated SVD, dependin... |
ColumbiaCMB/kid_readout | apps/data_taking_scripts/old_scripts/highq_power_sweep_0813f12.py | Python | bsd-2-clause | 5,967 | 0.019105 | import matplotlib
from kid_readout.roach import baseband
matplotlib.use('agg')
import numpy as np
import time
import sys
from kid_readout.utils import data_file,sweeps
from kid_readout.analysis.resonator import fit_best_resonator
ri = baseband.RoachBasebandWide()
ri.initialize()
#ri.set_fft_gain(6)
#f0s = np.load('/... | once-(nf%atonce))+f0s.max()))
offsets = np.linspace(-4882.8125,4638.671875,20)#[5:15]
offsets = offsets
#offsets = np.concatenate(([-40e3,-20e3],offsets,[20e3,40e3]))/1e6
offsets = np.concatenate(([-40e3],offsets,[40e3]))/1e6
#offsets = offsets*4
nsamp = 2**18
step = 1
nstep = 80
f0binned = np.round(f0s*nsamp/512.0)*... | ,(nstep+1))*step
offsets = offset_bins*512.0/nsamp
offsets = np.concatenate(([offsets.min()-20e-3,],offsets,[offsets.max()+20e-3]))
print f0s
print offsets*1e6
print len(f0s)
if False:
from kid_readout.utils.parse_srs import get_all_temperature_data
while True:
temp = get_all_temperature_data()[1][-... |
kxepal/replipy | replipy/peer.py | Python | mit | 7,259 | 0.000138 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file LICENSE, which
# you should have received as part of this distribution.
#
import functools
import json
import flask
import werkzeug.exceptions
import werkzeug.http
from flask im... | DB response in case of unsupported mime-type
return flask.abort(400)
doc['_id'] = docid
idx, rev = db.store(doc, rev, new_edits)
return make_response(201, {'ok': True, 'id': idx, 'rev': rev})
def delete():
idx, rev = db.remove(docid, fla | sk.request.args.get('rev', None))
return make_response(201, {'ok': True, 'id': idx, 'rev': rev})
db = app.dbs[dbname]
return locals()[flask.request.method.lower()]()
@replipy.route('/<dbname>/_design/<docid>',
methods=['HEAD', 'GET', 'PUT', 'DELETE'])
def design_document(dbname, docid)... |
volusion/vol-admin-bootstrap | test-infra/s3_cache.py | Python | mit | 8,567 | 0.010155 | #!/usr/bin/env python2.7
<<<<<<< HEAD
from __future__ import absol | ute_import, unicode_literals, print_function, division
from sys import a | rgv
from os import environ, stat, remove as _delete_file
from os.path import isfile, dirname, basename, abspath
from hashlib import sha256
from subprocess import check_call as run
=======
# pylint: disable=C0301
from __future__ import absolute_import, unicode_literals, print_function, division
from sys import argv
fro... |
Injabie3/lui-cogs | spoilers/spoilers.py | Python | gpl-3.0 | 8,453 | 0.002484 | """Spoilers cog
Filters out messages that start with a certain prefix, and store them for
later retrieval.
"""
from datetime import datetime, timedelta
import logging
import json
import os
import re
import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO
from cogs.utils impo... |
"check it and try again!")
return
try:
store = {}
store[KEY_MESSAGE] = msg
store[KEY_AUTHOR_ID] = ctx.message.author.id
store[KEY_AUTHOR_NAME] = "{0.name}#{0.discriminator}".format(ctx.message.author)
... | if data.type == 'image':
store[KEY_EMBED] = data.url
else:
imglinkPattern = r"(?i)http[^ ]+\.(?:png|jpg|jpeg|gif)"
match = re.search(imglinkPattern, msg)
if match:
store[KEY_EMBED] = match.group(0)
a... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.