hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f24ac29d015f11200dad8879234dd7ab9c174313
| 2,003
|
py
|
Python
|
N50.py
|
kstatebioinfo/stanford_swc
|
daa3f37bcbbe4a8a3cbe59a48b380603b9794634
|
[
"CC0-1.0"
] | null | null | null |
N50.py
|
kstatebioinfo/stanford_swc
|
daa3f37bcbbe4a8a3cbe59a48b380603b9794634
|
[
"CC0-1.0"
] | null | null | null |
N50.py
|
kstatebioinfo/stanford_swc
|
daa3f37bcbbe4a8a3cbe59a48b380603b9794634
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
##########################################################################
# USAGE: import N50
# help(N50)
# N50.main(~/stanford_swc/fasta-o-matic/fasta/normal.fa)
# DESCRIPTION: Function that calculates N50 for a FASTA file
# Created by Jennifer M Shelton
##########################################################################
import sys
import re
def n50(lengths):
'''
Reverse sort list of lengths and return N50
'''
lengths = sorted(lengths, reverse = True) # reverse sort lengths large
# to small
cumulative_length = sum(lengths) # get total length
fraction = cumulative_length # set fraction of total to 100%
my_n50 = 0 # initialize n50
for seq_length in lengths:
if fraction > (cumulative_length/2.0):
fraction = fraction - seq_length
my_n50 = seq_length
else: # when the fraction has passed 50% total length get N50
return(my_n50)
def main():
'''
calculates N50 for a FASTA file
'''
script = sys.argv[0]
filename = sys.argv[1]
fasta = open(filename, 'r')
header_pattern = re.compile('^>.*') # pattern for a header line
## Initialize strings for headers and sequences and a list for lengths
lengths = []
dna = ''
header = ''
for line in fasta:
line = line.rstrip()
if header_pattern.match(line):
if not dna == '': # skip the first (empty record)
lengths.append(len(dna))
dna = ''
else:
dna = dna + line
else:
lengths.append(len(dna))
my_n50 = n50(lengths)
print(my_n50)
##########################################################################
##### Execute main unless script is simply imported ############
##### for individual functions ############
##########################################################################
if __name__ == '__main__':
main()
| 34.534483
| 74
| 0.495756
| 211
| 2,003
| 4.601896
| 0.454976
| 0.025747
| 0.032956
| 0.035015
| 0.053553
| 0.053553
| 0
| 0
| 0
| 0
| 0
| 0.027721
| 0.261608
| 2,003
| 57
| 75
| 35.140351
| 0.628803
| 0.324513
| 0
| 0.2
| 0
| 0
| 0.013598
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.057143
| 0
| 0.114286
| 0.028571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f24b0ee4bbb24e050ab403a0d1e6bf087f8143ee
| 34,017
|
py
|
Python
|
ColDoc/latex.py
|
mennucc/ColDoc_project
|
947a79592b689f57e59652b37868cc22e520f724
|
[
"BSD-3-Clause"
] | null | null | null |
ColDoc/latex.py
|
mennucc/ColDoc_project
|
947a79592b689f57e59652b37868cc22e520f724
|
[
"BSD-3-Clause"
] | null | null | null |
ColDoc/latex.py
|
mennucc/ColDoc_project
|
947a79592b689f57e59652b37868cc22e520f724
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
__all__ = ('main_by_args','latex_main','latex_uuid','latex_tree')
cmd_help="""
Command help:
blob
compile the blob(s) with --uuid=UUID,
tree
compile all the blobs starting from --uuid=UUID
main_public
compile the whole document, for the general public
main_private
compile the whole document, including protected material, visible to the editors
all
all of the above
"""
import os, sys, shutil, subprocess, json, argparse, pathlib, tempfile, hashlib, pickle, base64, re, json, dbm
from os.path import join as osjoin
if __name__ == '__main__':
for j in ('','.'):
if j in sys.path:
sys.stderr.write('Warning: deleting %r from sys.path\n',j)
del sys.path[sys.path.index(j)]
#
a = os.path.realpath(sys.argv[0])
a = os.path.dirname(a)
a = os.path.dirname(a)
assert os.path.isdir(a), a
if a not in sys.path:
sys.path.insert(0, a)
del a
#
from ColDoc import loggin
import logging
logger = logging.getLogger(__name__)
############## ColDoc stuff
#
ColDoc_latex_engines=[
('pdflatex','LaTeX'),
('xelatex','XeLaTeX'),
('lualatex','LuaLaTeX'),
]
#from ColDoc import config, utils
import ColDoc, ColDoc.utils, ColDoc.config, ColDoc.transform
import plasTeX
import plasTeX.TeX, plasTeX.Base.LaTeX, plasTeX.Context , plasTeX.Tokenizer , plasTeX.Base
from plasTeX.TeX import TeX
from plasTeX import TeXDocument, Command
import plasTeX.Base as Base
from plasTeX.Packages import amsthm , graphicx
# the package ColDocUUID.sty defines a LaTeX command \uuid , that can be overriden in the preamble
environments_we_wont_latex = ColDoc.config.ColDoc_environments_we_wont_latex
standalone_template=r"""\documentclass[varwidth=%(width)s]{standalone}
%(latex_macros)s
\def\uuidbaseurl{%(url_UUID)s}
\input{preamble.tex}
\usepackage{ColDocUUID}
\begin{document}
%(begin)s
\input{%(input)s}
%(end)s
\end{document}
"""
preview_template=r"""\documentclass %(documentclass_options)s {%(documentclass)s}
%(latex_macros)s
\def\uuidbaseurl{%(url_UUID)s}
\input{preamble.tex}
\usepackage{hyperref}
\usepackage{ColDocUUID}
\begin{document}
%(begin)s
\input{%(input)s}
%(end)s
\end{document}
"""
## TODO investigate, this generates an empty PDF
##\setlength\PreviewBorder{5pt}
##%\usepackage[active]{preview}
plastex_template=r"""\documentclass{article}
%(latex_macros)s
\def\uuidbaseurl{%(url_UUID)s}
\input{preamble.tex}
\usepackage{hyperref}
\usepackage{ColDocUUID}
\begin{document}
%(begin)s
\input{%(input)s}
%(end)s
\end{document}
"""
def latex_uuid(blobs_dir, uuid, lang=None, metadata=None, warn=True, options = {}):
" `latex` the blob identified `uuid`; if `lang` is None, `latex` all languages; ( `metadata` are courtesy , to avoid recomputing )"
log_level = logging.WARNING if warn else logging.DEBUG
if metadata is None:
uuid_, uuid_dir, metadata = ColDoc.utils.resolve_uuid(uuid=uuid, uuid_dir=None,
blobs_dir = blobs_dir,
coldoc = options.get('coldoc'),
metadata_class= options['metadata_class'])
else:
uuid_dir = None
#
if metadata.environ in environments_we_wont_latex :
## 'include_preamble' is maybe illegal LaTeX; 'usepackage' is not yet implemented
logger.log(warn, 'Cannot `pdflatex` environ=%r',metadata.environ)
return True
#
if metadata.environ == 'main_file':
logger.log(log_level, 'Do not need to `pdflatex` the main_file')
return True
#
if lang is not None:
langs=[lang]
else:
langs=metadata.get('lang')
if not langs:
logger.debug('No languages for blob %r in blobs_dir %r',uuid,blobs_dir)
return True
#
res = True
for l in langs:
rh, rp = latex_blob(blobs_dir, metadata=metadata, lang=l,
uuid_dir=uuid_dir, options = options)
res = res and rh and rp
if lang is None:
# update only if all languages were recomputed
metadata.latex_time_update()
metadata.save()
return res
def latex_blob(blobs_dir, metadata, lang, uuid_dir=None, options = {}, squash = True):
""" `latex` the blob identified by the `metadata`, for the given language `lang`.
( `uuid` and `uuid_dir` are courtesy , to avoid recomputing )
Optionally squashes all sublevels, replacing with \\uuidplaceholder """
uuid = metadata.uuid
if uuid_dir is None:
uuid_dir = ColDoc.utils.uuid_to_dir(uuid, blobs_dir=blobs_dir)
#
if lang is None or lang == '':
_lang=''
else:
_lang = '_' + lang
#
if squash is None:
squash = options.get('squash')
# note that extensions are missing
save_name = os.path.join(uuid_dir, 'view' + _lang)
save_abs_name = os.path.join(blobs_dir, save_name)
fake_texfile = tempfile.NamedTemporaryFile(prefix='fakelatex' + _lang + '_' + uuid + '_',
suffix='.tex', dir = blobs_dir , mode='w+', delete=False)
fake_abs_name = fake_texfile.name[:-4]
fake_name = os.path.basename(fake_abs_name)
#
D = {'uuiddir':uuid_dir, 'lang':lang, 'uuid':uuid,
'_lang':_lang,
'width':'4in',
'begin':'','end':'',
'url_UUID' : options['url_UUID'],
'latex_macros' : options.get('latex_macros',metadata.coldoc.latex_macros_uuid),
}
#
b = os.path.join(uuid_dir,'blob'+_lang+'.tex')
s = os.path.join(uuid_dir,'squash'+_lang+'.tex')
if squash:
ColDoc.transform.squash_latex(b, s, blobs_dir, options,
helper = options.get('squash_helper')(blobs_dir, metadata, options))
D['input'] = s
else:
D['input'] = b
#
environ = metadata.environ
if environ[:2] == 'E_' and environ not in ( 'E_document', ):
env = environ[2:]
D['begin'] = r'\begin{'+env+'}'
D['end'] = r'\end{'+env+'}'
if 'split_list' in options and env in options['split_list']:
D['begin'] += r'\item'
##
## create pdf
logger.debug('create pdf for %r',save_abs_name)
env = metadata.environ
if env == 'main_file':
# never used, the main_file is compiled with the latex_main() function
logger.error("should never reach this line")
fake_texfile.write(open(os.path.join(blobs_dir, uuid_dir, 'blob'+_lang+'.tex')).read())
fake_texfile.close()
else:
#
ltclsch = metadata.get('latex_documentclass_choice')
ltclsch = ltclsch[0] if ltclsch else 'auto'
ltcls = options.get('documentclass')
if ltclsch == 'auto':
if env in ColDoc.config.ColDoc_environments_sectioning or env == 'E_document':
ltclsch = 'main'
else:
ltclsch = 'standalone'
if ltclsch == 'main' and not ltcls:
logger.warning('When LaTeXing uuid %r, could not use latex_documentclass_choice = "main"', uuid)
ltclsch = 'standalone'
if ltclsch == 'main':
latextemplate = preview_template
D['documentclass'] = ltcls
elif ltclsch == 'standalone':
latextemplate = standalone_template
elif ltclsch in ('article','book'):
latextemplate = preview_template
D['documentclass'] = ltclsch
else:
raise RuntimeError("unimplemented latex_documentclass_choice = %r",ltclsch)
# from metadata or from coldoc
ltclsopt = metadata.get('documentclassoptions')
if ltclsopt:
ltclsopt = ltclsopt[0]
else:
ltclsopt = options.get('documentclassoptions')
ltclsopt = ColDoc.utils.parenthesizes(ltclsopt, '[]')
D['documentclass_options'] = ltclsopt
#
fake_texfile.write(latextemplate % D)
fake_texfile.close()
rp = pdflatex_engine(blobs_dir, fake_name, save_name, environ, options)
##
# rewrite log to replace temporary file name with final file name
for ext in '.log','.fls':
try:
a = open(save_abs_name+ext).read()
b = a.replace(fake_name,save_name)
open(save_abs_name+ext,'w').write(b)
except Exception as e:
logger.warning(e)
## create html
logger.debug('create html for %r',save_abs_name)
main_file = open(fake_abs_name+'.tex', 'w')
D['url_UUID'] = ColDoc.config.ColDoc_url_placeholder
main_file.write(plastex_template % D)
main_file.close()
rh = plastex_engine(blobs_dir, fake_name, save_name, environ, options)
# paux is quite large and it will not be used after this line
if os.path.isfile(save_abs_name+'_plastex.paux'):
os.unlink(save_abs_name+'_plastex.paux')
# TODO there is a fundamental mistake here. This function may be called to
# update the PDF/HTML view of only one language. This timestamp
# does not record which language was updated. We should have different timestamps
# for different languages.
if len(metadata.get('lang')) == 1:
metadata.latex_time_update()
#
retcodes = ColDoc.utils.json_to_dict(metadata.latex_return_codes)
j = (':'+lang) if (isinstance(lang,str) and lang) else ''
ColDoc.utils.dict_save_or_del( retcodes, 'latex'+j, rp)
ColDoc.utils.dict_save_or_del( retcodes, 'plastex'+j, rh)
metadata.latex_return_codes = ColDoc.utils.dict_to_json(retcodes)
#
metadata.save()
return rh, rp
def latex_anon(coldoc_dir, uuid='001', lang=None, options = {}, access='public', verbose_name=None, email_to=None):
#
assert access=='public'
#
if isinstance(options, (str,bytes) ):
# base64 accepts both bytes and str
options = pickle.loads(base64.b64decode(options))
#
metadata_class = options.get('metadata_class')
assert coldoc_dir == options.get('coldoc_dir',coldoc_dir)
coldoc = options.get('coldoc')
warn = options.get('warn')
#
n, anon_dir = ColDoc.utils.prepare_anon_tree(coldoc_dir, uuid=uuid, lang=lang,
metadata_class=metadata_class, coldoc=coldoc)
if anon_dir is not None:
assert isinstance(anon_dir, (str, pathlib.Path)), anon_dir
return latex_main(anon_dir, uuid=uuid, lang=lang, options = options, access='public')
else:
return False
def latex_main(blobs_dir, uuid='001', lang=None, options = {}, access=None, verbose_name=None, email_to=None):
"latex the main document, as the authors intended it ; save all results in UUID dir, as main.* "
#
assert access in ('public','private')
assert isinstance(blobs_dir, (str, pathlib.Path)), blobs_dir
assert os.path.isdir(blobs_dir)
#
if isinstance(options, (str,bytes) ):
# base64 accepts both bytes and str
options = pickle.loads(base64.b64decode(options))
#
metadata_class = options.get('metadata_class')
coldoc_dir = options.get('coldoc_dir')
coldoc = options.get('coldoc')
#
if coldoc_dir is not None:
options = prepare_options_for_latex(coldoc_dir, blobs_dir, metadata_class, coldoc, options)
#
uuid_, uuid_dir, metadata = ColDoc.utils.resolve_uuid(uuid=uuid, uuid_dir=None,
blobs_dir = blobs_dir,
coldoc = coldoc,
metadata_class = metadata_class)
environ = metadata.environ
#
if access =='public':
options['plastex_theme'] = 'blue'
latex_macros = metadata.coldoc.latex_macros_public
else:
options['plastex_theme'] = 'green'
latex_macros = metadata.coldoc.latex_macros_private
if lang is not None:
langs=[lang]
else:
langs=metadata.get('lang')
#
ret = True
coldoc = options.get('coldoc')
if coldoc is not None:
retcodes = ColDoc.utils.json_to_dict(coldoc.latex_return_codes)
#
for lang in langs:
#
_lang = ('_'+lang) if (isinstance(lang,str) and lang) else ''
lang_ = (':'+lang) if (isinstance(lang,str) and lang) else ''
#
uuid_dir = ColDoc.utils.uuid_to_dir(uuid, blobs_dir=blobs_dir)
# note that extensions are missing
save_name = os.path.join(uuid_dir, 'main' + _lang)
save_abs_name = os.path.join(blobs_dir, save_name)
fake_name = 'fakemain' + _lang
fake_abs_name = os.path.join(blobs_dir, fake_name)
#
a = os.path.join(blobs_dir, uuid_dir, 'blob'+_lang+'.tex')
prologue, preamble, body, epilogue = ColDoc.utils.split_blob(open(a))
if not(preamble):
logger.warning(r" cannot locate '\begin{document}' ")
if True:
preamble = [latex_macros] + preamble
import re
r = re.compile(r'\\usepackage{ColDocUUID}')
if not any(r.match(a) for a in preamble):
preamble += ['\\usepackage{ColDocUUID}\n']
logger.debug(r" adding \usepackage{ColDocUUID}")
a = (r'\def\uuidbaseurl{%s}'%(options['url_UUID'],)+'\n')
f_pdf = ''.join(prologue + preamble + [a] + body + epilogue)
a = (r'\def\uuidbaseurl{%s}'%(ColDoc.config.ColDoc_url_placeholder,)+'\n')
f_html = ''.join(prologue + preamble + [a] + body + epilogue)
#
open(fake_abs_name+'.tex','w').write(f_pdf)
rp = pdflatex_engine(blobs_dir, fake_name, save_name, environ, options)
ColDoc.utils.dict_save_or_del(retcodes, 'latex'+lang_+':'+access, rp)
try:
ColDoc.utils.os_rel_symlink(save_name+'.pdf','main'+_lang+'.pdf',
blobs_dir, False, True)
except:
logger.exception('while symlinking')
open(fake_abs_name+'.tex','w').write(f_html)
rh = plastex_engine(blobs_dir, fake_name, save_name, environ, options,
levels = True, tok = True, strip_head = False)
parse_plastex_html(blobs_dir, osjoin(blobs_dir, save_name+'_html'), save_abs_name+'_plastex.paux')
# paux is quite large and it will not be used after this line
os.unlink(save_abs_name+'_plastex.paux')
ColDoc.utils.dict_save_or_del(retcodes, 'plastex'+lang_+':'+access, rh)
try:
ColDoc.utils.os_rel_symlink(save_name+'_html','main'+_lang+'_html',
blobs_dir, True, True)
except:
logger.exception('while symlinking')
#
for e in ('.aux','.bbl','_plastex.paux'):
# keep a copy of the aux file
# TODO should encode by language
a,b = osjoin(blobs_dir,save_name+e), osjoin(blobs_dir,'main'+e)
if os.path.isfile(a):
logger.debug('Copy %r to %r',a,b)
shutil.copy(a,b)
#
ret = ret and rh and rp
#
if coldoc is not None:
if lang is None:
# update only if all languages were updated
coldoc.latex_time_update()
coldoc.latex_return_codes = ColDoc.utils.dict_to_json(retcodes)
coldoc.save()
return ret
def parse_plastex_paux(blobs_dir, paux):
if isinstance(paux,str):
if not os.path.isabs(paux):
paux = osjoin(blobs_dir, paux)
try:
paux = open(paux,'rb')
except OSError as e:
logger.error('Cannot open %r : %r',paux,e)
return {}
a = pickle.load(paux)
a = a['HTML5']
D = {}
for n in a:
try:
if n.startswith('UUID:'):
uuid = n[5:]
url = a[n]['url']
if '#' in url:
S,name = url.split('#')
D[uuid] = (S, '#' + name)
else:
D[uuid] = (url, '')
except:
logger.exception('vv')
return D
def parse_plastex_html(blobs_dir, html_dir, paux):
try:
from bs4 import BeautifulSoup
except ImportError:
logger.error('Please install BeautifulSoup4: pip3 install BeautifulSoup4')
return
D = parse_plastex_paux(blobs_dir, paux)
P = ColDoc.config.ColDoc_url_placeholder
for S in os.listdir(html_dir):
if S.endswith('html'):
name = href = uuid = None
soup = BeautifulSoup(open(osjoin(html_dir,S)).read(), 'html.parser')
for link in soup.find_all('a'):
h = link.get('href')
n = link.get('name')
if n:
if n.startswith('UUID:'):
uuid = n[5:]
D[uuid] = (S, n)
else:
name = n
if h and h.startswith(P):
uuid = h[len(P):]
if uuid not in D and name:
D[uuid] = (S, '#' + name)
#pickle.dump(D,open(osjoin(blobs_dir,'.UUID_html_mapping.pickle'),'wb'))
db = dbm.open(osjoin(blobs_dir,'.UUID_html_mapping.dbm'),'c')
for k,v in D.items():
db[k] = json.dumps(v)
db.close()
json.dump(D,open(osjoin(blobs_dir,'.UUID_html_mapping.json'),'w'),indent=1)
def get_specific_html_for_UUID(blobs_dir,UUID):
try:
db = dbm.open(osjoin(blobs_dir,'.UUID_html_mapping.dbm'))
return json.loads(db[UUID])
except KeyError:
logger.info('Cannot resolve uuid=%r in %r',UUID,blobs_dir)
return '',''
except:
logger.exception('Cannot resolve uuid=%r in %r',UUID,blobs_dir)
return '',''
def dedup_html(src, options):
replacements = []
dedup_root = options.get('dedup_root')
dedup_url = options.get('dedup_url')
if dedup_root is not None:
coldoc_site_root = options['coldoc_site_root']
for k in 'js', 'styles', 'symbol-defs.svg' :
k_ = osjoin(src,k)
if os.path.exists(k_):
dedup = ColDoc.utils.replace_with_hash_symlink(coldoc_site_root, src, dedup_root, k)
if os.path.isfile(k_):
replacements.append( (k, dedup_url + '/' + dedup) )
elif os.path.isdir(k_):
for dirpath, dirnames, filenames in os.walk(k_):
for f in filenames:
a = osjoin(dirpath,f)
o = a[(len(src)+1):]
r = a[(len(src)+len(k)+2):]
replacements.append( ( o, (dedup_url + '/' + dedup + '/' + r) ) )
return replacements
def plastex_engine(blobs_dir, fake_name, save_name, environ, options,
levels = False, tok = False, strip_head = True, plastex_theme=None):
" compiles the `fake_name` latex, and generates the `save_name` result ; note that extensions are missing "
save_abs_name = os.path.join(blobs_dir, save_name)
fake_abs_name = os.path.join(blobs_dir, fake_name)
#
plastex_theme = options.get('plastex_theme','green')
#
fake_support=[]
for es,ed in ColDoc.config.ColDoc_plastex_fakemain_reuse_extensions:
a = osjoin(blobs_dir,'main'+es)
if os.path.exists(a):
logger.debug("Re-using %r as %r",a,fake_abs_name+ed)
shutil.copy2(a,fake_abs_name+ed)
fake_support.append((a,fake_abs_name+ed))
elif os.path.exists(save_abs_name+es):
logger.debug("Re-using %r as %r",save_abs_name+es,fake_abs_name+ed)
shutil.copy(save_abs_name+es,fake_abs_name+ed)
fake_support.append((save_abs_name+es,fake_abs_name+ed))
#
F = fake_name+'.tex'
d = os.path.dirname(F)
#assert os.path.isfile(F),F
if d :
logger.warning("The argument of `plastex` is not in the blobs directory: %r", F)
#
a,b = os.path.split(save_abs_name+'_html')
save_name_tmp = tempfile.mkdtemp(dir=a,prefix=b)
#
argv = ['-d',save_name_tmp,"--renderer=HTML5", '--theme-css', plastex_theme]
if not levels :
argv += [ '--split-level', '-3']
if tok is False or (environ[:2] == 'E_' and tok == 'auto'):
argv.append( '--no-display-toc' )
#n = osjoin(blobs_dir,save_name+'_paux')
#if not os.path.isdir(n): os.mkdir(n)
## do not use ['--paux-dirs',save_name+'_paux'] until we understand what it does
argv += ['--log',F]
stdout_ = osjoin(blobs_dir,save_name+'_plastex.stdout')
ret = ColDoc.utils.plastex_invoke(cwd_ = blobs_dir ,
stdout_ = stdout_,
argv_ = argv,
logfile = fake_name+'.log')
if os.path.exists(save_abs_name+'_html') :
shutil.rmtree(save_abs_name+'_html')
os.rename(save_name_tmp, save_abs_name+'_html')
extensions = '.log','.paux','.tex','.bbl'
if ret :
logger.warning('Failed: cd %r ; plastex %s',blobs_dir,' '.join(argv))
for e in extensions:
if os.path.exists(save_abs_name+'_plastex'+e):
os.rename(save_abs_name+'_plastex'+e,save_abs_name+'_plastex'+e+'~')
if os.path.exists(fake_abs_name+e):
s,d = fake_abs_name+e,save_abs_name+'_plastex'+e
os.rename(s,d)
if ret: logger.warning(' rename %r to %r',s,d)
if os.path.isfile(osjoin(blobs_dir, save_name+'_html','index.html')):
logger.info('created html version of %r ',save_abs_name)
else:
logger.warning('no "index.html" in %r',save_name+'_html')
return False
#
replacements = dedup_html(osjoin(blobs_dir, save_name+'_html'), options)
# replace urls in html to point to dedup-ed stuff
for f in os.listdir(osjoin(blobs_dir, save_name+'_html')):
f = osjoin(blobs_dir, save_name+'_html', f)
if f[-5:]=='.html':
L = O = open(f).read()
# ok, regular expressions may be cooler
for p in 'href="' , 'src="' :
for e in '"', '#':
for o,r in replacements:
L = L.replace(p+o+e , p+r+e)
if L != O:
os.rename(f,f+'~')
open(f,'w').write(L)
#
if strip_head:
for f in os.listdir(osjoin(blobs_dir, save_name+'_html')):
f = osjoin(blobs_dir, save_name+'_html', f)
if f[-5:]=='.html':
logger.debug('stripping <head> of %r ',f)
os.rename(f,f+'~~')
L=open(f+'~~').readlines()
try:
ns, ne = None,None
for n,s in enumerate(L):
s = s.strip()
if s == '<body>': ns = n
if s == '</body>': ne = n
assert ns,ne
L = L[ns+1:ne]
F = open(f,'w')
for l in L:
if l[:7] != '<script':
F.write(l)
except:
logger.exception('ARGH')
return ret == 0
def pdflatex_engine(blobs_dir, fake_name, save_name, environ, options, repeat = None):
" If repeat is None, it will be run twice if bib data or aux data changed"
save_abs_name = os.path.join(blobs_dir, save_name)
fake_abs_name = os.path.join(blobs_dir, fake_name)
# 'main.aux' and 'main.bbl' are saved latex_main()
for e in ColDoc.config.ColDoc_pdflatex_fakemain_reuse_extensions:
a = os.path.join(blobs_dir,'main'+e)
if os.path.exists(save_abs_name+e):
logger.debug("Re-using %r for %r",save_abs_name+e,fake_abs_name+e)
shutil.copy2(save_abs_name+e, fake_abs_name+e)
elif os.path.exists(a):
logger.debug("Re-using %r for %r (hoping for the best)",a,fake_abs_name+e)
shutil.copy2(a,fake_abs_name+e)
else:
logger.debug("No %r file for this job",e)
#
extensions = ColDoc.config.ColDoc_pdflatex_fakemain_preserve_extensions
#
## dunno what this may be useful for
#for e in extensions:
# if e not in ('.tex','.aux','.bbl') and os.path.exists(fake_abs_name+e):
# logger.warning('Overwriting: %r',fake_abs_name+e)
#
engine = options.get('latex_engine','pdflatex')
logger.debug('Using engine %r',engine)
args = [engine,'-file-line-error','-interaction','batchmode',
'-recorder','-no-shell-escape','-no-parse-first-line',
##TODO may use -output-directory directory
## TODO TEST THIS
##( r"\def\uuidbaseurl{%s}" % (options['url_UUID'],)), r"\input",
## TODO for luatex may add --nosocket --safer
fake_name+'.tex']
#
p = subprocess.Popen(args,cwd=blobs_dir,stdin=open(os.devnull),
stdout=open(os.devnull,'w'),stderr=subprocess.STDOUT)
r=p.wait()
logger.debug('Engine result %r',r)
#
if r != 0:
logger.debug('LaTeX failed %r will not run BiBTeX',r)
elif environ in ( 'main_file', 'E_document') and \
os.path.isfile(fake_abs_name+'.aux') and \
'\\bibdata' in open(fake_abs_name+'.aux').read():
logger.debug('Running BiBTeX')
if os.path.isfile(fake_abs_name+'.bbl'):
file_md5 = hashlib.md5(open(fake_abs_name+'.bbl','rb').read()).hexdigest()
else:
file_md5 = None
p = subprocess.Popen(['bibtex',fake_name],
cwd=blobs_dir,stdin=open(os.devnull),
stdout=subprocess.PIPE ,stderr=subprocess.STDOUT)
a = p.stdout.read()
if p.wait() != 0:
logger.warning('bibtex fails, see %r'%(save_abs_name+'.blg',))
logger.warning('bibtex output: %r',a)
else:
if os.path.isfile(fake_abs_name+'.bbl'):
if file_md5 is None or file_md5 != hashlib.md5(open(fake_abs_name+'.bbl','rb').read()).hexdigest():
if repeat is None:
logger.debug('BibTeX changed the .bbl file, will rerun')
repeat = True
else:
logger.debug('BibTeX changed the .bbl file')
else:
logger.debug('BibTeX did not change the .bbl file')
else:
logger.warning('BiBTeX did not generate %r',fake_abs_name+'.bbl')
#
a = 'Rerun to get cross-references right'
if r == 0:
if repeat is None and a in open(fake_abs_name+'.log').read():
logger.debug('%r reports %r in log, will rerun',engine,a)
repeat = True
elif repeat is None:
logger.debug('%r does not report %r in log, will not rerun',engine,a)
#
if r == 0 and repeat:
logger.debug('Rerunning engine %r',engine)
p = subprocess.Popen(args,cwd=blobs_dir,stdin=open(os.devnull),
stdout=open(os.devnull,'w'),stderr=subprocess.STDOUT)
r = p.wait()
logger.debug('Engine result %r',r)
#
res = r == 0
if not res:
logger.warning('%r fails, see %r'%(engine,save_abs_name+'.log'))
#
for e in extensions:
if os.path.exists(save_abs_name+e):
os.rename(save_abs_name+e,save_abs_name+e+'~')
if os.path.exists(fake_abs_name+e):
if e == '.pdf':
siz=os.path.getsize(fake_abs_name+e)
if siz :
logger.info("Created pdf %r size %d"%(save_abs_name+e,siz))
else:
logger.warning("Created empty pdf %r "%(save_abs_name+e,))
a,b=fake_abs_name+e,save_abs_name+e
logger.debug('Rename %r to %r',a,b)
os.rename(a,b)
else:
if e not in ( '.pdf', '.aux' ) :
logger.debug("Missing :%r"%(fake_abs_name+e,))
else:
logger.warning("Missing :%r"%(fake_abs_name+e,))
if e=='.pdf': res=False
return res
def latex_tree(blobs_dir, uuid=None, lang=None, warn=False, options={}, verbose_name=None, email_to=None):
" latex the whole tree, starting from `uuid` "
log_level = logging.WARNING if warn else logging.DEBUG
#
if isinstance(options, (str,bytes) ):
# base64 accepts both bytes and str
options = pickle.loads(base64.b64decode(options))
#
metadata_class = options.get('metadata_class')
coldoc_dir = options.get('coldoc_dir')
coldoc = options.get('coldoc')
#
if coldoc_dir is not None:
options = prepare_options_for_latex(coldoc_dir, blobs_dir, metadata_class, coldoc, options)
#
if uuid is None:
logger.warning('Assuming root_uuid = 001')
uuid = '001'
uuid_, uuid_dir, metadata = ColDoc.utils.resolve_uuid(uuid=uuid, uuid_dir=None,
blobs_dir = blobs_dir,
coldoc = coldoc,
metadata_class=metadata_class)
#
ret = True
if metadata.environ in environments_we_wont_latex:
logger.log(log_level, 'Cannot `latex` environ %r , UUID = %r'%(metadata.environ, uuid,))
else:
r = latex_uuid(blobs_dir, uuid=uuid, metadata=metadata, lang=lang, warn=warn, options=options)
ret = ret and r
for u in metadata.get('child_uuid'):
logger.debug('moving down from node %r to node %r',uuid,u)
r = latex_tree(blobs_dir, uuid=u, lang=lang, warn=warn, options=options)
ret = ret and r
return ret
def prepare_options_for_latex(coldoc_dir, blobs_dir, metadata_class, coldoc=None, options = None):
if options is None:
options = {}
### get and set some options
if coldoc is None:
coldoc = options.get('coldoc')
else:
options['coldoc'] = coldoc
options['coldoc_dir'] = coldoc_dir
#
try:
blobinator_args = ColDoc.utils.get_blobinator_args(blobs_dir)
options.update(blobinator_args)
except:
logger.exception('No blobinator_args')
#
a = osjoin(coldoc_dir, 'coldoc.json')
if os.path.isfile( a ):
coldoc_args = json.load(open(a))
options.update(coldoc_args['fields'])
#
coldoc_root_uuid = options.get('root_uuid')
if isinstance(coldoc_root_uuid,int):
coldoc_root_uuid = ColDoc.utils.int_to_uuid(coldoc_root_uuid)
options['root_uuid'] = coldoc_root_uuid
#
root_metadata = metadata_class.load_by_uuid(uuid=coldoc_root_uuid, coldoc=coldoc, basepath=blobs_dir)
for a in ('documentclass', 'documentclassoptions'):
b = root_metadata.get(a)
if b:
options[a] = b[0]
logger.debug('In root uuid %r = %r',a,b)
else:
logger.warning('In root uuid no value for %r',a)
#
logger.debug('From %r options %r',a,options)
else:
logger.error('No %r',a)
#
return options
def prepare_parser(cmd_help=cmd_help):
# parse arguments
COLDOC_SITE_ROOT = os.environ.get('COLDOC_SITE_ROOT')
parser = argparse.ArgumentParser(description='Compile coldoc material, using `latex` and `plastex` ',
epilog=cmd_help,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--verbose','-v',action='count',default=0)
parser.add_argument('--uuid',help='UUID to work on/start from')
parser.add_argument('command', help='specific command',nargs='+')
return parser
def main(argv):
parser = prepare_parser()
parser.add_argument('--blobs-dir',type=str,\
help='directory where the blob_ized output is saved',
required=True)
parser.add_argument('--url-UUID',type=str,\
help='URL of the website that will show the UUIDs, used by my \\uuid macro in PDF',
required=True)
args = parser.parse_args(argv[1:])
#
blobs_dir = args.blobs_dir
assert os.path.isdir(blobs_dir), blobs_dir
#
args.coldoc_dir = coldoc_dir = os.path.dirname(os.path.dirname(blobs_dir))
from ColDoc.utils import FMetadata
options = prepare_options_for_latex(coldoc_dir, blobs_dir, FMetadata)
options['url_UUID'] = args.url_UUID
#
options["squash_helper"] = ColDoc.transform.squash_input_uuid
options['metadata_class'] = ColDoc.utils.FMetadata
return main_by_args(args,options)
def main_by_args(args,options):
argv = args.command
blobs_dir = args.blobs_dir
coldoc_dir = args.coldoc_dir
logger.setLevel(logging.WARNING)
if args.verbose > 1 :
logger.setLevel(logging.DEBUG)
elif args.verbose > 0 :
logger.setLevel(logging.INFO)
#
if args.uuid is not None:
UUID = args.uuid
elif 'root_uuid' in options:
UUID = options['root_uuid']
else:
UUID = '001'
#
ret = True
if argv[0] == 'blob':
lang = None
if len(argv)>2:
lang = argv[2]
ret = latex_uuid(blobs_dir,UUID,lang=lang, options=options)
elif argv[0] == 'tree':
ret = latex_tree(blobs_dir,UUID, options=options)
elif argv[0] == 'main_private':
ret = latex_main(blobs_dir, uuid=UUID, options=options, access='private')
elif argv[0] == 'main_public':
ret = latex_anon(coldoc_dir, uuid=UUID, options=options, access='public')
elif argv[0] == 'all':
ret = latex_main(blobs_dir, uuid=UUID, options=options, access='private')
ret &= latex_anon(coldoc_dir, uuid=UUID, options=options, access='public')
ret &= latex_tree(blobs_dir,UUID, options=options)
else:
sys.stderr.write('Unknown command, see --help')
return False
return ret
if __name__ == '__main__':
ret = main(sys.argv)
sys.exit(0 if ret else 13)
| 39.010321
| 135
| 0.587559
| 4,420
| 34,017
| 4.350226
| 0.118552
| 0.037861
| 0.020595
| 0.01165
| 0.402798
| 0.3342
| 0.29041
| 0.257437
| 0.213647
| 0.192532
| 0
| 0.003825
| 0.28524
| 34,017
| 871
| 136
| 39.055109
| 0.786995
| 0.07752
| 0
| 0.298422
| 0
| 0.002869
| 0.164914
| 0.016864
| 0
| 0
| 0
| 0.002296
| 0.012912
| 1
| 0.021521
| false
| 0
| 0.021521
| 0
| 0.077475
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f24c7bebfc50062402e4f3d020937fffe8042def
| 1,945
|
py
|
Python
|
kivyx/uix/aspectratio.py
|
gottadiveintopython/kivyx.uix.aspectratio
|
e8b049fe76c9350b8c167ff1fb32299b8feceba7
|
[
"MIT"
] | null | null | null |
kivyx/uix/aspectratio.py
|
gottadiveintopython/kivyx.uix.aspectratio
|
e8b049fe76c9350b8c167ff1fb32299b8feceba7
|
[
"MIT"
] | null | null | null |
kivyx/uix/aspectratio.py
|
gottadiveintopython/kivyx.uix.aspectratio
|
e8b049fe76c9350b8c167ff1fb32299b8feceba7
|
[
"MIT"
] | null | null | null |
__all__ = ('KXAspectRatio', )
from kivy.uix.layout import Layout
from kivy.properties import BoundedNumericProperty, OptionProperty
HALIGN_TO_ATTR = {
'center': 'center_x',
'middle': 'center_x',
'left': 'x',
'right': 'right',
}
VALIGN_TO_ATTR = {
'center': 'center_y',
'middle': 'center_y',
'bottom': 'y',
'top': 'top',
}
class KXAspectRatio(Layout):
aspect_ratio = BoundedNumericProperty(1, min=0)
halign = OptionProperty(
'center', options=('center', 'middle', 'left', 'right', ))
valign = OptionProperty(
'center', options=('center', 'middle', 'bottom', 'top', ))
def __init__(self, **kwargs):
super().__init__(**kwargs)
tl = self._trigger_layout
self.bind(
parent=tl, children=tl, size=tl, pos=tl,
aspect_ratio=tl, halign=tl, valign=tl)
def add_widget(self, *args, **kwargs):
if self.children:
raise Exception('KXAspectRatio can only have one child')
return super().add_widget(*args, **kwargs)
def do_layout(self, *args):
if not self.children:
return
c = self.children[0]
c_aspect_ratio = self.aspect_ratio
w = self.width
h = self.height
x_attr = HALIGN_TO_ATTR[self.halign]
y_attr = VALIGN_TO_ATTR[self.valign]
if c_aspect_ratio == 0 or w <= 0 or h <= 0:
c.width = 0
c.height = 0
setattr(c, x_attr, getattr(self, x_attr))
setattr(c, y_attr, getattr(self, y_attr))
else:
if (w / h) < c_aspect_ratio:
c.width = w
c.height = w / c_aspect_ratio
c.x = self.x
setattr(c, y_attr, getattr(self, y_attr))
else:
c.width = h * c_aspect_ratio
c.height = h
setattr(c, x_attr, getattr(self, x_attr))
c.y = self.y
| 29.469697
| 68
| 0.5491
| 239
| 1,945
| 4.251046
| 0.267782
| 0.086614
| 0.059055
| 0.038386
| 0.226378
| 0.122047
| 0.122047
| 0.122047
| 0.064961
| 0
| 0
| 0.006047
| 0.319794
| 1,945
| 65
| 69
| 29.923077
| 0.761905
| 0
| 0
| 0.107143
| 0
| 0
| 0.096708
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053571
| false
| 0
| 0.035714
| 0
| 0.196429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f24e4b499348b1e6839320b71759fce8e46d5cc8
| 4,006
|
py
|
Python
|
src/analyze_img.py
|
IW276/IW276SS21-P13
|
851e220c34d55caa91f0967e02dc86c34deee2fa
|
[
"MIT"
] | null | null | null |
src/analyze_img.py
|
IW276/IW276SS21-P13
|
851e220c34d55caa91f0967e02dc86c34deee2fa
|
[
"MIT"
] | null | null | null |
src/analyze_img.py
|
IW276/IW276SS21-P13
|
851e220c34d55caa91f0967e02dc86c34deee2fa
|
[
"MIT"
] | null | null | null |
import cv2
import numpy as np
from matplotlib import pyplot as plt
brightness = {"DARK": 0,
"NORMAL": 1,
"LIGHT": 2}
contrast = {"HIGH": 2,
"NORMAL": 1,
"LOW": 0}
class ImageSetup:
def __init__(self):
self.brightness = None
self.contrast = None
self.gamma = 1
# grayscale values
self.average = -1
self.std_deviation = -1
self.threshold = -1
# saturation values
self.sat_average = -1
self.sat_std_deviation = -1
self.sat_threshold = -1
def average(img2d):
rows, cols = img2d.shape
m = np.mean(img2d[0:rows, 0:cols])
return m
def variance_std_deviation(img2d):
# variance
v = np.var(img2d)
# standard deviation
s = np.sqrt(v)
return v, s
def histogram(img2d, name=None, plot=False):
hist = cv2.calcHist([img2d], [0], None, [256], [0, 256])
if plot:
plt.hist(img2d.ravel(), 256, [0, 256])
plt.xlabel(name)
plt.show()
hist_norm = hist.ravel() / hist.sum()
return hist, hist_norm
def threshold(img2d):
# return is the threshold value followed by the result image
thr, o1 = cv2.threshold(img2d, 0, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C + cv2.THRESH_OTSU)
return thr
class Configuration:
def __init__(self, image):
self.img = image
self.imgGray = cv2.cvtColor(self.img, cv2.COLOR_BGR2GRAY)
self.imgHSV = cv2.cvtColor(self.img, cv2.COLOR_BGR2HSV)
self.rows, self.cols, self.cha = self.img.shape
self.pixels = self.cols * self.rows
self.imgSetup = ImageSetup()
def get_brightness(self):
m = average(self.imgGray)
if m < 100:
self.imgSetup.brightness = brightness["DARK"]
elif 100 < m < 150:
self.imgSetup.brightness = brightness["NORMAL"]
else:
self.imgSetup.brightness = brightness["LIGHT"]
self.imgSetup.average = m
def get_saturation(self):
m_sat = average(self.imgHSV[:, :, 1])
s2, s = variance_std_deviation(self.imgHSV[:, :, 1])
self.imgSetup.sat_average = m_sat
self.imgSetup.sat_std_deviation = s
def get_contrast(self):
s2, s = variance_std_deviation(self.imgGray)
if s >= 70:
self.imgSetup.contrast = contrast["HIGH"]
elif s >= 40:
self.imgSetup.contrast = contrast["NORMAL"]
else:
self.imgSetup.contrast = contrast["LOW"]
self.imgSetup.std_deviation = s
def get_thresholds(self):
gray_thresh = threshold(self.imgGray)
sat_thresh = threshold(self.imgHSV[:, :, 1])
self.imgSetup.threshold = gray_thresh
self.imgSetup.sat_threshold = sat_thresh
def print_values(self, do_print=True):
if do_print:
print("Average brightness: " + str(self.imgSetup.average))
print("Standard deviation: " + str(self.imgSetup.std_deviation))
print("Average saturation: " + str(self.imgSetup.sat_average))
print("Std. deviation sat: " + str(self.imgSetup.sat_std_deviation))
print("Threshold gray: " + str(self.imgSetup.threshold))
print("Threshold sat: " + str(self.imgSetup.sat_threshold))
print("Brightness: " + str(self.imgSetup.brightness))
print("Contrast: " + str(self.imgSetup.contrast))
def show(self, show=True):
if show:
cv2.imshow("Color", self.img)
cv2.waitKey(0)
cv2.imshow("Gray", self.imgGray)
cv2.waitKey(0)
cv2.imshow("Saturation", self.imgHSV[:, :, 1])
cv2.waitKey(0)
cv2.destroyAllWindows()
def evaluate(img):
c = Configuration(img)
c.get_brightness()
c.get_contrast()
histogram(c.imgGray, "gray")
histogram(c.imgHSV[:, :, 1], "saturation")
c.get_saturation()
c.get_thresholds()
c.print_values(False)
c.show(False)
return c.imgSetup
| 30.120301
| 92
| 0.595856
| 488
| 4,006
| 4.778689
| 0.213115
| 0.108062
| 0.051458
| 0.041166
| 0.126501
| 0.045455
| 0
| 0
| 0
| 0
| 0
| 0.02982
| 0.28008
| 4,006
| 132
| 93
| 30.348485
| 0.778779
| 0.030205
| 0
| 0.066667
| 0
| 0
| 0.057231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12381
| false
| 0
| 0.028571
| 0
| 0.219048
| 0.104762
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2503cce75279fee15a3fc46cd4a46df58314fef
| 3,799
|
py
|
Python
|
models/game/bots/RandoMaxBot.py
|
zachdj/ultimate-tic-tac-toe
|
b8e6128d9d19628f6f889a3958d30854527a8645
|
[
"MIT"
] | null | null | null |
models/game/bots/RandoMaxBot.py
|
zachdj/ultimate-tic-tac-toe
|
b8e6128d9d19628f6f889a3958d30854527a8645
|
[
"MIT"
] | null | null | null |
models/game/bots/RandoMaxBot.py
|
zachdj/ultimate-tic-tac-toe
|
b8e6128d9d19628f6f889a3958d30854527a8645
|
[
"MIT"
] | null | null | null |
import random
from models.game.bots.Bot import Bot
from models.game.Board import Board
class RandoMaxBot(Bot):
""" Semi-random bot
This is a minimax bot that scores moves randomly unless the end of the game is seen within a 2-ply lookahead
"""
def __init__(self, number, name=None):
if name is None:
name = "Rando-Max Bot"
Bot.__init__(self, number, name=name)
self.player_type = 'randomax'
random.seed()
def compute_next_move(self, board, valid_moves):
score, selected_move = self._max(board, valid_moves,-float('inf'), float('inf'), 2)
return selected_move
def _max(self, board, valid_moves, alpha, beta, max_depth):
"""
Private function which computes the move that a rational maximizing player would choose
:param board: GlobalBoard object representing the current state
:param valid_moves: list of valid moves that can be made on the board object
:param alpha: the current value of alpha (the best score that MAX can guarantee so far)
:param beta: the current value of beta (the best score that MIN can guarantee so far)
:return: the value (score) of the best move and the move object itself
"""
if board.board_completed: # termination test
if board.winner == Board.EMPTY or board.winner == Board.CAT:
return 0, None
elif board.winner == self.number:
return 10000000, None
else:
return -10000000, None
elif max_depth == 0:
# scores are computed from the perspective of the 'X' player, so they need to be flipped if our bot is 'O'
if self.number == Board.X:
return self.compute_score(board), None
else:
return -self.compute_score(board), None
a, b = alpha, beta
value = -float('inf')
best_move = None
for move in valid_moves:
child_board = board.clone()
child_board.make_move(move)
move_value, minimizing_move = self._min(child_board, child_board.get_valid_moves(move), a, b, max_depth-1)
if move_value > value:
value = move_value
best_move = move
if value >= b:
return value, best_move
a = max(a, move_value)
return value, best_move
def _min(self, board, valid_moves, alpha, beta, max_depth):
# test for stopping condition
if board.board_completed:
if board.winner == Board.EMPTY or board.winner == Board.CAT:
return 0, None
elif board.winner == self.number:
return 10000000, None
else:
return -10000000, None
elif max_depth == 0:
# scores are computed from the perspective of the 'X' player, so they need to be flipped if our bot is 'O'
if self.number == Board.X:
return self.compute_score(board), None
else:
return -self.compute_score(board), None
a, b = alpha, beta
value = float('inf')
best_move = None
for move in valid_moves:
child_board = board.clone()
child_board.make_move(move)
move_value, maximizing_move = self._max(child_board, child_board.get_valid_moves(move), a, b, max_depth - 1)
if move_value < value:
value = move_value
best_move = move
if value <= a:
return value, best_move
b = min(b, move_value)
return value, best_move
def compute_score(self, board):
return random.uniform(-1, 1)
def setup_bot(self, game):
pass
| 36.528846
| 120
| 0.589102
| 498
| 3,799
| 4.351406
| 0.230924
| 0.046147
| 0.035994
| 0.040609
| 0.522381
| 0.522381
| 0.522381
| 0.49377
| 0.460545
| 0.460545
| 0
| 0.016594
| 0.333772
| 3,799
| 103
| 121
| 36.883495
| 0.839589
| 0.225059
| 0
| 0.591549
| 0
| 0
| 0.01153
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084507
| false
| 0.014085
| 0.042254
| 0.014085
| 0.366197
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f25ce39acdbb3d945528b6cb2be68ac5895f77bb
| 1,241
|
py
|
Python
|
backend/server.py
|
mugeshk97/billing-api
|
3bf6899f62bee6db7870c3b6008a10c887eb3aa3
|
[
"MIT"
] | null | null | null |
backend/server.py
|
mugeshk97/billing-api
|
3bf6899f62bee6db7870c3b6008a10c887eb3aa3
|
[
"MIT"
] | null | null | null |
backend/server.py
|
mugeshk97/billing-api
|
3bf6899f62bee6db7870c3b6008a10c887eb3aa3
|
[
"MIT"
] | null | null | null |
from flask import Flask, request, jsonify
from connection import get_sql_connection
from product import get_all_products, insert_product, delete_product
import json
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
cnx = get_sql_connection()
@app.route('/getProducts', methods=['GET'])
def get_products():
products = get_all_products(cnx)
response = jsonify(products)
response.headers.add('Access-Control-Allow-Origin', '*')
return response
@app.route('/insertProduct', methods=['POST'])
def insert_prod():
request_payload = json.loads(request.form['data'])
print(request_payload)
product_id = insert_product(cnx, request_payload)
response = jsonify(
{'product_id': product_id}
)
response.headers.add('Access-Control-Allow-Origin', '*')
return response
@app.route('/deleteProduct', methods=['POST'])
def delete_prod():
request_payload = json.loads(request.form['product_id'])
return_id = delete_product(cnx, request_payload['product_id'])
response = jsonify(
{'product_id': return_id}
)
response.headers.add('Access-Control-Allow-Origin', '*')
return response
if __name__ == '__main__':
app.run(host= '0.0.0.0', port=5050, debug= True)
| 29.547619
| 68
| 0.709106
| 157
| 1,241
| 5.343949
| 0.312102
| 0.064362
| 0.064362
| 0.085816
| 0.31466
| 0.31466
| 0.31466
| 0.224076
| 0.224076
| 0.224076
| 0
| 0.007612
| 0.153102
| 1,241
| 42
| 69
| 29.547619
| 0.790676
| 0
| 0
| 0.228571
| 0
| 0
| 0.1562
| 0.065217
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085714
| false
| 0
| 0.142857
| 0
| 0.314286
| 0.028571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f25fca280607b95bdb378b87fdab5966ef3e46d2
| 555
|
py
|
Python
|
api/restaurant_helper_functions.py
|
daniellespencer/stfu-and-eat
|
cb82b364ba226dd61f11547720a20a132c1562f6
|
[
"MIT"
] | 1
|
2020-05-15T01:36:59.000Z
|
2020-05-15T01:36:59.000Z
|
api/restaurant_helper_functions.py
|
daniellespencer/stfu-and-eat
|
cb82b364ba226dd61f11547720a20a132c1562f6
|
[
"MIT"
] | null | null | null |
api/restaurant_helper_functions.py
|
daniellespencer/stfu-and-eat
|
cb82b364ba226dd61f11547720a20a132c1562f6
|
[
"MIT"
] | 2
|
2020-05-15T01:31:37.000Z
|
2020-05-20T00:04:41.000Z
|
import random
from api.config import restaurant_collection as restaurants
def organize_restaurant_output():
output = []
for q in restaurants.find():
output.append({
"id" : str(q['_id']),
'name' : q['name'],
'neighborhood' : q['neighborhood'],
'cuisine' : q['cuisine'],
'address' : q['address'],
'website' : q['website']
})
return output
def select_random_restaurant(options):
value = random.randint(0, len(options)-1)
return options[value]
| 26.428571
| 59
| 0.567568
| 58
| 555
| 5.327586
| 0.551724
| 0.07767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005089
| 0.291892
| 555
| 21
| 60
| 26.428571
| 0.78117
| 0
| 0
| 0
| 0
| 0
| 0.142086
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.117647
| 0
| 0.352941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2647ec6e2d3b985a5cc52948c24f37ae5751457
| 3,973
|
py
|
Python
|
stimuli.py
|
lieke2020/workmate_match
|
803f4e3b1fa62280cc0d6a7cd61eb80929dae918
|
[
"MIT"
] | null | null | null |
stimuli.py
|
lieke2020/workmate_match
|
803f4e3b1fa62280cc0d6a7cd61eb80929dae918
|
[
"MIT"
] | null | null | null |
stimuli.py
|
lieke2020/workmate_match
|
803f4e3b1fa62280cc0d6a7cd61eb80929dae918
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 1 13:21:44 2021
This file holds the stimuli that are used in the world to represent cues.
obs_time --> Stimulus representing time
match_cifar --> Natural scenes for phase 1 learning
obs_cifar --> Natural scenes for phase 2 learning
match_alpha --> Alphabetic letters for phase 1 learning
obs_alpha --> Alphabetic letters for phase 2 learning
Detailed information on the stimuli can be found in README.txt
@author: Lieke Ceton
"""
#%% Dependencies
import numpy as np
import string
from random import sample
import csv
from sklearn.preprocessing import normalize
#%% Time cell coding
maxtime = 10
# Time vectors are created by convolving a response vector
# with an identity matrix, yielding [maxtime] rows of time cell responses,
# each peaking at a unique, consecutive time.
z = [0.1, 0.25, 0.5, 1, 0.5, 0.25, 0.1]
crop = int((len(z)-1)/2) # the '3'-cropping here removes edge artefacts from convolution;
# Time cell 0 (at row 0) peaks at the first moment in time (column 0).
tmat = np.vstack([np.convolve(z, t)[crop:maxtime + crop] for t in np.eye(maxtime)])
def obs_time(t=0):
"""Vector that represents time"""
return tmat[t]
#%% CIFAR-10 observations for both learning phases
#CIFAR-10 features are extracted from a pre-trained CNN (Caley Woy, see README)
#They are the activity vectors of the second fully connected layer.
#load .csv file
with open("CIFAR_10_kaggle_feature_2.csv", 'r') as f:
csv_features = list(csv.reader(f, delimiter=","))
all_feat = np.array(csv_features[1:], dtype=np.float) #get the first row out
match_dict = normalize(all_feat[:,1:-2]) #normalize
feat_sample = all_feat[0:500,1:-2] #Sample the first 500 features/images
cifar_dict = normalize(feat_sample) #normalise
def match_cifar():
"""Stimuli for phase 1 learning, random natural scenes from CIFAR-10 dataset"""
a = np.random.choice(match_dict.shape[1])
return match_dict[a]
def obs_cifar(obs=1):
"""Stimuli for phase 2 learning, a specific set of CIFAR-10 stimuli is selected"""
return cifar_dict[obs]
#%% Alpha observations for both learning phases
#Construct stimulus dictionary
stimbits = 10 #length of stimuli
#Construct binary stim_repres
binstr = '0{}b'.format(stimbits)
binstrings = [format(i, binstr) for i in range(2**stimbits)]
tobinarr = lambda s : np.array([float(c) for c in s])
Dx = np.vstack([tobinarr(i) for i in binstrings]) #--> a
shuffle = sample(range(len(Dx)),len(Dx)) #shuffle the rows randomly
Dx = Dx[shuffle,:]
# Dx now is a matrix of 128 x 7 bits. 'stimbits' is a dict that will order the
# first 52 of these in a lookup table, #why not choose 2**6 when you only use the first 52? (LJC)
chars = string.ascii_lowercase + string.ascii_uppercase
stimdict = dict(list(zip( chars, Dx )))
# Stimuli with these 5 letters are used in prosaccade/antisaccade, and here made
# linearly separable, cf. Rombouts et al., 2015
stimdict['g'] = np.zeros(stimbits)
stimdict['p'] = np.eye(stimbits)[0]
stimdict['a'] = np.eye(stimbits)[1]
stimdict['l'] = np.eye(stimbits)[2]
stimdict['r'] = np.eye(stimbits)[3] #why? this ruins the neat dictionary that you just made.. (LJC)
# digits, used in 12-AX, are added to the stimdict in a similar manner
digdict = dict(
[(d,Dx[i + 2**(stimbits-1) ]) for i,d in enumerate(string.digits) ])
stimdict.update(digdict)
len_Dx = Dx.shape[0]
def match_alpha():
"""Stimuli for phase 1 learning, random vector selected from binary stimuli"""
rand_int = np.random.choice(len_Dx)
return Dx[rand_int,:]
def obs_alpha(obs='A'):
"""Stimuli for phase 2 learning, all lower and uppercase letters (52 stimuli)"""
# return the row of activity from the selected stimdict index as the observation
return stimdict[obs]
| 37.838095
| 100
| 0.683614
| 620
| 3,973
| 4.327419
| 0.377419
| 0.023854
| 0.013418
| 0.025345
| 0.115542
| 0.022363
| 0
| 0
| 0
| 0
| 0
| 0.031409
| 0.206645
| 3,973
| 104
| 101
| 38.201923
| 0.819797
| 0.532343
| 0
| 0
| 0
| 0
| 0.02426
| 0.01716
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2684fd08fdc8ebf74875458af9886f1554c5e7c
| 1,040
|
py
|
Python
|
meilisearch/tests/test_synonyms_meilisearch.py
|
jtmiclat/meilisearch-python
|
b6a48a62bb64ae58181550a0ddc793dcdc0a2b06
|
[
"MIT"
] | null | null | null |
meilisearch/tests/test_synonyms_meilisearch.py
|
jtmiclat/meilisearch-python
|
b6a48a62bb64ae58181550a0ddc793dcdc0a2b06
|
[
"MIT"
] | null | null | null |
meilisearch/tests/test_synonyms_meilisearch.py
|
jtmiclat/meilisearch-python
|
b6a48a62bb64ae58181550a0ddc793dcdc0a2b06
|
[
"MIT"
] | null | null | null |
import time
import meilisearch
from meilisearch.tests import BASE_URL, MASTER_KEY
class TestSynonyms:
client = meilisearch.Client(BASE_URL, MASTER_KEY)
index = None
new_synonyms = {
'hp': ['harry potter']
}
default_synonyms = {}
def setup_class(self):
self.index = self.client.create_index(uid='indexUID')
def teardown_class(self):
self.index.delete()
def test_update_synonyms(self):
response = self.index.update_synonyms(self.new_synonyms)
assert isinstance(response, object)
assert 'updateId' in response
def test_get_synonyms(self):
response = self.index.get_synonyms()
assert isinstance(response, object)
assert response == self.new_synonyms
def test_reset_synonyms(self):
response = self.index.reset_synonyms()
assert isinstance(response, object)
assert 'updateId' in response
time.sleep(2)
response = self.index.get_synonyms()
assert response == self.default_synonyms
| 28.888889
| 64
| 0.674038
| 120
| 1,040
| 5.658333
| 0.333333
| 0.079529
| 0.100147
| 0.106038
| 0.430044
| 0.326951
| 0.182622
| 0.182622
| 0.182622
| 0
| 0
| 0.001259
| 0.236538
| 1,040
| 35
| 65
| 29.714286
| 0.853904
| 0
| 0
| 0.241379
| 0
| 0
| 0.036538
| 0
| 0
| 0
| 0
| 0
| 0.241379
| 1
| 0.172414
| false
| 0
| 0.103448
| 0
| 0.448276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f26f15c108eabe8ae9328cc4ea34ff13c08d0947
| 950
|
py
|
Python
|
main.py
|
AbhigyanRanjan0505/dvigyuoixfhybiocthgnkfi
|
db1b5198f1a0902aff21c74c58578dcb1feda39d
|
[
"MIT"
] | null | null | null |
main.py
|
AbhigyanRanjan0505/dvigyuoixfhybiocthgnkfi
|
db1b5198f1a0902aff21c74c58578dcb1feda39d
|
[
"MIT"
] | null | null | null |
main.py
|
AbhigyanRanjan0505/dvigyuoixfhybiocthgnkfi
|
db1b5198f1a0902aff21c74c58578dcb1feda39d
|
[
"MIT"
] | null | null | null |
import plotly.figure_factory as figure_factory
import statistics
import random
import pandas
df = pandas.read_csv("data.csv")
data = df["reading_time"].tolist()
population_mean = statistics.mean(data)
print("Population mean :", population_mean)
def show_fig(mean_list):
df = mean_list
fig = figure_factory.create_distplot(
[df], ["reading_time"], show_hist=False)
fig.show()
def random_set_of_mean(counter):
dataset = []
for i in range(0, counter):
random_index = random.randint(0, len(data))
value = data[random_index]
dataset.append(value)
mean = statistics.mean(dataset)
return mean
def setup():
mean_list = []
for i in range(0, 100):
set_of_means = random_set_of_mean(30)
mean_list.append(set_of_means)
show_fig(mean_list)
mean = statistics.mean(mean_list)
print("Sampling mean :", mean)
setup()
| 22.093023
| 52
| 0.648421
| 126
| 950
| 4.650794
| 0.365079
| 0.081911
| 0.09215
| 0.051195
| 0.040956
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011127
| 0.243158
| 950
| 42
| 53
| 22.619048
| 0.803894
| 0
| 0
| 0
| 0
| 0
| 0.070485
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.133333
| 0
| 0.266667
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2738d7e2edb6f5a98849ea7773345dc1a404833
| 1,409
|
py
|
Python
|
hseling_lib_diachrony_webvectors/hseling_lib_diachrony_webvectors/strings_reader.py
|
wadimiusz/hseling-repo-diachrony-webvectors
|
5488d74141df360a6a721637ae7c7577136172d7
|
[
"MIT"
] | null | null | null |
hseling_lib_diachrony_webvectors/hseling_lib_diachrony_webvectors/strings_reader.py
|
wadimiusz/hseling-repo-diachrony-webvectors
|
5488d74141df360a6a721637ae7c7577136172d7
|
[
"MIT"
] | null | null | null |
hseling_lib_diachrony_webvectors/hseling_lib_diachrony_webvectors/strings_reader.py
|
wadimiusz/hseling-repo-diachrony-webvectors
|
5488d74141df360a6a721637ae7c7577136172d7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding:utf8
"""
this module reads strings.csv, which contains all
the strings, and lets the main app use it
"""
import sys
import csv
import os
from flask import Markup
import configparser
config = configparser.RawConfigParser()
path = '../hseling_api_diachrony_webvectors/hseling_api_diachrony_webvectors/webvectors.cfg'
assert os.path.isfile(path), "Current path: {}".format(os.getcwd())
config.read(path)
root = config.get('Files and directories', 'root')
l10nfile = config.get('Files and directories', 'l10n')
# open the strings database:
csvfile = open("../hseling_lib_diachrony_webvectors/hseling_lib_diachrony_webvectors/" + l10nfile, 'rU')
acrobat = csv.reader(csvfile, dialect='excel', delimiter=',')
# initialize a dictionary for each language:
language_dicts = {}
langnames = config.get('Languages', 'interface_languages').split(',')
header = next(acrobat)
included_columns = []
for langname in langnames:
language_dicts[langname] = {}
included_columns.append(header.index(langname))
# read the csvfile, populate language_dicts:
for row in acrobat:
for i in included_columns: # range(1, len(row)):
# Markup() is used to prevent autoescaping in templates
if sys.version_info[0] < 3:
language_dicts[header[i]][row[0]] = Markup(row[i].decode('utf-8'))
else:
language_dicts[header[i]][row[0]] = Markup(row[i])
| 32.022727
| 104
| 0.721079
| 187
| 1,409
| 5.315508
| 0.524064
| 0.065392
| 0.038229
| 0.05835
| 0.124748
| 0.06841
| 0.06841
| 0.06841
| 0.06841
| 0
| 0
| 0.010815
| 0.146913
| 1,409
| 43
| 105
| 32.767442
| 0.81614
| 0.220724
| 0
| 0
| 0
| 0
| 0.239852
| 0.140221
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0
| false
| 0
| 0.192308
| 0
| 0.192308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f274273a939d4c8377fbaeb7efafd00e9604432e
| 1,077
|
py
|
Python
|
day 5&6/linked list.py
|
yogeshkhola/100daysofDSA
|
93f0d30d718795e4e3eb5d8e677b87baebd0df7c
|
[
"MIT"
] | 3
|
2021-03-01T17:04:33.000Z
|
2021-03-01T17:44:23.000Z
|
day 5&6/linked list.py
|
yogeshkhola/100daysofDSA
|
93f0d30d718795e4e3eb5d8e677b87baebd0df7c
|
[
"MIT"
] | null | null | null |
day 5&6/linked list.py
|
yogeshkhola/100daysofDSA
|
93f0d30d718795e4e3eb5d8e677b87baebd0df7c
|
[
"MIT"
] | null | null | null |
class node:
def __init__(self,data):
self.data=data
self.next=None
class LinkedList:
def __init__(self):
self.start=None #(self/head)
def viewList(self):#this function print the whole list
if self.start==None:
print("list is empty")
else:
temp=self.start
while temp!=None:
print(temp.data,end=" ")
temp=temp.next
def deleteFirst(self):
if self.start==None:
print("Linked list is empty")
else:
# temp=self.start
self.start=self.start.next
def insertLast(self,value):
newNode=node(value)
if(self.start==None):
self.start=newNode
else:
temp=self.start
while temp.next!=None:
temp=temp.next
temp.next=newNode
mylist=LinkedList()
mylist.insertLast(10)
mylist.insertLast(20)
mylist.insertLast(17)
mylist.insertLast(18)
mylist.insertLast(60)
mylist.viewList()
print()
mylist.deleteFirst()
mylist.viewList()
| 21.54
| 58
| 0.571959
| 126
| 1,077
| 4.825397
| 0.285714
| 0.148026
| 0.085526
| 0.074013
| 0.215461
| 0.149671
| 0.092105
| 0
| 0
| 0
| 0
| 0.013587
| 0.31662
| 1,077
| 50
| 59
| 21.54
| 0.8125
| 0.056639
| 0
| 0.282051
| 0
| 0
| 0.033531
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.128205
| false
| 0
| 0
| 0
| 0.179487
| 0.102564
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2765c1d1962f66a204431e4dc547e6e1d4a52be
| 40,603
|
py
|
Python
|
detex/getdata.py
|
d-chambers/Detex
|
46602eb8e05e080a23111c8f2716065a016613c2
|
[
"BSD-3-Clause"
] | 39
|
2015-08-15T20:10:14.000Z
|
2022-03-17T00:41:57.000Z
|
detex/getdata.py
|
d-chambers/Detex
|
46602eb8e05e080a23111c8f2716065a016613c2
|
[
"BSD-3-Clause"
] | 39
|
2015-09-28T23:50:59.000Z
|
2019-07-16T20:38:31.000Z
|
detex/getdata.py
|
d-chambers/Detex
|
46602eb8e05e080a23111c8f2716065a016613c2
|
[
"BSD-3-Clause"
] | 8
|
2015-10-08T20:43:40.000Z
|
2020-08-05T22:47:45.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 10 20:21:46 2015
@author: derrick
"""
from __future__ import print_function, absolute_import, unicode_literals, division
import glob
import itertools
import json
import os
import random
import numpy as np
import obspy
import pandas as pd
from six import string_types
import detex
# client imports
import obspy.clients.fdsn
import obspy.clients.neic
import obspy.clients.earthworm
conDirDefault = 'ContinuousWaveForms'
eveDirDefault = 'EventWaveForms'
# extension key to map obspy output type to extension. Add more here
formatKey = {'mseed': 'msd', 'pickle': 'pkl', 'sac': 'sac', 'Q': 'Q'}
def read(path):
"""
function to read a file from a path. If IOError or TypeError simply try
appending os.set to start
"""
try:
st = obspy.read(path)
except (IOError, TypeError):
try:
st = obspy.read(os.path.join(os.path.sep, path))
except (IOError, TypeError):
msg = 'Cannot read %s, the file may be corrupt, skipping it' % path
detex.log(__name__, msg, level='warn', pri=True)
return None
return st
def quickFetch(fetch_arg, **kwargs):
"""
Instantiate a DataFetcher using as little information as possible.
Parameters
----------
fetch_arg : str or DataFetcher instance
fetch_arg can be one of three things:
1. An instance of DataFetcher
2. A valid DataFetcher Method other than dir
3. A path to a directory containing waveform data
fetch_arg is checked in that order, so if you are trying to use a
data directory make sure it does not share names with a valid
DataFetcher method
kwargs are passed to the DataFetcher class, see DataFetcher
docs for details
Returns
-------
An instance of DataFetcher
Notes
--------
For client methods (eg 'uuss', 'iris') remove response is assumed True
with the default prelim. filter. If you don't want this make a custom
instance of DataFetcher.
"""
if isinstance(fetch_arg, DataFetcher):
dat_fet = fetch_arg
elif isinstance(fetch_arg, string_types):
if fetch_arg in DataFetcher.supMethods:
if fetch_arg == 'dir':
msg = 'If using method dir you must pass a path to directory'
detex.log(__name__, msg, level='error')
dat_fet = DataFetcher(fetch_arg, removeResponse=True, **kwargs)
else:
if not os.path.exists(fetch_arg):
msg = 'Directory %s does not exist' % fetch_arg
detex.log(__name__, msg, level='error')
dat_fet = DataFetcher('dir', directoryName=fetch_arg, **kwargs)
else:
msg = 'Input not understood, read docs and try again'
detex.log(__name__, msg, level='error')
return dat_fet
def makeDataDirectories(templateKey='TemplateKey.csv',
stationKey='StationKey.csv',
fetch='IRIS',
formatOut='mseed',
templateDir=eveDirDefault,
timeBeforeOrigin=1 * 60,
timeAfterOrigin=4 * 60,
conDir=conDirDefault,
secBuf=120,
conDatDuration=3600,
multiPro=False,
getContinuous=True,
getTemplates=True,
removeResponse=True,
opType='VEL',
prefilt=[.05, .1, 15, 20]):
"""
Function designed to fetch data needed for detex and store them in local
directories. StationKey.csv and TemplateKey.csv indicate which events to
download and for which stations. Organizes ContinuousWaveForms and
EventWaveForms directories.
Parameters
------------
template_key : str or pd DataFrame
The path to the TemplateKey csv
station_key : str or pd DataFrame
The path to the station key
fetch : str or FetchData instance
String for method argument of FetchData class or FetchData instance
formatOut : str
Seismic data file format, most obspy formats acceptable, options are:
'mseed','sac','GSE2','sacxy','q','sh_asc',' slist', 'tspair','segy',
'su', 'pickle', 'h5' (h5 only if obspyh5 module installed)
tempalateDir : str
The name of the template directory. Using the default is recommended
else the templateDir parameter will have to be set in calling most
other detex functions
timeBeforeOrigin: real number
The time in seconds before the reported origin of each template that
is downloaded.
timeAfterOrigin : real number(int, float, etc.)
The time in seconds to download after the origin time of each template.
conDir : str
The name of the continuous waveform directory. Using the default is
recommended
secBuf : real number (int, float, etc.)
The number of seconds to download after each hour of continuous data.
This might be non-zero in order to capture some detections that would
normally be overlooked if data did not overlap somewhat.
conDatDuration : real number (int, float, etc.)
The duration of the continuous data to download in seconds.
multiPro : bool
If True fork several processes to get data at once, potentially much
faster but a bit inconsiderate on the server hosting the data
getContinuous : bool
If True fetch continuous data with station and date ranges listed in
the station key
getTemplates : bool
If True get template data with stations listed in the station key
and events listed in the template key
removeResponse : bool
If true remove instrument response
opType : str
Output type after removing instrument response. Choices are:
"DISP" (m), "VEL" (m/s), or "ACC" (m/s**2)
prefilt : list 4 real numbers
Pre-filter parameters for removing instrument response, response is
flat from corners 2 to 3.
"""
temkey = detex.util.readKey(templateKey, 'template')
stakey = detex.util.readKey(stationKey, 'station')
# Check output type
if formatOut not in formatKey.keys():
msg = ('%s is not an acceptable format, choices are %s' %
(formatOut, formatKey.keys()))
detex.log(__name__, msg, level='error')
# Configure data fetcher
if isinstance(fetch, detex.getdata.DataFetcher):
fetcher = fetch
# Make sure DataFetcher is on same page as function inputs
fetcher.opType = opType
fetcher.removeResponse = removeResponse
fetcher.prefilt = prefilt
else:
fetcher = detex.getdata.DataFetcher(fetch,
removeResponse=removeResponse,
opType=opType,
prefilt=prefilt)
## Get templates
if getTemplates:
msg = 'Getting template waveforms'
detex.log(__name__, msg, level='info', pri=True)
_getTemData(temkey, stakey, templateDir, formatOut,
fetcher, timeBeforeOrigin, timeAfterOrigin)
## Get continuous data
if getContinuous:
msg = 'Getting continuous data'
detex.log(__name__, msg, level='info', pri=True)
_getConData(fetcher, stakey, conDir, secBuf, opType, formatOut,
duration=conDatDuration)
## Log finish
msg = "finished makeDataDirectories call"
detex.log(__name__, msg, level='info', close=True, pri=True)
def _getTemData(temkey, stakey, temDir, formatOut, fetcher, tb4, taft):
streamGenerator = fetcher.getTemData(temkey, stakey, tb4, taft,
returnName=True, temDir=temDir,
skipIfExists=True)
for st, name in streamGenerator:
netsta = st[0].stats.network + '.' + st[0].stats.station
fname = netsta + '.' + name + '.' + formatKey[formatOut]
fdir = os.path.join(temDir, name)
if not os.path.exists(fdir):
os.makedirs(fdir)
st.write(os.path.join(fdir, fname), formatOut)
if not os.path.exists(os.path.join(temDir, '.index.db')):
indexDirectory(temDir)
def _getConData(fetcher, stakey, conDir, secBuf, opType, formatOut,
duration=3600):
streamGenerator = fetcher.getConData(stakey,
secBuf,
returnName=True,
conDir=conDir,
skipIfExists=True,
duration=duration)
for st, path, fname in streamGenerator:
if st is not None: # if data were returned
if not os.path.exists(path):
os.makedirs(path)
fname = fname + '.' + formatKey[formatOut]
st.write(os.path.join(path, fname), formatOut)
if not os.path.exists(os.path.join(conDir, '.index.db')):
indexDirectory(conDir)
class DataFetcher(object):
"""
\n
Class to handle data acquisition
Parameters
----------
method : str or int
One of the approved methods for getting data as supported by detex
Options are:
"dir" : A data directory as created by makeDataDirectories
"client" : an obspy client can be passed to get data
useful if using an in-network database
"iris" : an iris client is initiated, also uses IRIS for inventory
"uuss" : A client attached to the university of utah
seismograph stations is initiated using CWB for waveforms
and IRIS is used for station inventories
client : An obspy client object
Client object used to get data, from obspy.clients
removeResponse : bool
If True remove response before returning stream.
inventoryArg : None, obspy client object, or obspy Inventory object
A seperate client for station inventories, only used if
removeResponse == True, also supports keyword "iris" for iris client
directoryName : str
A path to the continuous waveforms directory or event waveforms
directory. If None is passed default names are used
(ContinuousWaveForms and EventWaveForms)
opType : str
Output type after removing instrument response. Choices are:
"DISP" (m), "VEL" (m/s), or "ACC" (m/s**2)
prefilt : list of real numbers
Pre-filter parameters for removing instrument response.
conDatDuration : int or float
Duration for continuous data in seconds
conBuff : int or float
The amount of data, in seconds, to download at the end of the
conDatDuration. Ideally should be equal to template length, important
in order to avoid missing potential events at the end of a stream
timeBeforeOrigin : int or float
Seconds before origin of each event to fetch (used in getTemData)
timeAfterOrigin : int or float
Seconds after origin of each event to fetch (used in getTemData)
checkData : bool
If True apply some data checks before returning streams, can be useful
for older data sets.
fillZeros : bool
If True fill data that are not available with 0s (provided some data are
available)
"""
supMethods = ['dir', 'client', 'uuss', 'iris']
def __init__(self, method, client=None, removeResponse=True,
inventoryArg=None, directoryName=None, opType='VEL',
prefilt=[.05, .1, 15, 20], conDatDuration=3600, conBuff=120,
timeBeforeOrigin=1 * 60, timeAfterOrigin=4 * 60, checkData=True,
fillZeros=False):
self.__dict__.update(locals()) # Instantiate all inputs
self.inventory = _getInventory(inventoryArg)
self._checkInputs()
if self.removeResponse and self.inventory is None:
if self.method == 'dir':
msg = ('Cannot remove response without a valid inventoryArg, '
'setting removeResponse to False')
detex.log(__name__, msg, level='warning', pri=True)
self.removeResponse = False
def _checkInputs(self):
if not isinstance(self.method, string_types):
msg = 'method must be a string. options:\n %s' % self.supMethods
detex.log(__name__, msg, level='error', e=TypeError)
self.method = self.method.lower() # parameter to lowercase
if not self.method in DataFetcher.supMethods:
msg = ('method %s not supported. Options are:\n %s' %
(self.method, self.supMethods))
detex.log(__name__, msg, level='error', e=ValueError)
if self.method == 'dir':
if self.directoryName is None:
self.directoryName = conDirDefault
dirPath = glob.glob(self.directoryName)
if len(dirPath) < 1:
msg = ('directory %s not found make sure path is correct' %
self.directoryName)
detex.log(__name__, msg, level='error', e=IOError)
else:
self.directory = dirPath[0]
self._getStream = _loadDirectoryData
elif self.method == "client":
if self.client is None:
msg = 'Method %s requires a valid obspy client' % self.method
detex.log(__name__, msg, level='error', e=ValueError)
self._getStream = _assignClientFunction(self.client)
elif self.method == "iris":
self.client = obspy.clients.fdsn.Client("IRIS")
self._getStream = _assignClientFunction(self.client)
elif self.method == 'uuss': # uuss setting
self.client = obspy.clients.neic.Client('128.110.129.227')
self._getStream = _assignClientFunction(self.client)
self.inventory = obspy.clients.fdsn.Client('iris') # use iris for resps
def getTemData(self, temkey, stakey, tb4=None, taft=None, returnName=True,
temDir=None, skipIfExists=False, skipDict=None,
returnTimes=False, phases=None):
"""
Take detex station keys and template keys and yield stream objects of
all possible combinations
Parameters
----------
temkey : pd DataFrame
Detex template key
stakey : pd DataFrame
Detex station key
tb4 : None, or real number
Time before origin (or first phase pick if phases is not None)
taft : None or real number
Time after origin (or first phase pick if phases is not None)
returnName : bool
If True return name of event as found in template key
returnNames : bool
If True return event names and template names
temDir : str or None
Name of template directory, used to check if exists
skipIfExists : bool
If True dont return if file is in temDir
skipDict : dict
Dictionary of stations (keys, net.sta) and events (values)
to skip
returnTimes : bool
If True return times of data
phases : None, str, or DataFrame
If not None must be a path to a phasePick file, in the same format
as detex.util.pickPhases, or a path to a saved csv of the same.
tb4 and taft will be referenced to the first arrival for each
event and station, or the origin if none are available.
Yields
--------
Stream objects of possible combination if data are fetchable and event
names if returnName == True or times of data if returnTimes == True
"""
if tb4 is None:
tb4 = self.timeBeforeOrigin
if taft is None:
taft = self.timeAfterOrigin
if skipDict is not None and len(skipDict.keys()) < 1:
skipDict = None
stakey = detex.util.readKey(stakey, key_type='station')
temkey = detex.util.readKey(temkey, key_type='template')
if phases is not None:
phases = detex.util.readKey(phases, "phases")
indexiter = itertools.product(stakey.index, temkey.index)
# iter through each station/event pair and fetch data
for stain, temin in indexiter:
ser = temkey.loc[temin].combine_first(stakey.loc[stain])
netsta = ser.NETWORK + '.' + ser.STATION
# Skip event/station combos in skipDict
if skipDict is not None and netsta in skipDict.keys():
vals = skipDict[netsta]
if ser.NAME in vals:
continue
# skip events that already have files
if skipIfExists:
pfile = glob.glob(os.path.join(temDir, ser.NAME, netsta + '*'))
if len(pfile) > 0:
continue
if isinstance(ser.TIME, string_types) and 'T' in ser.TIME:
time = ser.TIME
else:
time = float(ser.TIME)
net = ser.NETWORK
sta = ser.STATION
chan = ser.CHANNELS.split('-')
# if phases option is used then find first phase and use it
if phases is not None:
con1 = (phases.Event == ser.NAME)
con2 = (phases.Station == '%s.%s' % (net, sta))
curEve = phases[con1 & con2]
if len(curEve) < 1: # if event station pair not in phases
msg = (('%s on %s was not in phase file, using origin')
% (ser.NAME, sta))
detex.log(__name__, msg, level='info')
t = obspy.UTCDateTime(time)
else:
utcs = [obspy.UTCDateTime(x) for x in curEve.TimeStamp]
t = min(utcs)
else:
t = obspy.UTCDateTime(time)
start = t - tb4
end = t + taft
st = self.getStream(start, end, net, sta, chan, '??')
if st is None: # skip if returns nothing
continue
if returnName:
yield st, ser.NAME
elif returnTimes:
yield st, start, end
else:
yield st
def getConData(self, stakey, secBuff=None, returnName=False,
returnTimes=False, conDir=None, skipIfExists=False,
utcstart=None, utcend=None, duration=None, randSamps=None):
"""
Get continuous data defined by the stations and time range in
the station key
Parameters
-----------
stakey : str or pd.DataFrame
A path to the stationkey or a loaded DF of the stationkey
secBuff : int
A buffer in seconds to add to end of continuous data chunk
so that consecutive files overlap by secBuf
returnName : bool
If True return the name of the file and expected path
CondDir : str
Path to Continuous data directory if it exists. Used to check
if a file already exists so it can be skipped if skipIfExists
skipIfExists : bool
If True files already exists wont be downloaded again
utcstart : None, int, str or obspy.UTCDateTime instance
An object readable by obspy.UTCDateTime class which is the start
time of continuous data to fetch. If None use time in station key
utcend : None, int or str, or obspy.UTCDateTime instance
An object readable by obspy.UTCDateTime class which is the end
time of continuous data to fetch. If None use time in station key
duration : None, int, or float
The duration of each continuous data chunk to fetch, if None
use conDataDuration attribute of DataFetcher instance
randSamps : None or int
If not None, return random number of traces rather than whole
range
Yields
--------
Obspy trace and other requested parameters
"""
stakey = detex.util.readKey(stakey, 'station')
if secBuff is None:
secBuff = self.conBuff
if duration is None:
duration = self.conDatDuration
for num, ser in stakey.iterrows():
netsta = ser.NETWORK + '.' + ser.STATION
if utcstart is None:
ts1 = obspy.UTCDateTime(ser.STARTTIME)
else:
ts1 = utcstart
if utcend is None:
ts2 = obspy.UTCDateTime(ser.ENDTIME)
else:
ts2 = utcend
utcs = _divideIntoChunks(ts1, ts2, duration, randSamps)
for utc in utcs:
if conDir is not None:
path, fil = _makePathFile(conDir, netsta, utc)
if skipIfExists:
pfile = glob.glob(os.path.join(path, fil + '*'))
if len(pfile) > 0: # if already exists then skip
continue
start = utc
end = utc + self.conDatDuration + secBuff
net = ser.NETWORK
sta = ser.STATION
chan = ser.CHANNELS.split('-')
st = self.getStream(start, end, net, sta, chan, '*')
if st is None or len(st) < 1:
continue
if not utcend is None:
if utcend.timestamp < st[0].stats.endtime.timestamp: # trim if needed
st.trim(endtime=utcend)
if len(st) < 1:
continue
if returnName and returnTimes:
path, fname = _makePathFile(conDir, netsta, utc)
yield st, path, fname, start, end
elif returnName:
path, fname = _makePathFile(conDir, netsta, utc)
yield st, path, fname
elif returnTimes:
yield st, start, end
else:
yield st
def getStream(self, start, end, net, sta, chan='???', loc='??'):
"""
function for getting data.\n
Parameters
----------
start : obspy.UTCDateTime object
Start time to fetch
end : obspy.UTCDateTime object
End time to fetch
net : str
Network code, usually 2 letters
sta : str
Station code
chan : str or list of str (should support wildcard)
Channels to fetch
loc : str
Location code for station
Returns
---------
An instance of obspy.Stream populated with requested data, or None if
not available.
"""
# make sure start and end are UTCDateTimes
start = obspy.UTCDateTime(start)
end = obspy.UTCDateTime(end)
# check that chan input is ok
if not isinstance(chan, (list, tuple)):
if not isinstance(chan, string_types):
msg = 'chan must be a string or list of strings'
detex.log(__name__, msg, level='error')
chan = [chan]
# fetch stream
st = self._getStream(self, start, end, net, sta, chan, loc)
# perform checks if required
if self.checkData:
st = _dataCheck(st, start, end)
# if no data return None
if st is None or len(st) < 1:
return None
# attach response
if self.removeResponse and self.inventory is not None:
if not _hasResponse(st):
st = _attachResponse(self, st, start, end, net, sta, loc, chan)
# remove response
if self.removeResponse:
st = _removeInstrumentResponse(self, st)
if st is None: # return None if response removal failed
return None
# trims and zero fills
st.trim(starttime=start, endtime=end)
st.merge(1) # merge and split to overwrite overlaps
st = st.split()
st.detrend('linear')
if self.fillZeros:
st.trim(starttime=start, endtime=end, pad=True, fill_value=0.0)
st.merge(1, fill_value=0.0)
return st
########## Functions for loading data based on selected methods ###########
def _loadDirectoryData(fet, start, end, net, sta, chan, loc):
"""
Function to load continuous data from the detex directory structure
"""
# get times with slight buffer
t1 = obspy.UTCDateTime(start).timestamp
t2 = obspy.UTCDateTime(end).timestamp
buf = 3 * fet.conDatDuration
dfind = _loadIndexDb(fet.directoryName, net + '.' + sta, t1 - buf, t2 + buf)
if dfind is None:
t1p = obspy.UTCDateTime(t1)
t2p = obspy.UTCDateTime(t2)
msg = 'data from %s to %s on %s not found in %s' % (t1p, t2p, sta,
fet.directoryName)
detex.log(__name__, msg, level='warning', pri=False)
return None
# define conditions in which condata should not be loaded
# con1 and con2 - No overlap (other than 10%)
tra = t2 - t1 # time range
con1 = ((dfind.Starttime <= t1) & (dfind.Endtime - tra * .1 < t1) &
(dfind.Starttime < t2) & (dfind.Endtime < t2))
con2 = ((dfind.Starttime > t1) & (dfind.Endtime > t1) &
(dfind.Starttime + tra * .1 > t2) & (dfind.Endtime >= t2))
df = dfind[~(con1 | con2)]
if len(df) < 1:
t1p = obspy.UTCDateTime(t1)
t2p = obspy.UTCDateTime(t2)
msg = 'data from %s to %s on %s not found in %s' % (t1p, t2p, sta,
fet.directoryName)
detex.log(__name__, msg, level='warning', pri=False)
return None
st = obspy.core.Stream()
if len(df.Path) < 1: # if no event fits description
return None
for path, fname in zip(df.Path, df.FileName):
fil = os.path.join(path, fname)
st1 = read(fil)
if not st1 is None:
st += st1
# st.trim(starttime=start, endtime=end)
# check if chan variable is string else iterate
if isinstance(chan, string_types):
stout = st.select(channel=chan)
else:
stout = obspy.core.Stream()
for cha in chan:
stout += st.select(channel=cha)
loc = '*' if loc in ['???', '??'] else loc # convert ? to *
stout = stout.select(location=loc)
return stout
def _assignClientFunction(client):
"""
function to take an obspy client FDSN, NEIC, EW, etc. return the
correct loadFromClient function for getting data.
"""
if isinstance(client, obspy.clients.fdsn.Client):
return _loadFromFDSN
elif isinstance(client, obspy.clients.neic.Client):
return _loadFromNEIC
elif isinstance(client, obspy.clients.earthworm.Client):
return _loadFromEarthworm
else:
msg = 'Client type not supported'
detex.log(__name__, msg, level='error', e=TypeError)
## load from client functions, this is needed because the APIs are not the same
def _loadFromNEIC(fet, start, end, net, sta, chan, loc):
"""
Use obspy.neic.Client to fetch waveforms
"""
client = fet.client
# str reps of utc objects for error messages
startstr = str(start)
endstr = str(end)
st = obspy.Stream()
for cha in chan:
try: # try neic client
st += client.get_waveforms(net, sta, loc, cha, start, end)
except:
msg = ('Could not fetch data on %s from %s to %s' %
(net + '.' + sta, startstr, endstr))
detex.log(__name__, msg, level='warning', pri=False)
st = None
return st
def _loadFromEarthworm(fet, start, end, net, sta, chan, loc):
client = fet.client
startstr = str(start)
endstr = str(end)
st = obspy.Stream()
if '*' in loc or '?' in loc: # adjust for earthworm loc codes
loc = '--'
for cha in chan:
try: # try neic client
st += client.get_waveforms(net, sta, loc, cha, start, end)
except:
msg = ('Could not fetch data on %s from %s to %s' %
(net + '.' + sta, startstr, endstr))
detex.log(__name__, msg, level='warning', pri=False)
st = None
return st
def _loadFromFDSN(fet, start, end, net, sta, chan, loc):
"""
Use obspy.clients.fdsn.Client to fetch waveforms
"""
client = fet.client
# str reps of utc objects for error messages
startstr = str(start)
endstr = str(end)
# convert channels to correct format (list seperated by ,)
if not isinstance(chan, string_types):
chan = ','.join(chan)
else:
if '-' in chan:
chan = ','.join(chan.split('-'))
# try to get waveforms, else return None
try:
st = client.get_waveforms(net, sta, loc, chan, start, end, attach_response=fet.removeResponse)
except:
msg = ('Could not fetch data on %s from %s to %s' %
(net + '.' + sta, startstr, endstr))
detex.log(__name__, msg, level='warning', pri=False)
st = None
return st
########## MISC functions #############
def _attachResponse(fet, st, start, end, net, sta, loc, chan):
"""
Function to attach response from inventory or client
"""
if not fet.removeResponse or fet.inventory is None:
return st
if isinstance(fet.inventory, obspy.core.inventory.Inventory):
st.attach_response(fet.inventory)
else:
inv = obspy.core.inventory.Inventory([], 'detex')
for cha in chan:
inv += fet.inventory.get_stations(starttime=start,
endtime=end,
network=net,
station=sta,
loc=loc,
channel=cha,
level="response")
st.attach_response(inv)
return st
def _getInventory(invArg):
"""
Take a string, Obspy client, or inventory object and return inventory
object used to attach responses to stream objects for response removal
"""
if isinstance(invArg, string_types):
if invArg.lower() == 'iris':
invArg = obspy.clients.fdsn.Client('IRIS')
elif not os.path.exists(invArg):
msg = ('if inventoryArg is str then it must be a client name, ie '
'IRIS, or a path to a station xml')
detex.log(__name__, msg, level='error')
else:
return obspy.read_inventory(invArg)
elif isinstance(invArg, obspy.station.inventory.Inventory):
return invArg
elif isinstance(invArg, obspy.clients.fdsn.Client):
return invArg
elif invArg is None:
return None
def _dataCheck(st, start, end):
# if none or empty return None
if st is None or len(st) < 1:
return None
netsta = st[0].stats.network + '.' + st[0].stats.station
time = str(st[0].stats.starttime).split('.')[0]
# check if data range is way off what was requested
utcmin = min([x.stats.starttime for x in st])
utcmax = max([x.stats.endtime for x in st])
if (end - start) - (utcmax - utcmin) > 60 * 10: # give 10 mine tolerance
msg = '%s starting on %s is shorter than expected' % (netsta, time)
detex.log(__name__, msg, pri=True)
# Check sample rates
if any([tr.stats.sampling_rate % 1 != 0 for tr in st]):
for tr in st:
tr.stats.sampling_rate = np.round(tr.stats.sampling_rate)
msg = ('Found non-int sampling_rates, rounded to nearest \
int on %s around %s' % (netsta, time))
detex.log(__name__, msg, level='warning')
if any([not np.any(x.data) for x in st]):
msg = ('At least one channel is all 0s on %s around %s, skipping' %
(netsta, time))
detex.log(__name__, msg, level='warn', pri=True)
return None
return st
def _hasResponse(st):
"""
Test if all channels have responses of a stream, return bool
"""
return all([hasattr(tr.stats, 'response') for tr in st])
def _removeInstrumentResponse(fet, st):
if not fet.removeResponse: # pass stream back if no response removal
return st
st.detrend('linear') # detrend
st = _fftprep(st)
try:
st.remove_response(output=fet.opType, pre_filt=fet.prefilt)
except:
utc1 = str(st[0].stats.starttime).split('.')[0]
utc2 = str(st[0].stats.endtime).split('.')[0]
msg = 'RemoveResponse Failed for %s,%s, from %s to %s, skipping' % (
st[0].stats.network, st[0].stats.station, utc1, utc2)
detex.log(__name__, msg, level='warning', pri=True)
st = None
return st
def _fftprep(st):
data = st[0].data
"data is numpy vector, makes sure it is not of odd length or fft drags"
if len(data) % 2 != 0 and len(data) % 100 > 50:
data = np.insert(data, 0, data[0])
st[0].data = data
st[0].stats.starttime = st[0].stats.starttime - st[0].stats.delta
elif len(data) % 2 != 0 and len(data) % 100 < 50:
data = data[1:]
st[0].data = data
st[0].stats.starttime = st[0].stats.starttime + st[0].stats.delta
return st
def _divideIntoChunks(utc1, utc2, duration, randSamps):
"""
Function to take two utc date time objects and create a generator to yield
all time in between by intercals of duration. If randSamps is not None
it will return a random subsample, still divisible by randSamps to make
loading files easier. The randSamps parameter can at most rep.
Inputs can be any obspy readable format
"""
utc1 = obspy.UTCDateTime(utc1)
utc2 = obspy.UTCDateTime(utc2)
# convert to time stamps (epoch time)
ts1 = utc1.timestamp - utc1.timestamp % duration
ts2 = utc2.timestamp - utc2.timestamp % duration
if randSamps is None:
t = ts1
while t <= ts2:
yield obspy.UTCDateTime(t) # yield a value
t += duration # add an hour
else:
utcList = np.arange(utc1.timestamp, utc2.timestamp, duration)
if randSamps > len(utcList) / 4:
msg = ('Population too small for %d random samples, taking %d' % (
randSamps, len(utcList)))
detex.log(__name__, msg, level='info')
randSamps = len(utcList)
ranutc = random.sample(utcList, randSamps)
rsamps = [obspy.UTCDateTime(x) for x in ranutc]
for samp in rsamps:
yield samp
def _makePathFile(conDir, netsta, utc):
"""
Make the expected filename to see if continuous data chunk exists
"""
utc = obspy.UTCDateTime(utc)
year = '%04d' % utc.year
jd = '%03d' % utc.julday
hr = '%02d' % utc.hour
mi = '%02d' % utc.minute
se = '%02d' % utc.second
path = os.path.join(conDir, netsta, year, jd)
fname = netsta + '.' + year + '-' + jd + 'T' + '-'.join([hr, mi, se])
return path, fname
###### Index directory functions ##########
def indexDirectory(dirPath):
"""
Create an index (.index.db) for a directory with stored waveform files
which also contains quality info of each file
Parameters
__________
dirPath : str
The path to the directory containing waveform data (any structure)
"""
columns = ['Path', 'FileName', 'Starttime', 'Endtime', 'Gaps', 'Nc', 'Nt',
'Duration', 'Station']
df = pd.DataFrame(columns=columns) # DataFrame for indexing
msg = 'indexing, or updating index for %s' % dirPath
detex.log(__name__, msg, level='info', pri=True)
# Create a list of possible path permutations to save space in database
pathList = [] # A list of lists with different path permutations
for dirpath, dirname, filenames in os.walk(dirPath):
dirList = os.path.abspath(dirpath).split(os.path.sep)
# Expand pathList if needed
while len(dirList) > len(pathList):
pathList.append([])
# loop and put info in pathList that isnt already there
for ind, value in enumerate(dirList):
if not isinstance(value, list):
value = [[value]]
for val in value:
for va in val:
if va not in pathList[ind]:
pathList[ind].append(va)
# Loop over file names perform quality checks
for fname in filenames:
if fname[0] == '.':
continue
fpath = os.path.join(*dirList)
fullpath = os.path.join(fpath, fname)
qualDict = _checkQuality(fullpath)
if qualDict is None: # If file is not obspy readable
msg = 'obspy failed to read %s , skipping' % fullpath
detex.log(__name__, msg, level='warning', pri=True)
continue # skip to next file
pathInts = [pathList[num].index(x) for num,
x in enumerate(dirList)]
df.loc[len(df), 'Path'] = json.dumps(pathInts)
for key, value in qualDict.iteritems():
df.loc[len(df) - 1, key] = value
df.loc[len(df) - 1, 'FileName'] = fname
# Create path index key
if len(pathList) < 1:
msg = 'No obspy readable files found in %s' % dirPath
detex.log(__name__, msg, level='error')
dfInd = _createIndexDF(pathList)
detex.util.saveSQLite(df, os.path.join(dirPath, '.index.db'), 'ind')
detex.util.saveSQLite(dfInd, os.path.join(dirPath, '.index.db'), 'indkey')
def _createIndexDF(pathList):
indLength = len(pathList)
colLength = max([len(x) for x in pathList])
ind = [x for x in range(indLength)]
cols = ['col_' + str(x) for x in range(colLength)]
df = pd.DataFrame(index=ind, columns=cols)
df.fillna(value='', inplace=True)
for ind1, pl in enumerate(pathList):
for ind2, item in enumerate(pl):
df.loc[ind1, 'col_' + str(ind2)] = item
return df
def _checkQuality(stPath):
"""
load a path to an obspy trace and check quality
"""
st = read(stPath)
if st is None:
return None
lengthStream = len(st)
gaps = st.get_gaps()
gapsum = np.sum([x[-2] for x in gaps])
starttime = min([x.stats.starttime.timestamp for x in st])
endtime = max([x.stats.endtime.timestamp for x in st])
duration = endtime - starttime
nc = len(list(set([x.stats.channel for x in st])))
netsta = st[0].stats.network + '.' + st[0].stats.station
outDict = {'Gaps': gapsum, 'Starttime': starttime, 'Endtime': endtime,
'Duration': duration, 'Nc': nc, 'Nt': lengthStream,
'Station': netsta}
return outDict
def _loadIndexDb(dirPath, station, t1, t2):
indexFile = glob.glob(os.path.join(dirPath, '.index.db'))
if len(indexFile) < 1:
msg = '%s is not currently indexed, indexing now' % dirPath
detex.log(__name__, msg, level='info', pri=True)
indexDirectory(dirPath)
indexFile = glob.glob(os.path.join(dirPath, '.index.db'))
sql = (('SELECT %s FROM %s WHERE Starttime>=%f AND ' +
'Endtime<=%f AND Station="%s"') %
('*', 'ind', t1, t2, station))
df = detex.util.loadSQLite(indexFile[0], 'ind', sql=sql, silent=False)
if df is None or len(df) < 1: # if not in database
return None
dfin = detex.util.loadSQLite(indexFile[0], 'indkey', convertNumeric=False)
dfin.columns = [int(x.split('_')[1]) for x in dfin.columns]
dfin.index = [int(x) for x in dfin.index]
# reconstruct path
df['Path'] = [_associatePathList(x, dfin) for x in df['Path']]
df.sort_values(by='FileName', inplace=True)
df.reset_index(drop=True, inplace=True)
return df
def _associatePathList(pathList, dfin):
pl = json.loads(pathList)
pat = []
for num, p in enumerate(pl):
pat.append(dfin.loc[num, p])
return os.path.join(*pat)
getAllData = makeDataDirectories
| 38.929051
| 102
| 0.582174
| 4,924
| 40,603
| 4.749594
| 0.152924
| 0.010604
| 0.015906
| 0.019883
| 0.263565
| 0.213537
| 0.177107
| 0.159661
| 0.13127
| 0.09672
| 0
| 0.009421
| 0.325493
| 40,603
| 1,042
| 103
| 38.966411
| 0.844525
| 0.292146
| 0
| 0.280788
| 0
| 0
| 0.078739
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045977
| false
| 0.001642
| 0.022989
| 0
| 0.131363
| 0.001642
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2766a9a2df58d6c9fe0fc41dab441157d2a7a7d
| 4,850
|
py
|
Python
|
HouseHunter/core.py
|
JGBMichalski/House-Hunter
|
7ad1e866907545b8e2302c1a775cadbd8f807ad9
|
[
"MIT"
] | null | null | null |
HouseHunter/core.py
|
JGBMichalski/House-Hunter
|
7ad1e866907545b8e2302c1a775cadbd8f807ad9
|
[
"MIT"
] | null | null | null |
HouseHunter/core.py
|
JGBMichalski/House-Hunter
|
7ad1e866907545b8e2302c1a775cadbd8f807ad9
|
[
"MIT"
] | null | null | null |
from tarfile import SUPPORTED_TYPES
import requests
import re
from bs4 import BeautifulSoup
import json
import HouseHunter.globals as Globals
from HouseHunter.ad import *
from pathlib import Path
class Core():
def __init__(self, filename="ads.json"):
self.filepath = Path().absolute().joinpath(filename) if filename else None
self.all_ads = {}
self.new_ads = {}
self.third_party_ads = []
self.load_ads()
# Reads given file and creates a dict of ads in file
def load_ads(self):
# If filepath is None, just skip local file
if self.filepath:
# If the file doesn't exist create it
if not self.filepath.exists():
ads_file = self.filepath.open(mode='w')
ads_file.write("{}")
ads_file.close()
return
with self.filepath.open(mode="r") as ads_file:
self.all_ads = json.load(ads_file)
# Save ads to file
def save_ads(self):
# If filepath is None, just skip local file
if self.filepath:
with self.filepath.open(mode="w") as ads_file:
json.dump(self.all_ads, ads_file)
def validate_origin(self, url):
for origin in Globals.SUPPORTED_ORIGINS:
if origin in url:
return Globals.SUPPORTED_ORIGINS.index(origin)
return -1
# Pulls page data from a given url and finds all ads on each page
def scrape_url_for_ads(self, url):
self.new_ads = {}
email_title = None
origin = self.validate_origin(url)
if origin < 0:
print("Site not supported: {}".format(url))
return self.new_ads, email_title
while url:
# Get the html data from the URL
page = requests.get(url)
soup = BeautifulSoup(page.content, "html.parser")
# If the email title doesnt exist pull it from the html data
if email_title is None:
email_title = self.get_email_title(origin, soup)
# Find ads on the page
self.find_ads(soup, origin)
# Set url for next page of ads
# Depending on supported origins, this may not apply to all
url = soup.find("a", string="Next")
if not url:
url = soup.find("a", href=True, rel="next")
if url:
url = Globals.SUPPORTED_ORIGINS[origin] + url['href']
return self.new_ads, email_title
def find_ads(self, soup, origin):
# Finds all ad trees in page html.
ad_regex = re.compile('.*{}.*'.format(Globals.AD_ROOT_CLASS_NAMES[origin][Globals.PRIMARY]))
ads = soup.find_all(Globals.AD_ROOT_ELEMENT_TYPE[origin], {"class": ad_regex})
# If no ads use different class name
if not ads:
ad_regex = re.compile('.*{}.*'.format(Globals.AD_ROOT_CLASS_NAMES[origin][Globals.SECONDARY]))
ads = soup.find_all(Globals.AD_ROOT_ELEMENT_TYPE[origin], {"class": ad_regex})
# Create a dictionary of all ads with ad id being the key
for ad in ads:
if origin == 0:
current_ad = WFPAd(origin, ad)
elif origin == 1:
current_ad = RewAd(origin, ad)
else:
return
# Skip third-party ads and ads already found
if (current_ad.id not in self.all_ads):
self.new_ads[current_ad.id] = current_ad.info
self.all_ads[current_ad.id] = current_ad.info
def get_email_title(self, origin, soup):
if origin != 0:
# Used for origins that do not give any details about the search options
return Globals.SUPPORTED_FULL_NAMES[origin]
else:
# Depending on supported origins, this may not apply to all
email_title_location = soup.find('div', {"class": "results-info"}).find('h1')
if email_title_location:
# Depending on supported origins, this may not apply to all
return Globals.SUPPORTED_FULL_NAMES[origin] + " - " + self.format_title(email_title_location.text.split(' in ')[1].strip('"'))
else:
return Globals.SUPPORTED_FULL_NAMES[origin]
# Makes the first letter of every word upper-case
def format_title(self, title):
new_title = []
title = title.split()
for word in title:
new_word = ''
new_word += word[0].upper()
if len(word) > 1:
new_word += word[1:]
new_title.append(new_word)
return ' '.join(new_title)
# Returns a given list of words to lower-case words
def words_to_lower(self, words):
return [word.lower() for word in words]
| 34.15493
| 142
| 0.583711
| 639
| 4,850
| 4.283255
| 0.250391
| 0.04019
| 0.018268
| 0.021922
| 0.299963
| 0.267081
| 0.194008
| 0.174278
| 0.174278
| 0.174278
| 0
| 0.003361
| 0.325155
| 4,850
| 141
| 143
| 34.397163
| 0.832875
| 0.185979
| 0
| 0.168539
| 0
| 0
| 0.028775
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101124
| false
| 0
| 0.089888
| 0.011236
| 0.325843
| 0.011236
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f27acd0b94f784d85a24a1358e2c015c3198e304
| 4,138
|
py
|
Python
|
keras_med_io/utils/intensity_io.py
|
jchen42703/keras_med_io
|
2113de64a448c90b66993d6ed4fdbba7971f3417
|
[
"MIT"
] | null | null | null |
keras_med_io/utils/intensity_io.py
|
jchen42703/keras_med_io
|
2113de64a448c90b66993d6ed4fdbba7971f3417
|
[
"MIT"
] | 6
|
2019-03-24T02:39:43.000Z
|
2019-04-10T01:15:14.000Z
|
keras_med_io/utils/intensity_io.py
|
jchen42703/keras_med_io
|
2113de64a448c90b66993d6ed4fdbba7971f3417
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# funcions for quick testing
import numpy as np
# helper functions
def normalization(arr, normalize_mode, norm_range = [0,1]):
"""
Helper function: Normalizes the image based on the specified mode and range
Args:
arr: numpy array
normalize_mode: either "whiten", "normalize_clip", or "normalize" representing the type of normalization to use
norm_range: (Optional) Specifies the range for the numpy array values
Returns:
A normalized array based on the specifications
"""
# reiniating the batch_size dimension
if normalize_mode == "whiten":
return whiten(arr)
elif normalize_mode == "normalize_clip":
return normalize_clip(arr, norm_range = norm_range)
elif normalize_mode == "normalize":
return minmax_normalize(arr, norm_range = norm_range)
else:
return NotImplementedError("Please use the supported modes.")
def normalize_clip(arr, norm_range = [0,1]):
"""
Args:
arr: numpy array
norm_range: list of 2 integers specifying normalizing range
based on https://stats.stackexchange.com/questions/178626/how-to-normalize-data-between-1-and-1
Returns:
Whitened and normalized array with outliers clipped in the specified range
"""
# whitens -> clips -> scales to [0,1]
# whiten
norm_img = np.clip(whiten(arr), -5, 5)
norm_img = minmax_normalize(arr, norm_range)
return norm_img
def whiten(arr):
"""
Mean-Var Normalization (Z-score norm)
* mean of 0 and standard deviation of 1
Args:
arr: numpy array
Returns:
A numpy array with a mean of 0 and a standard deviation of 1
"""
shape = arr.shape
arr = arr.flatten()
norm_img = (arr-np.mean(arr)) / np.std(arr)
return norm_img.reshape(shape)
def minmax_normalize(arr, norm_range = [0,1]):
"""
Args:
arr: numpy array
norm_range: list of 2 integers specifying normalizing range
based on https://stats.stackexchange.com/questions/178626/how-to-normalize-data-between-1-and-1
Returns:
Normalized array with outliers clipped in the specified range
"""
norm_img = ((norm_range[1]-norm_range[0]) * (arr - np.amin(arr)) / (np.amax(arr) - np.amin(arr))) + norm_range[0]
return norm_img
def clip_upper_lower_percentile(image, mask=None, percentile_lower=0.2, percentile_upper=99.8):
"""
Clipping values for positive class areas.
Args:
image:
mask:
percentile_lower:
percentile_upper:
Return:
Image with clipped pixel intensities
"""
# Copyright 2017 Division of Medical Image Computing, German Cancer Research Center (DKFZ)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================================================================================================
# Changes: Added the ability to have the function clip at only the necessary percentiles with no mask and removed the
# automatic generation of a mask
# finding the percentile values
cut_off_lower = np.percentile(image[mask != 0].ravel(), percentile_lower)
cut_off_upper = np.percentile(image[mask != 0].ravel(), percentile_upper)
# clipping based on percentiles
res = np.copy(image)
if mask is not None:
res[(res < cut_off_lower) & (mask !=0)] = cut_off_lower
res[(res > cut_off_upper) & (mask !=0)] = cut_off_upper
elif mask is None:
res[(res < cut_off_lower)] = cut_off_lower
res[(res > cut_off_upper)] = cut_off_upper
return res
| 39.037736
| 121
| 0.656597
| 557
| 4,138
| 4.764811
| 0.321364
| 0.047476
| 0.027129
| 0.025622
| 0.283723
| 0.229842
| 0.214017
| 0.186134
| 0.165034
| 0.125094
| 0
| 0.016703
| 0.233204
| 4,138
| 105
| 122
| 39.409524
| 0.819729
| 0.556307
| 0
| 0.060606
| 0
| 0
| 0.036923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.151515
| false
| 0
| 0.030303
| 0
| 0.424242
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f27c23356c06fcdc25ca581c0cf5398df4251dbf
| 8,654
|
py
|
Python
|
source/notebooks/sagemaker_predictive_maintenance/autoencoder_entry_point/autoencoder_entry_point.py
|
brightsparc/predictive-maintenance-using-machine-learning
|
fae69698750185bb58a3fa67ff8887f435f46458
|
[
"Apache-2.0"
] | null | null | null |
source/notebooks/sagemaker_predictive_maintenance/autoencoder_entry_point/autoencoder_entry_point.py
|
brightsparc/predictive-maintenance-using-machine-learning
|
fae69698750185bb58a3fa67ff8887f435f46458
|
[
"Apache-2.0"
] | null | null | null |
source/notebooks/sagemaker_predictive_maintenance/autoencoder_entry_point/autoencoder_entry_point.py
|
brightsparc/predictive-maintenance-using-machine-learning
|
fae69698750185bb58a3fa67ff8887f435f46458
|
[
"Apache-2.0"
] | null | null | null |
# Autoencoder based on: https://towardsdatascience.com/predictive-maintenance-of-turbofan-engine-64911e39c367
import argparse
import pandas as pd
import numpy as np
import itertools
import logging
import random
import os
from scipy.spatial.distance import pdist, squareform
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.preprocessing import MinMaxScaler, StandardScaler
import tensorflow as tf
from tensorflow.keras.models import *
from tensorflow.keras.layers import *
from tensorflow.keras.optimizers import *
from tensorflow.keras.utils import *
from tensorflow.keras.callbacks import *
def get_logger(name):
logger = logging.getLogger(name)
log_format = '%(asctime)s %(levelname)s %(name)s: %(message)s'
logging.basicConfig(format=log_format, level=logging.INFO)
return logger
def parse_args():
parser = argparse.ArgumentParser()
# model_dir is always passed in from SageMaker. By default this is a S3 path under the default bucket.
parser.add_argument('--model_dir', type=str)
parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR'))
parser.add_argument('--training_dir', type=str, default=os.environ['SM_CHANNEL_TRAIN'])
parser.add_argument('--num_datasets', type=int, default=1)
parser.add_argument('--batch_size', type=int, default=512)
parser.add_argument('--epochs', type=int, default=25)
parser.add_argument('--sequence_length', type=int, default=50) # AE
parser.add_argument('--validation_split', type=float, default=0.2) # AE
parser.add_argument('--patience', type=int, default=6) # AE
return parser.parse_args()
def read_train_data(training_dir, num_datasets):
train_dfs = [pd.read_csv(os.path.join(training_dir, 'train-{}.csv'.format(i))) for i in range(num_datasets)]
return train_dfs
def read_test_data(training_dir, num_datasets):
test_dfs = [pd.read_csv(os.path.join(training_dir, 'test-{}.csv'.format(i))) for i in range(num_datasets)]
return test_dfs
def gen_sequence(id_df, seq_length, seq_cols):
data_matrix = id_df[seq_cols].values
num_elements = data_matrix.shape[0]
# Iterate over two lists in parallel.
# For example id1 have 192 rows and sequence_length is equal to 50
# so zip iterate over two following list of numbers (0,142),(50,192)
# 0 50 (start stop) -> from row 0 to row 50
# 1 51 (start stop) -> from row 1 to row 51
# 2 52 (start stop) -> from row 2 to row 52
# ...
# 141 191 (start stop) -> from row 141 to 191
for start, stop in zip(range(0, num_elements-seq_length), range(seq_length, num_elements)):
yield data_matrix[start:stop, :]
def gen_labels(id_df, seq_length, label):
data_matrix = id_df[label].values
num_elements = data_matrix.shape[0]
# I have to remove the first seq_length labels
# because for one id the first sequence of seq_length size have as target
# the last label (the previus ones are discarded).
# All the next id's sequences will have associated step by step one label as target.
return data_matrix[seq_length:num_elements, :]
def rec_plot(s, eps=0.10, steps=10):
d = pdist(s[:,None])
d = np.floor(d/eps)
d[d>steps] = steps
Z = squareform(d)
return Z
def get_dataset(train_df, test_df, sequence_length):
# NOTE: Skipping processing besides labels which are included in this page
# see: https://github.com/awslabs/predictive-maintenance-using-machine-learning/blob/master/source/notebooks/sagemaker_predictive_maintenance/preprocess.py
### ADD NEW LABEL TRAIN ###
w1 = 45
w0 = 15
train_df['label1'] = np.where(train_df['RUL'] <= w1, 1, 0 )
train_df['label2'] = train_df['label1']
train_df.loc[train_df['RUL'] <= w0, 'label2'] = 2
### ADD NEW LABEL TEST ###
test_df['label1'] = np.where(test_df['RUL'] <= w1, 1, 0 )
test_df['label2'] = test_df['label1']
test_df.loc[test_df['RUL'] <= w0, 'label2'] = 2
### DROP NA DATA ###
train_df = train_df.dropna(axis=1)
test_df = test_df.dropna(axis=1)
### SEQUENCE COL: COLUMNS TO CONSIDER ###
sequence_cols = []
for col in train_df.columns:
if col[0] == 's':
sequence_cols.append(col)
#sequence_cols.append('cycle_norm')
logging.info('Sequence Cols: {}'.format(sequence_cols))
### GENERATE X TRAIN TEST ###
x_train, x_test = [], []
for engine_id in train_df.id.unique():
for sequence in gen_sequence(train_df[train_df.id==engine_id], sequence_length, sequence_cols):
x_train.append(sequence)
for sequence in gen_sequence(test_df[test_df.id==engine_id], sequence_length, sequence_cols):
x_test.append(sequence)
x_train = np.asarray(x_train)
x_test = np.asarray(x_test)
logging.info("X_Train shape: {}".format(x_train.shape))
logging.info("X_Test shape: {}".format(x_test.shape))
### GENERATE Y TRAIN TEST ###
y_train, y_test = [], []
for engine_id in train_df.id.unique():
for label in gen_labels(train_df[train_df.id==engine_id], sequence_length, ['label2'] ):
y_train.append(label)
for label in gen_labels(test_df[test_df.id==engine_id], sequence_length, ['label2']):
y_test.append(label)
y_train = np.asarray(y_train).reshape(-1,1)
y_test = np.asarray(y_test).reshape(-1,1)
### ENCODE LABEL ###
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
logging.info("y_train shape: {}".format(y_train.shape))
logging.info("y_test shape: {}".format(y_test.shape))
### TRANSFORM X TRAIN TEST IN IMAGES ###
x_train_img = np.apply_along_axis(rec_plot, 1, x_train).astype('float16')
logging.info("x_train_image shape: {}".format(x_train_img.shape))
x_test_img = np.apply_along_axis(rec_plot, 1, x_test).astype('float16')
logging.info("x_test_image shape: {}".format(x_test_img.shape))
return x_train_img, y_train, x_test_img, y_test
def fit_model(x_train_img, y_train, batch_size=512, epochs=25, validation_split=0.2, patience=6):
input_shape = x_train_img.shape[1:]
logging.info("Input shape: {}".format(input_shape))
model = Sequential()
model.add(Conv2D(32, (3, 3), activation='relu', input_shape=input_shape))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(3, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
logging.info(model.summary())
### FIT ###
tf.random.set_seed(33)
np.random.seed(33)
random.seed(33)
session_conf = tf.compat.v1.ConfigProto(
intra_op_parallelism_threads=1,
inter_op_parallelism_threads=1
)
sess = tf.compat.v1.Session(
graph=tf.compat.v1.get_default_graph(),
config=session_conf
)
tf.compat.v1.keras.backend.set_session(sess)
es = EarlyStopping(monitor='val_accuracy', mode='auto', restore_best_weights=True, verbose=1, patience=patience)
model.fit(x_train_img, y_train, batch_size=batch_size, epochs=epochs, callbacks=[es],
validation_split=validation_split, verbose=2)
### EVAL ###
logging.info('Evaluate: {}'.format(model.evaluate(x_test_img, y_test, verbose=2)))
logging.info(classification_report(np.where(y_test != 0)[1], model.predict_classes(x_test_img)))
return model
if __name__ == '__main__':
logging = get_logger(__name__)
logging.info('numpy version:{} Tensorflow version::{}'.format(np.__version__, tf.__version__))
args = parse_args()
# Read the first dataset
train_df = read_train_data(args.training_dir, args.num_datasets)[0]
test_df = read_test_data(args.training_dir, args.num_datasets)[0]
# Get the training dataset as an image
x_train_img, y_train, x_test_img, y_test = get_dataset(train_df, test_df, args.sequence_length)
model = fit_model(x_train_img, y_train,
batch_size=args.batch_size,
epochs=args.epochs,
validation_split=args.validation_split,
patience=args.patience)
logging.info('saving model to: {}...'.format(args.model_dir))
model.save(os.path.join(args.sm_model_dir, '000000001'))
| 39.336364
| 159
| 0.686388
| 1,281
| 8,654
| 4.417642
| 0.234192
| 0.022265
| 0.027037
| 0.008835
| 0.238381
| 0.188373
| 0.171055
| 0.155151
| 0.131825
| 0.06821
| 0
| 0.026646
| 0.180379
| 8,654
| 220
| 160
| 39.336364
| 0.771183
| 0.152993
| 0
| 0.068027
| 0
| 0
| 0.084851
| 0.003311
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061224
| false
| 0
| 0.108844
| 0
| 0.22449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f27c2659a6f08c68bf5a68b6f0434f1302972e63
| 437
|
py
|
Python
|
util/dump_cmudict_json.py
|
raygard/readability-rg
|
3e0820ee5def6ffccfdc1114e511bdf137ff9b04
|
[
"MIT"
] | null | null | null |
util/dump_cmudict_json.py
|
raygard/readability-rg
|
3e0820ee5def6ffccfdc1114e511bdf137ff9b04
|
[
"MIT"
] | null | null | null |
util/dump_cmudict_json.py
|
raygard/readability-rg
|
3e0820ee5def6ffccfdc1114e511bdf137ff9b04
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# vim: set fileencoding=utf-8
import sys
import json
def main():
args = sys.argv[1:]
fn = args[0]
with open(fn) as fp:
d = json.load(fp)
# Using sorted() to get same results in Python 2 and 3.
for k, v in sorted(d.items()):
assert isinstance(v, list)
assert 0 < len(v) < 4
# print(k, v)
print('%-40s %s' % (k, ' '.join('%d' % n for n in v)))
main()
| 19.863636
| 62
| 0.533181
| 73
| 437
| 3.191781
| 0.671233
| 0.017167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029605
| 0.304348
| 437
| 21
| 63
| 20.809524
| 0.736842
| 0.263158
| 0
| 0
| 0
| 0
| 0.034591
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.083333
| false
| 0
| 0.166667
| 0
| 0.25
| 0.083333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f27e5faf956aa7b884e2d5afa37ca81bb25dcb92
| 1,328
|
py
|
Python
|
src/EvalShift.py
|
nekonyanneko/GA
|
328f37c421a8bd4857a0804b130c23bd7b98de19
|
[
"MIT"
] | null | null | null |
src/EvalShift.py
|
nekonyanneko/GA
|
328f37c421a8bd4857a0804b130c23bd7b98de19
|
[
"MIT"
] | null | null | null |
src/EvalShift.py
|
nekonyanneko/GA
|
328f37c421a8bd4857a0804b130c23bd7b98de19
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import Shift as shi
import Enum as enu
def evalShift(individual):
"""
This method is grobal method.
This method is evaluation.
If you need new evaluation method, you must define it as follows.
RETURN:
evaluation values
"""
shift = shi.Shift(individual) # Get indiviaual of shift
shift.employees = enu.EMPLOYEES # Get employees list
# 想定人数とアサイン人数の差
people_count_sub_sum = sum(shift.abs_people_between_need_and_actual()) / enu.EVA_1
# 応募していない時間帯へのアサイン数
not_applicated_count = shift.not_applicated_assign() / enu.EVA_2
# アサイン数が応募数の半分以下の従業員数
few_work_user = len(shift.few_work_user()) / enu.EVA_3
# 管理者が1人もいないコマ数
no_manager_box = len(shift.no_manager_box()) / enu.EVA_4
# a,bの全部にアサインされている
three_box_per_day = len(shift.three_box_per_day()) / enu.EVA_5
# 出勤日数(出勤日数は人によって異なるためget_work_day_num()の中で計算済み)
work_day = shift.get_work_day_num()
return (
not_applicated_count,
people_count_sub_sum,
few_work_user,
no_manager_box,
three_box_per_day,
work_day[0],
work_day[1],
work_day[2],
work_day[3],
work_day[4],
work_day[5],
work_day[6],
work_day[7],
work_day[8],
work_day[9],
work_day[10]
)
| 24.145455
| 83
| 0.652108
| 183
| 1,328
| 4.398907
| 0.404372
| 0.121739
| 0.040994
| 0.052174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019192
| 0.254518
| 1,328
| 54
| 84
| 24.592593
| 0.793939
| 0.267319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.068966
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f280852bfea33f9eda7c3cbe87f494f3dbe4c0a3
| 238
|
py
|
Python
|
Bot.py
|
pythonNoobas/Python228
|
7c266acad5bb5ae45df10ac3fdea209831399729
|
[
"MIT"
] | null | null | null |
Bot.py
|
pythonNoobas/Python228
|
7c266acad5bb5ae45df10ac3fdea209831399729
|
[
"MIT"
] | null | null | null |
Bot.py
|
pythonNoobas/Python228
|
7c266acad5bb5ae45df10ac3fdea209831399729
|
[
"MIT"
] | null | null | null |
import telebot
bot = telebot.TeleBot("879497357:AAHxUAZR2ZMy7q1dsC12NoFOmvBnKo9a3FA")
@bot.message_handler(content_types=['text'])
def echo_all(message):
bot.send_message(message.chat.id, message.text)
bot.polling( none_stop = True )
| 23.8
| 70
| 0.794118
| 30
| 238
| 6.133333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073059
| 0.079832
| 238
| 10
| 71
| 23.8
| 0.767123
| 0
| 0
| 0
| 0
| 0
| 0.205021
| 0.188285
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2808bb95000137789190b399e2a920a24f1f97a
| 2,980
|
py
|
Python
|
generator/address.py
|
leg020/python-training
|
f595b8b836ff60c68bdff9d881ca50c026762457
|
[
"Apache-2.0"
] | null | null | null |
generator/address.py
|
leg020/python-training
|
f595b8b836ff60c68bdff9d881ca50c026762457
|
[
"Apache-2.0"
] | null | null | null |
generator/address.py
|
leg020/python-training
|
f595b8b836ff60c68bdff9d881ca50c026762457
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from model.address import Address
import random
import string
import os.path
import json
import getopt
import sys
import jsonpickle
try:
opts, args = getopt.getopt(sys.argv[1:], 'n:f:', ['number of address', 'file'])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = 'data/address.json'
for o, a in opts:
if o == '-n':
n = int(a)
elif o == '-f':
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + " "*10
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata = [Address(firstname="",
middlename="",
lastname="",
nickname="",
photo="",
title="",
company="",
address_home="",
home="",
mobile="",
work="",
fax="",
email="",
email2="",
email3="",
homepage="",
bday="",
bmonth="-",
byear="",
aday="",
amonth="-",
ayear="",
address2="",
phone2="",
notes="")] + \
[Address(firstname=random_string("firstname", 10),
middlename=random_string('middlename', 10),
lastname=random_string('lastname', 10),
nickname=random_string('nickname', 10),
photo="C:\\fakepath\\title.gif",
title=random_string('title', 10),
company=random_string('company', 10),
address_home=random_string('address_home', 10),
home=random_string('8', 10),
mobile=random_string('8', 10),
work=random_string('8', 10),
fax=random_string('8', 10),
email=random_string('8', 10),
email2=random_string('8', 10),
email3=random_string('8', 10),
homepage=random_string('8', 10),
bday=str(random.randrange(1, 32)),
bmonth="September",
byear=random_string('8', 10),
aday=str(random.randrange(1, 32)),
amonth="May",
ayear=random_string('8', 10),
address2=random_string('8', 10),
phone2=random_string('8', 10),
notes=random_string('8', 10))
for i in range(n)]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', f)
with open(file, 'w') as out:
jsonpickle.set_encoder_options('json', indent=2)
out.write(jsonpickle.encode(testdata))
| 33.483146
| 94
| 0.451342
| 283
| 2,980
| 4.64311
| 0.353357
| 0.191781
| 0.128615
| 0.148402
| 0.031963
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041903
| 0.407383
| 2,980
| 89
| 95
| 33.483146
| 0.702152
| 0.007047
| 0
| 0
| 0
| 0
| 0.055105
| 0.007776
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012821
| false
| 0
| 0.102564
| 0
| 0.128205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f283d91585cbb97de4ca77780a488265da69f263
| 613
|
py
|
Python
|
scripts/test.py
|
darkmatter2222/Agar.AI
|
a757544581239a7b4c2b00bb7befa9b649d73f7f
|
[
"MIT"
] | 1
|
2020-01-02T13:49:51.000Z
|
2020-01-02T13:49:51.000Z
|
scripts/test.py
|
darkmatter2222/Agar.AI
|
a757544581239a7b4c2b00bb7befa9b649d73f7f
|
[
"MIT"
] | null | null | null |
scripts/test.py
|
darkmatter2222/Agar.AI
|
a757544581239a7b4c2b00bb7befa9b649d73f7f
|
[
"MIT"
] | 1
|
2020-01-24T19:17:38.000Z
|
2020-01-24T19:17:38.000Z
|
import scripts.screen_interface as si
import scripts.game_interface as gi
import ctypes
import os
import keyboard
import uuid
GI = gi.GameInterface()
# find center of screen
user32 = ctypes.windll.user32
screenSize = user32.GetSystemMetrics(0), user32.GetSystemMetrics(1)
centerPoint = tuple(i/2 for i in screenSize)
print('Screen Size X:%d y:%d' % screenSize)
print('Targeting Center X:%d y:%d' % centerPoint)
GI = gi.GameInterface()
SI = si.ScreenInterface()
GI.center_x = centerPoint[0]
GI.center_y = centerPoint[1]
GI.range_classifications = 10
while True:
angle = GI.get_mouse_class()
print(angle)
| 25.541667
| 67
| 0.761827
| 91
| 613
| 5.054945
| 0.483516
| 0.056522
| 0.073913
| 0.017391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028195
| 0.132137
| 613
| 24
| 68
| 25.541667
| 0.836466
| 0.034258
| 0
| 0.1
| 0
| 0
| 0.079526
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.3
| 0
| 0.3
| 0.15
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f284677f3d515ed6519b9b9782d95ab9e355ded5
| 4,052
|
py
|
Python
|
Controller/control/WorkerControl.py
|
th-nuernberg/ml-cloud
|
6d7527cbf6cceb7062e74dbc43d51998381aa6c8
|
[
"MIT"
] | null | null | null |
Controller/control/WorkerControl.py
|
th-nuernberg/ml-cloud
|
6d7527cbf6cceb7062e74dbc43d51998381aa6c8
|
[
"MIT"
] | 7
|
2020-07-19T03:29:21.000Z
|
2022-03-02T06:46:12.000Z
|
Controller/control/WorkerControl.py
|
th-nuernberg/ml-cloud
|
6d7527cbf6cceb7062e74dbc43d51998381aa6c8
|
[
"MIT"
] | null | null | null |
import json
import queue
from control.WorkerQueue import WorkerQueue as WQ
from data.StorageIO import StorageIO
'''
The WorkerControl coordinates workers and assigns jobs.
Worker register themself at startup. The controller queues workers as well as jobs in two seperate queues.
As soon as a worker and a job are available, they are taken from the queues and the job_id is send to the worker
via MQTT. After the worker finishes its job, it will be put back into the queue
'''
class WorkerControl:
config_queue = queue.Queue(-1) # infinite size
COMMAND_START = "start"
COMMAND_STOP = "stop"
commandIO = None
storageIO: StorageIO = None
worker_list = {} # "worker_id" : "job_id"
worker_job_mapping = {}
worker_queue = WQ()
def get_worker_info(self):
return self.worker_list
# Function called by external Thread !!!
def busy_changed_callback(self, worker_id, busy_message):
try:
if len(busy_message) == 0:
print("Worker LOST: " + worker_id)
self.worker_queue.remove_worker(worker_id)
self.worker_list.pop(worker_id, None)
if not worker_id in self.worker_job_mapping:
print("Unknown worker reported busy change! This should not happen")
else:
self.update_status(worker_id, "lost")
else:
message = json.loads(busy_message)
is_busy = message["busy"] # either False or the job_id
self.worker_list[worker_id] = is_busy
if is_busy == False:
if "job_id" in message:
self.update_status(worker_id, message["status"])
if worker_id in self.worker_job_mapping:
del self.worker_job_mapping[worker_id]
self.worker_queue.add_to_queue(worker_id)
else:
job_id = message["job_id"]
self.worker_queue.remove_worker(worker_id)
self.worker_job_mapping[worker_id] = job_id
self.update_status(worker_id, message["status"])
print("Worker is busy: " + worker_id)
except Exception as e:
print("An error occurred in MQTT callback: " + str(e))
def update_status(self, worker_id: str, status: str):
if not worker_id in self.worker_job_mapping:
print("ERROR. Tried to set status for unset worker!")
else:
self.storageIO.update_job_status(self.worker_job_mapping[worker_id], status)
def __init__(self, commandIO, storageIO: StorageIO):
self.commandIO = commandIO
self.storageIO = storageIO
self.commandIO.on_busy_changed(self.busy_changed_callback)
def modify_job_state(self, job_list, command: str):
for job in job_list:
config = {"job_id": job}
if command == self.COMMAND_START:
self.create_new_job(config)
else:
pass
# Function called by external Thread !!!
def create_new_job(self, job_config: dict):
try:
print("-> Job ready (ID=" + job_config["job_id"] + ")")
self.config_queue.put(job_config, timeout=1)
except:
return False
return True
def run(self):
while (True):
jsonConfig = self.config_queue.get()
job_id = jsonConfig["job_id"]
print("<- Job selected (ID=" + job_id + ")")
ready_worker = self.worker_queue.get_next_worker()
print("Starting new job (id: " + job_id + ")")
self.commandIO.start_new_job(ready_worker, json.dumps(jsonConfig))
if ready_worker in self.worker_job_mapping:
print("Removing orphaned job from worker job mapping")
del self.worker_job_mapping[ready_worker]
self.worker_job_mapping[ready_worker] = job_id
self.update_status(ready_worker, "assigned")
| 38.226415
| 112
| 0.607601
| 505
| 4,052
| 4.641584
| 0.261386
| 0.064846
| 0.075085
| 0.076792
| 0.255973
| 0.225683
| 0.145051
| 0.107509
| 0.074232
| 0.074232
| 0
| 0.001073
| 0.30997
| 4,052
| 105
| 113
| 38.590476
| 0.837268
| 0.034798
| 0
| 0.164557
| 0
| 0
| 0.096583
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088608
| false
| 0.012658
| 0.050633
| 0.012658
| 0.291139
| 0.113924
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f28467f33870630c6d980108ee2deecf6e265916
| 986
|
py
|
Python
|
spammer/groupdmspam.py
|
00-00-00-11/Raid-Toolbox
|
4d24841de5ef112dc15b858f62607e0d6b5277cd
|
[
"0BSD"
] | null | null | null |
spammer/groupdmspam.py
|
00-00-00-11/Raid-Toolbox
|
4d24841de5ef112dc15b858f62607e0d6b5277cd
|
[
"0BSD"
] | null | null | null |
spammer/groupdmspam.py
|
00-00-00-11/Raid-Toolbox
|
4d24841de5ef112dc15b858f62607e0d6b5277cd
|
[
"0BSD"
] | 1
|
2021-05-15T11:32:24.000Z
|
2021-05-15T11:32:24.000Z
|
import discord
import sys
import random
import aiohttp
import logging
token = sys.argv[1]
group = sys.argv[2]
tokenno = sys.argv[3]
msgtxt = sys.argv[4]
useproxies = sys.argv[5]
logging.basicConfig(filename='RTB.log', filemode='w', format='Token {}'.format(str(tokenno))+' - %(levelname)s - %(message)s',level=logging.CRITICAL)
if useproxies == 'True':
proxy_list = open("proxies.txt").read().splitlines()
proxy = random.choice(proxy_list)
con = aiohttp.ProxyConnector(proxy="http://"+proxy)
client = discord.Client(connector=con)
else:
client = discord.Client()
@client.event
async def on_ready():
groupdm = client.get_channel(int(group))
while not client.is_closed():
try:
await groupdm.send(msgtxt)
except Exception:
pass
try:
client.run(token, bot=False)
except Exception as c:
logging.critical('Token {} Unable to login: {}'.format(str(tokenno),str(c)))
print (c)
| 28.171429
| 150
| 0.649087
| 127
| 986
| 5
| 0.582677
| 0.055118
| 0.050394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006369
| 0.203854
| 986
| 34
| 151
| 29
| 0.802548
| 0
| 0
| 0.064516
| 0
| 0
| 0.10084
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.032258
| 0.16129
| 0
| 0.16129
| 0.032258
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f28613b99f347cb3a0fc049c18db1898247d805e
| 522
|
py
|
Python
|
t2t_bert/distributed_encoder/gpt_encoder.py
|
yyht/bert
|
480c909e0835a455606e829310ff949c9dd23549
|
[
"Apache-2.0"
] | 34
|
2018-12-19T01:00:57.000Z
|
2021-03-26T09:36:37.000Z
|
t2t_bert/distributed_encoder/gpt_encoder.py
|
yyht/bert
|
480c909e0835a455606e829310ff949c9dd23549
|
[
"Apache-2.0"
] | 11
|
2018-12-25T03:37:59.000Z
|
2021-08-25T14:43:58.000Z
|
t2t_bert/distributed_encoder/gpt_encoder.py
|
yyht/bert
|
480c909e0835a455606e829310ff949c9dd23549
|
[
"Apache-2.0"
] | 9
|
2018-12-27T08:00:44.000Z
|
2020-06-08T03:05:14.000Z
|
from model.gpt import gpt
import tensorflow as tf
import numpy as np
def gpt_encoder(model_config, features, labels,
mode, target, reuse=tf.AUTO_REUSE):
input_ids = features["input_ids"]
past = features.get('past', None)
model = gpt.GPT(model_config)
if model_config.get("scope", None):
scope = model_config['scope']
else:
scope = 'model'
model_config['scope'] = scope
model.build_model(hparams=model_config,
X=input_ids,
past=past,
scope=scope,
reuse=reuse)
return model
| 20.88
| 48
| 0.695402
| 75
| 522
| 4.68
| 0.4
| 0.188034
| 0.068376
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189655
| 522
| 24
| 49
| 21.75
| 0.829787
| 0
| 0
| 0
| 0
| 0
| 0.06334
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.157895
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f28ae939117634bfbb4da17376ebc5f47320b58f
| 879
|
py
|
Python
|
quick_sort.py
|
MichaelLenghel/Sorting-Algorithms
|
b0aba03a7e5d95b4ca4038e8b53a9d544adeefb1
|
[
"MIT"
] | null | null | null |
quick_sort.py
|
MichaelLenghel/Sorting-Algorithms
|
b0aba03a7e5d95b4ca4038e8b53a9d544adeefb1
|
[
"MIT"
] | null | null | null |
quick_sort.py
|
MichaelLenghel/Sorting-Algorithms
|
b0aba03a7e5d95b4ca4038e8b53a9d544adeefb1
|
[
"MIT"
] | null | null | null |
def partition(a, start, end):
pivot = a[start]
left = start + 1
right = end
met = False
# Iterate until i reaches j in the middle
while not met:
while left <= right and a[left] <= pivot:
left = left + 1
while right >= left and a[right] >= pivot:
right = right - 1
if left >= right:
met = True
else:
a[left], a[right] = a[right], a[left]
# Swap pivot with the position of j
a[start], a[right] = a[right], a[start]
return right
def quick_sort(li, l, r):
if l < r:
split = partition(li, l, r)
quick_sort(li, l, split - 1)
quick_sort(li, split + 1, r)
if __name__ == '__main__':
li = [65, 72, 23, 36, 99, 20, 1, 44]
# [8, 2, 5, 13, 4, 19, 12, 6, 3, 11, 10, 7, 9]
print("Unsorted list: ", li)
quick_sort(li, 0, len(li) - 1)
print("Sorted list: ", li)
| 22.538462
| 49
| 0.531286
| 143
| 879
| 3.181818
| 0.447552
| 0.065934
| 0.061538
| 0.052747
| 0.057143
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06689
| 0.319681
| 879
| 39
| 50
| 22.538462
| 0.69398
| 0.134243
| 0
| 0
| 0
| 0
| 0.05
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0
| 0
| 0.115385
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f28b677805cf2bdfc02ec0d719ce0fad31f82786
| 5,787
|
py
|
Python
|
astacus/coordinator/plugins/clickhouse/parts.py
|
aiven/astacus
|
2d64e1f33e01d50a41127f41d9da3d1ab0ce0387
|
[
"Apache-2.0"
] | 19
|
2020-06-22T12:17:59.000Z
|
2022-02-18T00:12:17.000Z
|
astacus/coordinator/plugins/clickhouse/parts.py
|
aiven/astacus
|
2d64e1f33e01d50a41127f41d9da3d1ab0ce0387
|
[
"Apache-2.0"
] | 7
|
2020-06-24T05:16:20.000Z
|
2022-02-28T07:35:31.000Z
|
astacus/coordinator/plugins/clickhouse/parts.py
|
aiven/astacus
|
2d64e1f33e01d50a41127f41d9da3d1ab0ce0387
|
[
"Apache-2.0"
] | 2
|
2020-09-05T21:23:08.000Z
|
2022-02-17T15:02:37.000Z
|
"""
Copyright (c) 2021 Aiven Ltd
See LICENSE for details
Algorithms to help with redistributing parts across servers for tables using the
Replicated family of table engines.
This does not support shards, but this is the right place to add support for them.
"""
from astacus.common.ipc import SnapshotFile
from astacus.coordinator.plugins.clickhouse.escaping import escape_for_file_name
from pathlib import Path
from typing import Dict, Iterable, List, Optional, Set, Tuple
import dataclasses
import re
import uuid
@dataclasses.dataclass
class PartFile:
snapshot_file: SnapshotFile
servers: Set[int]
@dataclasses.dataclass
class Part:
files: Dict[Path, PartFile]
total_size: int
@dataclasses.dataclass(frozen=True)
class PartKey:
table_uuid: uuid.UUID
part_name: str
def group_files_into_parts(snapshot_files: List[List[SnapshotFile]],
table_uuids: Set[uuid.UUID]) -> Tuple[List[Part], List[List[SnapshotFile]]]:
"""
Regroup all files that form a MergeTree table parts together in a `Part`.
Only parts from the provided list of `table_uuids` are regrouped.
Returns the list of `Part` and a separate list of list of `SnapshotFile` that
were not selected to make a `Part`.
The input and output list of lists will have the same length: the number
of server in the cluster (the first list is for the first server, etc.)
"""
other_files: List[List[SnapshotFile]] = [[] for _ in snapshot_files]
keyed_parts: Dict[PartKey, Part] = {}
for server_index, server_files in enumerate(snapshot_files):
for snapshot_file in server_files:
if not add_file_to_parts(snapshot_file, server_index, table_uuids, keyed_parts):
other_files[server_index].append(snapshot_file)
return list(keyed_parts.values()), other_files
def add_file_to_parts(
snapshot_file: SnapshotFile, server_index: int, table_uuids: Set[uuid.UUID], parts: Dict[PartKey, Part]
) -> bool:
"""
If the `snapshot_file` is a file from a part of one of the tables listed in
`table_uuids`, add it to the corresponding Part in `parts`.
A file is from a part if its path starts with
"store/3_first_char_of_table_uuid/table_uuid/detached/part_name".
If a file already exists in a part, the `server_index` is added to the `server` set
of the `PartFile` for that file.
Raises a `ValueError` if a different file with the same name already exists in a
part: a `PartFile` must be the identical on all servers where it is present.
Returns `True` if and only if the file was added to a `Part`.
"""
path_parts = snapshot_file.relative_path.parts
has_enough_depth = len(path_parts) >= 6
if not has_enough_depth:
return False
has_store_and_detached = path_parts[0] == "store" and path_parts[3] == "detached"
has_uuid_prefix = path_parts[1] == path_parts[2][:3]
has_valid_uuid = re.match(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", path_parts[2])
if not (has_store_and_detached and has_uuid_prefix and has_valid_uuid):
return False
table_uuid = uuid.UUID(path_parts[2])
if table_uuid not in table_uuids:
return False
part_key = PartKey(table_uuid=table_uuid, part_name=path_parts[4])
part = parts.setdefault(part_key, Part(files={}, total_size=0))
part_file = part.files.get(snapshot_file.relative_path)
if part_file is None:
part.files[snapshot_file.relative_path] = PartFile(snapshot_file=snapshot_file, servers={server_index})
part.total_size += snapshot_file.file_size
elif part_file.snapshot_file.equals_excluding_mtime(snapshot_file):
part_file.servers.add(server_index)
else:
raise ValueError(
f"Inconsistent part file {snapshot_file.relative_path} of part {part_key} "
f"between servers {part_file.servers} and server {server_index}:\n"
f" {part_file.snapshot_file}\n"
f" {snapshot_file}"
)
return True
def check_parts_replication(parts: Iterable[Part]):
"""
Checks that within a single part, all files are present on the same set of servers.
"""
for part in parts:
part_servers: Optional[Set[int]] = None
for file_path, file in part.files.items():
if part_servers is None:
part_servers = file.servers
elif part_servers != file.servers:
raise ValueError(
f"Inconsistent part, not all files are identically replicated: "
f"some files are on servers {part_servers} while {file_path} is on servers {file.servers}"
)
def distribute_parts_to_servers(parts: List[Part], server_files: List[List[SnapshotFile]]):
"""
Distributes each part to only one of the multiple servers where the part was
during the backup.
Parts are distributed to each server such as the total download size for each
server is roughly equal (using a greedy algorithm).
"""
total_file_sizes = [0 for _ in server_files]
for part in sorted(parts, key=lambda p: p.total_size, reverse=True):
server_index = None
for file in part.files.values():
if server_index is None:
server_index = min(file.servers, key=total_file_sizes.__getitem__)
total_file_sizes[server_index] += file.snapshot_file.file_size
server_files[server_index].append(file.snapshot_file)
def get_frozen_parts_pattern(freeze_name: str) -> str:
"""
Returns the glob pattern inside ClickHouse data dir where frozen table parts are stored.
"""
escaped_freeze_name = escape_for_file_name(freeze_name)
return f"shadow/{escaped_freeze_name}/store/**/*"
| 39.101351
| 111
| 0.697598
| 850
| 5,787
| 4.558824
| 0.248235
| 0.058839
| 0.024774
| 0.024774
| 0.056
| 0.018323
| 0.004903
| 0.004903
| 0.004903
| 0.004903
| 0
| 0.007061
| 0.216865
| 5,787
| 147
| 112
| 39.367347
| 0.84797
| 0.284258
| 0
| 0.085366
| 0
| 0.012195
| 0.11214
| 0.039299
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060976
| false
| 0
| 0.085366
| 0
| 0.329268
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f28ccbdb8a0ea7d42a8a232e4a98e01aac77cc9d
| 1,301
|
py
|
Python
|
tests/test_init.py
|
mds2/Rocket
|
53313677768159d13e6c2b7c69ad69ca59bb8c79
|
[
"MIT"
] | 16
|
2015-12-16T10:50:42.000Z
|
2020-06-04T10:39:20.000Z
|
tests/test_init.py
|
mds2/Rocket
|
53313677768159d13e6c2b7c69ad69ca59bb8c79
|
[
"MIT"
] | 6
|
2017-11-01T14:51:52.000Z
|
2019-01-01T22:12:27.000Z
|
tests/test_init.py
|
mds2/Rocket
|
53313677768159d13e6c2b7c69ad69ca59bb8c79
|
[
"MIT"
] | 13
|
2016-04-22T20:14:39.000Z
|
2021-12-21T22:52:02.000Z
|
# -*- coding: utf-8 -*-
# This file is part of the Rocket Web Server
# Copyright (c) 2012 Timothy Farrell
#
# See the included LICENSE.txt file for licensing details.
# Import System Modules
import sys
import unittest
# Import Custom Modules
import rocket
# Define Constants
PY3K = sys.version_info[0] > 2
# Define Tests
class RocketInitTest(unittest.TestCase):
def testMembers(self):
members = ["VERSION", "SERVER_NAME", "SERVER_SOFTWARE", "HTTP_SERVER_SOFTWARE", "BUF_SIZE", "IS_JYTHON", "IGNORE_ERRORS_ON_CLOSE", "DEFAULT_LISTEN_QUEUE_SIZE", "DEFAULT_MIN_THREADS", "DEFAULT_MAX_THREADS", "DEFAULTS", "PY3K", "u", "b", "Rocket", "CherryPyWSGIServer"]
for m in members:
self.assertTrue(hasattr(rocket, m),
msg="rocket module does not have %s" % m)
def testUnicode(self):
if PY3K:
self.skipTest("Not a valid test in Python 3")
self.assertEqual(rocket.u('abc'), eval("u'abc'"))
self.assertEqual(type(rocket.u('abc')), type(eval("u'abc'")))
def testBytes(self):
if PY3K:
self.skipTest("Not a valid test in Python 3")
self.assertEqual(rocket.b('abc'), 'abc')
self.assertEqual(type(rocket.b('abc')), type('abc'))
if __name__ == '__main__':
unittest.main()
| 32.525
| 275
| 0.647963
| 170
| 1,301
| 4.811765
| 0.523529
| 0.07335
| 0.02445
| 0.03423
| 0.227384
| 0.158924
| 0.158924
| 0.158924
| 0.158924
| 0.158924
| 0
| 0.012683
| 0.212145
| 1,301
| 39
| 276
| 33.358974
| 0.785366
| 0.176787
| 0
| 0.181818
| 0
| 0
| 0.298775
| 0.044298
| 0
| 0
| 0
| 0
| 0.227273
| 1
| 0.136364
| false
| 0
| 0.136364
| 0
| 0.318182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2925fa462ff21785df92756f554dc30e7733df7
| 1,450
|
py
|
Python
|
app/cipher_caesar.py
|
igorsilva3/cipher-of-caesar
|
2024dae7eb795f273785e9622d9e20a49cea089d
|
[
"MIT"
] | 2
|
2020-09-30T00:04:59.000Z
|
2020-10-02T14:33:56.000Z
|
app/cipher_caesar.py
|
igorsilva3/cipher-of-caesar
|
2024dae7eb795f273785e9622d9e20a49cea089d
|
[
"MIT"
] | null | null | null |
app/cipher_caesar.py
|
igorsilva3/cipher-of-caesar
|
2024dae7eb795f273785e9622d9e20a49cea089d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import string
class Caesar(object):
def __init__(self):
self.ALPHABET = string.ascii_letters
def character_type(self, character):
""" Returns the alphabet box """
if character.isupper():
return string.ascii_uppercase
return string.ascii_lowercase
def encrypt(self, text: str, key: int) -> str:
""" Returns the encrypted text """
ENCRYPT_TEXT = ""
for letter in text:
if letter in self.ALPHABET:
alphabet = self.character_type(letter)
index = alphabet.index(letter) + key
ENCRYPT_TEXT += alphabet[index % len(alphabet)]
if letter not in self.ALPHABET:
ENCRYPT_TEXT += letter
return ENCRYPT_TEXT
def decrypt(self, cipher: str, key: int) -> str:
""" Returns the decrypted text """
DECRYPT_TEXT = ""
for letter in cipher:
if letter in self.ALPHABET:
alphabet = self.character_type(letter)
index = alphabet.index(letter) - key
DECRYPT_TEXT += alphabet[index % len(alphabet)]
if letter not in self.ALPHABET:
DECRYPT_TEXT += letter
return DECRYPT_TEXT
| 29
| 63
| 0.512414
| 142
| 1,450
| 5.105634
| 0.302817
| 0.082759
| 0.077241
| 0.033103
| 0.427586
| 0.427586
| 0.366897
| 0.366897
| 0.366897
| 0.366897
| 0
| 0.001157
| 0.404138
| 1,450
| 50
| 64
| 29
| 0.837963
| 0.085517
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.035714
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f292e080e8bc6567932c91ed5f7d509146d3ac76
| 473
|
py
|
Python
|
programming-logic/teste.py
|
raulrosapacheco/python3-udemy
|
b84e6f82417aecd0e2a28c3fb3cb222e057a660b
|
[
"MIT"
] | null | null | null |
programming-logic/teste.py
|
raulrosapacheco/python3-udemy
|
b84e6f82417aecd0e2a28c3fb3cb222e057a660b
|
[
"MIT"
] | null | null | null |
programming-logic/teste.py
|
raulrosapacheco/python3-udemy
|
b84e6f82417aecd0e2a28c3fb3cb222e057a660b
|
[
"MIT"
] | null | null | null |
"""
Split: dividir string
Join: juntar uma lista (str)
Enumerate: enumerar elementos da lista (iteráveis)
"""
string ='O Brasil é o pais do futebol, o Brasil é penta.'
lista_1 = string.split(' ')
lista_2 = string.split(',')
print(lista_1)
print(lista_2)
palavra = ''
contagem = 0
for valor in lista_1:
print(f'A palavra {valor} apareceu {lista_1.count(valor)}x na frase')
qtd_vezes = lista_1.count(valor)
if qtd_vezes > contagem:
contagem = qtd_vezes
| 23.65
| 73
| 0.69556
| 74
| 473
| 4.310811
| 0.527027
| 0.094044
| 0.050157
| 0.100313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020672
| 0.181818
| 473
| 20
| 74
| 23.65
| 0.803618
| 0.213531
| 0
| 0
| 0
| 0
| 0.29589
| 0.063014
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f296278ff7fbbd618f4bc706e8d6af3757d8034e
| 2,547
|
py
|
Python
|
grizzly_cli/argparse/__init__.py
|
mgor/grizzly-cli
|
00da1a5a822baefedf61497120fd52dbb5203f12
|
[
"MIT"
] | null | null | null |
grizzly_cli/argparse/__init__.py
|
mgor/grizzly-cli
|
00da1a5a822baefedf61497120fd52dbb5203f12
|
[
"MIT"
] | null | null | null |
grizzly_cli/argparse/__init__.py
|
mgor/grizzly-cli
|
00da1a5a822baefedf61497120fd52dbb5203f12
|
[
"MIT"
] | 1
|
2021-11-02T09:36:21.000Z
|
2021-11-02T09:36:21.000Z
|
import sys
import re
from typing import Any, Optional, IO, Sequence
from argparse import ArgumentParser as CoreArgumentParser, Namespace, _SubParsersAction
from .markdown import MarkdownFormatter, MarkdownHelpAction
from .bashcompletion import BashCompletionAction, hook as bashcompletion_hook
ArgumentSubParser = _SubParsersAction
class ArgumentParser(CoreArgumentParser):
def __init__(self, markdown_help: bool = False, bash_completion: bool = False, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.markdown_help = markdown_help
self.bash_completion = bash_completion
if self.markdown_help:
self.add_argument('--md-help', action=MarkdownHelpAction)
if self.bash_completion:
self.add_argument('--bash-completion', action=BashCompletionAction)
self._optionals.title = 'optional arguments'
def error_no_help(self, message: str) -> None:
sys.stderr.write('{}: error: {}\n'.format(self.prog, message))
sys.exit(2)
def print_help(self, file: Optional[IO[str]] = None) -> None:
'''Hook to make help more command line friendly, if there is markdown markers in the text.
'''
if not self.markdown_help:
super().print_help(file)
return
if self.formatter_class is not MarkdownFormatter:
original_description = self.description
original_actions = self._actions
# code block "markers" are not really nice to have in cli help
if self.description is not None:
self.description = '\n'.join([line for line in self.description.split('\n') if '```' not in line])
self.description = self.description.replace('\n\n', '\n')
for action in self._actions:
if action.help is not None:
# remove any markdown link markers
action.help = re.sub(r'\[([^\]]*)\]\([^\)]*\)', r'\1', action.help)
super().print_help(file)
if self.formatter_class is not MarkdownFormatter:
self.description = original_description
self._actions = original_actions
def parse_args(self, args: Optional[Sequence[str]] = None, namespace: Optional[Namespace] = None) -> Namespace: # type: ignore
'''Hook to add `--bash-complete` to all parsers, if enabled for parser.
'''
if self.bash_completion:
bashcompletion_hook(self)
return super().parse_args(args, namespace)
| 38.014925
| 131
| 0.645465
| 292
| 2,547
| 5.489726
| 0.335616
| 0.065502
| 0.039925
| 0.024953
| 0.07985
| 0.052402
| 0.052402
| 0
| 0
| 0
| 0
| 0.001046
| 0.249313
| 2,547
| 66
| 132
| 38.590909
| 0.837343
| 0.110718
| 0
| 0.146341
| 0
| 0
| 0.042629
| 0.009769
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0
| 0.146341
| 0
| 0.317073
| 0.073171
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f29854376d62be05bf8d63dd4375c7cfd29ed77c
| 6,192
|
py
|
Python
|
ipa_util/validate.py
|
koolspin/vipa
|
f5b79a6ab4ce60975ff5ee6f173b97eebaf99b14
|
[
"MIT"
] | null | null | null |
ipa_util/validate.py
|
koolspin/vipa
|
f5b79a6ab4ce60975ff5ee6f173b97eebaf99b14
|
[
"MIT"
] | null | null | null |
ipa_util/validate.py
|
koolspin/vipa
|
f5b79a6ab4ce60975ff5ee6f173b97eebaf99b14
|
[
"MIT"
] | null | null | null |
import plistlib
from pathlib import Path
from datetime import datetime, timezone, timedelta
class Validate:
"""
Validate an unpacked .ipa file in various ways
The following rules are enforced. All are treated as errors, except as noted:
req-001: The root must contain a sub-directory called 'Payload'
req-002: Payload must contain a single .app sub-directory
req-003: The .app root must contain an Info.plist file
req-004: The application-identifier prefix from the provisioning profile Entitlements section must match one of
the values in the ApplicationIdentifierPrefix array
req-005: WARNING: Should warn if the provisioning profile has expired
req-006: The app id from the Entitlements section must match the app id from Info.plist, taking wildcards into account.
req-007: Executable files should be in the correct format for iOS devices (armv7, armv7s, arm64, etc)
"""
def __init__(self, dest_path) -> None:
"""
__init__
:param dest_path: The path to the unpacked .ipa file (location of the Payload folder)
"""
super().__init__()
self._root_path = Path(dest_path)
self._payload_path = None
self._app_dir = None
self._plist_file = None
self._bundle_id = None
self._executable_file = None
@property
def app_dir(self):
return self._app_dir
@property
def executable_name(self):
return self._executable_file
@property
def executable_path(self):
return self._app_dir / self._executable_file
def validate_structure(self):
"""
Validates the basic structure of an .ipa file
:return:
"""
# req-001
self._payload_path = self._root_path / 'Payload'
if not self._payload_path.is_dir():
raise Exception("Root Payload path not found")
# req-002
app_dirs = sorted(self._payload_path.glob('*.app'))
if len(app_dirs) == 0:
raise Exception("No .app directories found within Payload")
if len(app_dirs) > 1:
raise Exception("Multiple .app directories found within Payload")
for dir1 in app_dirs:
if not dir1.is_dir():
raise Exception("{0} is not a directory".format(dir1))
# req-003
self._app_dir = dir1
print('Found app: {0}'.format(dir1))
self._plist_file = self._app_dir / 'Info.plist'
if not self._plist_file.is_file():
raise Exception("Info.plist file was not found in the app bundle")
def extract_plist(self):
"""
Extracts information from the Info.plist file
:return: Dictionary representation of Info.plist contents
"""
with self._plist_file.open('rb') as plist_fp:
p_dict = plistlib.load(plist_fp)
self._bundle_id = p_dict.get('CFBundleIdentifier')
self._executable_file = p_dict.get('CFBundleExecutable')
return p_dict
def extract_provisioning_plist(self, embedded_prov_plist_path):
"""
Extracts information from the Info.plist file
:param embedded_prov_plist_path: Full path to the plist file which is embedded in the provisioning profile
:return: Dictionary representation of embedded.mobileprovision contents
"""
with embedded_prov_plist_path.open('rb') as plist_fp:
p_dict = plistlib.load(plist_fp)
return p_dict
def validate_provisioning_plist(self, plist_dict):
"""
Validate the embedded provisioning plist which was extracted in a previous step.
:param plist_dict: Dictionary representation of the embedded.mobileprovision file
:return: None
"""
app_id_prefix_array = plist_dict['ApplicationIdentifierPrefix']
entitlements_dict = plist_dict['Entitlements']
app_identifier_raw = entitlements_dict.get('application-identifier')
ix = app_identifier_raw.find('.')
if ix >= 0:
app_identifier_prefix = app_identifier_raw[:ix]
app_id = app_identifier_raw[ix+1:]
else:
app_identifier_prefix = app_identifier_raw
app_id = ''
get_task_allow = entitlements_dict.get('get-task-allow')
keychain_groups = entitlements_dict.get('keychain-access-groups')
# req-004
if app_identifier_prefix not in app_id_prefix_array:
raise Exception('The entitlements application-identifier {0} does not match any of the given app id prefixes'.format(app_identifier_prefix))
# req-005
exp_date = plist_dict['ExpirationDate']
now = datetime.now()
if exp_date < now:
print('The embedded provisioning profile has expired on {0}'.format(exp_date))
# req-006
self._validate_app_id(self._bundle_id, app_id)
def _validate_app_id(self, app_id_from_info_plist, app_id_from_provisioning_file):
"""
Validate the app ids from the Info.plist and provisioning profile to see if they match, taking wildcards into account.
Examples:
com.acme.app1, com.acme.app1 => match
com.acme.app1, com.acme.app2 => fail
com.acme.app1, com.acme.* => match
com.acme.app1, * => match
:param app_id_from_info_plist: Full appid from the Info.plist file, ex: com.acme.app1
:param app_id_from_provisioning_file: App id (possibly wildcard) from the provisioning profile
:return: None
"""
has_wildcard = False
ix = app_id_from_provisioning_file.find('*')
if ix >= 0:
has_wildcard = True
match_app_id = app_id_from_provisioning_file[:ix]
else:
match_app_id = app_id_from_provisioning_file
if has_wildcard:
wc_len = len(match_app_id)
match = (app_id_from_info_plist[:ix] == match_app_id)
else:
match = (app_id_from_info_plist == match_app_id)
if not match:
raise Exception('Bundle ID does not match app ID from provisioning profile: {0}'.format(app_id_from_provisioning_file))
| 41.837838
| 152
| 0.653424
| 805
| 6,192
| 4.782609
| 0.22236
| 0.035065
| 0.03039
| 0.038182
| 0.175065
| 0.088831
| 0.058701
| 0.038442
| 0.02026
| 0.02026
| 0
| 0.014346
| 0.268249
| 6,192
| 147
| 153
| 42.122449
| 0.835356
| 0.314599
| 0
| 0.144578
| 0
| 0
| 0.14664
| 0.023676
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108434
| false
| 0
| 0.036145
| 0.036145
| 0.216867
| 0.024096
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f29a992ba965f8e9cb047c742d3ca46176d0fa03
| 3,012
|
py
|
Python
|
netests/comparators/facts_compare.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | 14
|
2020-06-08T07:34:59.000Z
|
2022-03-14T08:52:03.000Z
|
netests/comparators/facts_compare.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | null | null | null |
netests/comparators/facts_compare.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | 3
|
2020-06-19T03:57:05.000Z
|
2020-06-22T22:46:42.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from nornir.core.task import Task
from netests import log
from netests.tools.file import open_file
from netests.protocols.facts import Facts
from netests.select_vars import select_host_vars
from netests.comparators.log_compare import log_compare, log_no_yaml_data
from netests.constants import NOT_SET, FACTS_WORKS_KEY, FACTS_DATA_HOST_KEY
from netests.exceptions.netests_exceptions import (
NetestsOverideTruthVarsKeyUnsupported
)
def _compare_transit_facts(task, options={}):
task.host[FACTS_WORKS_KEY] = _compare_facts(
host_keys=task.host.keys(),
hostname=task.host.name,
groups=task.host.groups,
facts_host_data=task.host.get(FACTS_DATA_HOST_KEY, None),
test=False,
options=options,
task=task
)
return task.host[FACTS_WORKS_KEY]
def _compare_facts(
host_keys,
hostname: str,
groups: list,
facts_host_data: Facts,
test=False,
options={},
task=Task
) -> bool:
if (
'own_vars' in options.keys() and
options.get('own_vars') is not None and
'enable' in options.get('own_vars').keys() and
options.get('own_vars').get('enable') is True
):
raise NetestsOverideTruthVarsKeyUnsupported()
else:
if test:
facts_yaml_data = open_file(
path="tests/features/src/facts_tests.yml"
).get(hostname)
else:
facts_yaml_data = select_host_vars(
hostname=hostname,
groups=groups,
protocol="facts"
)
log.debug(
"FACTS_DATA_HOST_KEY in host_keys="
f"{FACTS_DATA_HOST_KEY in host_keys}\n"
"facts_yaml_data is not None="
f"{facts_yaml_data is not None}"
)
if (
FACTS_DATA_HOST_KEY in host_keys and
facts_yaml_data is not None
):
verity_facts = Facts(
hostname=hostname,
domain=facts_yaml_data.get('domain', NOT_SET),
version=facts_yaml_data.get('version', NOT_SET),
build=facts_yaml_data.get('build', NOT_SET),
serial=facts_yaml_data.get('serial', NOT_SET),
base_mac=facts_yaml_data.get('serial', NOT_SET),
memory=facts_yaml_data.get('memory', NOT_SET),
vendor=facts_yaml_data.get('vendor', NOT_SET),
model=facts_yaml_data.get('model', NOT_SET),
interfaces_lst=facts_yaml_data.get('interfaces', list()),
options=facts_host_data.options
)
log_compare(verity_facts, facts_host_data, hostname, groups)
return verity_facts == facts_host_data
else:
log_no_yaml_data(
"facts",
FACTS_DATA_HOST_KEY,
"FACTS_DATA_HOST_KEY",
hostname,
groups
)
return True
| 31.705263
| 75
| 0.60259
| 363
| 3,012
| 4.688705
| 0.217631
| 0.075206
| 0.106933
| 0.084606
| 0.220917
| 0.145711
| 0.078731
| 0
| 0
| 0
| 0
| 0.000958
| 0.306773
| 3,012
| 94
| 76
| 32.042553
| 0.814176
| 0.014276
| 0
| 0.134146
| 0
| 0
| 0.097742
| 0.011459
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0.097561
| 0
| 0.158537
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f29b2579ee8dd83fbc2ef37d5767b8505b228c21
| 1,579
|
py
|
Python
|
graph.py
|
shinmura0/tkinter_kouza
|
1617a01591bf3cee808c4b3e62dc785cc76381f2
|
[
"MIT"
] | null | null | null |
graph.py
|
shinmura0/tkinter_kouza
|
1617a01591bf3cee808c4b3e62dc785cc76381f2
|
[
"MIT"
] | null | null | null |
graph.py
|
shinmura0/tkinter_kouza
|
1617a01591bf3cee808c4b3e62dc785cc76381f2
|
[
"MIT"
] | null | null | null |
#おまじない
from tkinter import Tk, Button, X, Frame, GROOVE, W, E, Label, Entry, END
import numpy as np
import os
from matplotlib import pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
# プロットする関数
def graph(data):
# 大きさ(6,3)のグラフを生成する
fig = plt.Figure(figsize=(6,3))
ax1 = fig.add_subplot(111)
# もらったデータをプロットする。
ax1.plot(data)
# グラフの描画
canvas = FigureCanvasTkAgg(fig, frame_3)
canvas.draw()
canvas.get_tk_widget().grid(row=1, column=0)
return fig
# 入力フォームの保存
def plot():
# 入力フォームを読み込む
a = box1.get()
b = box2.get()
c = box3.get()
# 表形式に変換
result = []
result.append(int(a))
result.append(int(b))
result.append(int(c))
# 描画関数にデータを渡す
graph(result)
#おまじない ↓ここから本文
if __name__ == '__main__':
# tkinter定義
root = Tk()
# ボタン1
frame_1 = Frame(root, bd=4, relief=GROOVE) #ボタン1の定義
frame_1.grid(row=0, column=0) #ボタン1の位置
btn1 = Button(frame_1, text='描画', command=plot, font=("",20)) #ボタン1が押されたときの処理
btn1.pack(fill=X) #ボタン1設置
# グラフ
frame_3 = Frame(root, bd=4, relief=GROOVE) #ボタン1の定義
frame_3.grid(row=1, column=0)
canvas = FigureCanvasTkAgg(graph([]), frame_3)
# 入力フォーム
box1 = Entry(width=3) #入力フォームの定義
box1.place(x=20, y=5) #入力フォームの位置
box2 = Entry(width=3) #入力フォームの定義
box2.place(x=50, y=5) #入力フォームの位置
box3 = Entry(width=3) #入力フォームの定義
box3.place(x=80, y=5) #入力フォームの位置
# tkinter作動
root.mainloop()
| 24.292308
| 82
| 0.60038
| 210
| 1,579
| 4.428571
| 0.480952
| 0.025806
| 0.048387
| 0.064516
| 0.109677
| 0.077419
| 0.077419
| 0.077419
| 0
| 0
| 0
| 0.04766
| 0.269158
| 1,579
| 65
| 83
| 24.292308
| 0.757366
| 0.151995
| 0
| 0
| 0
| 0
| 0.008
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.131579
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a0401693fdb2fa350f876989f4e1cc6a3ea3c3
| 698
|
py
|
Python
|
im3agents/tests/test_farmers.py
|
IMMM-SFA/im3agents
|
544e89803379a44108227e9cd83ce09f6974fe2d
|
[
"BSD-2-Clause"
] | null | null | null |
im3agents/tests/test_farmers.py
|
IMMM-SFA/im3agents
|
544e89803379a44108227e9cd83ce09f6974fe2d
|
[
"BSD-2-Clause"
] | 4
|
2020-05-27T18:50:29.000Z
|
2020-09-24T14:27:00.000Z
|
im3agents/tests/test_farmers.py
|
IMMM-SFA/im3agents
|
544e89803379a44108227e9cd83ce09f6974fe2d
|
[
"BSD-2-Clause"
] | null | null | null |
"""Farmer class tests.
:author: Someone
:email: [email protected]
License: BSD 2-Clause, see LICENSE and DISCLAIMER files
"""
import unittest
from im3agents import FarmerOne
class TestFarmers(unittest.TestCase):
def test_farmerone(self):
error_min_age = FarmerOne(age=-1)
error_max_age = FarmerOne(age=151)
valid = FarmerOne(age=32)
# expect value errors for exceeding min and max
with self.assertRaises(ValueError):
error_min_age.age
with self.assertRaises(ValueError):
error_max_age.age
# expect valid age
self.assertEqual(valid.age, 32)
if __name__ == '__main__':
unittest.main()
| 19.388889
| 56
| 0.659026
| 85
| 698
| 5.211765
| 0.541176
| 0.081264
| 0.049661
| 0.13544
| 0.158014
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019194
| 0.253582
| 698
| 35
| 57
| 19.942857
| 0.831094
| 0.270774
| 0
| 0.142857
| 0
| 0
| 0.016
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 1
| 0.071429
| false
| 0
| 0.142857
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a1157fdb66b63890403106ad4f269358b5419e
| 1,744
|
py
|
Python
|
day-24/part-1/th-ch.py
|
evqna/adventofcode-2020
|
526bb9c87057d02bda4de9647932a0e25bdb3a5b
|
[
"MIT"
] | 12
|
2020-11-30T19:22:18.000Z
|
2021-06-21T05:55:58.000Z
|
day-24/part-1/th-ch.py
|
evqna/adventofcode-2020
|
526bb9c87057d02bda4de9647932a0e25bdb3a5b
|
[
"MIT"
] | 13
|
2020-11-30T17:27:22.000Z
|
2020-12-22T17:43:13.000Z
|
day-24/part-1/th-ch.py
|
evqna/adventofcode-2020
|
526bb9c87057d02bda4de9647932a0e25bdb3a5b
|
[
"MIT"
] | 3
|
2020-12-01T08:49:40.000Z
|
2022-03-26T21:47:38.000Z
|
from tool.runners.python import SubmissionPy
WHITE = 0
BLACK = 1
DIRECTIONS = {
"e": (-1, 0), # (x, y) with axes right/bottom
"se": (-0.5, 1),
"sw": (0.5, 1),
"w": (1, 0),
"nw": (0.5, -1),
"ne": (-0.5, -1),
}
class ThChSubmission(SubmissionPy):
def run(self, s):
flipped_tiles = {}
for line in s.split("\n"):
i = 0
x, y = (0, 0) # ref
while i < len(line):
if line[i] == "s" or line[i] == "n":
direction = line[i : i + 2]
i += 2
else:
direction = line[i]
i += 1
dx, dy = DIRECTIONS[direction]
x += dx
y += dy
flipped_tiles[(x, y)] = (flipped_tiles.get((x, y), WHITE) + 1) % 2
return sum(tile == BLACK for tile in flipped_tiles.values())
def test_th_ch():
"""
Run `python -m pytest ./day-24/part-1/th-ch.py` to test the submission.
"""
assert (
ThChSubmission().run(
"""
seeswwswswwnenewsewsw
neeenesenwnwwswnenewnwwsewnenwseswesw
seswneswswsenwwnwse
nwnwneseeswswnenewneswwnewseswneseene
swweswneswnenwsewnwneneseenw
eesenwseswswnenwswnwnwsewwnwsene
sewnenenenesenwsewnenwwwse
wenwwweseeeweswwwnwwe
wsweesenenewnwwnwsenewsenwwsesesenwne
neeswseenwwswnwswswnw
nenwswwsewswnenenewsenwsenwnesesenew
enewnwewneswsewnwswenweswnenwsenwsw
sweneswneswneneenwnewenewwneswswnese
swwesenesewenwneswnwwneseswwne
enesenwswwswneneswsenwnewswseenwsese
wnwnesenesenenwwnenwsewesewsesesew
nenewswnwewswnenesenwnesewesw
eneswnwswnwsenenwnwnwwseeswneewsenese
neswnwewnwnwseenwseesewsenwsweewe
wseweeenwnesenwwwswnew
""".strip()
)
== 10
)
| 25.275362
| 78
| 0.614106
| 160
| 1,744
| 6.65625
| 0.55
| 0.007512
| 0.011268
| 0.028169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024623
| 0.278096
| 1,744
| 68
| 79
| 25.647059
| 0.821287
| 0.06078
| 0
| 0
| 0
| 0
| 0.014184
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 1
| 0.055556
| false
| 0
| 0.027778
| 0
| 0.138889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a14427a74c318066628e0e58bdecded62e08df
| 259
|
py
|
Python
|
Python/tais_formula.py
|
mimseyedi/Kattis
|
a99ea2112544e89cc466feb7d81ffe6eb017f7e2
|
[
"MIT"
] | null | null | null |
Python/tais_formula.py
|
mimseyedi/Kattis
|
a99ea2112544e89cc466feb7d81ffe6eb017f7e2
|
[
"MIT"
] | null | null | null |
Python/tais_formula.py
|
mimseyedi/Kattis
|
a99ea2112544e89cc466feb7d81ffe6eb017f7e2
|
[
"MIT"
] | null | null | null |
n = int(input())
l1 = list()
l2 = list()
for _ in range(n):
t, v = input().split()
l1.append(int(t))
l2.append(float(v))
result = 0
for i in range(len(l1) - 1):
result += ((l2[i] + l2[i + 1]) / 2) * (l1[i + 1] - l1[i])
print(result / 1000)
| 17.266667
| 61
| 0.505792
| 47
| 259
| 2.765957
| 0.468085
| 0.107692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092784
| 0.250965
| 259
| 14
| 62
| 18.5
| 0.57732
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a1e765b746fab626eeae28ec0da8d5f9142f43
| 643
|
py
|
Python
|
modules/constant.py
|
aubravo/Clasificacion-de-actividad-volcanica
|
0f7be0d77509fa13948a0f714103ce6e6d8cb2ae
|
[
"MIT"
] | 1
|
2021-10-20T02:42:20.000Z
|
2021-10-20T02:42:20.000Z
|
modules/constant.py
|
aubravo/ActividadVolcanica
|
0f7be0d77509fa13948a0f714103ce6e6d8cb2ae
|
[
"MIT"
] | null | null | null |
modules/constant.py
|
aubravo/ActividadVolcanica
|
0f7be0d77509fa13948a0f714103ce6e6d8cb2ae
|
[
"MIT"
] | null | null | null |
"""----------------------------------------------------------------------------
This is the core of the parsing stage:
*re_find comments will search for everything between the $$ and EOL
*re_findDataLabels will search for everything between the start of a tag
(##) and the start of the next tag ignoring the contents of next tag,
while grouping into tag name and tag contents
----------------------------------------------------------------------------"""
re_findComments = r'\$\$[\s\S]*?(?=\n)'
re_findBlocks = r'(##TITLE\=[\W\w]*?##END=)'
re_findDataLabels = r'##([\w\W]*?)=([\w\W]*?(?=\n##[\w\W]))'
FILE = True
DIR = False
| 45.928571
| 79
| 0.494557
| 79
| 643
| 3.962025
| 0.506329
| 0.031949
| 0.083067
| 0.146965
| 0.210863
| 0.210863
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133748
| 643
| 13
| 80
| 49.461538
| 0.561939
| 0.724728
| 0
| 0
| 0
| 0
| 0.467836
| 0.362573
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a3e15dbd9f5aecf7c8735a8a4cd1ee5164b116
| 5,879
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/f5networks/f5_modules/tests/unit/modules/network/f5/test_bigip_message_routing_transport_config.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/f5networks/f5_modules/tests/unit/modules/network/f5/test_bigip_message_routing_transport_config.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/f5networks/f5_modules/tests/unit/modules/network/f5/test_bigip_message_routing_transport_config.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.f5networks.f5_modules.plugins.modules.bigip_message_routing_transport_config import (
ApiParameters, ModuleParameters, ModuleManager, GenericModuleManager, ArgumentSpec
)
from ansible_collections.f5networks.f5_modules.tests.unit.compat import unittest
from ansible_collections.f5networks.f5_modules.tests.unit.compat.mock import Mock, patch
from ansible_collections.f5networks.f5_modules.tests.unit.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
partition='foobar',
description='my description',
profiles=['genericmsg', 'foo_udp'],
src_addr_translation=dict(
type='snat',
pool='some_pool1'
),
src_port=1023,
rules=['rule1', 'rule2'],
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.partition == 'foobar'
assert p.description == 'my description'
assert p.profiles == ['/foobar/genericmsg', '/foobar/foo_udp']
assert p.snat_type == 'snat'
assert p.snat_pool == '/foobar/some_pool1'
assert p.src_port == 1023
assert p.rules == ['/foobar/rule1', '/foobar/rule2']
def test_api_parameters(self):
args = load_fixture('load_generic_transport_config.json')
p = ApiParameters(params=args)
assert p.name == 'gen1'
assert p.partition == 'Common'
assert p.profiles == ['/Common/diametersession', '/Common/tcp']
assert p.snat_type == 'snat'
assert p.src_port == 0
assert p.snat_pool == '/Common/test_snat'
assert p.rules == ['/Common/test']
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.p2 = patch('ansible_collections.f5networks.f5_modules.plugins.modules.bigip_message_routing_transport_config.tmos_version')
self.p3 = patch('ansible_collections.f5networks.f5_modules.plugins.modules.bigip_message_routing_transport_config.send_teem')
self.m2 = self.p2.start()
self.m2.return_value = '14.1.0'
self.m3 = self.p3.start()
self.m3.return_value = True
def tearDown(self):
self.p2.stop()
self.p3.stop()
def test_create_generic_transport(self, *args):
set_module_args(dict(
name='foo',
partition='foobar',
description='my description',
profiles=['genericmsg', 'foo_udp'],
src_addr_translation=dict(
type='snat',
pool='some_pool1'
),
src_port=1023,
rules=['rule1', 'rule2'],
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
gm = GenericModuleManager(module=module)
gm.exists = Mock(return_value=False)
gm.create_on_device = Mock(return_value=True)
mm = ModuleManager(module=module)
mm.version_less_than_14 = Mock(return_value=False)
mm.get_manager = Mock(return_value=gm)
results = mm.exec_module()
assert results['changed'] is True
assert results['description'] == 'my description'
assert results['src_addr_translation'] == dict(type='snat', pool='/foobar/some_pool1')
assert results['src_port'] == 1023
assert results['rules'] == ['/foobar/rule1', '/foobar/rule2']
assert results['profiles'] == ['/foobar/genericmsg', '/foobar/foo_udp']
def test_update_generic_transport(self, *args):
set_module_args(dict(
name='gen1',
src_port=1024,
rules=['/Common/barfoo'],
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
current = ApiParameters(params=load_fixture('load_generic_transport_config.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
)
# Override methods in the specific type of manager
gm = GenericModuleManager(module=module)
gm.exists = Mock(return_value=True)
gm.update_on_device = Mock(return_value=True)
gm.read_current_from_device = Mock(return_value=current)
mm = ModuleManager(module=module)
mm.version_less_than_14 = Mock(return_value=False)
mm.get_manager = Mock(return_value=gm)
results = mm.exec_module()
assert results['changed'] is True
assert results['src_port'] == 1024
assert results['rules'] == ['/Common/barfoo']
| 33.403409
| 136
| 0.628678
| 675
| 5,879
| 5.26963
| 0.272593
| 0.029519
| 0.037953
| 0.050604
| 0.57436
| 0.545122
| 0.494518
| 0.447287
| 0.403992
| 0.350576
| 0
| 0.019266
| 0.258377
| 5,879
| 175
| 137
| 33.594286
| 0.79656
| 0.042014
| 0
| 0.367647
| 0
| 0
| 0.159289
| 0.0544
| 0
| 0
| 0
| 0
| 0.176471
| 1
| 0.051471
| false
| 0.022059
| 0.073529
| 0
| 0.154412
| 0.007353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a4c80d5b858823c4ef9a8432cc56f697eb6900
| 3,618
|
py
|
Python
|
tests/test_builder_path_parameter.py
|
tabebqena/flask-open-spec
|
ee1fd9cd349e46e1d8295fc2799898731392af6a
|
[
"MIT"
] | null | null | null |
tests/test_builder_path_parameter.py
|
tabebqena/flask-open-spec
|
ee1fd9cd349e46e1d8295fc2799898731392af6a
|
[
"MIT"
] | null | null | null |
tests/test_builder_path_parameter.py
|
tabebqena/flask-open-spec
|
ee1fd9cd349e46e1d8295fc2799898731392af6a
|
[
"MIT"
] | null | null | null |
from ..open_oas.builder.builder import OasBuilder
from unittest import TestCase
from ..tests.schemas.schemas import PaginationSchema
from ..open_oas.decorators import Deferred, path_parameter
class TestPathParameter(TestCase):
def run_tests(self, builder: OasBuilder):
data = builder.get_data()
parameters = (
data.get("paths", {}).get("/gists", {})
# .get("get", {})
.get("parameters", [])
)
self.assertNotEqual(parameters, [])
for param in parameters:
self.assertEqual(
param.get("schema", {}).get("$ref", {}),
"#/components/schemas/Pagination",
)
def test_data(self):
data = {
"paths": {
"/gists": {
"parameters": [
{
"schema": PaginationSchema,
"in": "query",
"name": "offsetParam",
"required": False,
},
{
"schema": PaginationSchema,
"in": "query",
"name": "limitParam",
"required": False,
},
],
"get": {
"summary": "Gets a list of users.",
"responses": {"200": {"description": "OK"}},
},
}
},
}
builder = OasBuilder(data)
# pprint(builder.get_data())
self.run_tests(builder)
def test_data_dict_schema(self):
data = {
"paths": {
"/gists": {
"parameters": [
{
"schema": {"type": "object"},
"in": "query",
"name": "offsetParam",
"required": False,
},
{
"schema": {"type": "object"},
"in": "query",
"name": "limitParam",
"required": False,
},
],
"get": {
"summary": "Gets a list of users.",
"responses": {"200": {"description": "OK"}},
},
}
},
}
builder = OasBuilder(data)
# pprint(builder.get_data())
# self.run_tests(builder)
parameters = (
builder.get_data()
.get("paths", {})
.get("/gists", {})
# .get("get", {})
.get("parameters", [])
)
self.assertEqual(
parameters,
data.get("paths", {}).get("/gists", {})
# .get("get", {})
.get("parameters", []),
)
def test_decorator(self):
path_parameter(
["/gists"],
"query",
name="offsetParam",
schema=PaginationSchema,
description="",
)
path_parameter(
["/gists"],
"query",
name="limitParam",
schema=PaginationSchema,
description="",
)
builder = OasBuilder()
# pprint(builder.get_data())
self.run_tests(builder)
def tearDown(self) -> None:
Deferred._deferred = []
return super().tearDown()
| 30.661017
| 68
| 0.369541
| 226
| 3,618
| 5.827434
| 0.265487
| 0.027335
| 0.053151
| 0.034169
| 0.558846
| 0.493546
| 0.426727
| 0.364465
| 0.364465
| 0.330296
| 0
| 0.003297
| 0.49696
| 3,618
| 117
| 69
| 30.923077
| 0.72033
| 0.042012
| 0
| 0.534653
| 0
| 0
| 0.133892
| 0.008965
| 0
| 0
| 0
| 0
| 0.029703
| 1
| 0.049505
| false
| 0
| 0.039604
| 0
| 0.108911
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a7f8c88dbf4887b1d166b409dc1bae27f7d5b9
| 815
|
py
|
Python
|
tests/test_templates.py
|
knipknap/django_searchable
|
6fd9f8aa766477e8648fdbed720e966af1b216b7
|
[
"MIT"
] | 62
|
2018-11-05T09:06:39.000Z
|
2022-02-18T15:33:06.000Z
|
tests/test_templates.py
|
knipknap/django_searchable
|
6fd9f8aa766477e8648fdbed720e966af1b216b7
|
[
"MIT"
] | 4
|
2018-11-05T07:57:27.000Z
|
2021-05-30T00:37:35.000Z
|
tests/test_templates.py
|
knipknap/django_searchable
|
6fd9f8aa766477e8648fdbed720e966af1b216b7
|
[
"MIT"
] | 8
|
2018-11-08T16:10:04.000Z
|
2022-01-27T09:31:53.000Z
|
from django.test import TestCase
from django.test.client import RequestFactory
from django.template import Template, Context
from django.template.loader import render_to_string
from .models import Author, Book
expected_headers = '''
<tr>
<th>Name</th><th>The title</th><th>Comment</th><th>Stars</th><th>AuthorID</th>
</tr>
'''.strip()
class HeadersTest(TestCase):
def setUp(self):
self.maxDiff = None
self.context = {'object_list': Book.objects.all}
author = Author.objects.create(name='MyAuthor', rating=2)
for i in range(11):
Book.objects.create(author=author, title='B'+str(i), rating=10)
def testHeaders1(self):
result = render_to_string('django_find/headers.html', self.context)
self.assertEqual(result.strip(), expected_headers, result)
| 32.6
| 78
| 0.69816
| 111
| 815
| 5.054054
| 0.495496
| 0.071301
| 0.049911
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008798
| 0.16319
| 815
| 24
| 79
| 33.958333
| 0.813783
| 0
| 0
| 0
| 0
| 0.05
| 0.165848
| 0.099509
| 0
| 0
| 0
| 0
| 0.05
| 1
| 0.1
| false
| 0
| 0.25
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a86a4dc06766b095f7432edceef5b58b99f8ac
| 103,875
|
py
|
Python
|
diabolo_play/scripts/interactive_play.py
|
omron-sinicx/diabolo
|
a0258fdf634d27c7cf185b2e40c6b12699417d36
|
[
"BSD-3-Clause"
] | 11
|
2021-10-15T15:51:24.000Z
|
2021-12-26T16:43:17.000Z
|
diabolo_play/scripts/interactive_play.py
|
omron-sinicx/diabolo
|
a0258fdf634d27c7cf185b2e40c6b12699417d36
|
[
"BSD-3-Clause"
] | null | null | null |
diabolo_play/scripts/interactive_play.py
|
omron-sinicx/diabolo
|
a0258fdf634d27c7cf185b2e40c6b12699417d36
|
[
"BSD-3-Clause"
] | 1
|
2022-02-01T01:58:37.000Z
|
2022-02-01T01:58:37.000Z
|
#!/usr/bin/env python
import sys
import copy
import rospy
import tf_conversions
import tf.transformations as transform
import tf
from math import pi
import math
import thread
import os
import random
import geometry_msgs.msg
from geometry_msgs.msg import Pose, PoseArray
from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
import moveit_msgs.msg
import shape_msgs.msg
import visualization_msgs.msg
import diabolo_gazebo.msg
from diabolo_play.srv import SetInitialStickPositionsRequest, SetInitialStickPositions
from diabolo_play.srv import CreateSticksTrajectoryRequest, CreateSticksTrajectory
from diabolo_play.srv import CreateRobotTrajectory, CreateRobotTrajectoryRequest
from moveit_msgs.srv import GetPlanningScene, GetPlanningSceneRequest
import pandas as pd
import numpy as np
from gazebo_msgs.srv import (
DeleteModel,
DeleteModelRequest,
SpawnModel,
SpawnModelRequest,
)
from diabolo_play.msg import DiaboloMotionSplineSeeds
from diabolo_play.srv import GetDiaboloState, GetDiaboloStateRequest
from std_msgs.msg import String
from std_srvs.srv import Empty, EmptyRequest
import rospkg
from diabolo_gazebo.msg import DiaboloState
from scipy import interpolate
import matplotlib.pyplot as plt
from diabolo_play.motion_knot_points import KnotPointsServer
import yaml
import pickle
class PlayerClass:
def __init__(self):
rospy.init_node("diabolo_player", anonymous=True)
self._rospack = rospkg.RosPack()
self.diabolo_urdf_pack = self._rospack.get_path("diabolo_gazebo")
self.diabolo_urdf_file_path = os.path.join(
self.diabolo_urdf_pack, "urdf", "diabolo.urdf"
)
self._package_directory = self._rospack.get_path("diabolo_play")
self.tf_listener = tf.TransformListener()
self.tf_broadcaster = tf.TransformBroadcaster()
self.marker_count = 0
self.marker_pub = rospy.Publisher(
"visualization_markers", visualization_msgs.msg.Marker, queue_size=100
)
self.marker_array_pub = rospy.Publisher(
"visualization_marker_array",
visualization_msgs.msg.MarkerArray,
queue_size=100,
latch=True,
)
self.pub_stick_poses = rospy.Publisher(
"/diabolo_stick_poses",
geometry_msgs.msg.PoseArray,
queue_size=50,
latch=True,
)
self.pub_diabolo_position = rospy.Publisher(
"/experiment_diabolo_position", geometry_msgs.msg.Pose, queue_size=50
)
self.diabolo_state_pub = rospy.Publisher(
"/experiment_diabolo_state", DiaboloState, queue_size=1
)
self.a_bot_command_pub = rospy.Publisher(
"/a_bot/scaled_pos_joint_traj_controller/command",
JointTrajectory,
queue_size=1,
)
self.b_bot_command_pub = rospy.Publisher(
"/b_bot/scaled_pos_joint_traj_controller/command",
JointTrajectory,
queue_size=1,
)
self.set_robots_initial_position_service = rospy.ServiceProxy(
"/initialize_robots_from_stick_positions", SetInitialStickPositions
)
self.command_robot_trajectory_service = rospy.ServiceProxy(
"/command_robot_traj_from_stick_traj", CreateRobotTrajectory
)
self.a_bot_display_traj_pub = rospy.Publisher(
"/display_a_bot_bioik_trajectory",
moveit_msgs.msg.DisplayTrajectory,
queue_size=1,
)
self.b_bot_display_traj_pub = rospy.Publisher(
"/display_b_bot_bioik_trajectory",
moveit_msgs.msg.DisplayTrajectory,
queue_size=1,
)
self.pause_gazebo_service = rospy.ServiceProxy("/gazebo/pause_physics", Empty)
self.unpause_gazebo_service = rospy.ServiceProxy(
"/gazebo/unpause_physics", Empty
)
self.get_diabolo_state_service = rospy.ServiceProxy(
"/get_observed_diabolo_state", GetDiaboloState
)
self.generate_trajectory_service = rospy.ServiceProxy(
"/generate_stick_trajectory", CreateSticksTrajectory
)
self.get_planning_scene_service = rospy.ServiceProxy(
"/get_planning_scene", GetPlanningScene
)
self.latest_diabolo_state = None
self.create_markers()
self.sim_recorder = None # This will be filled with a DiaboloSimRecorder type if running automated trials
self.current_rot_velocity = 0.0 # TODO: Calculate rotational velocity from experiment data and store here
self.left_traj_plan_marker = None
self.right_traj_plan_marker = None
# self.timer = rospy.Timer(rospy.Duration(0.01), self.get_observed_diabolo_state , oneshot=False)
# If this is true, the intermediate frames are displayed. Useful if the calibration seems off, or the rotations are not correct.
self.pub_rate = 50.0
rospy.set_param("/stick_pose_publish_rate", self.pub_rate)
# This parameter is set by the gazebo launch file if robots are being spawned in gazebo
# If the parameter is true, the program will wait for the service to initialize robot positions
self.constrain_to_plane = True # If true, ignore motion x coordinates
self.DEFAULT_X_COORD = (
0.55 # Set robots to this coordinate if motion constrained to plane
)
self.filename = ""
# This is a dictionary of the functions gotten by spline interpolation from the data
self.motion_functions = {}
# This is the name of the current motion being executed.
# The function(s) to be used can be extracted using this by appending
# "_sl" (for left stick)
# "_sr" (for right stick)
# and using it as the key for the self.motion_functions dictionary
self.current_motion = ""
self.frame_rate = 120.0
self.last_stick_positions = {}
self.read_transformed_motion_data(
folder=("experiments/output/2020-09-14_motion_extraction/")
)
self.initialize_motion_functions()
self.get_stick_tips_from_tf()
self.create_knot_server()
self.stop_motion_flag = True
self.tilt_offset = 0.0
self.changed_tilt_offset_flag = False
print("Started experiment playback class")
def get_stick_tips_from_tf(self):
# Get initial stick positions from tf.
# If not available, assume robots are at 'diabolo_ready' position
a_t = geometry_msgs.msg.Point()
b_t = geometry_msgs.msg.Point()
try:
self.tf_listener.waitForTransform(
"/world", "/a_bot_diabolo_stick_tip", rospy.Time(), rospy.Duration(1.0)
)
print("Got stick tip positions from tf")
(a_trans, a_rot) = self.tf_listener.lookupTransform(
"/world", "/a_bot_diabolo_stick_tip", rospy.Time(0)
)
(b_trans, b_rot) = self.tf_listener.lookupTransform(
"/world", "/b_bot_diabolo_stick_tip", rospy.Time(0)
)
a_t.x = a_trans[0]
a_t.y = a_trans[1]
a_t.z = a_trans[2]
b_t.x = b_trans[0]
b_t.y = b_trans[1]
b_t.z = b_trans[2]
except:
print("Transforms not available")
print("Initializing with initial position for this robot")
pose_left = self.motion_functions[self.current_motion + "_sl"][
"initial_pose"
]
pose_right = self.motion_functions[self.current_motion + "_sr"][
"initial_pose"
]
a_t = pose_right.position
b_t = pose_left.position
self.last_stick_positions = {"pos_left": b_t, "pos_right": a_t}
def create_knot_server(self):
"""
Create a knot server with the number of points given by the number knots in the current motion
"""
interactive_knot_points = len(
self.motion_functions[self.current_motion + "_sl"]["time_seed"]
)
if (
self.motion_functions[self.current_motion + "_sl"]["motion_type"]
== "periodic"
):
interactive_knot_points -= 1
pos_left = self.last_stick_positions["pos_left"]
pos_right = self.last_stick_positions["pos_right"]
left_seed_positions = []
right_seed_positions = []
left_seed_positions.append(pos_left)
right_seed_positions.append(pos_right)
left_dict = self.motion_functions[self.current_motion + "_sl"]
right_dict = self.motion_functions[self.current_motion + "_sr"]
for i in range(interactive_knot_points):
# Fill the seed position arrays with the initial seeds if available
left_seed = geometry_msgs.msg.Point(
left_dict["x_knot_seed"][i],
left_dict["y_knot_seed"][i],
left_dict["z_knot_seed"][i],
)
right_seed = geometry_msgs.msg.Point(
right_dict["x_knot_seed"][i],
right_dict["y_knot_seed"][i],
right_dict["z_knot_seed"][i],
)
left_seed_positions.append(left_seed)
right_seed_positions.append(right_seed)
self.knot_point_server = KnotPointsServer(
interactive_knot_points, [left_seed_positions, right_seed_positions]
)
def get_observed_diabolo_state(self, timer):
# Store the real pose/simulated pose of the diabolo to use for prediction
try:
req = GetDiaboloStateRequest()
req.header.stamp = rospy.Time.now()
resp = self.get_diabolo_state_service(req)
if resp.success:
self.latest_diabolo_state = resp.state
# else:
# self.latest_diabolo_pose = None
# self.latest_diabolo_trans_vel = None
except:
self.latest_diabolo_state = None
def _make_marker_from_mesh(
self,
mesh_filename="package://diabolo_play/meshes/diabolo_shell.stl",
namespace="diabolo",
scale=(1, 1, 1),
color=(1, 1, 1),
alpha=1.0,
):
"""
Based on the 'makeMesh()' function from 'moveit_commander/planning_scene_interface.py'
pose is a PoseStamped object.
"""
marker = visualization_msgs.msg.Marker()
marker.header.frame_id = "world"
marker.header.stamp = rospy.Time.now()
marker.ns = namespace
marker.id = self.marker_count
self.marker_count = self.marker_count + 1
marker.type = visualization_msgs.msg.Marker.MESH_RESOURCE
marker.action = visualization_msgs.msg.Marker.ADD
marker.pose.orientation.w = 1.0
marker.scale.x = scale[0]
marker.scale.y = scale[1]
marker.scale.z = scale[2]
marker.color.a = alpha
marker.color.r = color[0]
marker.color.g = color[1]
marker.color.b = color[2]
marker.mesh_resource = mesh_filename
return marker
def create_markers(self):
# Create marker objects from the meshes
self.diabolo_shell_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_shell.stl",
color=(1, 0, 0),
scale=[0.001, 0.001, 0.001],
namespace="",
)
self.diabolo_fixator_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_fixators.stl",
color=(0.1, 0.1, 0.1),
scale=[0.001, 0.001, 0.001],
namespace="",
)
self.diabolo_axis_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_axis.stl",
color=(0.7, 0.7, 0.7),
scale=[0.001, 0.001, 0.001],
namespace="",
)
self.stick_left_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_stick.stl",
color=(153 / 255.0, 75 / 255.0, 0.1),
scale=[0.001, 0.001, 0.001],
namespace="",
)
self.stick_right_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_stick.stl",
color=(153 / 255.0, 75 / 255.0, 0.1),
scale=[0.001, 0.001, 0.001],
namespace="",
)
self.holder_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_mount.stl",
color=(1, 1, 200 / 255.0),
scale=[0.001, 0.001, 0.001],
namespace="",
)
# Add the string
self.line_segments_marker = self._make_marker_from_mesh(
"", color=(204 / 255.0, 100 / 255.0, 0), namespace=""
)
self.line_segments_marker.type = visualization_msgs.msg.Marker.LINE_STRIP
self.line_segments_marker.points.append(
geometry_msgs.msg.Point(1, 1, 1)
) # The left stick tip
self.line_segments_marker.points.append(
geometry_msgs.msg.Point(0, 0, 0)
) # The diabolo center
self.line_segments_marker.points.append(
geometry_msgs.msg.Point(-1, -1, 1)
) # The right stick tip
self.line_segments_marker.scale.x = 0.005 # line width
self.sphere_marker_1 = self._make_marker_from_mesh(
"", color=(0.0, 0.0, 1.0), scale=[0.08, 0.08, 0.08], namespace=""
)
self.sphere_marker_1.type = visualization_msgs.msg.Marker.SPHERE
self.sphere_marker_2 = self._make_marker_from_mesh(
"", color=(0.0, 0.0, 1.0), scale=[0.08, 0.08, 0.08], namespace=""
)
self.sphere_marker_2.type = visualization_msgs.msg.Marker.SPHERE
def update_and_publish_markers(self, poses):
"""
poses needs to be a dict containing "diabolo", "stick_left", "stick_right" poses as geometry_msgs.msg.Pose
"""
self.sphere_marker_1.pose = poses["stick_left"]
self.sphere_marker_2.pose = poses["stick_right"]
# Flip orientations for correct display of the sticks
marker_array = [self.sphere_marker_1, self.sphere_marker_2]
self.marker_array_pub.publish(marker_array)
def read_transformed_motion_data(
self, folder="experiments/output/2020-09-14_motion_extraction/"
):
# This is a different function because the header is formatted differently in the transformed CSV file
linear_accel_file = "linear_accel_stick_motion.csv"
circular_accel_right_file = "circular_accel_right_stick_motion.csv"
circular_accel_left_file = "circular_accel_left_stick_motion.csv"
self.motion_data_dict = {}
# Get linear acceleration stick positions
motion_df = pd.read_csv(
os.path.join(self._package_directory, folder, linear_accel_file),
header=[0, 1, 2],
)
self.motion_data_dict["lin_accel_sl"] = motion_df["stick_left"]
self.motion_data_dict["lin_accel_sr"] = motion_df["stick_right"]
# Get circular acceleration stick positions
motion_df = pd.read_csv(
os.path.join(self._package_directory, folder, circular_accel_right_file),
header=[0, 1, 2],
)
self.motion_data_dict["circ_accel_sr"] = motion_df["stick_right"]
self.motion_data_dict["circ_accel_sl"] = motion_df["stick_left"]
def force_add_motion_function_(self):
"""
This is a helper function to add and overwrite motion functions in the database.
"""
self.current_motion = "lin_accel"
self.motion_functions["circ_accel_sr"]["time_seed"] = (
0.35,
0.65,
0.95,
1.25,
1.55,
1.85,
)
self.motion_functions["circ_accel_sr"]["motion_type"] = "periodic"
self.motion_functions["circ_accel_sl"]["time_seed"] = (
0.35,
0.65,
0.95,
1.25,
1.55,
1.85,
)
self.motion_functions["circ_accel_sl"]["motion_type"] = "periodic"
### For horizontal impulse
left_pose = Pose()
right_pose = Pose()
left_pose.position.x = self.DEFAULT_X_COORD
right_pose.position.x = self.DEFAULT_X_COORD
left_pose.position.y = 0.05
right_pose.position.y = -0.05
left_pose.position.z = 1.25
right_pose.position.z = 1.25
self.motion_functions["horizontal_impulse_short_left_sl"] = {
"x_knot_seed": (0.0, 0.0),
"y_knot_seed": (-0.23, -0.1),
"z_knot_seed": (0.0, 0.0),
"time_seed": (0.6, 1.0, 1.7),
"initial_pose": copy.deepcopy(left_pose),
"motion_type": "periodic",
}
self.motion_functions["horizontal_impulse_short_left_sr"] = {
"x_knot_seed": (0.0, 0.0),
"y_knot_seed": (-0.23, -0.1),
"z_knot_seed": (0.0, 0.0),
"time_seed": (0.8, 1.0, 1.7),
"initial_pose": copy.deepcopy(right_pose),
"motion_type": "periodic",
}
### For lin_accel
self.motion_functions["lin_accel_sr"]["time_seed"] = (0.25, 0.5, 0.9, 1.2)
self.motion_functions["lin_accel_sr"]["motion_type"] = "periodic"
self.motion_functions["lin_accel_sl"]["time_seed"] = (0.25, 0.5, 0.9, 1.2)
self.motion_functions["lin_accel_sl"]["motion_type"] = "periodic"
self.motion_functions["lin_accel_sr"]["x_knot_seed"] = (0.0, 0.0, 0.0)
self.motion_functions["lin_accel_sr"]["y_knot_seed"] = (0.05, 0.0, 0.05)
self.motion_functions["lin_accel_sr"]["z_knot_seed"] = (0.2, 0.4, 0.2)
self.motion_functions["lin_accel_sl"]["x_knot_seed"] = (0.0, 0.0, 0.0)
self.motion_functions["lin_accel_sl"]["y_knot_seed"] = (-0.05, 0.0, -0.05)
self.motion_functions["lin_accel_sl"]["z_knot_seed"] = (-0.15, -0.3, -0.15)
self.motion_functions["lin_accel_sl"][
"initial_pose"
].position.x = self.DEFAULT_X_COORD
self.motion_functions["lin_accel_sl"]["initial_pose"].position.y = 0.1
self.motion_functions["lin_accel_sl"]["initial_pose"].position.z = 1.47
self.motion_functions["lin_accel_sr"][
"initial_pose"
].position.x = self.DEFAULT_X_COORD
self.motion_functions["lin_accel_sr"]["initial_pose"].position.y = -0.1
self.motion_functions["lin_accel_sr"]["initial_pose"].position.z = 1.07
### For vertical throw
left_pose = Pose()
right_pose = Pose()
left_pose.position.x = self.DEFAULT_X_COORD
right_pose.position.x = self.DEFAULT_X_COORD
left_pose.position.y = 0.05
right_pose.position.y = -0.05
left_pose.position.z = 1.25
right_pose.position.z = 1.25
### throw_1.bag and throw_1b.bag settings
# self.motion_functions["vertical_throw_sl"] = {"x_knot_seed":(0.0, 0.0, 0.0), \
# "y_knot_seed":(0.2, 0.65, 0.728), \
# "time_seed": (0.4, 0.8, 0.96), \
# self.motion_functions["vertical_throw_sr"] = {"x_knot_seed":(0.0, 0.0, 0.0), \
# "y_knot_seed":(-0.2, -0.65, -0.728), \
# "time_seed": (0.4, 0.8, 0.96), \
### throw_2.bag settings
# self.motion_functions["vertical_throw_sl"] = {"x_knot_seed":(0.0, 0.0, 0.0), \
# "y_knot_seed":(0.2, 0.65, 0.729), \
# "z_knot_seed":(0.0, 0.0, 0.0), \
# "time_seed": (0.4, 0.8, 0.94), \
# self.motion_functions["vertical_throw_sr"] = {"x_knot_seed":(0.0, 0.0, 0.0), \
# "y_knot_seed":(-0.2, -0.65, -0.729), \
# "z_knot_seed":(0.0, 0.0, 0.0), \
# "time_seed": (0.4, 0.8, 0.94), \
self.motion_functions["vertical_throw_sl"] = {
"x_knot_seed": (0.0, 0.0, 0.0),
"y_knot_seed": (0.2, 0.65, 0.731),
"z_knot_seed": (0.0, 0.0, 0.0),
"time_seed": (0.4, 0.8, 0.92),
"flight_time": 0.5,
"initial_pose": copy.deepcopy(left_pose),
"motion_type": "oneshot",
}
self.motion_functions["vertical_throw_sr"] = {
"x_knot_seed": (0.0, 0.0, 0.0),
"y_knot_seed": (-0.2, -0.65, -0.731),
"z_knot_seed": (0.0, 0.0, 0.0),
"time_seed": (0.4, 0.8, 0.92),
"flight_time": 0.5,
"initial_pose": copy.deepcopy(right_pose),
"motion_type": "oneshot",
}
def add_motion_function(self, name, num_knot_points=5):
# TODO: Create a new motion and add it to self.motion_list and self.motion_functions under that key
# TODO: Use self.motion_functions.keys instead of maintaining self.motion_list
left_pose = Pose()
right_pose = Pose()
left_pose.position.x = self.DEFAULT_X_COORD
right_pose.position.x = self.DEFAULT_X_COORD
left_pose.position.y = 0.05
right_pose.position.y = -0.05
left_pose.position.z = 1.25
right_pose.position.z = 1.25
self.motion_functions[name + "_sl"] = {
"x_knot_seed": [0.0] * num_knot_points,
"y_knot_seed": range(0.1, (num_knot_points + 1) * 0.1, 0.1),
"z_knot_seed": range(0.05, (num_knot_points + 1) * 0.05, 0.05),
"time_seed": range(0.5, (num_knot_points + 1) * 0.5, 0.5),
"initial_pose": copy.deepcopy(left_pose),
"motion_type": "periodic",
}
self.motion_functions[name + "_sr"] = {
"x_knot_seed": [0.0] * num_knot_points,
"y_knot_seed": range(0.1, (num_knot_points + 1) * 0.1, 0.1),
"z_knot_seed": range(0.05, (num_knot_points + 1) * 0.05, 0.05),
"time_seed": range(0.5, (num_knot_points + 1) * 0.5, 0.5),
"initial_pose": copy.deepcopy(right_pose),
"motion_type": "periodic",
}
def initialize_motion_functions(
self, use_saved_values=True, filename="default.pkl"
):
# First add the period motions, for which there is motion capture data
self.motion_functions = {}
path = os.path.join(self._package_directory, "config", filename)
if os.path.exists(path) and use_saved_values:
print("Using stored motion function values")
with open(path, "r") as f:
self.motion_functions = pickle.load(f)
else:
print("Using hardcoded values")
self.motion_list = []
# Make the last position in the data the same as the first postion, to make the motion cyclic
for key in self.motion_data_dict:
pos_data = np.array(self.motion_data_dict[key]["Position"])
delta_x = pos_data[-1] - pos_data[0]
total_steps = pos_data.shape[0]
for i in range(total_steps):
pos_data[i] = pos_data[i] - delta_x * (
float(i) / float(total_steps - 1)
)
# Using two "cycles" of the data for interpolation to ensure I get the correct slope at the end points
# That is why pos_data is made by appending two of the data arrays
pos_data = np.append(pos_data, pos_data).reshape(
pos_data.shape[0] * 2, -1
)
time_steps = np.arange(pos_data.shape[0]) / self.frame_rate
# Create spline functions by interpolating between the data positions, ignoring the nan values
good_indices = np.where(np.isfinite(pos_data))[0].reshape(-1, 3)[
:, 0
] # Indices where array is finite
# Store the functions returning spline functions, time period of this motion and the initial position of the motion
self.motion_functions[key] = {
"X": interpolate.InterpolatedUnivariateSpline(
time_steps[good_indices], pos_data[good_indices, 0]
),
"Y": interpolate.InterpolatedUnivariateSpline(
time_steps[good_indices], pos_data[good_indices, 1]
),
"Z": interpolate.InterpolatedUnivariateSpline(
time_steps[good_indices], pos_data[good_indices, 2]
),
"period": pos_data.shape[0] / (2.0 * self.frame_rate),
}
self.motion_functions[key]["initial_pose"] = self.stick_pose_at_time(
function=self.motion_functions[key], time=0
)
self.motion_list.append(key)
# There are 6 knot points for circular accel and linear accel, but the last point is the same as the initial position
# Therefore, the number of interactive markers should be len(time_seed)-1 for circular motions
self.motion_functions["circ_accel_sr"]["time_seed"] = (
0.3,
0.6,
0.9,
1.2,
1.5,
1.8,
)
self.motion_functions["circ_accel_sr"]["motion_type"] = "periodic"
self.motion_functions["circ_accel_sl"]["time_seed"] = (
0.3,
0.6,
0.9,
1.2,
1.5,
1.8,
)
self.motion_functions["circ_accel_sl"]["motion_type"] = "periodic"
self.motion_functions["circ_accel_sr"]["x_knot_seed"] = (
0.0,
0.0,
0.0,
0.0,
0.0,
)
self.motion_functions["circ_accel_sr"]["y_knot_seed"] = (
-0.042,
-0.24,
-0.41,
-0.371,
-0.163,
)
self.motion_functions["circ_accel_sr"]["z_knot_seed"] = (
0.20,
0.30,
0.2,
0.0,
-0.076,
)
self.motion_functions["circ_accel_sl"]["x_knot_seed"] = (
0.0,
0.0,
0.0,
0.0,
0.0,
)
self.motion_functions["circ_accel_sl"]["y_knot_seed"] = (
-0.061,
0.0592,
0.2619,
0.3100,
0.1410,
)
self.motion_functions["circ_accel_sl"]["z_knot_seed"] = (
0.1801,
0.3820,
0.344,
0.15914,
0.03543,
)
self.motion_functions["lin_accel_sr"]["time_seed"] = (0.3, 0.6, 0.9, 1.2)
self.motion_functions["lin_accel_sr"]["motion_type"] = "periodic"
self.motion_functions["lin_accel_sl"]["time_seed"] = (0.3, 0.6, 0.9, 1.2)
self.motion_functions["lin_accel_sl"]["motion_type"] = "periodic"
self.motion_functions["lin_accel_sr"]["x_knot_seed"] = (0.0, 0.0, 0.0)
self.motion_functions["lin_accel_sr"]["y_knot_seed"] = (-0.05, 0.0, -0.05)
self.motion_functions["lin_accel_sr"]["z_knot_seed"] = (0.1, 0.2, 0.1)
self.motion_functions["lin_accel_sl"]["x_knot_seed"] = (0.0, 0.0, 0.0)
self.motion_functions["lin_accel_sl"]["y_knot_seed"] = (0.05, 0.0, 0.05)
self.motion_functions["lin_accel_sl"]["z_knot_seed"] = (-0.1, -0.2, -0.1)
self.motion_functions["lin_accel_sl"][
"initial_pose"
].position.x = self.DEFAULT_X_COORD
self.motion_functions["lin_accel_sl"]["initial_pose"].position.y = 0.1
self.motion_functions["lin_accel_sl"]["initial_pose"].position.z = 1.42
self.motion_functions["lin_accel_sr"][
"initial_pose"
].position.x = self.DEFAULT_X_COORD
self.motion_functions["lin_accel_sr"]["initial_pose"].position.y = -0.1
self.motion_functions["lin_accel_sr"]["initial_pose"].position.z = 1.12
left_pose = Pose()
right_pose = Pose()
left_pose.position.x = self.DEFAULT_X_COORD
left_pose.position.y = 0.21
left_pose.position.z = 1.27
right_pose.position.x = self.DEFAULT_X_COORD
right_pose.position.y = -0.28
right_pose.position.z = 1.27
# Now store the initial position for throwing motions
# self.motion_functions["circ_accel_sr"]["initial_pose"] = circ_accel_initial_pose_right
left_pose = Pose()
right_pose = Pose()
left_pose.position.x = self.DEFAULT_X_COORD
right_pose.position.x = self.DEFAULT_X_COORD
left_pose.position.y = 0.05
right_pose.position.y = -0.05
left_pose.position.z = 1.25
right_pose.position.z = 1.25
self.motion_functions["vertical_throw_sl"] = {
"x_knot_seed": (0.0, 0.0, 0.0),
"y_knot_seed": (0.1, 0.74, 0.748),
"z_knot_seed": (0.0, 0.0, 0.0),
"time_seed": (0.2, 0.55, 0.6),
"flight_time": 0.5,
"initial_pose": copy.deepcopy(left_pose),
"motion_type": "oneshot",
}
self.motion_functions["vertical_throw_sr"] = {
"x_knot_seed": (0.0, 0.0, 0.0),
"y_knot_seed": (-0.1, -0.74, -0.748),
"z_knot_seed": (0.0, 0.0, 0.0),
"time_seed": (0.2, 0.55, 0.6),
"flight_time": 0.5,
"initial_pose": copy.deepcopy(right_pose),
"motion_type": "oneshot",
}
left_pose.position.y = 0.15
right_pose.position.y = -0.15
left_pose.position.z = 1.20
right_pose.position.z = 1.30
self.motion_functions["right_throw_sl"] = {
"x_knot_seed": (0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
"y_knot_seed": (-0.1, 0.33, 0.46),
"z_knot_seed": (0.2, 0.42, 0.439),
"time_seed": (0.1, 0.2, 0.3, 0.4, 0.5, 0.6),
"flight_time": 0.5,
"initial_pose": copy.deepcopy(left_pose),
"motion_type": "oneshot",
}
self.motion_functions["right_throw_sr"] = {
"x_knot_seed": (0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
"y_knot_seed": (0.1, -0.33, -0.46),
"z_knot_seed": (-0.2, -0.42, -0.439),
"time_seed": (0.1, 0.2, 0.3, 0.4, 0.5, 0.6),
"flight_time": 0.5,
"initial_pose": copy.deepcopy(right_pose),
"motion_type": "oneshot",
}
self.motion_functions["left_throw_sl"] = {
"x_knot_seed": (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
"y_knot_seed": (-0.1, 0.2, 0.304652, 0.4, 0.5, 0.6, 0.7, 0.8),
"z_knot_seed": (0.1, -0.22814, -0.38441, -0.4, -0.5, -0.6, -0.7, -0.8),
"time_seed": (0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8),
"flight_time": 0.5,
"initial_pose": copy.deepcopy(left_pose),
"motion_type": "oneshot",
}
self.motion_functions["left_throw_sr"] = {
"x_knot_seed": (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
"y_knot_seed": (0.1, -0.22814, -0.38441, -0.4, -0.5, -0.6, -0.7, -0.8),
"z_knot_seed": (0.1, 0.2, 0.304652, 0.4, 0.5, 0.6, 0.7, 0.8),
"time_seed": (0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8),
"flight_time": 0.5,
"initial_pose": copy.deepcopy(right_pose),
"motion_type": "oneshot",
}
self.motion_list.append("vertical_throw")
self.motion_list.append("right_throw")
self.motion_list.append("left_throw")
self.motion_list = [
m.replace("_sr", "").replace("_sl", "") for m in self.motion_list
]
self.motion_list = list(set(self.motion_list))
#
print("Available motions are: " + str(self.motion_list))
self.motion_list = []
for key in self.motion_data_dict:
self.motion_list.append(key)
self.motion_list.append("vertical_throw")
self.motion_list.append("right_throw")
self.motion_list.append("left_throw")
self.motion_list = [
m.replace("_sr", "").replace("_sl", "") for m in self.motion_list
]
self.motion_list = list(set(self.motion_list))
left_pose = Pose()
right_pose = Pose()
left_pose.position.x = self.DEFAULT_X_COORD
right_pose.position.x = self.DEFAULT_X_COORD
left_pose.position.y = 0.05
right_pose.position.y = -0.05
left_pose.position.z = 1.25
right_pose.position.z = 1.25
self.motion_functions["horizontal_impulse_sl"] = {
"x_knot_seed": (0.0, 0.0, 0.0, 0.0),
"y_knot_seed": (0.23, 0.12, -0.12, 0.23),
"z_knot_seed": (0.0, 0.0, 0.0, 0.0),
"time_seed": (0.9, 1.4, 2.0, 2.8, 3.5),
"initial_pose": copy.deepcopy(left_pose),
"motion_type": "periodic",
}
self.motion_functions["horizontal_impulse_sr"] = {
"x_knot_seed": (0.0, 0.0, 0.0, 0.0),
"y_knot_seed": (0.23, 0.12, -0.12, 0.23),
"z_knot_seed": (0.0, 0.0, 0.0, 0.0),
"time_seed": (0.9, 1.4, 2.0, 2.8, 3.5),
"initial_pose": copy.deepcopy(right_pose),
"motion_type": "periodic",
}
self.motion_functions["vertical_throw_sr"]["y_knot_seed"] = (
-0.1,
-0.725,
-0.735,
)
self.motion_functions["vertical_throw_sl"]["y_knot_seed"] = (0.1, 0.725, 0.735)
left_pose.position.z = 1.35
right_pose.position.z = 1.35
self.motion_functions["left_throw_sl"]["initial_pose"] = left_pose
self.motion_functions["left_throw_sr"]["initial_pose"] = right_pose
self.current_motion = "circ_accel"
def get_traj_for_transition_to_motion(self, desired_motion):
"""
Return stick poses between the current position and start point of the desired motion
Parameters:
desired_motion: A string naming the motion desired. This must one of the accepted names contained in the self.motion_list list
If current motion is the same as desired motion, returns without doing anything
"""
# if(self.current_motion == desired_motion):
# print("Already executing this. Returning")0.9, 1.2, 1.6
# Get start position of desired motion for each arm
sr_target_pose = self.motion_functions[desired_motion + "_sl"]["initial_pose"]
sl_target_pose = self.motion_functions[desired_motion + "_sr"]["initial_pose"]
# Get direction of travel for both sticks as as vector
sr_target_pose_vec = np.array(
(
sr_target_pose.position.x,
sr_target_pose.position.y,
sr_target_pose.position.z,
)
)
sl_target_pose_vec = np.array(
(
sl_target_pose.position.x,
sl_target_pose.position.y,
sl_target_pose.position.z,
)
)
sr_current_pose_vec = np.array(
(
self.last_stick_positions["pos_right"].x,
self.last_stick_positions["pos_right"].y,
self.last_stick_positions["pos_right"].z,
)
)
sl_current_pose_vec = np.array(
(
self.last_stick_positions["pos_left"].x,
self.last_stick_positions["pos_left"].y,
self.last_stick_positions["pos_left"].z,
)
)
# Get directions of travel for both arms
left_dir = sl_target_pose_vec - sl_current_pose_vec
right_dir = sr_target_pose_vec - sr_current_pose_vec
if np.linalg.norm(left_dir) < 0.01 or np.linalg.norm(right_dir) < 0.01:
self.initialize_robot_positions()
return False, False
time_to_target = 0.8 # Set a constant time to get to the target
steps = int(time_to_target * self.pub_rate) # the number of steps to target
left_step_length = np.linalg.norm(left_dir) / steps
right_step_length = np.linalg.norm(right_dir) / steps
print(
"Step lengths are : \n Left: "
+ str(left_step_length)
+ "\n Right: "
+ str(right_step_length)
)
# Get normal of direction vectors
if left_step_length != 0.0:
left_dir = left_dir / np.linalg.norm(left_dir)
if right_step_length != 0.0:
right_dir = right_dir / np.linalg.norm(right_dir)
left_stick_pose_array = PoseArray()
right_stick_pose_array = PoseArray()
for i in range(0, steps + 1):
pose_l = Pose()
pose_r = Pose()
# Get next waypoint by adding normal direction vector * step length to current position
sr_current_pose_vec = sr_current_pose_vec + right_dir * right_step_length
sl_current_pose_vec = sl_current_pose_vec + left_dir * left_step_length
pose_r.position.x = sr_current_pose_vec[0]
pose_r.position.y = sr_current_pose_vec[1]
pose_r.position.z = sr_current_pose_vec[2]
pose_l.position.x = sl_current_pose_vec[0]
pose_l.position.y = sl_current_pose_vec[1]
pose_l.position.z = sl_current_pose_vec[2]
left_stick_pose_array.poses.append(copy.deepcopy(pose_l))
right_stick_pose_array.poses.append(copy.deepcopy(pose_r))
# The last pose published should be the target pose
self.current_motion = desired_motion
return left_stick_pose_array, right_stick_pose_array
# self.start_publish()
def stick_pose_at_time(self, function, time, rate=1.0):
"""
Parameters:
function: The motion function(s) to use. Expects a dictionary containing a function object for each X, Y and Z coordinates at time t
as well as the time period of the function
rate: Rate of rotation. Greater than one for faster motion
time: The time at which to calculate the coordinates
"""
t = (rate * time) % function["period"] # Time
pose = Pose()
if self.constrain_to_plane:
pose.position.x = self.DEFAULT_X_COORD
else:
pose.position.x = function["X"](t) + 0.3
pose.position.y = function["Y"](t) + 0.2
pose.position.z = function["Z"](t) - 0.1
return pose
def initialize_robot_positions(self):
print("Waiting for robot arm initialization service")
try:
rospy.wait_for_service(
"/initialize_robots_from_stick_positions", timeout=1.0
)
pose_left = self.motion_functions[self.current_motion + "_sl"][
"initial_pose"
]
pose_right = self.motion_functions[self.current_motion + "_sr"][
"initial_pose"
]
req = SetInitialStickPositionsRequest()
req.left_stick_position = pose_left.position
req.right_stick_position = pose_right.position
self.set_robots_initial_position_service(req)
self.last_stick_positions = {
"pos_left": pose_left.position,
"pos_right": pose_right.position,
}
except rospy.ROSException:
print(
"Service not found. Did you start the stick position to joint converter node?"
)
self.create_knot_server()
def start_publish(self, loop=False, speed_factor=1.0):
print("Starting publish")
# self.check_amplitude()
thread.start_new_thread(self._run_publish, (loop, speed_factor))
def stop_publish(self):
self.exit_publish_flag = True
def _run_publish(self, loop=False, speed_factor=1.0):
"This function is meant to be called in a separate thread by play_experiment"
print("Starting publish 2")
self.exit_publish_flag = False
self.pause_publish_flag = False
r = rospy.Rate(self.pub_rate)
initial_time = rospy.get_time()
motion = self.current_motion
while True:
time = rospy.get_time() - initial_time
# Adding an empty pose as the robot controller requires a pose array msg
pose_array = PoseArray()
pose_l = self.stick_pose_at_time(
function=self.motion_functions[motion + "_sl"],
time=time,
rate=speed_factor,
)
pose_r = self.stick_pose_at_time(
function=self.motion_functions[motion + "_sr"],
time=time,
rate=speed_factor,
)
pose_array.poses.append(pose_l)
pose_array.poses.append(pose_r)
self.pub_stick_poses.publish(pose_array)
self.last_stick_positions = {
"pos_left": pose_l.position,
"pos_right": pose_r.position,
}
self.update_and_publish_markers(
{"stick_left": pose_l, "stick_right": pose_r}
)
if self.pause_publish_flag:
rospy.loginfo("Publishing stick poses is paused!")
while self.pause_publish_flag:
rospy.sleep(0.2)
rospy.loginfo("Publishing stick poses is resumed!")
if self.exit_publish_flag or rospy.is_shutdown():
print("Done with thread while loop")
break
r.sleep()
rospy.loginfo("Stopping...")
return
# Takes diabolo sim parameters as argument.
# parameters[0] = /pv_pre_cap_scaling_factor
# parameters[1] = /pv_cap_scaling_factor
# parameters[2] = /pv_post_cap_scaling_factor
# parameters[3] = /constrained_velocity_scaling_factor
def initialize_sim_diabolo(self, parameters=(1.0, 1.0, 1.0, 1.0)):
# Set the pull velocity parameters
rospy.set_param("/pv_pre_cap_scaling_factor", parameters[0])
rospy.set_param("/pv_cap_scaling_factor", parameters[1])
rospy.set_param("/pv_post_cap_scaling_factor", parameters[2])
rospy.set_param("/constrained_velocity_scaling_factor", parameters[3])
# Delete existing diabolo if present
delete_model = rospy.ServiceProxy("/gazebo/delete_model", DeleteModel)
rospy.wait_for_service("/gazebo/delete_model")
req = DeleteModelRequest()
req.model_name = "diabolo"
try:
if not delete_model(req):
print("There was no diabolo spawned")
except:
raise
# Set initial postions as parameters on the parameter server
pose_left = self.motion_functions[self.current_motion + "_sl"]["initial_pose"]
pose_right = self.motion_functions[self.current_motion + "_sr"]["initial_pose"]
left_pos = pose_left.position
right_pos = pose_right.position
rospy.set_param(
"/right_stick_initial_position",
[
float(self.last_stick_positions["pos_right"].x),
float(self.last_stick_positions["pos_right"].y),
float(self.last_stick_positions["pos_right"].z),
],
)
rospy.set_param(
"/left_stick_initial_position",
[
float(self.last_stick_positions["pos_left"].x),
float(self.last_stick_positions["pos_left"].y),
float(self.last_stick_positions["pos_left"].z),
],
)
# The initial rotational velocity of the diabolo
rospy.set_param("/diabolo_initial_rot_velocity", 25.0)
print("Done setting params")
self._spawn_diabolo_in_gazebo()
return True
def _spawn_diabolo_in_gazebo(self):
# Create service proxy
spawn_model = rospy.ServiceProxy("/gazebo/spawn_urdf_model", SpawnModel)
rospy.wait_for_service("/gazebo/spawn_urdf_model")
# Load URDF
with open(self.diabolo_urdf_file_path, "r") as f:
poses = dict()
model_xml = f.read()
# Spawn model
req = SpawnModelRequest()
req.model_name = "diabolo"
# req.initial_pose = diabolo_pose
pose = Pose()
pose.position.x = 0.7
pose.position.y = 0.0
pose.position.z = 0.7
req.initial_pose.position = pose.position
req.model_xml = model_xml
req.robot_namespace = "/"
req.reference_frame = "world"
if spawn_model(req).success:
print("Spawning diabolo in gazebo")
rospy.sleep(0.2)
def execute_periodic_trajectory_(
self,
a_bot_trajectory,
b_bot_trajectory,
speed_factor=0.5,
confirm_execution=True,
start_time=None,
):
req = GetPlanningSceneRequest()
req.components.components = req.components.ROBOT_STATE
planning_scene = self.get_planning_scene_service(req)
try:
display_a_bot_traj = moveit_msgs.msg.DisplayTrajectory()
display_b_bot_traj = moveit_msgs.msg.DisplayTrajectory()
a_bot_robot_traj = moveit_msgs.msg.RobotTrajectory()
b_bot_robot_traj = moveit_msgs.msg.RobotTrajectory()
a_bot_robot_traj.joint_trajectory = a_bot_trajectory
b_bot_robot_traj.joint_trajectory = b_bot_trajectory
display_a_bot_traj.trajectory.append(a_bot_robot_traj)
display_b_bot_traj.trajectory.append(b_bot_robot_traj)
display_a_bot_traj.trajectory_start = planning_scene.scene.robot_state
display_b_bot_traj.trajectory_start = planning_scene.scene.robot_state
self.a_bot_display_traj_pub.publish(display_a_bot_traj)
self.b_bot_display_traj_pub.publish(display_b_bot_traj)
time_to_start = start_time
if not time_to_start:
time_to_start = rospy.Time.now()
if confirm_execution:
print("Execute this trajectory? y/n")
e = raw_input()
if e == "y":
time_to_start = rospy.Time.now() + rospy.Duration(0.1)
a_bot_trajectory.header.stamp = time_to_start
b_bot_trajectory.header.stamp = time_to_start
self.a_bot_command_pub.publish(a_bot_trajectory)
self.b_bot_command_pub.publish(b_bot_trajectory)
else:
# print("Auto execution selected. Executing")
a_bot_trajectory.header.stamp = time_to_start
b_bot_trajectory.header.stamp = time_to_start
# if(time_to_start.to_sec() > rospy.Time.now().to_sec()):
# print("Time in header = " + str(time_to_start.to_sec()))
# print("Published time = " + str(rospy.Time.now().to_sec()))
# else:
# rospy.logerr("Time in header = " + str(time_to_start.to_sec()))
# rospy.logerr("Published time = " + str(rospy.Time.now().to_sec()))
self.a_bot_command_pub.publish(a_bot_trajectory)
self.b_bot_command_pub.publish(b_bot_trajectory)
return time_to_start + a_bot_trajectory.points[-1].time_from_start
except:
raise
def execute_throw_trajectory_(
self,
a_bot_trajectory,
b_bot_trajectory,
time_of_flight=0.5,
speed_factor=1.0,
reverse=False,
confirm_execution=True,
start_time=None,
):
a_bot_whole_trajectory = copy.deepcopy(a_bot_trajectory)
b_bot_whole_trajectory = copy.deepcopy(b_bot_trajectory)
last_time = rospy.Duration(0)
old_last_time = rospy.Duration(0)
new_a_bot_trajectory = copy.deepcopy(a_bot_whole_trajectory)
for i in range(len(a_bot_whole_trajectory.points)):
if not i == 0:
step_length = (
a_bot_whole_trajectory.points[i].time_from_start
- a_bot_whole_trajectory.points[i - 1].time_from_start
)
else:
step_length = a_bot_whole_trajectory.points[i].time_from_start
new_step_length = step_length / speed_factor
new_a_bot_trajectory.points[i].time_from_start = new_step_length + last_time
last_time = new_step_length + last_time
last_time = rospy.Duration(0)
old_last_time = rospy.Duration(0)
new_b_bot_trajectory = copy.deepcopy(b_bot_whole_trajectory)
for i in range(len(b_bot_whole_trajectory.points)):
if not i == 0:
step_length = (
b_bot_whole_trajectory.points[i].time_from_start
- b_bot_whole_trajectory.points[i - 1].time_from_start
)
else:
step_length = b_bot_whole_trajectory.points[i].time_from_start
new_step_length = step_length / speed_factor
new_b_bot_trajectory.points[i].time_from_start = new_step_length + last_time
last_time = new_step_length + last_time
time_to_start = start_time
req = GetPlanningSceneRequest()
req.components.components = req.components.ROBOT_STATE
planning_scene = self.get_planning_scene_service(req)
a_bot_display_traj = moveit_msgs.msg.DisplayTrajectory()
b_bot_display_traj = moveit_msgs.msg.DisplayTrajectory()
a_bot_robot_traj = moveit_msgs.msg.RobotTrajectory()
a_bot_robot_traj.joint_trajectory = copy.deepcopy(new_a_bot_trajectory)
b_bot_robot_traj = moveit_msgs.msg.RobotTrajectory()
b_bot_robot_traj.joint_trajectory = copy.deepcopy(new_b_bot_trajectory)
a_bot_display_traj.trajectory.append(a_bot_robot_traj)
b_bot_display_traj.trajectory.append(b_bot_robot_traj)
a_bot_display_traj.trajectory_start = planning_scene.scene.robot_state
b_bot_display_traj.trajectory_start = planning_scene.scene.robot_state
self.a_bot_display_traj_pub.publish(a_bot_display_traj)
self.b_bot_display_traj_pub.publish(b_bot_display_traj)
if confirm_execution:
print("Execute this trajectory? y/n")
e = raw_input()
if e == "y":
now = rospy.Time.now() + rospy.Duration(1.0)
new_a_bot_trajectory.header.stamp = now
new_b_bot_trajectory.header.stamp = now
self.a_bot_command_pub.publish(new_a_bot_trajectory)
self.b_bot_command_pub.publish(new_b_bot_trajectory)
else:
print("Auto execution selected. Executing")
new_a_bot_trajectory.header.stamp = time_to_start
new_b_bot_trajectory.header.stamp = time_to_start
self.a_bot_command_pub.publish(new_a_bot_trajectory)
self.b_bot_command_pub.publish(new_b_bot_trajectory)
return time_to_start + new_a_bot_trajectory.points[-1].time_from_start
# if(reverse):
# self.last_stick_positions["pos_left"] = left_stick_traj.poses[0].position
# self.last_stick_positions["pos_right"] = right_stick_traj.poses[0].position
# Increase the time from start for all the
# rospy.sleep(time_of_flight)
# TEMP: Reverse motion nack to starting point of the trajectory
# req = CreateRobotTrajectoryRequest()
# number_of_poses = len(left_stick_traj.poses)
# resp = self.command_robot_trajectory_service(req)
# if(resp.success):
# print("Reverse trajectory executed!")
def make_prediction_request_msg_(
self, planned_left_poses=None, planned_right_poses=None
):
# Goal positions and velocities are arrays of the appropriate type
req = CreateSticksTrajectoryRequest()
################### Set current Sim Config
req.current_sim_config.pv_pre_cap_scale = 0.13
req.current_sim_config.pv_post_cap_scale = 0.13
req.current_sim_config.pv_cap_scale = 0.07
req.current_sim_config.velocity_diffusion_factor = 0.9999
if (
self.motion_functions[self.current_motion + "_sl"]["motion_type"]
== "oneshot"
):
req.motion_flag = CreateSticksTrajectoryRequest.THROW
elif (
self.motion_functions[self.current_motion + "_sl"]["motion_type"]
== "periodic"
):
req.motion_flag = CreateSticksTrajectoryRequest.LOOP
# Diabolo constant parameters
req.current_sim_config.mass = 0.2
req.current_sim_config.axle_radius = 0.0065
req.current_sim_config.string_length = 1.58
if planned_left_poses and planned_right_poses:
req.planned_left_stick_poses = copy.deepcopy(planned_left_poses)
req.planned_right_stick_poses = copy.deepcopy(planned_right_poses)
return req
def run_oneshot_motion(
self,
interactive=True,
confirm_execution=True,
preparatory_motion="horizontal_impulse",
):
planned_left_poses = None
planned_right_poses = None
# The trajectory begins one second from now
trajectory_start_time = rospy.Time.now() + rospy.Duration(1.0)
prediction_time = 0
diab_state_req = GetDiaboloStateRequest()
diab_state_req.header.stamp = rospy.Time.now()
diabolo_state_resp = copy.deepcopy(
self.get_diabolo_state_service(diab_state_req)
)
self.latest_diabolo_state = copy.deepcopy(diabolo_state_resp.state)
# Get diabolo orientation here, to handle pitch and yaw
dp = self.latest_diabolo_state.pose.orientation
self.get_stick_tips_from_tf()
left_stick_start_pos = self.last_stick_positions["pos_left"]
right_stick_start_pos = self.last_stick_positions["pos_right"]
# Execute a pre-defined motion (e.g. to give a horizontal impulse for sideways throws)
if self.current_motion == "left_throw" or self.current_motion == "right_throw":
motion = copy.deepcopy(self.current_motion)
self.current_motion = do_preparatory_motion
prediction_start_time = rospy.Time.now()
(
a_bot_trajectory,
b_bot_trajectory,
left_stick_poses,
right_stick_poses,
) = self.call_prediction_service(
interactive=False,
planned_left_poses=planned_left_poses,
planned_right_poses=planned_right_poses,
left_stick_start_pos=left_stick_start_pos,
right_stick_start_pos=right_stick_start_pos,
plan=False,
)
trajectory_end_time = self.execute_periodic_trajectory_(
a_bot_trajectory,
b_bot_trajectory,
1.0,
True,
start_time=trajectory_start_time,
)
safe_prediction_time = rospy.Duration(0.5)
sleep_time = (trajectory_end_time - rospy.Time.now()) - safe_prediction_time
rospy.sleep(sleep_time)
diab_state_req = GetDiaboloStateRequest()
diab_state_req.header.stamp = rospy.Time.now()
diabolo_state_resp = copy.deepcopy(
self.get_diabolo_state_service(diab_state_req)
)
self.latest_diabolo_state = copy.deepcopy(diabolo_state_resp.state)
trajectory_start_time = trajectory_end_time
self.current_motion = motion
(
a_bot_trajectory,
b_bot_trajectory,
left_stick_poses,
right_stick_poses,
) = self.call_prediction_service(
interactive=True,
planned_left_poses=planned_left_poses,
planned_right_poses=planned_right_poses,
left_stick_start_pos=left_stick_start_pos,
right_stick_start_pos=right_stick_start_pos,
plan=False,
)
if a_bot_trajectory:
trajectory_end_time = self.execute_throw_trajectory_(
a_bot_trajectory,
b_bot_trajectory,
1.0,
1.0,
True,
False,
start_time=trajectory_start_time,
)
# Create reverse trajectory
# TODO: Add the initial point to the reversed trajectory. The calculated trajectory does not have the first point.
# There is probably also not much point keeping the last point of the old trajectory (That is the current position)
reverse_a_bot_trajectory = JointTrajectory()
reverse_b_bot_trajectory = JointTrajectory()
# print(len(a_bot_trajectory.points))
reverse_a_bot_trajectory.joint_names = a_bot_trajectory.joint_names
reverse_b_bot_trajectory.joint_names = b_bot_trajectory.joint_names
for i in range(len(a_bot_trajectory.points)):
# print("Now adding " + str(a_bot_trajectory.points[i].time_from_start.to_sec()) + " time from start reverse traj")
reverse_a_bot_trajectory.points.append(
copy.deepcopy(
a_bot_trajectory.points[len(a_bot_trajectory.points) - 1 - i]
)
)
reverse_a_bot_trajectory.points[
i
].time_from_start = a_bot_trajectory.points[i].time_from_start
for i in range(len(b_bot_trajectory.points)):
reverse_b_bot_trajectory.points.append(
copy.deepcopy(
b_bot_trajectory.points[len(b_bot_trajectory.points) - 1 - i]
)
)
reverse_b_bot_trajectory.points[
i
].time_from_start = b_bot_trajectory.points[i].time_from_start
# rospy.logwarn("Forward trajectory")
# print(a_bot_trajectory)
# rospy.logwarn("Reverse trajectory")
# print(reverse_a_bot_trajectory)
print("Reverse trajectory? y/n?")
e = raw_input()
if e == "y":
trajectory_start_time = rospy.Time.now() + rospy.Duration(0.01)
trajectory_end_time = self.execute_throw_trajectory_(
reverse_a_bot_trajectory,
reverse_b_bot_trajectory,
1.0,
0.8,
False,
False,
start_time=trajectory_start_time,
)
else:
rospy.logerr("Could not find a trajectory")
def start_periodic_motion(
self,
interactive=True,
confirm_execution=True,
preparatory_motion="horizontal_impulse",
):
self.stop_motion_flag = False
thread.start_new_thread(
self.run_periodic_motion,
(interactive, confirm_execution, preparatory_motion),
)
def stop_periodic_motion(self):
self.stop_motion_flag = True
def run_periodic_motion(
self,
interactive=False,
confirm_execution=True,
preparatory_motion="horizontal_impulse",
):
#### IMPORTANT: This assumes that the points in the trajectory are evenly spaced
#### That must be assured by the node providing the stick trajectory/robot trajectory generating service
planned_left_poses = None
planned_right_poses = None
# The trajectory begins one second from now
trajectory_start_time = rospy.Time.now() + rospy.Duration(1.0)
prediction_time = 0
diab_state_req = GetDiaboloStateRequest()
diab_state_req.header.stamp = rospy.Time.now()
diabolo_state_resp = copy.deepcopy(
self.get_diabolo_state_service(diab_state_req)
)
self.latest_diabolo_state = copy.deepcopy(diabolo_state_resp.state)
# Get diabolo orientation here, to handle pitch and yaw
dp = self.latest_diabolo_state.pose.orientation
self.get_stick_tips_from_tf()
left_stick_start_pos = self.last_stick_positions["pos_left"]
right_stick_start_pos = self.last_stick_positions["pos_right"]
# First, execute the pre-defined horizontal motion
motion = copy.deepcopy(self.current_motion)
if preparatory_motion:
self.current_motion = preparatory_motion
(
a_bot_trajectory,
b_bot_trajectory,
left_stick_poses,
right_stick_poses,
) = self.call_prediction_service(
interactive=False,
planned_left_poses=None,
planned_right_poses=None,
left_stick_start_pos=left_stick_start_pos,
right_stick_start_pos=right_stick_start_pos,
plan=False,
)
trajectory_end_time = self.execute_periodic_trajectory_(
a_bot_trajectory,
b_bot_trajectory,
1.0,
confirm_execution,
start_time=trajectory_start_time,
)
# In this part of the code, "prediction" means motion generation
safe_prediction_time = rospy.Duration(0.5)
prediction_start_time = (
trajectory_end_time - trajectory_start_time
) - safe_prediction_time
print("Prediction start time = " + str(prediction_start_time.to_sec()))
# Break out if there is not enough time to plan
if prediction_start_time.to_sec() < 0.0:
print(
"Prediction time is too long. Is = "
+ str(safe_prediction_time.to_sec())
)
prediction_start_time = trajectory_end_time
planned_left_poses = None
planned_right_poses = None
last_traj_end_time = rospy.Time.now() + rospy.Duration(1.0)
else:
# rospy.logwarn("prediction_start_time is " + str(prediction_start_time.to_sec()))
# rospy.logwarn("Trajectory length is " + str((trajectory_end_time - trajectory_start_time).to_sec()))
# Find the point from which to get planned poses
planned_left_poses = geometry_msgs.msg.PoseArray()
planned_right_poses = geometry_msgs.msg.PoseArray()
# This is the id of the last pose to execute in the sent trajectory
last_pose_to_execute = 0
for j in range(len(a_bot_trajectory.points) - 1):
# print("Time from start for j = " + str(j) + " is" + str(a_bot_trajectory.points[j].time_from_start.to_sec()))
if (
a_bot_trajectory.points[j].time_from_start
<= prediction_start_time
and a_bot_trajectory.points[j + 1].time_from_start
> prediction_start_time
):
# pass the left and right poses from the ith position onwards as planned trajectories to the prediction node
planned_left_poses.poses = left_stick_poses.poses[j:]
planned_right_poses.poses = right_stick_poses.poses[j:]
last_pose_to_execute = j
break
# Store end position of start trajectory as start position of old trajectory
print(
"last_pose to execute is "
+ str(last_pose_to_execute)
+ " at time "
+ str(
a_bot_trajectory.points[
last_pose_to_execute
].time_from_start.to_sec()
)
)
left_stick_start_pos = planned_left_poses.poses[0].position
right_stick_start_pos = planned_right_poses.poses[0].position
planned_left_poses.poses = planned_left_poses.poses[1:]
planned_right_poses.poses = planned_right_poses.poses[1:]
# Sleep until the next trajectory is at left_stick_start_pos
now = rospy.Time.now()
sleep_time1 = (
trajectory_start_time - now
) # Until current trajectory is over
sleep_time2 = (
trajectory_end_time - trajectory_start_time
) - safe_prediction_time
# Until next trajectory is at left_stick_start_pos
sleep_time = sleep_time1 + sleep_time2
rospy.sleep(sleep_time)
# Change the current motion back to what it was before applying the impulse
self.current_motion = motion
trajectory_start_time = trajectory_end_time
else:
trajectory_start_time = rospy.Time.now() + rospy.Duration(1.0)
prediction_time = 0
diab_state_req = GetDiaboloStateRequest()
diab_state_req.header.stamp = rospy.Time.now()
diabolo_state_resp = copy.deepcopy(
self.get_diabolo_state_service(diab_state_req)
)
self.latest_diabolo_state = copy.deepcopy(diabolo_state_resp.state)
while True:
prediction_start_time = rospy.Time.now()
(
a_bot_trajectory,
b_bot_trajectory,
left_stick_poses,
right_stick_poses,
) = self.call_prediction_service(
interactive=True,
planned_left_poses=planned_left_poses,
planned_right_poses=planned_right_poses,
left_stick_start_pos=left_stick_start_pos,
right_stick_start_pos=right_stick_start_pos,
plan=False,
)
# If user-set flag is true, stop moving the arms
if self.stop_motion_flag or rospy.is_shutdown():
break
# Ensure that the prediction service found something
if a_bot_trajectory:
reverse = False
if (
self.motion_functions[self.current_motion + "_sl"]["motion_type"]
== "periodic"
):
# Queue up the trajectory in the driver, so it will be executed next
trajectory_end_time = self.execute_periodic_trajectory_(
a_bot_trajectory,
b_bot_trajectory,
1.0,
confirm_execution,
start_time=trajectory_start_time,
)
# Time that this prediction/motion generation took
prediction_time = rospy.Time.now() - prediction_start_time
# Add safety buffer
safe_prediction_time = prediction_time + prediction_time * 0.3
# This should be the time from start for the first pose for the list of "planned poses"
# when planning the next trajectory
prediction_start_time = (
trajectory_end_time - trajectory_start_time
) - safe_prediction_time
# Don't plan another trajectory if there is not enough time to plan it
if prediction_start_time.to_sec() < 0.0:
# print("Prediction time is too long. Is = " + str(safe_prediction_time.to_sec()))
prediction_start_time = trajectory_end_time
planned_left_poses = None
planned_right_poses = None
last_traj_end_time = rospy.Time.now() + rospy.Duration(1.0)
break
else: # Prepare next loop
# rospy.logwarn("prediction_start_time is " + str(prediction_start_time.to_sec()))
# rospy.logwarn("Trajectory length is " + str((trajectory_end_time - trajectory_start_time).to_sec()))
# Find the point from which to get planned poses
planned_left_poses = geometry_msgs.msg.PoseArray()
planned_right_poses = geometry_msgs.msg.PoseArray()
# Find the last point in the stick trajectory to be executed before beginning the prediction
# for the diabolo state at the end of the current trajectory
# Trim planned poses to contain only remainder of next trajectory. This will be used during the
# next iteration.
last_pose_to_execute = 0
for j in range(len(a_bot_trajectory.points) - 1):
if (
a_bot_trajectory.points[j].time_from_start
<= prediction_start_time
and a_bot_trajectory.points[j + 1].time_from_start
> prediction_start_time
):
# pass the left and right poses from the ith positon onwards as planned trajectories to the prediction node
# print("len(a_bot_trajectory.points)", len(a_bot_trajectory.points))
# print("len(left_stick_poses.poses)", len(left_stick_poses.poses))
planned_left_poses.poses = left_stick_poses.poses[j:]
planned_right_poses.poses = right_stick_poses.poses[j:]
last_pose_to_execute = j
break
# Store end position of start trajectory as start position of old trajectory
left_stick_start_pos = planned_left_poses.poses[0].position
right_stick_start_pos = planned_right_poses.poses[0].position
planned_left_poses.poses = planned_left_poses.poses[1:]
planned_right_poses.poses = planned_right_poses.poses[1:]
# Sleep until the next trajectory is at left_stick_start_pos
now = rospy.Time.now()
sleep_time1 = (
trajectory_start_time - now
) # Until current trajectory is over
sleep_time2 = (
trajectory_end_time - trajectory_start_time
) - safe_prediction_time
# Until next trajectory is at left_stick_start_pos
sleep_time = sleep_time1 + sleep_time2
rospy.sleep(sleep_time)
# self.pause_gazebo_service()
diab_state_req = GetDiaboloStateRequest()
diab_state_req.header.stamp = (
trajectory_start_time
+ a_bot_trajectory.points[last_pose_to_execute].time_from_start
)
self.latest_diabolo_state = copy.deepcopy(
self.get_diabolo_state_service(diab_state_req).state
)
trajectory_start_time = trajectory_end_time
# self.unpause_gazebo_service()
return
def get_diabolo_waypoint_goals(
self,
goal_velocity=geometry_msgs.msg.Point(),
goal_position=geometry_msgs.msg.Point(),
):
# This is the initial position. Do not add to request waypoints
goal_states = []
goal_state = DiaboloState()
diabolo_goal_pos = geometry_msgs.msg.Point()
diabolo_goal_vel = geometry_msgs.msg.Point()
# if self.latest_diabolo_pose:
# diabolo_goal_pos = copy.deepcopy(self.latest_diabolo_pose.position)
# else:
if (
self.motion_functions[self.current_motion + "_sl"]["motion_type"]
== "periodic"
):
diabolo_goal_pos = geometry_msgs.msg.Point()
diabolo_goal_pos.x = self.DEFAULT_X_COORD
diabolo_goal_pos.y = -0.0382
diabolo_goal_pos.z = 0.51991
## First waypoint
diabolo_goal_pos.x = self.DEFAULT_X_COORD
diabolo_goal_pos.y = diabolo_goal_pos.y + 0.3
diabolo_goal_pos.z = diabolo_goal_pos.z + 0.2
diabolo_goal_vel.x = 0.0
diabolo_goal_vel.y = -0.5
diabolo_goal_vel.z = 1.0
goal_state.trans_velocity = copy.deepcopy(diabolo_goal_vel)
goal_state.pose.position = copy.deepcopy(diabolo_goal_pos)
goal_state.pose.orientation.w = 1.0
goal_states.append(copy.deepcopy(goal_state))
## Second waypoint
diabolo_goal_pos.x = self.DEFAULT_X_COORD
diabolo_goal_pos.y = diabolo_goal_pos.y - 0.1
diabolo_goal_pos.z = diabolo_goal_pos.z + 0.2
diabolo_goal_vel.x = 0.0
diabolo_goal_vel.y = -1.0
diabolo_goal_vel.z = 0.1
goal_state.trans_velocity = copy.deepcopy(diabolo_goal_vel)
goal_state.pose.position = copy.deepcopy(diabolo_goal_pos)
goal_state.pose.orientation.w = 1.0
goal_states.append(copy.deepcopy(goal_state))
## Third waypoint
diabolo_goal_pos.x = self.DEFAULT_X_COORD
diabolo_goal_pos.y = diabolo_goal_pos.y - 0.5
diabolo_goal_pos.z = diabolo_goal_pos.z + 0.0
diabolo_goal_vel.x = 0.0
diabolo_goal_vel.y = -0.5
diabolo_goal_vel.z = -0.5
goal_state.trans_velocity = copy.deepcopy(diabolo_goal_vel)
goal_state.pose.position = copy.deepcopy(diabolo_goal_pos)
goal_state.pose.orientation.w = 1.0
goal_states.append(copy.deepcopy(goal_state))
## Fourth waypoint
diabolo_goal_pos.x = self.DEFAULT_X_COORD
diabolo_goal_pos.y = diabolo_goal_pos.y - 0.1
diabolo_goal_pos.z = diabolo_goal_pos.z - 0.2
diabolo_goal_vel.x = 0.0
diabolo_goal_vel.y = 1.0
diabolo_goal_vel.z = -0.5
goal_state.trans_velocity = copy.deepcopy(diabolo_goal_vel)
goal_state.pose.position = copy.deepcopy(diabolo_goal_pos)
goal_state.pose.orientation.w = 1.0
goal_states.append(copy.deepcopy(goal_state))
# End of if current motion is circular acceleration
else:
diabolo_goal_pos = geometry_msgs.msg.Point(
x=self.DEFAULT_X_COORD, y=0.0, z=1.25
)
diabolo_goal_vel = geometry_msgs.msg.Point(x=0.0, y=0.0, z=1.4) # 0.1 m
# diabolo_goal_vel = geometry_msgs.msg.Point(x=0.0, y=0.0, z=2.0) # 0.2 m
# diabolo_goal_vel = geometry_msgs.msg.Point(x=0.0, y=0.0, z=2.8) # 0.4 m
# diabolo_goal_vel = geometry_msgs.msg.Point(x=0.0, y=0.0, z=3.4) # 0.6 m
# diabolo_goal_vel = geometry_msgs.msg.Point(x=0.0, y=0.0, z=3.97) # 0.8 m
# diabolo_goal_vel = geometry_msgs.msg.Point(x=0.0, y=0.0, z=4.4) # 1.0 m
goal_state.trans_velocity = copy.deepcopy(diabolo_goal_vel)
goal_state.pose.position = copy.deepcopy(diabolo_goal_pos)
goal_states.append(goal_state)
return goal_states
def save_current_knot_points(self, filename="default.pkl"):
path = os.path.join(self._package_directory, "config", filename)
print("Saving to " + path)
with open(path, "w") as f:
pickle.dump(self.motion_functions, f)
def call_prediction_service(
self,
planned_left_poses=None,
planned_right_poses=None,
interactive=False,
left_stick_start_pos=None,
right_stick_start_pos=None,
plan=True,
):
"""
Call the service that returns a robot trajectory for a given set of diabolo goal states.
The service starts planning a new motion starting from a point in the future.
planned_left_poses, planned_right_poses are the stick trajectories that will be executed
until that point in the future.
The current diabolo state is added inside this method, and the diabolo state in the future
estimated using the planned_poses.
left_stick_start_pos, right_stick_start_pos are the stick positions *before* that prediction,
or the start position to plan from if the planned_poses are empty.
"""
## Set diabolo goal states
# rospy.logwarn("Entered prediction service function")
## TODO: Change this to allow multiple waypoint goals
req = self.make_prediction_request_msg_(planned_left_poses, planned_right_poses)
req.goal_states = self.get_diabolo_waypoint_goals()
## Set current sim config
# Diabolo velocity and pose
diabolo_pose = Pose()
diabolo_vel = geometry_msgs.msg.Point()
if self.latest_diabolo_state:
# print("Using actual diabolo starting position")
diabolo_pose = self.latest_diabolo_state.pose
diabolo_vel = self.latest_diabolo_state.trans_velocity
else:
rospy.logwarn("Using default diabolo coordinates for prediction service")
diabolo_pose.position.x = self.DEFAULT_X_COORD
diabolo_pose.position.y = 0.053
diabolo_pose.position.z = 0.554
req.current_sim_config.trans_velocity = geometry_msgs.msg.Point()
sl_pose = Pose()
sr_pose = Pose()
# self.get_stick_tips_from_tf()
## TODO: Get the actual current stick poses. This is temporary
sl_pose.position = left_stick_start_pos
sr_pose.position = right_stick_start_pos
req.current_sim_config.initial_poses.poses.append(diabolo_pose)
req.current_sim_config.trans_velocity = diabolo_vel
req.current_sim_config.initial_poses.poses.append(sl_pose)
req.current_sim_config.initial_poses.poses.append(sr_pose)
# IMPORTANT: Must give stick update rate and sim time step
req.stick_update_time_step = 1.0 / self.pub_rate
req.current_sim_config.time_step = 0.002
req.optimize = plan
req.constrain_to_YZ = True
# Set spline knot point seeds
# Set seeds for left stick
time_seed = self.motion_functions[self.current_motion + "_sl"]["time_seed"]
# Plot the stick trajectories/splines
# These are the seeds for the chosen motion
if not interactive:
"""
If not interactive, use the motion seeds precalculated for the current motion
"""
left_motion = self.motion_functions[self.current_motion + "_sl"]
right_motion = self.motion_functions[self.current_motion + "_sr"]
# The number of knot points should correspond to the number of time seeds
for i in range(len(time_seed)):
left_knot_point = geometry_msgs.msg.Point()
right_knot_point = geometry_msgs.msg.Point()
if (
i == len(time_seed) - 1
and self.motion_functions[self.current_motion + "_sl"][
"motion_type"
]
== "periodic"
):
# If this is a periodic motion, the last knot point must be at the initial robot position
left_knot_point = geometry_msgs.msg.Point()
right_knot_point = geometry_msgs.msg.Point()
else:
left_knot_point.x = copy.deepcopy(left_motion["x_knot_seed"][i])
left_knot_point.y = copy.deepcopy(left_motion["y_knot_seed"][i])
left_knot_point.z = copy.deepcopy(left_motion["z_knot_seed"][i])
right_knot_point.x = copy.deepcopy(right_motion["x_knot_seed"][i])
right_knot_point.y = copy.deepcopy(right_motion["y_knot_seed"][i])
right_knot_point.z = copy.deepcopy(right_motion["z_knot_seed"][i])
if self.changed_tilt_offset_flag:
# This means the tilt offset has changed since the last trajectory.
# Must add tilt over the course of this new trajectory
print(
"Setting left x knot seed at "
+ str(self.tilt_offset * ((float(i + 1)) / len(time_seed)))
)
left_knot_point.x -= self.tilt_offset * (
float((i + 1.0)) / len(time_seed)
)
right_knot_point.x += self.tilt_offset * (
float((i + 1.0)) / len(time_seed)
)
req.knot_seeds.left_seed.append(copy.deepcopy(left_knot_point))
req.knot_seeds.right_seed.append(copy.deepcopy(right_knot_point))
req.knot_seeds.time_seed.append(copy.deepcopy(time_seed[i]))
else:
"""
If interactive, set the motion seeds to the points gotten from the interactive markers
"""
marker_positions = copy.deepcopy(
self.knot_point_server.get_marker_positions(relative=True)
)
# Replace current knot seeds with newly set knot seeds
self.motion_functions[self.current_motion + "_sl"]["x_knot_seed"] = []
self.motion_functions[self.current_motion + "_sl"]["y_knot_seed"] = []
self.motion_functions[self.current_motion + "_sl"]["z_knot_seed"] = []
self.motion_functions[self.current_motion + "_sr"]["x_knot_seed"] = []
self.motion_functions[self.current_motion + "_sr"]["z_knot_seed"] = []
self.motion_functions[self.current_motion + "_sr"]["y_knot_seed"] = []
left_positions = marker_positions[0]
right_positions = marker_positions[1]
# print(left_positions)
for i in range(len(time_seed)):
left_knot_point = geometry_msgs.msg.Point()
right_knot_point = geometry_msgs.msg.Point()
if (
i == len(time_seed) - 1
and self.motion_functions[self.current_motion + "_sl"][
"motion_type"
]
== "periodic"
):
# If this is a periodic motion, the last knot point must be at the initial robot position
left_knot_point = geometry_msgs.msg.Point()
right_knot_point = geometry_msgs.msg.Point()
else:
left_knot_point = copy.deepcopy(left_positions[i])
right_knot_point = copy.deepcopy(right_positions[i])
self.motion_functions[self.current_motion + "_sl"][
"x_knot_seed"
].append(left_knot_point.x)
self.motion_functions[self.current_motion + "_sl"][
"y_knot_seed"
].append(left_knot_point.y)
self.motion_functions[self.current_motion + "_sl"][
"z_knot_seed"
].append(left_knot_point.z)
self.motion_functions[self.current_motion + "_sr"][
"x_knot_seed"
].append(right_knot_point.x)
self.motion_functions[self.current_motion + "_sr"][
"y_knot_seed"
].append(right_knot_point.y)
self.motion_functions[self.current_motion + "_sr"][
"z_knot_seed"
].append(right_knot_point.z)
if self.changed_tilt_offset_flag:
# This means the tilt offset has changed since the last trajectory.
# Must add tilt over the course of this new trajectory
print(
"Setting left x knot seed at "
+ str(self.tilt_offset * ((float(i + 1)) / len(time_seed)))
)
left_knot_point.x -= self.tilt_offset * (
float((i + 1.0)) / len(time_seed)
)
right_knot_point.x += self.tilt_offset * (
float((i + 1.0)) / len(time_seed)
)
req.knot_seeds.left_seed.append(copy.deepcopy(left_knot_point))
req.knot_seeds.right_seed.append(copy.deepcopy(right_knot_point))
req.knot_seeds.time_seed.append(copy.deepcopy(time_seed[i]))
self.changed_tilt_offset_flag = False
# rospy.logwarn("Calling prediction service")
resp = self.generate_trajectory_service(req)
# rospy.logwarn("Prediction service returned")
if resp.success:
# print("Got trajectories!")
self.marker_count = 0
marker_array = []
self.left_traj_plan_marker = self._make_marker_from_mesh(
mesh_filename="",
namespace="left_stick_plan",
scale=(0.01, 1, 1),
color=(1, 0, 0),
)
self.left_traj_plan_marker.type = visualization_msgs.msg.Marker.LINE_STRIP
for i in resp.left_stick_poses.poses:
self.left_traj_plan_marker.points.append(i.position)
self.right_traj_plan_marker = self._make_marker_from_mesh(
mesh_filename="",
namespace="right_stick_plan",
scale=(0.01, 1, 1),
color=(0, 0, 1),
)
self.right_traj_plan_marker.type = visualization_msgs.msg.Marker.LINE_STRIP
for i in resp.right_stick_poses.poses:
self.right_traj_plan_marker.points.append(i.position)
marker_array.append(copy.deepcopy(self.left_traj_plan_marker))
marker_array.append(copy.deepcopy(self.right_traj_plan_marker))
self.sphere_marker_1.pose = sr_pose
self.sphere_marker_1.pose.orientation.w = 1.0
self.sphere_marker_1.ns = "right_stick_plan"
self.sphere_marker_1.color.r = 0
self.sphere_marker_1.color.g = 0
self.sphere_marker_1.color.b = 1
self.sphere_marker_2.pose = sl_pose
self.sphere_marker_2.ns = "left_stick_plan"
self.sphere_marker_2.pose.orientation.w = 1.0
self.sphere_marker_2.color.r = 1
self.sphere_marker_2.color.g = 0
self.sphere_marker_2.color.b = 0
marker_array.append(copy.deepcopy(self.sphere_marker_1))
marker_array.append(copy.deepcopy(self.sphere_marker_2))
# Display diabolo start state with a white marker
initial_pos_shell_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_shell.stl",
color=(1.0, 1.0, 1.0),
scale=[0.001, 0.001, 0.001],
namespace="initial_pos",
)
initial_pos_shell_marker.pose = diabolo_pose
initial_pos_fixator_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_fixators.stl",
color=(1.0, 1.0, 1.0),
scale=[0.001, 0.001, 0.001],
namespace="initial_pos",
)
initial_pos_fixator_marker.pose = diabolo_pose
initial_pos_axis_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_axis.stl",
color=(1.0, 1.0, 1.0),
scale=[0.001, 0.001, 0.001],
namespace="initial_pos",
)
initial_pos_axis_marker.pose = diabolo_pose
marker_array.append(copy.deepcopy(initial_pos_shell_marker))
marker_array.append(copy.deepcopy(initial_pos_fixator_marker))
marker_array.append(copy.deepcopy(initial_pos_axis_marker))
initial_diabolo_vel_marker = self._make_marker_from_mesh(
"",
color=(1.0, 1.0, 1.0),
scale=[0.03, 0.02, 0.02],
namespace="initial_pos",
)
initial_vel_base = geometry_msgs.msg.Point()
initial_vel_tip = geometry_msgs.msg.Point()
initial_diabolo_vel_marker.type = visualization_msgs.msg.Marker.ARROW
initial_vel_base = diabolo_pose.position
initial_vel_tip.x = initial_vel_base.x + (diabolo_vel.x) / 2.0
initial_vel_tip.y = initial_vel_base.y + (diabolo_vel.y) / 2.0
initial_vel_tip.z = initial_vel_base.z + (diabolo_vel.z) / 2.0
initial_diabolo_vel_marker.points.append(initial_vel_base)
initial_diabolo_vel_marker.points.append(initial_vel_tip)
marker_array.append(copy.deepcopy(initial_diabolo_vel_marker))
marker_count = self.marker_count
diabolo_shell_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_shell.stl",
color=(0.0, 1.0, 0.0),
scale=[0.001, 0.001, 0.001],
namespace="",
)
diabolo_fixator_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_fixators.stl",
color=(0.1, 0.1, 0.1),
scale=[0.001, 0.001, 0.001],
namespace="",
)
diabolo_axis_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_axis.stl",
color=(0.7, 0.7, 0.7),
scale=[0.001, 0.001, 0.001],
namespace="",
)
goal_diabolo_shell_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_shell.stl",
color=(100.0 / 255.0, 255.0 / 255.0, 50.0 / 255.0),
scale=[0.001, 0.001, 0.001],
namespace="",
)
goal_diabolo_fixator_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_fixators.stl",
color=(0.1, 0.1, 0.1),
scale=[0.001, 0.001, 0.001],
namespace="",
)
goal_diabolo_axis_marker = self._make_marker_from_mesh(
"package://diabolo_scene_description/meshes/diabolo_axis.stl",
color=(0.7, 0.7, 0.7),
scale=[0.001, 0.001, 0.001],
namespace="",
)
goal_diabolo_vel_marker = self._make_marker_from_mesh(
"",
color=(100.0 / 255.0, 255.0 / 255.0, 50.0 / 255.0),
scale=[0.02, 0.02, 0.02],
namespace="",
)
diabolo_to_goal_marker = self._make_marker_from_mesh(
"",
color=(1.0, 1.0, 1.0),
scale=[0.01, 0.02, 0.02],
namespace="",
alpha=0.5,
)
self.marker_count = marker_count + 1
for i in range(len(req.goal_states)):
ns = "waypoint_" + str(i)
# diabolo_shell_marker.pose = resp.diabolo_states[i].pose
# diabolo_fixator_marker.pose = resp.diabolo_states[i].pose
# diabolo_axis_marker.pose = resp.diabolo_states[i].pose
self.sphere_marker_g = self._make_marker_from_mesh(
"",
color=(240.0 / 255.0, 230.0 / 255.0, 50.0 / 255.0),
scale=[0.05, 0.05, 0.05],
namespace="closest_point_to_goal",
)
self.sphere_marker_g.type = visualization_msgs.msg.Marker.SPHERE
self.sphere_marker_g.pose = resp.diabolo_states[i].pose
self.sphere_marker_g.id = self.marker_count
self.marker_count += 1
marker_array.append(copy.deepcopy(self.sphere_marker_g))
goal_diabolo_shell_marker.pose = req.goal_states[i].pose
goal_diabolo_shell_marker.id = self.marker_count
goal_diabolo_shell_marker.ns = "goal_states"
self.marker_count += 1
goal_diabolo_fixator_marker.pose = req.goal_states[i].pose
goal_diabolo_fixator_marker.id = self.marker_count
goal_diabolo_fixator_marker.ns = "goal_states"
self.marker_count += 1
goal_diabolo_axis_marker.pose = req.goal_states[i].pose
goal_diabolo_axis_marker.id = self.marker_count
goal_diabolo_axis_marker.ns = "goal_states"
self.marker_count += 1
marker_array.append(copy.deepcopy(goal_diabolo_shell_marker))
marker_array.append(copy.deepcopy(goal_diabolo_fixator_marker))
marker_array.append(copy.deepcopy(goal_diabolo_axis_marker))
## The goal state velocity
# goal_vel_base = geometry_msgs.msg.Point()
# goal_vel_tip = geometry_msgs.msg.Point()
# goal_diabolo_vel_marker.type = visualization_msgs.msg.Marker.ARROW
# goal_vel_base = req.goal_states[i].pose.position
# goal_vel_tip.x = goal_vel_base.x + (req.goal_states[i].trans_velocity.x)/2.0
# goal_vel_tip.y = goal_vel_base.y + (req.goal_states[i].trans_velocity.y)/2.0
# goal_vel_tip.z = goal_vel_base.z + (req.goal_states[i].trans_velocity.z)/2.0
# goal_diabolo_vel_marker.points.append(goal_vel_base)
# goal_diabolo_vel_marker.points.append(goal_vel_tip)
## The distance between the goal state and the closest point
# FIXME: This doesn't seem to point to the closest point.
diabolo_to_goal_base = geometry_msgs.msg.Point()
diabolo_to_goal_tip = geometry_msgs.msg.Point()
diabolo_to_goal_marker.type = visualization_msgs.msg.Marker.ARROW
diabolo_to_goal_base = req.goal_states[i].pose.position
diabolo_to_goal_tip = resp.diabolo_states[i].pose.position
diabolo_to_goal_marker.points.append(diabolo_to_goal_base)
diabolo_to_goal_marker.points.append(diabolo_to_goal_tip)
diabolo_to_goal_marker.id = self.marker_count
diabolo_to_goal_marker.ns = "from_goal_to_closest_point"
self.marker_count += 1
marker_array.append(copy.deepcopy(diabolo_to_goal_marker))
# predicted_diabolo_vel_marker = self._make_marker_from_mesh("", color=(0.,1.,0.), scale=[0.02, 0.02, 0.02], namespace=ns)
# predicted_vel_base = geometry_msgs.msg.Point()
# predicted_vel_tip = geometry_msgs.msg.Point()
# predicted_diabolo_vel_marker.type = visualization_msgs.msg.Marker.ARROW
# predicted_vel_base = resp.diabolo_states[i].pose.position
# predicted_vel_tip.x = predicted_vel_base.x + (resp.diabolo_states[i].trans_velocity.x)/2.0
# predicted_vel_tip.y = predicted_vel_base.y + (resp.diabolo_states[i].trans_velocity.y)/2.0
# predicted_vel_tip.z = predicted_vel_base.z + (resp.diabolo_states[i].trans_velocity.z)/2.0
# predicted_diabolo_vel_marker.points.append(predicted_vel_base)
# predicted_diabolo_vel_marker.points.append(predicted_vel_tip)
# marker_array.append(copy.deepcopy(predicted_diabolo_vel_marker))
self.marker_array_pub.publish(marker_array)
# time_of_flight = 2.0*(resp.diabolo_trans_vel.z)/9.81
# rospy.logwarn("Returning trajectories")
return (
resp.a_bot_trajectory,
resp.b_bot_trajectory,
resp.left_stick_poses,
resp.right_stick_poses,
)
else:
# print("Trajectory not found. Aborting")
return None, None, None, None
if __name__ == "__main__":
try:
c = PlayerClass()
i = 1
# print(c.motion_functions)
c.force_add_motion_function_()
prep_motions = ["None", "horizontal_impulse", "horizontal_impulse_short_left"]
# prep_motion = prep_motions[2]
prep_motion = ""
while not rospy.is_shutdown():
rospy.loginfo("Enter 1 to load motion data")
rospy.loginfo(
"Enter 2 to initialize the motion functions with hardcoded values."
)
rospy.loginfo("Enter 3 to initialize the robot positions.")
rospy.loginfo("Enter d to spawn diabolo in simulation")
rospy.loginfo("Enter sx to start playback at custom rate.")
rospy.loginfo("Enter m to change the motion being executed")
rospy.loginfo("Enter n to change the preparatory motion")
rospy.loginfo("Enter ox to start oneshot motion")
rospy.loginfo("Enter px to start continuous periodic motion")
rospy.loginfo("Enter t to stop motion.")
rospy.loginfo("Enter f to tilt the diabolo forward.")
rospy.loginfo("Enter b to tilt the diabolo backward.")
rospy.loginfo("Enter k to save the current knot points")
rospy.loginfo("Enter x to exit.")
i = raw_input()
if i == "1":
c.read_transformed_motion_data(
folder=("experiments/output/2020-09-14_motion_extraction/")
)
elif i == "2":
c.initialize_motion_functions(use_saved_values=False)
elif i == "3":
c.initialize_robot_positions()
elif i == "d" or i == "D":
print("Default parameters are (0.13, 0.13, 0.07, .9999). Change? y/n")
a = raw_input()
if a == "y":
print("Enter the parameters, seperated by spaces")
p = raw_input().split()
if len(p) >= 4:
print(
"New parameters are: "
+ p[0]
+ " "
+ p[1]
+ " "
+ p[2]
+ " "
+ p[3]
)
c.initialize_sim_diabolo(
parameters=(
float(p[0]),
float(p[1]),
float(p[2]),
float(p[3]),
)
)
else:
print("Not enough parameters")
else:
c.initialize_sim_diabolo(
parameters=(0.13, 0.13, 0.07, 0.9999)
) # Set the diabolo plugin parameters and spawn the diabolo
# One-shot / continuous motion execution call
elif i == "ox" or i == "OX":
print(
"This will execute the motion without asking for confirmation. \n Meant for execution in simulation \n Are you sure? y/n?"
)
e = raw_input()
if e == "y":
# TODO: pass preparatory_motion?
c.run_oneshot_motion(interactive=True, confirm_execution=False)
else:
print("Aborting")
elif i == "px" or i == "PX":
print(
"This will execute the motion without asking for confirmation. \n Meant for execution in simulation \n Are you sure? y/n?"
)
e = raw_input()
if e == "y":
c.start_periodic_motion(
interactive=True,
confirm_execution=False,
preparatory_motion=prep_motion,
)
else:
print("Aborting")
elif i == "T" or i == "t":
c.stop_periodic_motion()
elif i == "f":
# To tilt the diabolo forward, the right hand goes forward
c.tilt_offset = 0.03
c.changed_tilt_offset_flag = True
elif i == "b":
c.tilt_offset = -0.03
c.changed_tilt_offset_flag = True
## Changing motion / prep. motion
elif i == "m" or i == "M":
print("The current motion is " + c.current_motion)
print("Change? y/n")
i = raw_input()
if i == "y":
print("List of available functions is as follows: ")
print(
"Enter the appropriate index number to choose the motion to change to"
)
for i in range(len(c.motion_list)):
print(str(i) + ": " + str(c.motion_list[i]))
i = raw_input()
try:
c.current_motion = c.motion_list[int(i)]
except:
print("Incorrect index. Aborting")
raise
elif i == "n" or i == "N":
print("The current preparatory motion is " + prep_motion)
print("Change? y/n")
i = raw_input()
if i == "y":
print("List of available motions: ")
print(
"Enter the appropriate index number to choose the motion to change to"
)
for i in range(len(prep_motions)):
print(str(i) + ": " + str(prep_motions[i]))
i = raw_input()
try:
prep_motion = prep_motions[int(i)]
if prep_motion == "None":
prep_motion = ""
except:
print("Incorrect index. Aborting")
raise
elif i == "r":
c.tilt_offset = 0.0
elif i == "k" or i == "K":
c.save_current_knot_points()
elif i == "x":
# c.stop_publish()
break
elif i == "":
continue
except rospy.ROSInterruptException:
pass
| 44.014831
| 142
| 0.575779
| 12,726
| 103,875
| 4.394704
| 0.062156
| 0.009191
| 0.009602
| 0.00987
| 0.646056
| 0.582938
| 0.525203
| 0.485705
| 0.443113
| 0.403133
| 0
| 0.029829
| 0.329993
| 103,875
| 2,359
| 143
| 44.033489
| 0.773755
| 0.145463
| 0
| 0.447242
| 0
| 0.001607
| 0.097028
| 0.024876
| 0
| 0
| 0
| 0.00212
| 0
| 1
| 0.015533
| false
| 0.000536
| 0.019282
| 0
| 0.042314
| 0.024638
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2a9847b819084a601442dc4d30086db0ba4a8ad
| 1,378
|
py
|
Python
|
genius.py
|
fedecalendino/alfred-lyrics-finder
|
771eb9ddcd1849b6095b2e7b16a2335d25c74f30
|
[
"MIT"
] | 3
|
2020-09-14T01:07:11.000Z
|
2021-03-12T09:43:12.000Z
|
genius.py
|
fedecalendino/alfred-lyrics-finder
|
771eb9ddcd1849b6095b2e7b16a2335d25c74f30
|
[
"MIT"
] | null | null | null |
genius.py
|
fedecalendino/alfred-lyrics-finder
|
771eb9ddcd1849b6095b2e7b16a2335d25c74f30
|
[
"MIT"
] | null | null | null |
from workflow import web
class APIException(Exception):
def __init__(self, status, message, url):
self.status = status
self.message = message
self.url = url
super(APIException, self).__init__(
"{status} > {message}".format(
status=self.status,
message=self.message
)
)
class Genius:
BASE_URL = "https://api.genius.com"
def __init__(self, access_token):
assert access_token
self.access_token = "Bearer {access_token}".format(access_token=access_token)
def __call__(self, service, **params):
url = "{base_url}/{service}".format(base_url=self.BASE_URL, service=service)
params["text_format"] = "plain"
response = web.get(
url=url,
params=params,
headers={"Authorization": self.access_token}
).json()
meta = response["meta"]
if meta["status"] != 200:
raise APIException(meta["status"], meta["message"], url)
return response["response"]
def search(self, text, page=1, per_page=20):
assert text
assert page > 0
assert 21 > per_page > 1
result = self("search", q=text, page=page, per_page=per_page)
return map(
lambda hit: hit["result"],
result.get("hits", [])
)
| 26
| 85
| 0.564586
| 152
| 1,378
| 4.907895
| 0.342105
| 0.103217
| 0.060322
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.310595
| 1,378
| 52
| 86
| 26.5
| 0.774737
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.105263
| false
| 0
| 0.026316
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2b2b69ac9c8d9c5d5b9c1cb7f1d8d0174255511
| 2,310
|
py
|
Python
|
utils/html_markup.py
|
carlboudreau007/BlockChain_Demo
|
fb90212e9a401aa3b757e49af7fd28d250bafbc4
|
[
"MIT"
] | null | null | null |
utils/html_markup.py
|
carlboudreau007/BlockChain_Demo
|
fb90212e9a401aa3b757e49af7fd28d250bafbc4
|
[
"MIT"
] | null | null | null |
utils/html_markup.py
|
carlboudreau007/BlockChain_Demo
|
fb90212e9a401aa3b757e49af7fd28d250bafbc4
|
[
"MIT"
] | null | null | null |
import glob
from flask import Markup
SERVER_OPTIONS = [{'text': 'Local Host', 'value': '127.0.0.1'},
{'text': 'Test weved23962', 'value': '10.201.144.167'},
{'text': 'Stage weves31263', 'value': '10.50.8.130'},
{'text': 'Prod wevep31172', 'value': '10.48.164.198'}
]
def server_options(ip_address: str) -> Markup:
return Markup(SERVER_OPTIONS)
def sql_options(base_dir: str) -> [Markup, str]:
"""Create an option list based on files in the directory.
:param base_dir: where the sql files are located
:return: list of options
"""
pattern = f'{base_dir}/*.sql'
files = glob.glob(pattern, recursive=True)
options = ''
first = True
first_file = ''
for file in files:
file = file.replace('\\', '/')
description = file.replace('.sql', '').replace('_', ' ')
last_count = description.rfind('/') + 1
description = description[last_count:]
# print(description)
if first:
options += f'<option value="{file}" selected="selected">{description}</option>\n'
first_file = file
first = False
else:
options += f'<option value="{file}">{description}</option>\n'
return Markup(options), first_file
def vue_sql_select(base_dir: str) -> [Markup, str]:
"""Create an option list based on files in the directory.
:param base_dir: where the sql files are located
:return: list of options
"""
pattern = f'{base_dir}/*.sql'
files = glob.glob(pattern, recursive=True)
options = []
first = True
first_file = ''
for file in files:
file = file.replace('\\', '/')
description = file.replace('.sql', '').replace('_', ' ')
last_count = description.rfind('/') + 1
description = description[last_count:]
# print(description)
if first:
first_file = file
first = False
# options += f"{{text: '{description}', value: '{file}'}},"
options.append({'text': f'{description}', 'value': f'{file}'})
return Markup(options), first_file
if __name__ == '__main__':
print(vue_sql_select('../sql/pa_related/care_guidance'))
print(sql_options('../sql/pa_related/care_guidance'))
| 32.535211
| 93
| 0.577056
| 269
| 2,310
| 4.806691
| 0.297398
| 0.032483
| 0.029389
| 0.024749
| 0.672854
| 0.521268
| 0.521268
| 0.521268
| 0.521268
| 0.521268
| 0
| 0.030534
| 0.262771
| 2,310
| 70
| 94
| 33
| 0.728714
| 0.155411
| 0
| 0.565217
| 0
| 0
| 0.211216
| 0.075996
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065217
| false
| 0
| 0.043478
| 0.021739
| 0.173913
| 0.043478
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2b606f246e1cf267d985e5ff3efcca86aeda8cd
| 2,237
|
py
|
Python
|
streamlit_app.py
|
sebastiandres/xkcd_streamlit
|
68b1c01dd8eca34135126ebb33a2d539a0d25650
|
[
"MIT"
] | 1
|
2021-07-21T03:20:52.000Z
|
2021-07-21T03:20:52.000Z
|
streamlit_app.py
|
sebastiandres/xkcd_streamlit
|
68b1c01dd8eca34135126ebb33a2d539a0d25650
|
[
"MIT"
] | null | null | null |
streamlit_app.py
|
sebastiandres/xkcd_streamlit
|
68b1c01dd8eca34135126ebb33a2d539a0d25650
|
[
"MIT"
] | null | null | null |
import streamlit as st
from xkcd import xkcd_plot
from shared import translate, LANGUAGE_DICT
# Set page properties for the app
st.set_page_config(
page_title="Streamlit & XKCD",
layout="wide",
initial_sidebar_state="expanded",
)
# Initialize the session states - f_list has functions and colors
if 'f_list' not in st.session_state:
st.session_state['f_list'] = [
("5*exp(-x**2)", "g"),
("sin(5*x)/x", "b"),
]
if 'SLANG' not in st.session_state:
st.session_state['SLANG'] = list(LANGUAGE_DICT.keys())[0]
# The side bar
language_title = st.sidebar.empty() # Hack so the title gets updated before selection is made
st.session_state['SLANG'] = st.sidebar.selectbox("",
list(LANGUAGE_DICT.keys())
)
language_title.subheader(translate("language_title"))
# Delete
SLANG_DICT = LANGUAGE_DICT[st.session_state['SLANG']]
st.sidebar.subheader(translate("parameters_title"))
with st.sidebar.expander(translate("functions_expander")):
f = st.text_input(translate("equation"), "sin(5*x)/x")
c = st.color_picker(translate("function_color"), "#0000FF")
col1, col2 = st.columns(2)
if col1.button(translate("add_function")):
st.session_state['f_list'].append( (f, c) )
if col2.button(translate("clean_functions")):
st.session_state['f_list'] = []
st.write(translate("functions_link"))
with st.sidebar.expander(translate("graph_expander")):
title = st.text_input(translate("title_text"), translate("title_value"))
xlabel = st.text_input(translate("xlabel_text"), "x")
ylabel = st.text_input(translate("ylabel_text"), "y")
xmin = st.number_input(translate("xmin_text"), value=-5)
xmax = st.number_input(translate("xmax_text"), value=+5)
st.sidebar.markdown(translate("links_md"))
# The main view
try:
fig = xkcd_plot(st.session_state['f_list'], title, xlabel, ylabel, xmin, xmax, Nx=1001)
st.pyplot(fig)
except Exception as e:
st.session_state['f_list'] = []
st.error(translate("error_warning"))
st.warning(translate("error_advice"))
st.exception(e)
| 37.283333
| 93
| 0.644166
| 295
| 2,237
| 4.698305
| 0.355932
| 0.064935
| 0.10101
| 0.054113
| 0.192641
| 0.118326
| 0.047619
| 0.047619
| 0
| 0
| 0
| 0.011312
| 0.209656
| 2,237
| 60
| 94
| 37.283333
| 0.772624
| 0.0827
| 0
| 0.043478
| 0
| 0
| 0.16911
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.065217
| 0
| 0.065217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2b7c2a6955082c094b447b57a5e843a6c763e15
| 4,693
|
py
|
Python
|
cyclegan/data/celeba/mask_face_region_with_avail_kpts.py
|
dingyanna/DepthNets
|
a13b05e315b0732b6a28594b1343a6940bbab229
|
[
"MIT"
] | 114
|
2018-11-27T19:34:13.000Z
|
2022-03-26T19:39:00.000Z
|
cyclegan/data/celeba/mask_face_region_with_avail_kpts.py
|
dingyanna/DepthNets
|
a13b05e315b0732b6a28594b1343a6940bbab229
|
[
"MIT"
] | 9
|
2018-12-11T09:05:22.000Z
|
2021-07-02T21:27:34.000Z
|
cyclegan/data/celeba/mask_face_region_with_avail_kpts.py
|
kdh4672/Face_Recognition_With_Augmentation
|
b0795b97c94bbba1a1e3310670d0868f3eacb479
|
[
"MIT"
] | 32
|
2018-12-03T00:52:54.000Z
|
2021-08-30T01:45:31.000Z
|
"""
This module masks faces using kpts already detected
"""
import numpy as np
import argparse
import cv2
#from RCN.preprocessing.tools import BGR2Gray
from PIL import Image
import h5py
def get_parsed_keypoints(path):
with open(path) as f:
x = f.read()
y=x.split('\n')
z=[[int(i) for i in k.split()] for k in y if k is not '']
return np.array(z)
def read_kpts(kpts_dir, imgs_ids):
kpts_list = []
for img_id in imgs_ids:
img_path = '%s/%s_crop.txt' % (kpts_dir, img_id)
kpts = get_parsed_keypoints(img_path)
kpts_list.append(kpts)
return np.array(kpts_list)
def mask_out_face(imgs, pred_kpts):
mask_imgs = []
for img, kpts in zip(imgs, pred_kpts):
# mask_img = cv2.fillPoly(img, kpts)
kpts = kpts.astype(np.int32)
# reordering #1 to #17 kpts to form a polygon
kpts_mask = np.concatenate((kpts[:17][::-1], kpts[17:27]), axis=0)
img_mask = img.copy()
#cv2.fillConvexPoly(img_mask, kpts_mask, 0)
cv2.fillPoly(img_mask, kpts_mask.reshape(1,27,2), 0)
mask_imgs.append(img_mask)
return mask_imgs
def plot_cross(img, kpt, color, lnt=1):
kpt = map(int, kpt)
x, y = kpt
cv2.line(img=img, pt1=(x-lnt, y-lnt), pt2=(x+lnt, y+lnt), color=color)
cv2.line(img=img, pt1=(x-lnt, y+lnt), pt2=(x+lnt, y-lnt), color=color)
return img
def draw_kpts(img, kpts, color):
for kpt in kpts:
x_i = int(kpt[0])
y_i = int(kpt[1])
img = plot_cross(img, kpt=(x_i, y_i), color=color)
return img
def convert_np_to_PIL(np_img):
img_rev = np_img[:, :, ::-1].copy()
rescaled = (255.0 / img_rev.max() * (img_rev - img_rev.min())).astype(np.uint8)
im = Image.fromarray(rescaled)
return im
def tile_images(img, img_mask, img_depthNet, row_size, col_size):
rows = 1
cols = 3
gap_sz = 5
gap_cols = (cols - 1) * gap_sz
gap_rows = (rows - 1) * gap_sz
index = 0
new_im = Image.new('RGB', (cols*col_size + gap_cols,
rows*row_size + gap_rows), "white")
for i in xrange(0, rows * row_size + gap_rows, row_size + gap_sz):
for jj in xrange(0, cols * col_size + gap_cols, col_size + gap_sz):
if jj == 0:
new_im.paste(img, (jj, i))
elif jj == col_size + gap_sz:
new_im.paste(img_mask, (jj, i))
else:
new_im.paste(img_depthNet, (jj, i))
return new_im
if __name__ == "__main__":
#parser = argparse.ArgumentParser(description='Getting keypoint prediction\
# using a trained model.')
#parser.add_argument('--img_path', type=str, help='the complete path to the\
# pickle file that contains pre-processed images',
# required=True)
#kpts_path = '/home/honari/libs/test_RCN/RCN/plotting/keypoints'
kpts_path = "./keypoints"
#args = parser.parse_args()
#img_path = args.img_path
imgs_path = 'celebA.h5'
#fp = open(img_path, 'r')
fp = h5py.File(imgs_path, 'a')
#dset = pickle.load(fp)
imgs = fp['src_GT']
#imgs_depthNet = fp['src_depthNet']
imgs_ids = fp['src_id'][:].astype("U6")
print('getting kpts')
#pred_kpts = get_kpts(imgs, path)
pred_kpts = read_kpts(kpts_path, imgs_ids)
print('getting masks')
masked_face = mask_out_face(imgs, pred_kpts)
"""
data_dict = OrderedDict()
data_dict['img_orig'] = imgs
data_dict['img_mask'] = masked_face
pickle.dump('mask_faces.pickle', data_dict)
"""
src_GT_mask_face = np.array(masked_face).astype(np.uint8)
#img_path_out = img_path.split('.pickle')[0] + '_with_mask.pickle'
#with open(img_path_out, 'wb') as fp:
# pickle.dump(dset, fp)
fp.create_dataset('src_GT_mask_face', data=src_GT_mask_face)
src_depthNet = fp['src_depthNet']
fp.create_dataset('src_depthNet_and_mask',
data=np.concatenate((src_depthNet, src_GT_mask_face), axis=-1))
'''
print('plotting samples')
n_sample = 50
for img, img_mask, img_depthNet, img_id in \
zip(imgs, masked_face, imgs_depthNet, np.arange(n_sample)):
row_size, col_size, _ = img.shape
img_PIL = convert_np_to_PIL(img)
img_mask_PIL = convert_np_to_PIL(img_mask)
img_depthNet_PIL = convert_np_to_PIL(img_depthNet)
img_new = tile_images(img_PIL, img_mask_PIL, img_depthNet_PIL,
row_size, col_size)
img_new.save('./sample_mask_imgs/img_%s.png' % (img_id))
'''
fp.close()
print('done!')
| 32.818182
| 97
| 0.603665
| 707
| 4,693
| 3.739745
| 0.254597
| 0.029123
| 0.015129
| 0.012103
| 0.142965
| 0.07413
| 0.034039
| 0.034039
| 0.034039
| 0.034039
| 0
| 0.016507
| 0.264223
| 4,693
| 142
| 98
| 33.049296
| 0.749204
| 0.198381
| 0
| 0.025316
| 0
| 0
| 0.048392
| 0.006961
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088608
| false
| 0
| 0.063291
| 0
| 0.240506
| 0.037975
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2b7d3d40db3233a8eadd8a94f91fbf6d7c9b69b
| 589
|
py
|
Python
|
task1/task1.py
|
ZHN202/opencv_learning
|
f0725955e6e525d3918c1117763bf0aaa4299777
|
[
"MIT"
] | 1
|
2021-11-04T03:41:04.000Z
|
2021-11-04T03:41:04.000Z
|
task1/task1.py
|
ZHN202/opencv_learning
|
f0725955e6e525d3918c1117763bf0aaa4299777
|
[
"MIT"
] | null | null | null |
task1/task1.py
|
ZHN202/opencv_learning
|
f0725955e6e525d3918c1117763bf0aaa4299777
|
[
"MIT"
] | null | null | null |
import cv2 as cv
import numpy as np
img = cv.imread('test.png')
# 将图片大小改为1920*1080h,s,v = cv.split(hsvimg)
img = cv.resize(img, dsize=(1920, 1080), fx=1, fy=1, interpolation=cv.INTER_NEAREST)
# hsv图像
hsvimg = cv.cvtColor(img, cv.COLOR_BGR2HSV)
lower_y = np.array([20, 43, 46])
upper_y = np.array([34, 255, 220])
mask = cv.inRange(hsvimg, lower_y, upper_y)
# 霍夫直线检测
lines = cv.HoughLinesP(mask, 1, np.pi / 180, 127, minLineLength=500, maxLineGap=1)
for line in lines:
x1, y1, x2, y2 = line[0]
cv.line(img, (x1, y1), (x2, y2), (0, 255, 0), 1)
cv.imshow('img', img)
cv.waitKey(0)
| 26.772727
| 84
| 0.665535
| 107
| 589
| 3.607477
| 0.570093
| 0.051813
| 0.041451
| 0.041451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122244
| 0.152801
| 589
| 21
| 85
| 28.047619
| 0.651303
| 0.089983
| 0
| 0
| 0
| 0
| 0.020677
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2ba3f6c4a26d42ba28e90efe9fded89ad4b027a
| 385
|
py
|
Python
|
Importing_&_Managing_Financial_Data/Importing_financial_data_from_the_web/Visualize_a_stock_price_trend.py
|
RKiddle/python_finance
|
7c0ed2998c0f82a0998ba0cb06225453ba8ee3fe
|
[
"MIT"
] | 1
|
2021-04-28T01:26:38.000Z
|
2021-04-28T01:26:38.000Z
|
Importing_&_Managing_Financial_Data/Importing_financial_data_from_the_web/Visualize_a_stock_price_trend.py
|
RKiddle/python_finance
|
7c0ed2998c0f82a0998ba0cb06225453ba8ee3fe
|
[
"MIT"
] | null | null | null |
Importing_&_Managing_Financial_Data/Importing_financial_data_from_the_web/Visualize_a_stock_price_trend.py
|
RKiddle/python_finance
|
7c0ed2998c0f82a0998ba0cb06225453ba8ee3fe
|
[
"MIT"
] | null | null | null |
# Import matplotlib.pyplot
import matplotlib.pyplot as plt
# Set start and end dates
start = date(2016, 1, 1)
end = date(2016, 12, 31)
# Set the ticker and data_source
ticker = 'FB'
data_source = 'google'
# Import the data using DataReader
stock_prices = DataReader(ticker, data_source, start, end)
# Plot Close
stock_prices['Close'].plot(title=ticker)
# Show the plot
plt.show()
| 19.25
| 58
| 0.735065
| 60
| 385
| 4.633333
| 0.483333
| 0.107914
| 0.158273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04321
| 0.158442
| 385
| 19
| 59
| 20.263158
| 0.814815
| 0.355844
| 0
| 0
| 0
| 0
| 0.053942
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2bdc8d9084d26a302efcbe7ca92780a65ffbfe3
| 3,722
|
py
|
Python
|
src/resnet/resnetv2_3stage_gaussian.py
|
googleinterns/out-of-distribution
|
84a2d5af59462f0943f629f742090b485ed50e61
|
[
"Apache-2.0"
] | null | null | null |
src/resnet/resnetv2_3stage_gaussian.py
|
googleinterns/out-of-distribution
|
84a2d5af59462f0943f629f742090b485ed50e61
|
[
"Apache-2.0"
] | null | null | null |
src/resnet/resnetv2_3stage_gaussian.py
|
googleinterns/out-of-distribution
|
84a2d5af59462f0943f629f742090b485ed50e61
|
[
"Apache-2.0"
] | null | null | null |
from typing import List, Union
import torch
from torch import nn
from torch.nn import functional as F
from src.modules.max_mahalanobis import MaxMahalanobis, GaussianResult
from src.modules.normalize import Normalize
from src.resnet.bottleneck_block_v2s3 import create_bottleneck_stage_v2s3
from src.resnet.shared import GaussianMode, ResNet_Gaussian
class ResNetV2_3Stage_Gaussian(ResNet_Gaussian):
"""
Implements Max-Mahalanobis center loss for classification on 32x32 RGB images (e.g. CIFAR-10).
Reference papers:
- Identity Mappings in Deep Residual Networks (https://arxiv.org/abs/1603.05027)
- Rethinking Softmax Cross-Entropy Loss For Adversarial Robustness (https://arxiv.org/pdf/1905.10626.pdf)
Reference implementations:
- Official code (https://github.com/P2333/Max-Mahalanobis-Training/blob/master/train.py)
"""
normalize: Normalize
conv1: nn.Conv2d
stage2: nn.Sequential
stage3: nn.Sequential
stage4: nn.Sequential
bn_post: nn.BatchNorm2d
avgpool: nn.AdaptiveAvgPool2d
fc: nn.Linear
max_mahalanobis: MaxMahalanobis
out_channels: int
def __init__(self, stage_sizes: List[int], radius: float, n_classes: int):
super().__init__()
if len(stage_sizes) != 3:
raise ValueError("Stage_sizes must have length 3!")
if radius <= 0:
raise ValueError("Radius must be positive!")
if n_classes <= 1:
raise ValueError("N_classes must be greater than 1!")
self.init_layers(stage_sizes, radius, n_classes)
self.reset_parameters()
self.out_channels = n_classes
def init_layers(self, stage_sizes: List[int], radius: float, n_classes: int) -> None:
self.normalize = Normalize(3)
self.conv1 = nn.Conv2d(3, 16, 3, padding=1, bias=False)
self.stage2 = create_bottleneck_stage_v2s3(stage_sizes[0], 16, 16, 64, 1)
self.stage3 = create_bottleneck_stage_v2s3(stage_sizes[1], 64, 32, 128, 2)
self.stage4 = create_bottleneck_stage_v2s3(stage_sizes[2], 128, 64, 256, 2)
self.bn_post = nn.BatchNorm2d(256)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(256, 256)
self.max_mahalanobis = MaxMahalanobis(radius, 256, n_classes)
def reset_parameters(self) -> None:
for module in self.modules():
if isinstance(module, (nn.Conv2d, nn.Linear)):
nn.init.kaiming_normal_(module.weight, nonlinearity="relu")
def forward(self, x: torch.Tensor, mode: GaussianMode) -> Union[torch.Tensor, GaussianResult]:
if x.shape[1:] != (3, 32, 32):
raise ValueError("Input tensor must have shape [N, C=3, H=32, W=32]!")
x = self.normalize(x)
x = self.conv1(x)
x = self.stage2(x)
x = self.stage3(x)
x = self.stage4(x)
x = self.bn_post(x)
x = F.relu(x, inplace=True)
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.fc(x)
x = self.max_mahalanobis(x, mode)
return x
class ResNet29V2_Gaussian(ResNetV2_3Stage_Gaussian):
def __init__(self, radius: float, n_classes: int):
super().__init__([3, 3, 3], radius, n_classes)
class ResNet47V2_Gaussian(ResNetV2_3Stage_Gaussian):
def __init__(self, radius: float, n_classes: int):
super().__init__([5, 5, 5], radius, n_classes)
class ResNet65V2_Gaussian(ResNetV2_3Stage_Gaussian):
def __init__(self, radius: float, n_classes: int):
super().__init__([7, 7, 7], radius, n_classes)
class ResNet83V2_Gaussian(ResNetV2_3Stage_Gaussian):
def __init__(self, radius: float, n_classes: int):
super().__init__([9, 9, 9], radius, n_classes)
| 35.113208
| 109
| 0.671682
| 503
| 3,722
| 4.755467
| 0.308151
| 0.050167
| 0.0301
| 0.047659
| 0.204013
| 0.204013
| 0.160117
| 0.156355
| 0.156355
| 0.156355
| 0
| 0.053155
| 0.21655
| 3,722
| 105
| 110
| 35.447619
| 0.767147
| 0.112037
| 0
| 0.056338
| 0
| 0
| 0.043438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.112676
| false
| 0
| 0.112676
| 0
| 0.450704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2c0ef753b4cd8675d6db691f0d1c053e49d0236
| 504
|
py
|
Python
|
assignments/Exercise_Lecture73_Phumeth.P.py
|
ZnoKunG/PythonProject
|
388b5dfeb0161aee66094e7b2ecc2d6ed13588bd
|
[
"MIT"
] | null | null | null |
assignments/Exercise_Lecture73_Phumeth.P.py
|
ZnoKunG/PythonProject
|
388b5dfeb0161aee66094e7b2ecc2d6ed13588bd
|
[
"MIT"
] | null | null | null |
assignments/Exercise_Lecture73_Phumeth.P.py
|
ZnoKunG/PythonProject
|
388b5dfeb0161aee66094e7b2ecc2d6ed13588bd
|
[
"MIT"
] | null | null | null |
systemMenu = {"ไก่ทอด": 35, "เป็ดทอด": 45, "ปลาทอด": 55, "ผักทอด": 20}
menuList = []
def showBill():
print("---- My Food----")
totalPrice = 0
for number in range(len(menuList)):
print(menuList[number][0],menuList[number][1])
totalPrice += int(menuList[number][1])
print("Totalprice :", totalPrice)
while True:
menuName = input("Please Enter Menu :")
if(menuName.lower() == "exit"):
break
else:
menuList.append([menuName, systemMenu[menuName]])
showBill()
| 28
| 70
| 0.613095
| 63
| 504
| 4.952381
| 0.68254
| 0.134615
| 0.096154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029484
| 0.19246
| 504
| 18
| 71
| 28
| 0.72973
| 0
| 0
| 0
| 0
| 0
| 0.150495
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0
| 0
| 0.0625
| 0.1875
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2c15988d8527886dc69eb42d21e16810aed3ba2
| 2,229
|
py
|
Python
|
FetchTextFromRISE.py
|
RISE-MPIWG/hylg
|
7d49e7aed0623d9730d5c8933030954fa8f729b0
|
[
"MIT"
] | 1
|
2020-05-30T02:29:36.000Z
|
2020-05-30T02:29:36.000Z
|
FetchTextFromRISE.py
|
RISE-MPIWG/hylg
|
7d49e7aed0623d9730d5c8933030954fa8f729b0
|
[
"MIT"
] | null | null | null |
FetchTextFromRISE.py
|
RISE-MPIWG/hylg
|
7d49e7aed0623d9730d5c8933030954fa8f729b0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import requests
import os
# 6000 is a large number to make sure we get all the components of a collection. Please do note that RISE also has a pagination feature,
# which can be implemented by clients if they wish.
per_page = 6000
# getting the list of collections that the user has access to:
collections_response = requests.get(f'https://rise.mpiwg-berlin.mpg.de/api/collections?per_page={per_page}')
collections = collections_response.json()
# each accessible collections has a name, a uuid, and a number of resources.
# print(collections)
idx = 1
for collection in collections:
print(f'collection at index: {idx}')
idx += 1
print(collection)
# picking a collection by its index
# collection_index = 1
# collection = collections[collection_index]
results = list(filter(lambda collection: collection['name'] == 'MPIWG - 哈佛燕京圖書館藏珍稀方志', collections))
collection = results[0]
print(collection['uuid'])
collection_uuid = collection['uuid']
# we grab all resources for this collection
resources_response = requests.get(f'https://rise.mpiwg-berlin.mpg.de/api/collections/{collection_uuid}/resources?per_page={per_page}')
corpus_path = './corpus'
if not os.path.exists(corpus_path):
os.makedirs(corpus_path)
for resource in resources_response.json():
uuid = resource['uuid']
resource_name = resource['name']
print(resource_name)
if not os.path.exists(corpus_path + "/" + resource_name):
os.makedirs(corpus_path + "/" + resource_name)
sections = requests.get("https://rise.mpiwg-berlin.mpg.de/api/resources/"+ resource['uuid'] +"/sections")
for section in sections.json():
print(section)
print(section['uuid'])
section_name = section['name']
section_path = corpus_path + "/" + resource_name + "/" + section_name
file = open(section_path +".txt", "w")
content_units = requests.get("https://rise.mpiwg-berlin.mpg.de/api/sections/"+ section['uuid'] +"/content_units?per_page=6000")
for content_unit in content_units.json():
print(content_unit)
file.write(content_unit['content'])
file.close()
| 39.803571
| 139
| 0.682369
| 291
| 2,229
| 5.106529
| 0.323024
| 0.028264
| 0.037685
| 0.053836
| 0.168237
| 0.168237
| 0.168237
| 0.131898
| 0.131898
| 0.079408
| 0
| 0.009471
| 0.194706
| 2,229
| 56
| 140
| 39.803571
| 0.818384
| 0.224764
| 0
| 0
| 0
| 0.027778
| 0.232829
| 0.016298
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0.194444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2c4872a061796a24a75f519586680551cd85468
| 348
|
py
|
Python
|
data.py
|
alantess/DDQN-BTC
|
0fff185200dd1c16088dc322cbb7790b848c1e6d
|
[
"MIT"
] | 2
|
2021-01-12T08:59:54.000Z
|
2022-02-07T23:41:49.000Z
|
data.py
|
alantess/DDQN-BTC
|
0fff185200dd1c16088dc322cbb7790b848c1e6d
|
[
"MIT"
] | null | null | null |
data.py
|
alantess/DDQN-BTC
|
0fff185200dd1c16088dc322cbb7790b848c1e6d
|
[
"MIT"
] | null | null | null |
import pandas as pd
import matplotlib.pyplot as plt
def retrieve_data():
train_data = 'data/Nov_btc.csv'
test_data = 'data/btc_test_data.csv'
df = pd.read_csv(test_data)
df = df.drop(columns=['date', 'weighted','volume'])
# Columns are set at close, high, low and open.
df = df.dropna()
data = df.values
return data
| 23.2
| 55
| 0.666667
| 55
| 348
| 4.072727
| 0.6
| 0.107143
| 0.098214
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.20977
| 348
| 14
| 56
| 24.857143
| 0.814545
| 0.12931
| 0
| 0
| 0
| 0
| 0.187291
| 0.073579
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2c664d27fab22d77e93ebebd90d26fccfda0d77
| 4,715
|
py
|
Python
|
main.py
|
lucaswerner90/upc_dl_project_2021
|
c02061da0e25a0b24a9b742074b87ac30f36586d
|
[
"MIT"
] | 2
|
2021-07-15T12:30:43.000Z
|
2021-11-04T07:50:16.000Z
|
main.py
|
lucaswerner90/upc_dl_project_2021
|
c02061da0e25a0b24a9b742074b87ac30f36586d
|
[
"MIT"
] | 30
|
2021-05-03T07:37:37.000Z
|
2021-07-01T18:53:23.000Z
|
main.py
|
lucaswerner90/upc_dl_project_2021
|
c02061da0e25a0b24a9b742074b87ac30f36586d
|
[
"MIT"
] | 1
|
2021-06-21T11:12:32.000Z
|
2021-06-21T11:12:32.000Z
|
import argparse
import os
import torch
import torch.nn as nn
import torch.optim as optim
import argparse
from torch.utils.data import DataLoader
from torchvision import transforms
from dataset.main import Flickr8kDataset
from dataset.caps_collate import CapsCollate
from dataset.download import DownloadDataset
from model.main import ImageCaptioningModel,ViTImageCaptioningModel
from train import train, split_subsets
from transformers import ViTFeatureExtractor
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
use_ViT_Enc = True
def main(args):
if use_ViT_Enc:
print("It is using ViT encoder!!!!")
transform = None
feature_extractor = ViTFeatureExtractor.from_pretrained('google/vit-base-patch16-224-in21k')
else:
feature_extractor = None
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Resize((args['image_size'], args['image_size'])),
# The normalize parameters depends on the model we're gonna use
# If we apply transfer learning from a model that used ImageNet, then
# we should use the ImageNet values to normalize the dataset.
# Otherwise we could just normalize the values between -1 and 1 using the
# standard mean and standard deviation
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
])
dataset = Flickr8kDataset(dataset_folder='data', transform=transform,
reduce=True, vocab_max_size=args['vocabulary_size'],feature_extractor=feature_extractor)
# Create the model
if use_ViT_Enc:
model = ViTImageCaptioningModel(
embed_size=args['embedding_dimension'],
vocab = dataset.vocab,
caption_max_length=args['captions_max_length'],
).to(device)
else:
model = ImageCaptioningModel(
image_features_dim=args['image_features_dimension'],
embed_size=args['embedding_dimension'],
vocab = dataset.vocab,
caption_max_length=args['captions_max_length'],
).to(device)
# Perform the split of the dataset
train_split, test_split = split_subsets(dataset,all_captions=True)
train_loader = DataLoader(train_split, shuffle=True, batch_size=args['batch_size'], collate_fn=CapsCollate(
pad_idx=dataset.vocab.word_to_index['<PAD>'], batch_first=True))
test_loader = DataLoader(test_split, shuffle=True, batch_size=args['batch_size'], collate_fn=CapsCollate(
pad_idx=dataset.vocab.word_to_index['<PAD>'], batch_first=True))
optimizer = optim.Adam(model.parameters(), lr=args['learning_rate'], betas=(0.9, 0.98), eps=1e-9)
criterion = nn.CrossEntropyLoss(ignore_index=dataset.vocab.word_to_index['<PAD>'])
train(
num_epochs=args['epochs'],
model=model,
train_loader=train_loader,
test_loader=test_loader,
optimizer=optimizer,
criterion=criterion,
device=device,
log_interval=args['log_interval']
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Image captioning model setup')
parser.add_argument('-bsz','--batch-size',type=int, required=False, choices=[4,8,16,32,64], default=64, help='Number of images to process on each batch')
parser.add_argument('-vocab','--vocabulary-size',type=int, required=False, default=5000, help='Number of words that our model will use to generate the captions of the images')
parser.add_argument('-image-feature','--image-features-dimension',type=int, choices=[256,512,1024], required=False, default=512, help='Number of features that the model will take for each image')
parser.add_argument('-attn-dim','--attention-dimension',type=int, choices=[256,512,1024], required=False, default=256, help='Dimension of the attention tensor')
parser.add_argument('-embed-dim','--embedding-dimension',type=int, choices=[256,512,1024], required=False, default=256, help='Dimension of the word embedding tensor')
parser.add_argument('-epochs','--epochs',type=int, required=False, default=100, help='Number of epochs that our model will run')
parser.add_argument('-captions-length','--captions-max-length',type=int, required=False, default=28, help='Max size of the predicted captions')
parser.add_argument('-lr','--learning-rate',type=float, required=False, choices=[1e-1,1e-2,1e-3,1e-4],default=1e-3, help='Max size of the predicted captions')
parser.add_argument('-img-size','--image-size',type=int, required=False, choices=[224,256,320], default=224, help='Size of the input image that our model will process')
parser.add_argument('-log','--log-interval',type=int, required=False, default=5, help='During training, every X epochs, we log the results')
args = parser.parse_args()
variables = vars(args)
if not os.path.exists('data'):
print('Downloading Flickr8k dataset...')
filepath = os.path.join(os.getcwd(),'data')
DownloadDataset.download(filepath)
main(variables)
| 45.336538
| 196
| 0.759915
| 671
| 4,715
| 5.208644
| 0.304024
| 0.025751
| 0.048641
| 0.034335
| 0.25608
| 0.224034
| 0.198856
| 0.198856
| 0.198856
| 0.198856
| 0
| 0.029545
| 0.109862
| 4,715
| 103
| 197
| 45.776699
| 0.803193
| 0.074019
| 0
| 0.202532
| 0
| 0
| 0.241854
| 0.033502
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012658
| false
| 0
| 0.177215
| 0
| 0.189873
| 0.025316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2c78a6895c6f2bb08f5bc34684b1ca6a132fd79
| 2,050
|
py
|
Python
|
tests/FasterSubsetSumTests/test_randomizedBase.py
|
joakiti/Benchmark-SubsetSums
|
a875b5adf7f800d26b73516452904031c73ec29d
|
[
"MIT"
] | null | null | null |
tests/FasterSubsetSumTests/test_randomizedBase.py
|
joakiti/Benchmark-SubsetSums
|
a875b5adf7f800d26b73516452904031c73ec29d
|
[
"MIT"
] | null | null | null |
tests/FasterSubsetSumTests/test_randomizedBase.py
|
joakiti/Benchmark-SubsetSums
|
a875b5adf7f800d26b73516452904031c73ec29d
|
[
"MIT"
] | null | null | null |
import unittest
from unittest import TestCase
from Implementations.FastIntegersFromGit import FastIntegersFromGit
from Implementations.helpers.Helper import ListToPolynomial, toNumbers
from Implementations.FasterSubsetSum.RandomizedBase import NearLinearBase
from benchmarks.test_distributions import Distributions as dist
class RandomizedBaseTester(TestCase):
@classmethod
def setUp(cls):
cls.fasterSubset = NearLinearBase(False, 1)
def test_faster_sumset_base_returns_correct_sumset(self):
vals = [1, 15, 3, 8, 120, 290, 530, 420, 152, 320, 150, 190]
T = 11
sums = self.fasterSubset.fasterSubsetSum(vals, T, 0.2)
self.assertListEqual(sums, [0, 1, 3, 4, 8, 9, 11])
def test_color_coding_base_returns_correct_sumset(self):
vals = [1, 15, 3, 8, 120, 290, 530, 420, 152, 320, 150, 190]
T = 11
characteristic = ListToPolynomial(vals)
sums = self.fasterSubset.color_coding(characteristic, T, len(vals), 0.2)
self.assertListEqual(toNumbers(sums), [0, 1, 3, 4, 8, 9, 11])
@unittest.skip("Not currently working.")
def test_faster_sumset_returns_correct_sumset_multiples(self):
vals = [1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
T = 11
sums = self.fasterSubset.fasterSubsetSum(vals, T, 0.2)
self.assertListEqual(sums, [0, 1, 3, 4])
@unittest.skip("Not currently working. I.e some of the speed ups we done means this does not work properly")
def test_faster_simple(self):
vals = [8, 10]
T = 18
a = list(set(vals))
delta = 0.0001
fast = self.fasterSubset.fasterSubsetSum(a, T, delta)
self.assertListEqual(fast, [0, 8, 10, 18])
@unittest.skip("comment in for benchmark.")
def test_me(self):
delta = 0.0001
i = 500
a, T = dist.evenDistribution(i)
fast = self.fasterSubset.fasterSubsetSum(a, T, delta)
# expertSolution = FastIntegersFromGit().run(a, T)
# self.assertListEqual(fast, expertSolution)
| 38.679245
| 112
| 0.661463
| 271
| 2,050
| 4.918819
| 0.346863
| 0.022506
| 0.031508
| 0.03901
| 0.335334
| 0.288822
| 0.288822
| 0.225806
| 0.213803
| 0.213803
| 0
| 0.083386
| 0.227805
| 2,050
| 52
| 113
| 39.423077
| 0.758686
| 0.04439
| 0
| 0.268293
| 0
| 0
| 0.070041
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 1
| 0.146341
| false
| 0
| 0.146341
| 0
| 0.317073
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2ca9fdc60f3ee0343b7c18df16ab40ecebc987e
| 4,744
|
py
|
Python
|
web/fabric_utils/deploy.py
|
kbarnes3/guidcoin
|
c9011a00f18bbd181a538a553950dbc0e8c1a05e
|
[
"BSD-2-Clause"
] | null | null | null |
web/fabric_utils/deploy.py
|
kbarnes3/guidcoin
|
c9011a00f18bbd181a538a553950dbc0e8c1a05e
|
[
"BSD-2-Clause"
] | null | null | null |
web/fabric_utils/deploy.py
|
kbarnes3/guidcoin
|
c9011a00f18bbd181a538a553950dbc0e8c1a05e
|
[
"BSD-2-Clause"
] | null | null | null |
from fabric.api import cd, run, settings, sudo
configurations = {
'daily': {
'branch': 'master',
'ssl': False,
},
'dev': {
'branch': 'master',
'ssl': False,
},
'prod': {
'branch': 'prod',
'ssl': False,
},
'staging': {
'branch': 'prod',
'ssl': False,
},
}
def deploy(config):
configuration = configurations[config]
branch = configuration['branch']
use_ssl = configuration['ssl']
PYTHON_DIR = '/var/www/python'
repo_dir = '{0}/guidcoin-{1}'.format(PYTHON_DIR, config)
web_dir = '{0}/web'.format(repo_dir)
config_dir = '{0}/config/ubuntu-14.04'.format(repo_dir)
uwsgi_dir = '{0}/uwsgi'.format(config_dir)
nginx_dir = '{0}/nginx'.format(config_dir)
virtualenv_python = '{0}/venv/bin/python'.format(repo_dir)
_update_source(repo_dir, branch)
_compile_source(config, repo_dir, web_dir, virtualenv_python)
_reload_code(config, uwsgi_dir)
_reload_web(config, nginx_dir, use_ssl)
_run_tests(config, web_dir, virtualenv_python)
def _update_source(repo_dir, branch):
with cd(repo_dir):
sudo('chgrp -R webadmin .')
sudo('chmod -R ug+w .')
run('git fetch origin')
# Attempt to checkout the target branch. This might fail if we've
# never deployed from this branch before in this deployment. In that case,
# just create the branch then try again.
with settings(warn_only=True):
result = sudo('git checkout {0}'.format(branch))
if result.failed:
sudo('git branch {0}'.format(branch))
sudo('git checkout {0}'.format(branch))
sudo('git reset --hard origin/{0}'.format(branch))
def _compile_source(config, repo_dir, web_dir, virtualenv_python):
with cd(repo_dir):
sudo('venv/bin/pip install --requirement=requirements.txt')
with cd(web_dir):
sudo('find . -iname "*.pyc" -exec rm {} \;')
sudo('{0} -m compileall .'.format(virtualenv_python))
sudo('{0} manage_{1}.py collectstatic --noinput'.format(virtualenv_python, config))
def _reload_code(config, uwsgi_dir):
with cd(uwsgi_dir):
sudo('cp guidcoin-{0}.ini /etc/uwsgi/apps-enabled'.format(config))
sudo('chmod 755 /etc/uwsgi/apps-enabled/guidcoin-{0}.ini'.format(config))
sudo('/etc/init.d/uwsgi start guidcoin-{0}'.format(config))
sudo('/etc/init.d/uwsgi reload guidcoin-{0}'.format(config))
def _reload_web(config, nginx_dir, ssl):
with cd(nginx_dir):
sudo('cp {0}-guidcoin-com /etc/nginx/sites-enabled/'.format(config))
if ssl:
sudo('cp ssl/{0}.guidcoin.com.* /etc/nginx/ssl'.format(config))
sudo('chown root /etc/nginx/ssl/{0}.guidcoin.com.*'.format(config))
sudo('chgrp root /etc/nginx/ssl/{0}.guidcoin.com.*'.format(config))
sudo('chmod 644 /etc/nginx/ssl/{0}.guidcoin.com.*'.format(config))
sudo('/etc/init.d/nginx reload')
def _run_tests(config, web_dir, virtualenv_python):
with cd(web_dir):
run('{0} manage_{1}.py test'.format(virtualenv_python, config))
def deploy_global_config(config):
global_dir = '/var/www/python/guidcoin-{0}/config/ubuntu-14.04/global'.format(config)
SHARED_MEM = '/etc/sysctl.d/30-postgresql-shm.conf'
NGINX_CONF = '/etc/nginx/nginx.conf'
POSTGRES_HBA = '/etc/postgresql/9.3/main/pg_hba.conf'
POSTGRES_CONF = '/etc/postgresql/9.3/main/postgresql.conf'
with cd(global_dir):
sudo('cp 30-postgresql-shm.conf {0}'.format(SHARED_MEM))
_update_permissions(SHARED_MEM, 'root', 'root', '644')
sudo('cp nginx.conf {0}'.format(NGINX_CONF))
_update_permissions(NGINX_CONF, 'root', 'root', '644')
sudo('cp pg_hba.conf {0}'.format(POSTGRES_HBA))
_update_permissions(POSTGRES_HBA, 'postgres', 'postgres', '640')
sudo('cp postgresql.conf {0}'.format(POSTGRES_CONF))
_update_permissions(POSTGRES_HBA, 'postgres', 'postgres', '644')
sudo('/etc/init.d/nginx restart')
sudo('/etc/init.d/postgresql restart')
def _update_permissions(path, owner, group, mode):
sudo('chown {0}:{1} {2}'.format(owner, group, path))
sudo('chmod {0} {1}'.format(mode, path))
def shutdown(config):
configuration = configurations[config]
branch = configuration['branch']
PYTHON_DIR = '/var/www/python'
repo_dir = '{0}/guidcoin-{1}'.format(PYTHON_DIR, config)
nginx_dir = '{0}/config/ubuntu-14.04/nginx/shutdown'.format(repo_dir)
_update_source(repo_dir, branch)
_reload_web(config, nginx_dir)
| 35.669173
| 92
| 0.620784
| 617
| 4,744
| 4.60778
| 0.226904
| 0.032008
| 0.039395
| 0.021104
| 0.450229
| 0.308477
| 0.236722
| 0.145973
| 0.11924
| 0.071755
| 0
| 0.020563
| 0.220911
| 4,744
| 132
| 93
| 35.939394
| 0.748647
| 0.036889
| 0
| 0.222222
| 0
| 0
| 0.309948
| 0.120009
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.010101
| 0
| 0.10101
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2cace32420ebacd10ddd9012cee72a53278a13e
| 1,863
|
py
|
Python
|
sorts/4.Tree_sort.py
|
18-2-SKKU-OSS/2018-2-OSS-E5--
|
8bb7e4c239f5bd95f4635b442bb8b2838e76fb36
|
[
"MIT"
] | 4
|
2018-12-02T14:21:02.000Z
|
2019-02-28T04:15:42.000Z
|
sorts/4.Tree_sort.py
|
18-2-SKKU-OSS/2018-2-OSS-E5
|
8bb7e4c239f5bd95f4635b442bb8b2838e76fb36
|
[
"MIT"
] | 25
|
2018-11-27T10:00:05.000Z
|
2018-12-11T01:58:46.000Z
|
sorts/4.Tree_sort.py
|
18-2-SKKU-OSS/2018-2-OSS-E5--
|
8bb7e4c239f5bd95f4635b442bb8b2838e76fb36
|
[
"MIT"
] | null | null | null |
"""
파이썬으로 Tree Sort를 구현한 코드입니다.
정확히 말하자면 Binary Search Tree를 구현하였습니다.
Binary Search Tree는
각 노드에 값이 있다.
Root 노드가 존재한다.
노드의 왼쪽 서브트리에는 그 노드의 값보다 작은 값들을 지닌 노드들로 이루어져있다.
노드의 오른쪽 서브트리에는 그 노드의 값과 같거나 큰 값들을 지닌 노드들로 이루어져있다.
좌우 하위 트리는 각각이 다시 Binary Search Tree 이어야 합니다.
"""
from __future__ import print_function
class node(): #Binary Search Tree를 구현한 class
def __init__(self, val): #시작할때 처음 값을 node에 넣어줍니다.
self.val = val
self.left = None
self.right = None
def insert(self,val): #insert 해주는 코드로서
if self.val:
if val < self.val: #root의 값보다 작을 경우 왼쪽 서브트리로
if self.left is None:
self.left = node(val)
else:
self.left.insert(val)
elif val > self.val: #root의 값보다 클 경우 오른쪽 서브트리로 넣어줍니다.
if self.right is None:
self.right = node(val)
else:
self.right.insert(val)
else:
self.val = val
"""
Binary Search Tree를 오름차순으로 출력하기위해선
inorder 순으로 배열에 저장하여 출력을 해야하기 위해 inorder 함수를 추가하였습니다.
"""
def inorder(root, res):
if root:
inorder(root.left,res)
res.append(root.val)
inorder(root.right,res)
def treesort(arr):
# Binary Search Tree를 만드는 코드입니다.
if len(arr) == 0:
return arr
root = node(arr[0])
for i in range(1,len(arr)):
root.insert(arr[i])
# 오름차순 출력을 위해 inorder 함수를 사용하였습니다.
res = []
inorder(root,res)
return res
if __name__ == '__main__':
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
for i in range(3):
user_input = raw_input('Enter numbers separated by a comma:\n').strip()
unsorted = [int(item) for item in user_input.split(',')]
print(treesort(unsorted))
| 28.661538
| 79
| 0.570585
| 265
| 1,863
| 3.928302
| 0.45283
| 0.04707
| 0.065322
| 0.028818
| 0.034582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004882
| 0.340311
| 1,863
| 64
| 80
| 29.109375
| 0.842148
| 0.258722
| 0
| 0.116279
| 0
| 0
| 0.036249
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093023
| false
| 0
| 0.023256
| 0
| 0.186047
| 0.046512
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2cb9232d1beaf4ae9243ec51c0966d350c75625
| 446
|
py
|
Python
|
rules/helpers.py
|
prokoptsev/rules
|
436348004aa34c2e50d71960dad2076719fc433b
|
[
"MIT"
] | null | null | null |
rules/helpers.py
|
prokoptsev/rules
|
436348004aa34c2e50d71960dad2076719fc433b
|
[
"MIT"
] | 1
|
2017-02-01T08:56:08.000Z
|
2017-02-01T08:56:08.000Z
|
rules/helpers.py
|
prokoptsev/rules
|
436348004aa34c2e50d71960dad2076719fc433b
|
[
"MIT"
] | 1
|
2019-11-08T10:44:43.000Z
|
2019-11-08T10:44:43.000Z
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
_NOTSET = type(
b"NotSet",
(object,),
{"__repr__": lambda self: "<ValueNotSet>"}
)()
def get_by_path(keys, source_dict):
if "." in keys:
key, tail_keys = keys.split(".", 1)
if key not in source_dict:
return _NOTSET
return get_by_path(tail_keys, source_dict[key])
else:
return source_dict.get(keys, _NOTSET)
| 24.777778
| 56
| 0.632287
| 59
| 446
| 4.389831
| 0.559322
| 0.15444
| 0.069498
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005952
| 0.246637
| 446
| 18
| 57
| 24.777778
| 0.764881
| 0.029148
| 0
| 0
| 0
| 0
| 0.06713
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b2bca30173574ead32b90a8d29f7a356f54d612
| 3,030
|
py
|
Python
|
e2e/Vectors/Generation/Consensus/Beaten.py
|
kayabaNerve/Currency
|
260ebc20f1704f42ad6183fee39ad58ec6d07961
|
[
"CC0-1.0"
] | 66
|
2019-01-14T08:39:52.000Z
|
2022-01-06T11:39:15.000Z
|
e2e/Vectors/Generation/Consensus/Beaten.py
|
kayabaNerve/Currency
|
260ebc20f1704f42ad6183fee39ad58ec6d07961
|
[
"CC0-1.0"
] | 228
|
2019-01-16T15:42:44.000Z
|
2022-02-05T07:48:07.000Z
|
e2e/Vectors/Generation/Consensus/Beaten.py
|
kayabaNerve/Currency
|
260ebc20f1704f42ad6183fee39ad58ec6d07961
|
[
"CC0-1.0"
] | 19
|
2019-01-14T08:53:04.000Z
|
2021-11-03T20:19:28.000Z
|
from typing import List
import json
import e2e.Libs.Ristretto.Ristretto as Ristretto
from e2e.Libs.BLS import PrivateKey
from e2e.Classes.Transactions.Transactions import Claim, Send, Transactions
from e2e.Classes.Consensus.Verification import SignedVerification
from e2e.Classes.Consensus.VerificationPacket import VerificationPacket
from e2e.Classes.Consensus.SpamFilter import SpamFilter
from e2e.Classes.Merit.Merit import Block, Merit
from e2e.Vectors.Generation.PrototypeChain import PrototypeBlock, PrototypeChain
edPrivKey: Ristretto.SigningKey = Ristretto.SigningKey(b'\0' * 32)
edPubKey: bytes = edPrivKey.get_verifying_key()
transactions: Transactions = Transactions()
sendFilter: SpamFilter = SpamFilter(3)
proto: PrototypeChain = PrototypeChain(40, keepUnlocked=True)
proto.add(1)
merit: Merit = Merit.fromJSON(proto.toJSON())
#Create a Claim.
claim: Claim = Claim([(merit.mints[-1], 0)], edPubKey)
claim.sign(PrivateKey(0))
transactions.add(claim)
merit.add(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[VerificationPacket(claim.hash, list(range(2)))]
).finish(0, merit)
)
sends: List[Send] = [
#Transaction which will win.
Send([(claim.hash, 0)], [(bytes(32), claim.amount)]),
#Transaction which will be beaten.
Send([(claim.hash, 0)], [(edPubKey, claim.amount // 2), (edPubKey, claim.amount // 2)])
]
#Children. One which will have a Verification, one which won't.
sends += [
Send([(sends[1].hash, 0)], [(edPubKey, claim.amount // 2)]),
Send([(sends[1].hash, 1)], [(edPubKey, claim.amount // 2)])
]
#Send which spend the remaining descendant of the beaten Transaction.
sends.append(Send([(sends[2].hash, 0)], [(bytes(32), claim.amount // 2)]))
for s in range(len(sends)):
sends[s].sign(edPrivKey)
sends[s].beat(sendFilter)
if s < 3:
transactions.add(sends[s])
verif: SignedVerification = SignedVerification(sends[2].hash, 1)
verif.sign(1, PrivateKey(1))
merit.add(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[
VerificationPacket(sends[0].hash, [0]),
VerificationPacket(sends[1].hash, [1])
]
).finish(0, merit)
)
merit.add(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[VerificationPacket(sends[2].hash, [0])]
).finish(0, merit)
)
for _ in range(4):
merit.add(
PrototypeBlock(merit.blockchain.blocks[-1].header.time + 1200).finish(0, merit)
)
blockWBeatenVerif: Block = PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[VerificationPacket(sends[2].hash, [1])]
).finish(0, merit)
merit.add(
PrototypeBlock(merit.blockchain.blocks[-1].header.time + 1200).finish(0, merit)
)
with open("e2e/Vectors/Consensus/Beaten.json", "w") as vectors:
vectors.write(json.dumps({
"blockchain": merit.toJSON(),
"transactions": transactions.toJSON(),
"sends": [send.toJSON() for send in sends],
"verification": verif.toSignedJSON(),
"blockWithBeatenVerification": blockWBeatenVerif.toJSON()
}))
| 29.705882
| 89
| 0.718482
| 381
| 3,030
| 5.706037
| 0.249344
| 0.022539
| 0.080037
| 0.096596
| 0.295768
| 0.282889
| 0.23873
| 0.23873
| 0.23873
| 0.23873
| 0
| 0.032867
| 0.126403
| 3,030
| 101
| 90
| 30
| 0.78844
| 0.067657
| 0
| 0.236842
| 0
| 0
| 0.03617
| 0.021277
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.131579
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b2c8fbe001a03db6be5e0e2f8295d8600500dd8
| 5,105
|
py
|
Python
|
main/pythonDev/TestModels/sphericalJointTest.py
|
eapcivil/EXUDYN
|
52bddc8c258cda07e51373f68e1198b66c701d03
|
[
"BSD-3-Clause-Open-MPI"
] | 1
|
2020-10-06T08:06:25.000Z
|
2020-10-06T08:06:25.000Z
|
main/pythonDev/TestModels/sphericalJointTest.py
|
eapcivil/EXUDYN
|
52bddc8c258cda07e51373f68e1198b66c701d03
|
[
"BSD-3-Clause-Open-MPI"
] | null | null | null |
main/pythonDev/TestModels/sphericalJointTest.py
|
eapcivil/EXUDYN
|
52bddc8c258cda07e51373f68e1198b66c701d03
|
[
"BSD-3-Clause-Open-MPI"
] | null | null | null |
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# This is an EXUDYN example
#
# Details: Simulate Chain with 3D rigid bodies and SphericalJoint;
# Also test MarkerNodePosition
#
# Author: Johannes Gerstmayr
# Date: 2020-04-09
#
# Copyright:This file is part of Exudyn. Exudyn is free software. You can redistribute it and/or modify it under the terms of the Exudyn license. See 'LICENSE.txt' for more details.
#
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
import sys
sys.path.append('../TestModels') #for modelUnitTest as this example may be used also as a unit test
import exudyn as exu
from exudyn.itemInterface import *
from exudyn.utilities import *
from exudyn.graphicsDataUtilities import *
from modelUnitTests import ExudynTestStructure, exudynTestGlobals
SC = exu.SystemContainer()
mbs = SC.AddSystem()
nBodies = 4
color = [0.1,0.1,0.8,1]
s = 0.1 #width of cube
sx = 3*s #lengt of cube/body
cPosZ = 0.1 #offset of constraint in z-direction
zz = sx * (nBodies+1)*2 #max size of background
background0 = GraphicsDataRectangle(-zz,-zz,zz,sx,color)
oGround=mbs.AddObject(ObjectGround(referencePosition= [0,0,0],
visualization=VObjectGround(graphicsData= [background0])))
mPosLast = mbs.AddMarker(MarkerBodyPosition(bodyNumber = oGround,
localPosition=[-sx,0,cPosZ*0]))
#create a chain of bodies:
for i in range(nBodies):
f = 0 #factor for initial velocities
omega0 = [0,50.*f,20*f] #arbitrary initial angular velocity
ep0 = eulerParameters0 #no rotation
ep_t0 = AngularVelocity2EulerParameters_t(omega0, ep0)
p0 = [-sx+i*2*sx,0.,0] #reference position
v0 = [0.2*f,0.,0.] #initial translational velocity
nRB = mbs.AddNode(NodeRigidBodyEP(referenceCoordinates=p0+ep0,
initialVelocities=v0+list(ep_t0)))
#nRB = mbs.AddNode(NodeRigidBodyEP(referenceCoordinates=[0,0,0,1,0,0,0], initialVelocities=[0,0,0,0,0,0,0]))
oGraphics = GraphicsDataOrthoCubeLines(-sx,-s,-s, sx,s,s, [0.8,0.1,0.1,1])
oRB = mbs.AddObject(ObjectRigidBody(physicsMass=2,
physicsInertia=[6,1,6,0,0,0],
nodeNumber=nRB,
visualization=VObjectRigidBody(graphicsData=[oGraphics])))
mMassRB = mbs.AddMarker(MarkerBodyMass(bodyNumber = oRB))
mbs.AddLoad(Gravity(markerNumber = mMassRB, loadVector=[0.,-9.81,0.])) #gravity in negative z-direction
if i==0:
#mPos = mbs.AddMarker(MarkerBodyPosition(bodyNumber = oRB, localPosition = [-sx*0,0.,cPosZ*0]))
mPos = mbs.AddMarker(MarkerNodePosition(nodeNumber=nRB))
else:
mPos = mbs.AddMarker(MarkerBodyPosition(bodyNumber = oRB, localPosition = [-sx,0.,cPosZ]))
#alternative with spring-damper:
#mbs.AddObject(ObjectConnectorCartesianSpringDamper(markerNumbers = [mPosLast, mPos],
# stiffness=[k,k,k], damping=[d,d,d])) #gravity in negative z-direction
axes = [1,1,1]
if (i==0):
axes = [0,1,1]
mbs.AddObject(SphericalJoint(markerNumbers = [mPosLast, mPos], constrainedAxes=axes))
#marker for next chain body
mPosLast = mbs.AddMarker(MarkerBodyPosition(bodyNumber = oRB, localPosition = [sx,0.,cPosZ]))
mbs.Assemble()
#exu.Print(mbs)
simulationSettings = exu.SimulationSettings() #takes currently set values or default values
fact = 1000
simulationSettings.timeIntegration.numberOfSteps = 1*fact
simulationSettings.timeIntegration.endTime = 0.001*fact
simulationSettings.solutionSettings.solutionWritePeriod = simulationSettings.timeIntegration.endTime/fact*10
simulationSettings.timeIntegration.verboseMode = 1
simulationSettings.timeIntegration.newton.useModifiedNewton = True
simulationSettings.timeIntegration.generalizedAlpha.useIndex2Constraints = False
simulationSettings.timeIntegration.generalizedAlpha.useNewmark = False
simulationSettings.timeIntegration.generalizedAlpha.spectralRadius = 0.6 #0.6 works well
simulationSettings.solutionSettings.solutionInformation = "rigid body tests"
SC.visualizationSettings.nodes.defaultSize = 0.05
#simulationSettings.displayComputationTime = True
#simulationSettings.displayStatistics = True
if exudynTestGlobals.useGraphics:
exu.StartRenderer()
mbs.WaitForUserToContinue()
SC.TimeIntegrationSolve(mbs, 'GeneralizedAlpha', simulationSettings)
#+++++++++++++++++++++++++++++++++++++++++++++
sol = mbs.systemData.GetODE2Coordinates();
solref = mbs.systemData.GetODE2Coordinates(configuration=exu.ConfigurationType.Reference);
#exu.Print('sol=',sol)
u = 0
for i in range(14): #take coordinates of first two bodies
u += abs(sol[i]+solref[i])
exu.Print('solution of sphericalJointTest=',u)
exudynTestGlobals.testError = u - (4.409004179180698) #2020-04-04: 4.409004179180698
if exudynTestGlobals.useGraphics:
#SC.WaitForRenderEngineStopFlag()
exu.StopRenderer() #safely close rendering window!
| 40.84
| 181
| 0.681097
| 560
| 5,105
| 6.203571
| 0.419643
| 0.009787
| 0.007772
| 0.046056
| 0.117444
| 0.058146
| 0.056131
| 0.056131
| 0.056131
| 0
| 0
| 0.039886
| 0.174927
| 5,105
| 124
| 182
| 41.169355
| 0.7849
| 0.326934
| 0
| 0.028169
| 0
| 0
| 0.022399
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.084507
| 0
| 0.084507
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b315d99b885f67bca9bd8f9e32645470a5d8448
| 1,915
|
py
|
Python
|
inference/online_inference/src/app.py
|
made-ml-in-prod-2021/marina-zav
|
7b4b6e5f333707001e36dfb014dcd36bf975d969
|
[
"FTL"
] | null | null | null |
inference/online_inference/src/app.py
|
made-ml-in-prod-2021/marina-zav
|
7b4b6e5f333707001e36dfb014dcd36bf975d969
|
[
"FTL"
] | null | null | null |
inference/online_inference/src/app.py
|
made-ml-in-prod-2021/marina-zav
|
7b4b6e5f333707001e36dfb014dcd36bf975d969
|
[
"FTL"
] | null | null | null |
import logging
import sys
import time
from typing import List, Optional
import uvicorn
from fastapi import FastAPI
from fastapi.exceptions import RequestValidationError
from fastapi.responses import PlainTextResponse
from sklearn.pipeline import Pipeline
from src.entities import (
read_app_params,
HeartDiseaseModelRequest,
HeartDiseaseModelResponse,
)
from src.models import make_predict, load_model
logger = logging.getLogger(__name__)
handler = logging.StreamHandler(sys.stdout)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
DEFAULT_CONFIG_PATH = "configs/app_config.yaml"
model: Optional[Pipeline] = None
app = FastAPI()
@app.exception_handler(RequestValidationError)
async def validation_exception_handler(request, exc):
return PlainTextResponse(str(exc), status_code=400)
@app.get("/")
def main():
return "it is entry point of our predictor"
@app.on_event("startup")
def load_app_model():
time.sleep(30)
app_params = read_app_params("configs/app_config.yaml")
logger.info("Start loading model")
global model
model = load_model(app_params.model_path)
logger.info("Model loaded")
@app.get("/predict/", response_model=List[HeartDiseaseModelResponse])
def predict(request: HeartDiseaseModelRequest):
return make_predict(request.data, request.features, model)
@app.get("/predict_new/", response_model=List[HeartDiseaseModelResponse])
def predict(request: HeartDiseaseModelRequest):
# For checking new code version (new docker image)
return make_predict(request.data, request.features, model)
@app.get("/healthz")
def health() -> bool:
return not (model is None)
def setup_app():
app_params = read_app_params(DEFAULT_CONFIG_PATH)
logger.info(f"Running app on {app_params.host} with port {app_params.port}")
uvicorn.run(app, host=app_params.host, port=app_params.port)
if __name__ == "__main__":
setup_app()
| 26.232877
| 80
| 0.769191
| 245
| 1,915
| 5.812245
| 0.379592
| 0.063202
| 0.027388
| 0.02809
| 0.223315
| 0.192416
| 0.192416
| 0.192416
| 0.075843
| 0.075843
| 0
| 0.003001
| 0.130026
| 1,915
| 72
| 81
| 26.597222
| 0.851741
| 0.025065
| 0
| 0.078431
| 0
| 0
| 0.116354
| 0.024665
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.215686
| 0.078431
| 0.431373
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b34659c04f2dfee8c71b653e9b765ff930cf91e
| 8,040
|
py
|
Python
|
serverCollector.py
|
VertexC/pipot-server
|
0e2c9b0e34a589d9813301765ef8d2433ef67869
|
[
"ISC"
] | 4
|
2019-02-11T12:43:08.000Z
|
2019-03-23T06:59:38.000Z
|
serverCollector.py
|
VertexC/pipot-server
|
0e2c9b0e34a589d9813301765ef8d2433ef67869
|
[
"ISC"
] | 25
|
2019-02-26T17:16:58.000Z
|
2019-08-19T03:36:56.000Z
|
serverCollector.py
|
VertexC/pipot-server
|
0e2c9b0e34a589d9813301765ef8d2433ef67869
|
[
"ISC"
] | 5
|
2019-01-15T06:32:21.000Z
|
2020-01-10T11:58:43.000Z
|
import hashlib
import hmac
import json
import datetime
from abc import ABCMeta, abstractmethod
from twisted.internet import protocol
from mod_config.models import Rule, Actions
from mod_honeypot.models import PiPotReport, Deployment
from pipot.encryption import Encryption
from pipot.notifications import NotificationLoader
from pipot.services import ServiceLoader
class ICollector:
"""
Interface that represents a uniform collector.
"""
__metaclass__ = ABCMeta
def __init__(self):
pass
@abstractmethod
def process_data(self, data):
"""
Server-side processing of received data.
:param data: A JSONified version of the data.
:type data: str
:return: None
:rtype: None
"""
pass
@abstractmethod
def queue_data(self, service_name, data):
"""
Client-side processing of data to send
:param service_name: The name of the service.
:type service_name: str
:param data: A JSON collection of data
:type data: dict
:return: None
:rtype: None
"""
pass
class ServerCollector(ICollector):
def __init__(self, db):
super(ServerCollector, self).__init__()
self.db = db
def queue_data(self, service_name, data):
pass
def process_data(self, data):
print("Received a message: %s" % data)
# Attempt to deserialize the data
try:
data = json.loads(data)
except ValueError:
print('Message not valid JSON; discarding')
return
# Check if JSON contains the two required fields
if 'data' not in data or 'instance' not in data:
print('Invalid JSON (information missing; discarding)')
return
""":type : mod_honeypot.models.Deployment"""
honeypot = Deployment.query.filter(
Deployment.instance_key == data['instance']).first()
if honeypot is not None:
# Attempt to decrypt content
decrypted = Encryption.decrypt(honeypot.encryption_key,
data['data'])
try:
decrypted_data = json.loads(decrypted)
except ValueError:
print('Decrypted data is not JSON; discarding')
return
if 'hmac' not in decrypted_data or \
'content' not in decrypted_data:
print('Decrypted data misses info; discarding')
return
# Verify message authenticity
mac = hmac.new(
str(honeypot.mac_key),
str(json.dumps(decrypted_data['content'], sort_keys=True)),
hashlib.sha256
).hexdigest()
try:
authentic = hmac.compare_digest(
mac, decrypted_data['hmac'].encode('utf8'))
except AttributeError:
# Older python version? Fallback which is less safe
authentic = mac == decrypted_data['hmac']
if authentic:
print('Data authenticated; processing')
# Determine service
for entry in decrypted_data['content']:
# Entry exists out of timestamp, service & data elements
timestamp = datetime.datetime.utcnow()
try:
timestamp = datetime.datetime.strptime(
entry['timestamp'], '%Y-%m-%d %H:%M:%S')
except ValueError:
pass
if entry['service'] == 'PiPot':
# Store
row = PiPotReport(honeypot.id, entry['data'],
timestamp)
self.db.add(row)
self.db.commit()
print('Stored PiPot entry in the database')
else:
# Get active services through the deployment profile
for p_service in honeypot.profile.services:
if p_service.service.name != entry['service']:
continue
print('Valid service for profile: %s' %
entry['service'])
# Valid service
service = ServiceLoader.get_class_instance(
entry['service'], self,
p_service.get_service_config()
)
# Convert JSON back to object
service_data = service.create_storage_row(
honeypot.id, entry['data'], timestamp)
notification_level = \
service.get_notification_level(service_data)
# Get rules that apply here
rules = Rule.query.filter(
Rule.service_id ==
p_service.service_id
).order_by(Rule.level.asc())
rule_parsed = False
for rule in rules:
if not rule.matches(notification_level):
continue
# Process message according to rule
notifier = \
NotificationLoader.get_class_instance(
rule.notification.name,
rule.get_notification_config()
)
notifier.process(
service_data.get_message_for_level(
notification_level
)
)
if rule.action == Actions.drop:
rule_parsed = True
break
if not rule_parsed:
# Store in DB
self.db.add(service_data)
self.db.commit()
print('Processed message; stored in DB')
else:
print('Processed message; dropping due to '
'rules')
if len(honeypot.profile.services) == 0:
print('There are no services configured for '
'this honeypot; discarding')
else:
print('Message not authentic; discarding')
# print('Expected: %s, got %s' % (mac, decrypted_data[
# 'hmac']))
# print('Payload: %s' % json.dumps(decrypted_data['content']))
else:
print('Unknown honeypot instance (%s); discarding' %
data['instance'])
class SSLCollector(protocol.Protocol):
def __init__(self, factory):
self.factory = factory
def connectionMade(self):
pass
def connectionLost(self, reason=protocol.connectionDone):
pass
def dataReceived(self, data):
if 'collector' in self.factory.__dict__:
self.factory.collector.process_data(data)
else:
print('No collector present!')
class SSLFactory(protocol.Factory):
def __init__(self, collector):
self.collector = collector
def buildProtocol(self, addr):
return SSLCollector(self)
class UDPCollector(protocol.DatagramProtocol):
def __init__(self, collector):
self.collector = collector
def datagramReceived(self, data, addr):
self.collector.process_data(data)
| 37.924528
| 78
| 0.486318
| 691
| 8,040
| 5.526773
| 0.287988
| 0.037444
| 0.014402
| 0.015711
| 0.093218
| 0.039801
| 0.039801
| 0.023566
| 0
| 0
| 0
| 0.001114
| 0.441915
| 8,040
| 211
| 79
| 38.104265
| 0.850011
| 0.114801
| 0
| 0.25
| 0
| 0
| 0.093275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094595
| false
| 0.047297
| 0.074324
| 0.006757
| 0.243243
| 0.094595
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b3702971613873f8e0d3ea487888d2084d6acd1
| 4,852
|
py
|
Python
|
pyhttptest/decorators.py
|
NickMitin/pyhttptest
|
5116caf3962dab63d62bffe94b0659f435b3e2d3
|
[
"BSD-3-Clause"
] | 142
|
2019-10-22T11:19:44.000Z
|
2021-11-09T11:05:27.000Z
|
pyhttptest/decorators.py
|
NickMitin/pyhttptest
|
5116caf3962dab63d62bffe94b0659f435b3e2d3
|
[
"BSD-3-Clause"
] | 5
|
2019-10-22T14:43:39.000Z
|
2020-10-09T13:25:24.000Z
|
pyhttptest/decorators.py
|
NickMitin/pyhttptest
|
5116caf3962dab63d62bffe94b0659f435b3e2d3
|
[
"BSD-3-Clause"
] | 14
|
2019-10-23T18:27:58.000Z
|
2020-09-22T01:07:39.000Z
|
from sys import modules
from functools import wraps
from jsonschema import validate
from pyhttptest.constants import (
HTTP_METHOD_NAMES,
JSON_FILE_EXTENSION,
)
from pyhttptest.exceptions import (
FileExtensionError,
HTTPMethodNotSupportedError
)
from pyhttptest.http_schemas import ( # noqa
get_schema,
post_schema,
put_schema,
delete_schema
)
def check_file_extension(func):
"""A decorator responsible for checking whether
the file extension is supported.
An inner :func:`_decorator` slices the last five
characters of the passed ``file_path`` parameter and
checking whether they are equal to JSON file extension(.json).
If there is equality, decorated function business logic is
performed otherwise, the exception for not supported file extension
is raised.
Usage:
.. code-block:: python
@check_file_extension
def load_content_from_json_file(file_path):
...
:raises FileExtensionError: If the file extension is not '.json'.
"""
@wraps(func)
def _decorator(file_path):
file_extension = file_path[-5:]
if file_extension != JSON_FILE_EXTENSION:
raise FileExtensionError(file_extension)
return func(file_path)
return _decorator
def validate_extract_json_properties_func_args(func):
"""A validation decorator, ensuring that arguments
passed to the decorated function are with proper types.
An inner :func:`_decorator` does checking of arguments
types. If the types of the arguments are different than allowing
ones, the exception is raised, otherwise decorated function
is processed.
Usage:
.. code-block:: python
@validate_extract_json_properties_func_args
def extract_properties_values_from_json(data, keys):
...
:raises TypeError: If the data is not a `dict`.
:raises TypeError: If the keys is not a type of (`tuple`, `list`, `set`).
"""
@wraps(func)
def _decorator(data, keys):
if not isinstance(data, dict):
raise TypeError(
(
"Passed 'data' param argument, must be of "
"data type 'dict'. Not a type of {type}.".format(
type=type(data)
)
)
)
if not isinstance(keys, (tuple, list, set)):
raise TypeError(
(
"Passed 'keys' param argument, must be one of: "
"(tuple, list, set) data types. Not a type of {type}.".format(
type=type(keys)
)
)
)
return func(data, keys)
return _decorator
def validate_data_against_json_schema(func):
"""A validation decorator, ensuring that data is
covering JSON Schema requirements.
An inner :func:`_decorator` does checking of data
type, HTTP Method support along with appropriate JSON Schema,
that can validate passed data. If one of the checks doesn't match,
the exception is raised, otherwise, data validation is run against
JSON Schema and decorated function is processed.
Usage:
.. code-block:: python
@validate_data_against_json_schema
def extract_json_data(data):
...
:raises TypeError: If the data is not a `dict`.
:raises HTTPMethodNotSupportedError: If an HTTP Method is not supported.
:raises TypeError: If lack of appropriate JSON Schema to validate data.
"""
@wraps(func)
def _decorator(data):
if not isinstance(data, dict):
raise TypeError(
(
"Passed 'data' param argument, must be of "
"data type 'dict'. Not a type of {type}.".format(
type=type(data)
)
)
)
if 'verb' not in data or data['verb'].lower() not in HTTP_METHOD_NAMES:
raise HTTPMethodNotSupportedError(data.get('verb', 'None'))
http_schema_name = '_'.join([data['verb'].lower(), 'schema'])
# The key is used to extract module loaded in sys.modules
http_schema_module_key = '.'.join(
['pyhttptest.http_schemas', http_schema_name]
)
# Extract the module instance
http_schema_module = modules[http_schema_module_key]
if not hasattr(http_schema_module, http_schema_name):
raise ValueError(
(
'There is no appropriate JSON Schema to '
'validate data against it.'
)
)
http_schema_instance = getattr(http_schema_module, http_schema_name)
validate(instance=data, schema=http_schema_instance)
return func(data)
return _decorator
| 31.303226
| 82
| 0.61789
| 561
| 4,852
| 5.187166
| 0.244207
| 0.049141
| 0.027491
| 0.013746
| 0.341581
| 0.269416
| 0.17732
| 0.153952
| 0.14433
| 0.105842
| 0
| 0.000298
| 0.307708
| 4,852
| 154
| 83
| 31.506494
| 0.866032
| 0.395095
| 0
| 0.217949
| 0
| 0
| 0.135834
| 0.008376
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.038462
| 0.076923
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b3983c191ae8db18994072c0ce7b31ca01543db
| 12,081
|
py
|
Python
|
python/test/lib/zk/cache_test.py
|
cschutijser/scion
|
054cef53b31a577ed224a090d6a4fd3883fd520b
|
[
"Apache-2.0"
] | 1
|
2018-03-18T14:46:34.000Z
|
2018-03-18T14:46:34.000Z
|
python/test/lib/zk/cache_test.py
|
cschutijser/scion
|
054cef53b31a577ed224a090d6a4fd3883fd520b
|
[
"Apache-2.0"
] | 1
|
2020-03-20T01:28:56.000Z
|
2020-03-20T01:28:56.000Z
|
python/test/lib/zk/cache_test.py
|
cschutijser/scion
|
054cef53b31a577ed224a090d6a4fd3883fd520b
|
[
"Apache-2.0"
] | 2
|
2020-03-14T16:03:27.000Z
|
2020-03-18T08:13:19.000Z
|
# Copyright 2015 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`cache_test` --- lib.zk.cache unit tests
======================================================
"""
# Stdlib
from unittest.mock import call, patch
# External packages
import nose
import nose.tools as ntools
from kazoo.exceptions import (
ConnectionLoss,
NoNodeError,
NodeExistsError,
SessionExpiredError,
)
# SCION
from lib.zk.errors import ZkNoConnection, ZkNoNodeError
from lib.zk.cache import ZkSharedCache
from test.testcommon import assert_these_calls, create_mock
class TestZkSharedCacheStore(object):
"""
Unit tests for lib.zk.cache.ZkSharedCache.store
"""
def _setup(self):
inst = ZkSharedCache("zk", "path", "handler")
inst._path = "/path"
inst._zk = create_mock(["is_connected"])
inst._kazoo = create_mock(["create", "set"])
inst._incoming_entries = create_mock(["append"])
return inst
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_not_connected(self, init):
inst = self._setup()
inst._zk.is_connected.return_value = False
# Call
ntools.assert_raises(ZkNoConnection, inst.store, 'n', 'v')
# Tests
inst._zk.is_connected.assert_called_once_with()
@patch("lib.zk.cache.time.time", autospec=True)
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_set(self, init, time_):
inst = self._setup()
# Call
inst.store('n', 'v')
# Tests
inst._kazoo.set.assert_called_once_with("/path/n", "v")
ntools.assert_false(inst._kazoo.create.called)
inst._incoming_entries.append.assert_called_once_with(
("n", time_.return_value))
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def _check_set_conn_loss(self, excp, init):
inst = self._setup()
inst._kazoo.set.side_effect = excp
# Call
ntools.assert_raises(ZkNoConnection, inst.store, 'n', 'v')
def test_set_conn_loss(self):
for excp in ConnectionLoss, SessionExpiredError:
yield self._check_set_conn_loss, excp
@patch("lib.zk.cache.time.time", autospec=True)
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_create(self, init, time_):
inst = self._setup()
inst._kazoo.set.side_effect = NoNodeError
# Call
inst.store('n', 'v')
# Tests
inst._kazoo.create.assert_called_once_with("/path/n", "v",
makepath=True)
inst._incoming_entries.append.assert_called_once_with(
("n", time_.return_value))
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_suddenly_exists(self, init):
inst = self._setup()
inst._kazoo.set.side_effect = NoNodeError
inst._kazoo.create.side_effect = NodeExistsError
# Call
inst.store('n', 'v')
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def _check_create_conn_loss(self, excp, init):
inst = self._setup()
inst._kazoo.set.side_effect = NoNodeError
inst._kazoo.create.side_effect = excp
# Call
ntools.assert_raises(ZkNoConnection, inst.store, 'n', 'v')
def test_create_conn_loss(self):
for excp in ConnectionLoss, SessionExpiredError:
yield self._check_create_conn_loss, excp
class TestZkSharedCacheProcess(object):
"""
Unit tests for lib.zk.cache.ZkSharedCache.process
"""
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_not_connected(self, init):
inst = ZkSharedCache("zk", "path", "handler")
inst._zk = create_mock(["is_connected"])
inst._zk.is_connected.return_value = False
# Call
ntools.assert_raises(ZkNoConnection, inst.process)
# Tests
inst._zk.is_connected.assert_called_once_with()
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_full(self, init):
inst = ZkSharedCache("zk", "path", "handler")
inst._zk = create_mock(["conn_epoch", "is_connected"])
inst._incoming_entries = create_mock(["__bool__", "popleft"])
inst._incoming_entries.__bool__.side_effect = True, True, False
inst._incoming_entries.popleft.side_effect = ("inc0", 1), ("inc1", 0)
inst._entries = {"inc0": 0, "old0": 0}
inst._list_entries = create_mock()
inst._list_entries.return_value = "inc0", "inc1", "new0"
inst._handle_entries = create_mock()
inst._path = "/path"
# Call
inst.process()
# Tests
ntools.eq_(inst._entries, {"inc0": 0, "inc1": 0})
inst._handle_entries.assert_called_once_with({"new0"})
class TestZkSharedCacheGet(object):
"""
Unit tests for lib.zk.cache.ZkSharedCache._get
"""
@patch("lib.zk.cache.time.time", autospec=True)
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_success(self, init, time_):
inst = ZkSharedCache("zk", "path", "handler")
inst._path = "/path"
inst._kazoo = create_mock(["get"])
inst._kazoo.get.return_value = ("data", "meta")
inst._entries = create_mock(["setdefault"])
# Call
ntools.eq_(inst._get("name"), "data")
# Tests
inst._kazoo.get.assert_called_once_with("/path/name")
inst._entries.setdefault.assert_called_once_with(
"name", time_.return_value)
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_no_entry(self, init):
inst = ZkSharedCache("zk", "path", "handler")
inst._path = "/path"
inst._kazoo = create_mock(["get"])
inst._kazoo.get.side_effect = NoNodeError
inst._entries = create_mock(["pop"])
# Call
ntools.assert_raises(ZkNoNodeError, inst._get, "name")
# Tests
inst._kazoo.get.assert_called_once_with("/path/name")
inst._entries.pop.assert_called_once_with("name", None)
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def _check_exception(self, excp, expected, init):
inst = ZkSharedCache("zk", "path", "handler")
inst._path = "/path"
inst._kazoo = create_mock(["get"])
inst._kazoo.get.side_effect = excp
# Call
ntools.assert_raises(expected, inst._get, "name")
def test_exceptions(self):
for excp, expected in (
(ConnectionLoss, ZkNoConnection),
(SessionExpiredError, ZkNoConnection),
):
yield self._check_exception, excp, expected
class TestZkSharedCacheListEntries(object):
"""
Unit tests for lib.zk.cache.ZkSharedCache._list_entries
"""
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_sucesss(self, init):
inst = ZkSharedCache("zk", "path", "handler")
inst._path = "/path"
inst._kazoo = create_mock(["get_children"])
inst._kazoo.get_children.return_value = [
"node0", "node1", "node2", "node3"]
# Call
ntools.eq_(inst._list_entries(),
{"node0", "node1", "node2", "node3"})
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_no_cache(self, init):
inst = ZkSharedCache("zk", "path", "handler")
inst._path = "/path"
inst._kazoo = create_mock(["get_children"])
inst._kazoo.get_children.side_effect = NoNodeError
# Call
ntools.eq_(inst._list_entries(), set())
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def _check_children_exception(self, excp, expected, init):
inst = ZkSharedCache("zk", "path", "handler")
inst._path = "/path"
inst._kazoo = create_mock(["get_children"])
inst._kazoo.get_children.side_effect = excp
# Call
ntools.assert_raises(expected, inst._list_entries)
def test_children_exceptions(self):
for excp, expected in (
(ConnectionLoss, ZkNoConnection),
(SessionExpiredError, ZkNoConnection),
):
yield self._check_children_exception, excp, expected
class TestZkSharedCacheHandleEntries(object):
"""
Unit test for lib.zk.cache.ZkSharedCache._handle_entries
"""
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test(self, init):
inst = ZkSharedCache("zk", "path", "handler")
entry_names = ["entry0", "entry1", "entry2", "entry3"]
inst._get = create_mock()
inst._get.side_effect = [
"data0", ZkNoNodeError, "data2", ZkNoConnection
]
inst._path = "/path"
inst._handler = create_mock()
# Call
ntools.eq_(inst._handle_entries(entry_names), 2)
# Tests
assert_these_calls(inst._get, ([call(i) for i in entry_names]))
inst._handler.assert_called_once_with(["data0", "data2"])
class TestZkSharedCacheExpire(object):
"""
Unit test for lib.zk.cache.ZkSharedCache.expire
"""
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_not_connected(self, init):
inst = ZkSharedCache("zk", "path", "handler")
inst._zk = create_mock(["is_connected"])
inst._zk.is_connected.return_value = False
# Call
ntools.assert_raises(ZkNoConnection, inst.expire, 42)
# Tests
inst._zk.is_connected.assert_called_once_with()
def _setup(self, time_, entries):
inst = ZkSharedCache("zk", "path", "handler")
inst._zk = create_mock(["is_connected"])
time_.return_value = 1000
inst._entries = entries
inst._kazoo = create_mock(["delete"])
inst._path = "/path"
return inst
@patch("lib.zk.cache.time.time", autospec=True)
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def test_success(self, init, time_):
entries = {}
for last_seen in 1000, 999, 996, 995, 994, 990, 1001:
entries["entry%d" % last_seen] = last_seen
inst = self._setup(time_, entries)
# Call
inst.expire(5)
# Tests
assert_these_calls(inst._kazoo.delete, [
call("/path/entry994"), call("/path/entry990")
], any_order=True)
@patch("lib.zk.cache.time.time", autospec=True)
@patch("lib.zk.cache.ZkSharedCache.__init__", autospec=True,
return_value=None)
def _check_exception(self, excp, expected, init, time_):
inst = self._setup(time_, {"entry1": 0})
inst._kazoo.delete.side_effect = excp
# Call
ntools.assert_raises(expected, inst.expire, 5)
def test_exceptions(self):
for excp, expected in (
(NoNodeError, ZkNoNodeError),
(ConnectionLoss, ZkNoConnection),
(SessionExpiredError, ZkNoConnection),
):
yield self._check_exception, excp, expected
if __name__ == "__main__":
nose.run(defaultTest=__name__)
| 36.279279
| 77
| 0.631653
| 1,397
| 12,081
| 5.163207
| 0.143164
| 0.022182
| 0.042978
| 0.076528
| 0.66089
| 0.634271
| 0.61708
| 0.60086
| 0.552752
| 0.516013
| 0
| 0.008697
| 0.238556
| 12,081
| 332
| 78
| 36.388554
| 0.77541
| 0.093866
| 0
| 0.541126
| 0
| 0
| 0.126319
| 0.06848
| 0
| 0
| 0
| 0
| 0.112554
| 1
| 0.108225
| false
| 0
| 0.030303
| 0
| 0.17316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b3b08e4408a36e23ecf7b49e3efe15dedf8336d
| 2,347
|
py
|
Python
|
scripts/macro-f1-tag.py
|
shuoyangd/stenella
|
a677c67c602f2229e4452ed7f38b778897df51c0
|
[
"MIT"
] | 1
|
2021-11-09T04:57:24.000Z
|
2021-11-09T04:57:24.000Z
|
scripts/macro-f1-tag.py
|
shuoyangd/stenella
|
a677c67c602f2229e4452ed7f38b778897df51c0
|
[
"MIT"
] | null | null | null |
scripts/macro-f1-tag.py
|
shuoyangd/stenella
|
a677c67c602f2229e4452ed7f38b778897df51c0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright © 2021 Shuoyang Ding <[email protected]>
# Created on 2021-02-11
#
# Distributed under terms of the MIT license.
import argparse
import logging
import math
import sys
logging.basicConfig(
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
opt_parser = argparse.ArgumentParser(description="")
opt_parser.add_argument("--tag-file", "-tf", required=True, help="file that holds system predictions, one label per line")
opt_parser.add_argument("--ref-file", "-rf", required=True, help="file that holds reference ok/bad labels, one label per line")
def f1(prec, recl):
return 2 * (prec * recl) / (prec + recl)
def main(options):
tf = open(options.tag_file, 'r')
rf = open(options.ref_file, 'r')
ok_correct = 0
ok_label_total = 0
ok_pred_total = 0
bad_correct = 0
bad_label_total = 0
bad_pred_total = 0
for idx, (tl, rl) in enumerate(zip(tf, rf)):
tag = tl.strip()
rl = rl.strip()
if tag == "OK":
ok_pred_total += 1
if rl == "OK":
ok_correct += 1
ok_label_total += 1
else:
bad_label_total += 1
elif tag == "BAD":
bad_pred_total += 1
if rl == "BAD":
bad_correct += 1
bad_label_total += 1
else:
ok_label_total += 1
else:
logging.error("line {0}: tag should either have value OK/BAD, but has value {1}".format(idx, tag))
if not (tf.read() == rf.read() == ''):
logging.error("Your tag and reference file are of different length. You should fix that first.")
ok_prec = ok_correct / ok_pred_total
ok_recl = ok_correct / ok_label_total
ok_f1 = f1(ok_prec, ok_recl)
sys.stdout.write("p/r/f for ok label: {0:.4f}/{1:.4f}/{2:.4f}\n".format(ok_prec, ok_recl, ok_f1))
bad_prec = bad_correct / bad_pred_total
bad_recl = bad_correct / bad_label_total
bad_f1 = f1(bad_prec, bad_recl)
sys.stdout.write("p/r/f for bad label: {0:.4f}/{1:.4f}/{2:.4f}\n".format(bad_prec, bad_recl, bad_f1))
sys.stdout.write("macro-f1: {0:.4f}\n".format(ok_f1 * bad_f1))
if __name__ == "__main__":
ret = opt_parser.parse_known_args()
options = ret[0]
if ret[1]:
logging.warning(
"unknown arguments: {0}".format(
opt_parser.parse_known_args()[1]))
main(options)
| 27.611765
| 127
| 0.647635
| 377
| 2,347
| 3.835544
| 0.334218
| 0.055325
| 0.033195
| 0.03112
| 0.17704
| 0.102351
| 0.062241
| 0.062241
| 0.029046
| 0
| 0
| 0.030818
| 0.198125
| 2,347
| 84
| 128
| 27.940476
| 0.736982
| 0.059651
| 0
| 0.116667
| 0
| 0.033333
| 0.222273
| 0.022727
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.066667
| 0.016667
| 0.116667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b3bc0d2b44d013537c672eed0453f853feeca74
| 8,865
|
py
|
Python
|
Stack/Solutions_Two.py
|
daniel-zeiler/potential-happiness
|
1c9d503a52c35dab8b031f72e63725578735ac73
|
[
"MIT"
] | null | null | null |
Stack/Solutions_Two.py
|
daniel-zeiler/potential-happiness
|
1c9d503a52c35dab8b031f72e63725578735ac73
|
[
"MIT"
] | null | null | null |
Stack/Solutions_Two.py
|
daniel-zeiler/potential-happiness
|
1c9d503a52c35dab8b031f72e63725578735ac73
|
[
"MIT"
] | null | null | null |
import collections
from typing import List
def maxDepth(s: str) -> int:
stack = []
max_depth = 0
for character in s:
if character == '(':
stack.append(character)
elif character == ')':
stack.pop()
max_depth = max(max_depth, len(stack))
return max_depth
def removeOuterParentheses(s: str) -> str:
stack = []
result = ''
for i, character in enumerate(s):
if character == '(':
stack.append(i)
else:
if len(stack) == 1:
result += s[stack[0] + 1:i]
stack.pop()
return result
def removeDuplicates(s: str) -> str:
stack = []
for character in s:
if not stack:
stack.append(character)
elif character == stack[-1]:
stack.pop()
else:
stack.append(character)
return ''.join(stack)
def calPoints(ops: List[str]) -> int:
stack = []
total_points = 0
for operation in ops:
if operation == 'C':
total_points -= stack.pop()
else:
if operation == '+':
stack.append(stack[-1] + stack[-2])
elif operation == 'D':
stack.append(stack[-1] * 2)
else:
stack.append(int(operation))
total_points += stack[-1]
return total_points
def makeGood(s: str) -> str:
stack = []
for character in s:
if not stack:
stack.append(character)
elif stack[-1] == character.lower() and stack[-1] != character:
stack.pop()
else:
stack.append(character)
return ''.join(stack)
def backspaceCompare(s: str, t: str) -> bool:
s_list = []
t_list = []
for character in s:
if character == '#':
if s_list:
s_list.pop()
else:
s_list.append(character)
for character in t:
if character == '#':
if t_list:
t_list.pop()
else:
t_list.append(character)
return s_list == t_list
def isValid(s: str) -> bool:
curly = 0
square = 0
bracket = 0
for character in s:
if character == ')':
bracket -= 1
elif character == '(':
bracket += 1
elif character == ']':
square -= 1
elif character == '[':
square += 1
elif character == '}':
curly -= 1
else:
curly += 1
if curly | square | bracket == -1:
return False
return curly == square == bracket == 0
def minAddToMakeValid(s: str) -> int:
min_add = 0
stack = 0
for character in s:
if character == ')':
if not stack:
min_add += 1
else:
stack -= 1
else:
stack += 1
return min_add + stack
def reverseParentheses(s: str) -> str:
s = list(s)
stack = []
result_list = []
for index, character in enumerate(s):
if character == ')':
while stack[-1] != '(':
result_list.append(stack.pop())
stack.pop()
stack.extend(result_list)
result_list = []
else:
stack.append(character)
return ''.join(stack)
def validateStackSequences(pushed: List[int], popped: List[int]) -> bool:
stack = []
pushed = collections.deque(pushed)
popped = collections.deque(popped)
while pushed:
if not stack or stack[-1] != popped[0]:
stack.append(pushed.popleft())
else:
popped.popleft()
stack.pop()
while popped and stack[-1] == popped[0]:
stack.pop()
popped.popleft()
return len(pushed) == len(popped) == 0
def minRemoveToMakeValid(s: str) -> str:
stack = []
remove_set = set()
for index, character in enumerate(s):
if character == ')':
if not stack:
remove_set.add(index)
else:
stack.pop()
elif character == '(':
stack.append(index)
remove_set.update(stack)
result = ''
for index, character in enumerate(s):
if index not in remove_set:
result += character
return result
def is_valid_abc(s: str) -> bool:
stack = []
for character in s:
stack.append(character)
while len(stack) >= 3 and stack[-3] + stack[-2] + stack[-1] == 'abc':
stack.pop()
stack.pop()
stack.pop()
return not stack
def remove_duplicate_value(s: str, k: int) -> str:
stack = []
for character in s:
stack.append(character)
if len(stack) >= k:
if set(stack[-k:]) == set(character):
for _ in range(k):
stack.pop()
return ''.join(stack)
def decodeString(s: str) -> str:
stack = []
for character in s:
stack.append(character)
if stack[-1] == ']':
stack.pop()
decode = ''
while stack[-1] != '[':
decode = stack.pop() + decode
stack.pop()
number = ''
while stack and stack[-1].isnumeric():
number += stack.pop()
decode = int(number[::-1]) * decode
stack.extend(list(decode))
return ''.join(stack)
def isanumber(a):
try:
float(repr(a))
bool_a = True
except:
bool_a = False
return bool_a
def evalRPN(tokens: List[str]) -> int:
stack = []
for i, token in enumerate(tokens):
if token.lstrip('-').isnumeric():
stack.append(int(token))
else:
first_pop = stack.pop()
second_pop = stack.pop()
if token == '/':
stack.append(int(second_pop / first_pop))
elif token == '+':
stack.append(second_pop + first_pop)
elif token == '*':
stack.append(second_pop * first_pop)
elif token == '-':
stack.append(second_pop - first_pop)
return stack[-1]
def finalPrices(prices: List[int]) -> List[int]:
result = [price for price in prices]
stack = []
for i, price in enumerate(prices):
if not stack:
stack.append(i)
else:
while stack and prices[stack[-1]] >= price:
index = stack.pop()
result[index] = prices[index] - price
stack.append(i)
return result
def nextGreaterElement(nums1: List[int], nums2: List[int]) -> List[int]:
result_map = {}
stack = []
for i, number in enumerate(nums2):
while stack and nums2[stack[-1]] < number:
index = stack.pop()
result_map[nums2[index]] = number
stack.append(i)
for i, num in enumerate(nums1):
if num in result_map:
nums1[i] = result_map[num]
else:
nums1[i] = -1
return nums1
def dailyTemperatures(temperatures: List[int]) -> List[int]:
result = [0 for _ in range(len(temperatures))]
stack = []
for index, temperature in enumerate(temperatures):
while stack and temperatures[stack[-1]] < temperature:
stack_index = stack.pop()
result[stack_index] = index - stack_index
stack.append(index)
return result
def nextGreaterElements(nums: List[int]) -> List[int]:
stack = []
result = [-1 for _ in range(len(nums))]
for i, num in enumerate(nums):
while stack and nums[stack[-1]] < num:
index = stack.pop()
result[index] = num
stack.append(i)
for i, num in enumerate(nums):
while stack and nums[stack[-1]] < num:
index = stack.pop()
result[index] = num
if not stack:
return result
return result
def exclusiveTime(n: int, logs: List[str]) -> List[int]:
res = [0] * n
stack = []
for log in logs:
ID, op, time = log.split(':')
ID = int(ID)
time = int(time)
if op == 'start':
if stack:
res[stack[-1][0]] += time - stack[-1][1]
stack.append([ID, time])
else:
prev = stack.pop()
res[ID] += time - prev[1] + 1
if stack:
stack[-1][1] = time + 1
return res
def validSubarrays(nums: List[int]) -> int:
result = 0
pointer_a = 0
while pointer_a < len(nums):
pointer_b = pointer_a
temp_result = []
while pointer_b < len(nums):
if nums[pointer_b] < nums[pointer_a]:
break
temp_result.append(nums[pointer_b])
result += 1
pointer_b += 1
pointer_a += 1
return result
input = [1, 4, 2, 5, 3]
print(validSubarrays(input))
| 24.901685
| 77
| 0.502538
| 1,006
| 8,865
| 4.360835
| 0.125249
| 0.065193
| 0.031912
| 0.030773
| 0.300433
| 0.249829
| 0.226579
| 0.181901
| 0.134488
| 0.134488
| 0
| 0.014521
| 0.370784
| 8,865
| 355
| 78
| 24.971831
| 0.771961
| 0
| 0
| 0.417808
| 0
| 0
| 0.003835
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075342
| false
| 0
| 0.006849
| 0
| 0.164384
| 0.003425
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b3c016c7ef444898f5da6f026c91b333cec123a
| 4,684
|
py
|
Python
|
scripts/pklhisto2root.py
|
umd-lhcb/lhcb-ntuples-gen
|
d306895a0dc6bad2def19ca3d7d1304a5a9be239
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/pklhisto2root.py
|
umd-lhcb/lhcb-ntuples-gen
|
d306895a0dc6bad2def19ca3d7d1304a5a9be239
|
[
"BSD-2-Clause"
] | 105
|
2018-12-20T19:09:19.000Z
|
2022-03-19T09:53:06.000Z
|
scripts/pklhisto2root.py
|
umd-lhcb/lhcb-ntuples-gen
|
d306895a0dc6bad2def19ca3d7d1304a5a9be239
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python3
#
# Stolen almost verbatim from:
# https://gitlab.cern.ch/lhcb-rta/pidcalib2/-/blob/master/src/pidcalib2/pklhisto2root.py
###############################################################################
# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration #
# #
# This software is distributed under the terms of the GNU General Public #
# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
# #
# In applying this licence, CERN does not waive the privileges and immunities #
# granted to it by virtue of its status as an Intergovernmental Organization #
# or submit itself to any jurisdiction. #
###############################################################################
"""Convert pickled PIDCalib2 histograms to TH*D & save them in a ROOT file.
Only 1D, 2D, and 3D histograms are supported by ROOT. Attempting to convert
higher-dimensional histograms will result in an exception.
"""
import itertools
import math
import pathlib
import pickle
import sys
import boost_histogram as bh
import ROOT
def convert_to_root_histo(name: str, bh_histo: bh.Histogram):
"""Convert boost_histogram histogram to a ROOT histogram.
Only 1D, 2D, and 3D histograms are supported by ROOT. Attempting to convert
higher-dimensional histograms will result in an exception.
Furthermore, the boost histogram must have a storage type that stores
variance, e.g., Weight.
Args:
name: Name of the new ROOT histogram.
bh_histo: The histogram to convert.
Returns:
The converted ROOT histogram. Type depends on dimensionality.
"""
histo = None
if len(bh_histo.axes) == 1:
histo = ROOT.TH1D(name, name, 3, 0, 1)
histo.SetBins(bh_histo.axes[0].size, bh_histo.axes[0].edges)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
elif len(bh_histo.axes) == 2:
histo = ROOT.TH2D(name, name, 3, 0, 1, 3, 0, 1)
histo.SetBins(
bh_histo.axes[0].size,
bh_histo.axes[0].edges,
bh_histo.axes[1].size,
bh_histo.axes[1].edges,
)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
histo.GetYaxis().SetTitle(bh_histo.axes[1].metadata["name"])
elif len(bh_histo.axes) == 3:
histo = ROOT.TH3D(name, name, 3, 0, 1, 3, 0, 1, 3, 0, 1)
histo.SetBins(
bh_histo.axes[0].size,
bh_histo.axes[0].edges,
bh_histo.axes[1].size,
bh_histo.axes[1].edges,
bh_histo.axes[2].size,
bh_histo.axes[2].edges,
)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
histo.GetYaxis().SetTitle(bh_histo.axes[1].metadata["name"])
histo.GetZaxis().SetTitle(bh_histo.axes[2].metadata["name"])
else:
raise Exception(f"{len(bh_histo.axes)}D histograms not supported by ROOT")
indices_ranges = [list(range(n)) for n in bh_histo.axes.size]
for indices_tuple in itertools.product(*indices_ranges):
root_indices = [index + 1 for index in indices_tuple]
histo.SetBinContent(
histo.GetBin(*root_indices), bh_histo[indices_tuple].value # type: ignore
)
histo.SetBinError(
histo.GetBin(*root_indices), math.sqrt(bh_histo[indices_tuple].variance) # type: ignore # noqa
)
return histo
def convert_pklfile_to_rootfile(path: str, output_path: str):
pkl_path = pathlib.Path(path)
root_path = pathlib.Path(output_path)
eff_histos = {}
with open(pkl_path, "rb") as f:
eff_histos["eff"] = pickle.load(f)
eff_histos["passing"] = pickle.load(f)
eff_histos["total"] = pickle.load(f)
for item in eff_histos.values():
assert isinstance(item, bh.Histogram)
root_file = ROOT.TFile(str(root_path), "RECREATE")
eff_histo = convert_to_root_histo("eff", eff_histos["eff"])
eff_histo.Write()
passing_histo = convert_to_root_histo("passing", eff_histos["passing"])
passing_histo.Write()
total_histo = convert_to_root_histo("total", eff_histos["total"])
total_histo.Write()
root_file.Close()
def main():
file_in = sys.argv[1]
try:
file_out = sys.argv[2]
except IndexError:
file_out = pathlib.Path(sys.argv[1]).with_suffix('.root')
convert_pklfile_to_rootfile(file_in, file_out)
if __name__ == "__main__":
main()
| 35.755725
| 107
| 0.604825
| 608
| 4,684
| 4.508224
| 0.305921
| 0.068953
| 0.092302
| 0.039402
| 0.305728
| 0.263043
| 0.263043
| 0.249909
| 0.242977
| 0.242977
| 0
| 0.018036
| 0.25427
| 4,684
| 130
| 108
| 36.030769
| 0.766676
| 0.308924
| 0
| 0.2
| 0
| 0
| 0.048602
| 0.006991
| 0
| 0
| 0
| 0
| 0.013333
| 1
| 0.04
| false
| 0.04
| 0.093333
| 0
| 0.146667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b42581465b8edd2e244428913cc73b52bb89dd0
| 1,964
|
py
|
Python
|
ASC/Teme/tema1/consumer.py
|
mihai-constantin/ACS
|
098c99d82dad8fb5d0e909da930c72f1185a99e2
|
[
"Apache-2.0"
] | null | null | null |
ASC/Teme/tema1/consumer.py
|
mihai-constantin/ACS
|
098c99d82dad8fb5d0e909da930c72f1185a99e2
|
[
"Apache-2.0"
] | null | null | null |
ASC/Teme/tema1/consumer.py
|
mihai-constantin/ACS
|
098c99d82dad8fb5d0e909da930c72f1185a99e2
|
[
"Apache-2.0"
] | null | null | null |
"""
This module represents the Consumer.
Computer Systems Architecture Course
Assignment 1
March 2020
"""
from threading import Thread
from time import sleep
class Consumer(Thread):
"""
Class that represents a consumer.
"""
def __init__(self, carts, marketplace, retry_wait_time, **kwargs):
"""
Constructor.
:type carts: List
:param carts: a list of add and remove operations
:type marketplace: Marketplace
:param marketplace: a reference to the marketplace
:type retry_wait_time: Time
:param retry_wait_time: the number of seconds that a producer must wait
until the Marketplace becomes available
:type kwargs:
:param kwargs: other arguments that are passed to the Thread's __init__()
"""
Thread.__init__(self, **kwargs)
self.carts = carts
self.marketplace = marketplace
self.retry_wait_time = retry_wait_time
self.name = kwargs["name"]
self.cart_id = -1
def run(self):
for current_cart in self.carts:
self.cart_id = self.marketplace.new_cart(self.name)
for current_order in current_cart:
product_type = current_order["type"]
product = current_order["product"]
quantity = current_order["quantity"]
if product_type == "add":
while quantity > 0:
while True:
ret = self.marketplace.add_to_cart(self.cart_id, product)
if ret:
break
sleep(self.retry_wait_time)
quantity -= 1
else:
while quantity > 0:
self.marketplace.remove_from_cart(self.cart_id, product)
quantity -= 1
self.marketplace.place_order(self.cart_id)
| 30.215385
| 85
| 0.563646
| 211
| 1,964
| 5.042654
| 0.341232
| 0.050752
| 0.073308
| 0.031955
| 0.039474
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008032
| 0.36609
| 1,964
| 64
| 86
| 30.6875
| 0.846586
| 0.26833
| 0
| 0.133333
| 0
| 0
| 0.019712
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b42f8a4c4ed9dadeb6bc01da50d750be154d614
| 978
|
py
|
Python
|
rnn_based/model2.py
|
gunkaynar/heart_anomaly
|
94ea2700e2c4d79028e0448022f6857df3c35e04
|
[
"MIT"
] | null | null | null |
rnn_based/model2.py
|
gunkaynar/heart_anomaly
|
94ea2700e2c4d79028e0448022f6857df3c35e04
|
[
"MIT"
] | null | null | null |
rnn_based/model2.py
|
gunkaynar/heart_anomaly
|
94ea2700e2c4d79028e0448022f6857df3c35e04
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch
import numpy as np
from torch.autograd import Variable
class RNNModel(nn.Module):
def __init__(self, input_dim, hidden_dim, layer_dim, output_dim):
super(RNNModel, self).__init__()
self.hidden_dim = hidden_dim
self.layer_dim = layer_dim
self.rnn = nn.RNN(input_dim, hidden_dim, layer_dim, batch_first=True, nonlinearity='relu')
self.fc = nn.Linear(hidden_dim, output_dim)
def forward(self, x):
h0 = Variable(torch.zeros(self.layer_dim, x.size(0), self.hidden_dim))
out, hn = self.rnn(x, h0)
out = self.fc(out[:, -1, :])
return out
def shuffle_torch(x, y):
p = np.random.permutation(x.shape[0])
return x[p], y[p]
def batch_generator_torch(x, y, batch_size, shuffle=True):
if shuffle:
x, y = shuffle_torch(x, y)
n_samples = x.shape[0]
for i in range(0, n_samples, batch_size):
yield x[i:i + batch_size], y[i:i + batch_size]
| 31.548387
| 98
| 0.649284
| 158
| 978
| 3.803797
| 0.341772
| 0.08985
| 0.0599
| 0.056572
| 0.083195
| 0.083195
| 0
| 0
| 0
| 0
| 0
| 0.009223
| 0.223926
| 978
| 30
| 99
| 32.6
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0.00409
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0
| 0.16
| 0
| 0.44
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b44ab85799151e1020d7bb62b4190682ce5fa39
| 1,974
|
py
|
Python
|
src/entities/report/actions/consult.py
|
LuisArmando-TestCoder/ShareGraph
|
fa89d37c8fe522c526b903fe25bd1e22fd769425
|
[
"MIT"
] | null | null | null |
src/entities/report/actions/consult.py
|
LuisArmando-TestCoder/ShareGraph
|
fa89d37c8fe522c526b903fe25bd1e22fd769425
|
[
"MIT"
] | null | null | null |
src/entities/report/actions/consult.py
|
LuisArmando-TestCoder/ShareGraph
|
fa89d37c8fe522c526b903fe25bd1e22fd769425
|
[
"MIT"
] | null | null | null |
from utilities.getStore import getStore
filePath = "./entities/bill/store.json"
productFilePath = "./entities/product/store.json"
def getProduct(name):
for product in getStore(productFilePath):
if product["name"] == name:
return product
def getProductBillsAverage(name):
productPriceSummation = 0
productAmount = 0
for bill in getStore(filePath):
for sell in bill:
if sell["name"] == name:
productAmount += 1
productPriceSummation += sell["amount"] * getProduct(
name
)["price"]
return productPriceSummation / (productAmount if productAmount else 1)
def getSellsAverage(bills):
# print("The average of sells has being")
allProductsPricesSummation = 0
allSellsAmount = 0
for bill in bills:
allSellsAmount += 1
for sell in bill:
allProductsPricesSummation = sell["amount"] * getProduct(sell["name"])["price"]
print(f"The average of sells has being {allProductsPricesSummation / allSellsAmount}")
def getProductsAverage():
print("The average of sells for each product")
for product in getStore(productFilePath):
print(f"For {product['name']} the average sells are {getProductBillsAverage(product['name'])}")
def getHighestSellWithBill(bills):
maximumBillIndex = 0
maximumBill = 0
for billIndex in range(len(bills)):
billSummation = 0
for sell in bills[billIndex]:
billSummation += sell["amount"] * getProduct(
sell["name"]
)["price"]
if billSummation > maximumBill:
maximumBill = billSummation
maximumBillIndex = billIndex
print(f"The highest sell is {maximumBill}")
print(f"For the following bill {bills[maximumBillIndex]}")
def main():
bills = getStore(filePath)
getSellsAverage(bills)
getProductsAverage()
getHighestSellWithBill(bills)
| 28.608696
| 103
| 0.644377
| 191
| 1,974
| 6.659686
| 0.277487
| 0.012579
| 0.021226
| 0.040094
| 0.208333
| 0.132075
| 0.080189
| 0
| 0
| 0
| 0
| 0.00684
| 0.259372
| 1,974
| 68
| 104
| 29.029412
| 0.863201
| 0.019757
| 0
| 0.122449
| 0
| 0
| 0.19824
| 0.076605
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122449
| false
| 0
| 0.020408
| 0
| 0.183673
| 0.102041
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b44b5778d0cc6b0adc1458cef3d5591585dd53d
| 1,826
|
py
|
Python
|
discordbot.py
|
kinotan/discordpy-startup
|
1505c4f78deff7f793de75985e669ee84a78a3f2
|
[
"MIT"
] | null | null | null |
discordbot.py
|
kinotan/discordpy-startup
|
1505c4f78deff7f793de75985e669ee84a78a3f2
|
[
"MIT"
] | null | null | null |
discordbot.py
|
kinotan/discordpy-startup
|
1505c4f78deff7f793de75985e669ee84a78a3f2
|
[
"MIT"
] | null | null | null |
#discord.pyのインポート
from asyncio import sleep
import discord
client = discord.Client()
#BOTログイン処理
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
await client.change_presence(game=discord.Game(name='!delchat *'))
# BOT動作プログラム
@client.event
async def on_message(message):
# 送り主がBotだった場合反応したくないので
if client.user != message.author:
# 削除コマンド
if message.content.startswith("!delchat "):
#役職比較
if discord.utils.get(message.author.roles, name="admin"):
# メッセージを格納
delcmd = message.content
# 入力メッセージのリスト化
delcmd_ = delcmd.split()
# 入力メッセージのint化
delcmd_int = int(delcmd_[1])
# 入力メッセージの単語数
delcmd_c = len(delcmd_)
if delcmd_c == 2 and delcmd_int <= 50 and delcmd_int > 1:
# メッセージ取得
msgs = [msg async for msg in client.logs_from(message.channel, limit=(delcmd_int+1))]
await client.delete_messages(msgs)
delmsg = await client.send_message(message.channel, '削除が完了しました')
await sleep(5)
await client.delete_message(delmsg)
else:
# エラーメッセージを送ります
delmsg = await client.send_message(message.channel, "コマンドが間違っています。[!delchat *] *:2~50")
await sleep(5)
await client.delete_message(delmsg)
else:
# エラーメッセージを送ります
delmsg = await client.send_message(message.channel, "admin権限がありません。")
await sleep(5)
await client.delete_message(delmsg)
client.run("***")
| 37.265306
| 107
| 0.545455
| 179
| 1,826
| 5.452514
| 0.385475
| 0.090164
| 0.069672
| 0.064549
| 0.332992
| 0.289959
| 0.289959
| 0.246926
| 0.204918
| 0.204918
| 0
| 0.010213
| 0.356517
| 1,826
| 48
| 108
| 38.041667
| 0.820426
| 0.083242
| 0
| 0.294118
| 0
| 0
| 0.060205
| 0.013245
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b465725a6717037599028f9aa649996198118b6
| 268
|
py
|
Python
|
exec-shell.py
|
zqsheng/snippet
|
cb14300fc62c616d48e6552ad93c6d33b5e8c9a1
|
[
"Apache-2.0"
] | 1
|
2018-09-10T11:31:33.000Z
|
2018-09-10T11:31:33.000Z
|
exec-shell.py
|
zqsheng/snippet
|
cb14300fc62c616d48e6552ad93c6d33b5e8c9a1
|
[
"Apache-2.0"
] | null | null | null |
exec-shell.py
|
zqsheng/snippet
|
cb14300fc62c616d48e6552ad93c6d33b5e8c9a1
|
[
"Apache-2.0"
] | null | null | null |
import os
import time
exec_count = 100
cmds = []
cmds.append("tar -cPzf /opt/web.tar.gz /opt/web/ /opt/soft")
cmds.append("rm -f /opt/web.tar.gz")
for i in range(exec_count):
for cmd in cmds:
if os.system(cmd) != 0:
break
time.sleep(1)
| 24.363636
| 60
| 0.604478
| 47
| 268
| 3.404255
| 0.574468
| 0.1125
| 0.1125
| 0.1375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02451
| 0.238806
| 268
| 11
| 61
| 24.363636
| 0.759804
| 0
| 0
| 0
| 0
| 0
| 0.245353
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.181818
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b48d527c36dcd783a13d6a5609545147bc8c89c
| 45,165
|
py
|
Python
|
platform/hwconf_data/zgm13/PythonSnippet/ExporterModel.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | null | null | null |
platform/hwconf_data/zgm13/PythonSnippet/ExporterModel.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T02:36:22.000Z
|
2020-08-25T02:36:22.000Z
|
platform/hwconf_data/zgm13/PythonSnippet/ExporterModel.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T01:56:04.000Z
|
2020-08-25T01:56:04.000Z
|
from . import types
from . import dep
from . import RuntimeModel
from . import Metadata
class Property(object):
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None, long_description=None):
'''
:param name: Name of the property (string)
:param type: PropertyType describing the type of property
:param visible: Boolean, whether the property is visible in the UI or not
'''
self.name = name
self.type = type
self.dependencies = []
self.generation = []
self.visible = visible
self.readonly = readonly
self.category=''
self.subcategory=''
self.namespace = namespace
self.id = '.'.join((str(namespace).upper(), self.name))
self.label = description
self.description = long_description
self.defaultvalue = ''
self.transient = False
self.parent = None
self.mode = None
self.define_name = define_name if define_name else name
self.is_advanced = False
self.allowedconflicts = []
self.generate_if_hidden = True
def set_parent_module(self, mod):
self.parent = mod
# Set default category to name of module
if not self.category:
self.category = self.parent.name
def set_namespace(self, namespace):
'''
Sets the namespace on a property
:param namespace:
:return:
'''
self.namespace = namespace
self.id = '.'.join((str(namespace).upper(), self.name, 'PROP'))
def set_visibility(self, visible):
self.visible = visible
def set_readonly(self, readonly):
self.readonly = readonly
def add_dependency(self, dependency):
self.dependencies.append(dependency)
def get_dependencies(self):
return self.dependencies
def generateXML(self):
'''
Generate the Studio XML for this property
:return: etree.Element containing the Studio XML describing the property
'''
print("Not able to gen XML from base property!")
print(self.name)
print(self.type)
return None
class StringProperty(Property):
'''
Property which can take on a string value
'''
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None, long_description=None):
Property.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.id = '.'.join((str(namespace).upper(), self.name, 'STRING'))
def set_namespace(self, namespace):
self.id = '.'.join((str(namespace).upper(), self.name, 'STRING'))
class ArrayProperty(Property):
'''
Property which can take on an array value
'''
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None, long_description=None):
Property.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.id = '.'.join((str(namespace).upper(), self.name, 'ARRAY'))
def set_namespace(self, namespace):
self.id = '.'.join((str(namespace).upper(), self.name, 'ARRAY'))
class IntegerProperty(Property):
'''
Property which can take on integer values
'''
def __init__(self, name, description, min, max, default, namespace='', visible=False, readonly=False, define_name=None, long_description=None):
Property.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.min = int(min)
self.max = int(max)
self.defaultvalue = int(default)
self.id = '.'.join((str(namespace).upper(), self.name, 'INT'))
self.format = None
def set_namespace(self, namespace):
self.id = '.'.join((str(namespace).upper(), self.name, 'INT'))
def set_format(self, format):
self.format = format
class Enum:
'''
Container class for an item inside of an EnumProperty
'''
def __init__(self, value, index, define_value=None, visible=True):
self.value = value
self.visible = visible
self.index = index
if define_value is not None:
self.define_value = define_value
else:
self.define_value = value
class EnumProperty(Property):
'''
Property allowing a selection from a list of options
'''
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None, long_description=None):
Property.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.values = {}
self.id = '.'.join((str(namespace).upper(), self.name, 'ENUM'))
def set_namespace(self, namespace):
self.id = '.'.join((str(namespace).upper(), self.name, 'ENUM'))
def add_enum(self, value, define_value=None, visible=True):
'''
Add an option to the selection list
:param value: String value for the option (visible in UI)
:param visible: Whether or not this option will be visible in the UI
:param define_value: Name which will be generated as #def value
:return: None
'''
self.values[len(self.values.keys())] = Enum(value, len(self.values.keys()), define_value=define_value, visible=visible)
class ModeProperty(EnumProperty):
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None, long_description=None, hide_properties=True):
Property.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.values = {}
self.id = '.'.join((str(namespace).upper(), self.name, 'ENUM'))
self.hide_properties = hide_properties
class BoolProperty(EnumProperty):
'''
Property allowing you to select a binary setting
'''
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None, long_description=None):
EnumProperty.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.id = '.'.join((str(namespace).upper(), self.name, 'BOOL'))
self.add_enum('False', define_value="0")
self.add_enum('True', define_value="1")
def set_namespace(self, namespace):
self.id = '.'.join((str(namespace).upper(), self.name, 'BOOL'))
def set_default_to_false(self):
self.defaultvalue = 'False'
def set_default_to_true(self):
self.defaultvalue = 'True'
class CheckboxProperty(Property):
'''
Property allowing you to select a binary setting using a checkbox
'''
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None):
Property.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name)
self.id = '.'.join((str(namespace).upper(), self.name, 'CHECKBOX'))
def set_namespace(self, namespace):
self.id = '.'.join((str(namespace).upper(), self.name, 'CHECKBOX'))
class ModuleProperty(EnumProperty):
'''
Property allowing you to select a peripheral available on the current chip
'''
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None, long_description=None):
EnumProperty.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.id = '.'.join((str(namespace).upper(), self.name, 'MOD'))
self.allowedModules = []
self.inherit_options = False
self.define_value_prefix = ''
self.owned_mode = None
self.define_name_postfix = ''
def set_namespace(self, namespace):
self.id = '.'.join((str(namespace).upper(), self.name, 'MOD'))
def add_allowed_module(self, module_namespace):
'''
Adds a module 'namespace' to the allowed modules for this property
:param module_namespace: regular expression for which modules can be selected by this property
:return: None
'''
self.allowedModules.append(module_namespace)
def mask_with_module_list(self, module_name_list):
'''
Updates the list of allowed modules for this property by comparing a list with the property's allowed modules.
:param module_name_list: list of module names available on this part
:return:
'''
self.values = {}
self.add_enum('None')
for mod_name in module_name_list:
for allowed_mod in self.allowedModules:
if mod_name.rstrip('0123456789') == allowed_mod:
define_value = mod_name
self.add_enum(mod_name, define_value=define_value)
class PinProperty(EnumProperty):
'''
Property allowing you to select any GPIO pin available
'''
def __init__(self, name, description, namespace='', visible=False, readonly=False, define_name=None, disabled_label=None, long_description=None):
EnumProperty.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.referenced_module = None
self.referenced_route = None
self.em4 = False
self.id = '.'.join((str(namespace).upper(), self.name, 'PIN'))
if not disabled_label:
disabled_label = 'Disabled'
self.disabled_label = disabled_label
self.add_enum(disabled_label, define_value="Disabled")
def set_namespace(self, namespace):
self.id = '.'.join((str(namespace).upper(), self.name, 'PIN'))
def mask_with_pin_list(self, pin_list):
'''
Updates the available enum values with the values from pin_list
:param pin_list: list of pin names available on the part
:return: None
'''
self.values={}
self.add_enum(self.disabled_label, define_value="Disabled")
for pin in pin_list:
self.add_enum(pin)
def set_reference_route(self, route):
self.referenced_route = route
def set_reference_module(self, module):
self.referenced_module = module
def set_reference(self, module, route):
self.referenced_module = module
self.referenced_route = route
class PRSChannelProperty(EnumProperty):
"""
Property allowing you to select PRS channel available from the PRS module
"""
def __init__(self, name, description, channel_count, custom_name="", namespace='', visible=False, readonly=False, define_name=None, long_description=None, gpio=True):
EnumProperty.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name, long_description=long_description)
self.add_enum("Disabled")
self.channel_count = channel_count
self.custom_name = custom_name
self.gpio = gpio
for i in range(channel_count):
self.add_enum("CH" + str(i), define_value=str(i))
class AportBusProperty(EnumProperty):
"""
APORT bus select
"""
def __init__(self, name, description, signal=None, define_name_prefix=None, define_value_prefix=None, namespace='', visible=True, readonly=False, define_name=None):
EnumProperty.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name)
self.signal = signal
self.define_name_prefix = define_name_prefix
self.define_value_prefix = define_value_prefix
self.extra_enums = []
self.bus_props = {}
def add_extra_enum(self, value, define_value=None):
self.extra_enums.append((value, define_value))
def mask_with_bus_list(self, bus_list, superset=False):
'''
Updates the list of allowed buses for this property.
:param bus_list: list of buses available on this part
:return:
'''
self.values = {}
# Find existing referenced bus property
bus_property_prefix = "{}_".format(self.define_name_prefix.lower())
for bus_name in bus_list:
aportname = busname_to_aportname(self.parent.name, bus_name)
# Add bus to bus dropdown
self.add_enum("APORT bus {}".format(bus_name), define_value="APORT{}".format(aportname))
# Add channel dropdown for bus
bus_property_name = bus_property_prefix + bus_name
bus_prop = self.bus_props.get(bus_property_name)
if not bus_prop:
bus_prop = PinProperty(bus_property_name, "Channel on APORT bus {} ({})".format(bus_name, self.signal), define_name=self.define_name_prefix, visible=False)
bus_prop.set_reference(self.parent.name, "{}_{}".format(self.signal, bus_name))
bus_prop.category = self.category
bus_prop.subcategory = self.subcategory
self.parent.add_property(bus_prop)
self.bus_props[bus_property_name] = bus_prop
else:
bus_prop.set_visibility(False)
# Add extra values to bus dropdown
for value, define_value in self.extra_enums:
self.add_enum(value, define_value=define_value)
class AportScanProperty(EnumProperty):
scan_props = {}
"""
APORT scan select
"""
def __init__(self, name, description, define_name_prefix=None, define_value_prefix=None, namespace='', visible=True, readonly=False, define_name=None):
EnumProperty.__init__(self, name, description, namespace=namespace, visible=visible, readonly=readonly, define_name=define_name)
self.define_name_prefix = define_name_prefix
self.define_value_prefix = define_value_prefix
self.scan_mask = None
self.start = 0
self.end = 0
def attach_to_scan_mask(self, scan_mask_property):
self.scan_mask = scan_mask_property
def set_range(self, start, end):
self.start = start
self.end = end
def mask_with_bus_list(self, bus_list, superset=False):
'''
Updates the list of allowed buses for this property.
:param bus_list: list of buses available on this part
:return:
'''
self.values = {}
if not self.scan_props.get(self.parent.name):
self.scan_props[self.parent.name] = {}
bus_channels = {}
aports = {}
updated_scan_props = []
# print(bus_list)
for signal, buses in bus_list.items():
for bus_name, routes in buses.items():
aport = busname_to_aportname(self.parent.name, bus_name)[:-1]
bus_name = bus_name[:-1]
aports[bus_name] = aport
if bus_name not in bus_channels:
bus_channels[bus_name] = set()
bus_channels[bus_name] = bus_channels[bus_name] | set(routes)
for name, route_prop in self.scan_props[self.parent.name].items():
# Hide props by default
route_prop.set_visibility(False)
for bus, routes in bus_channels.items():
channels_available = [False, False, False, False]
for route in sorted(routes, key=lambda r: r.number):
channels_available[int(route.number / 8)] = True
for i in range(4):
start = i * 8
end = (i + 1) * 8 - 1
if channels_available[i]:
self.add_enum("APORT bus {} channel {}-{}".format(bus, start, end), "APORT{}CH{}TO{}".format(aports[bus], start, end))
else:
self.add_enum("APORT bus {} channel {}-{} (no pins available)".format(bus, start, end), "APORT{}CH{}TO{}".format(aports[bus], start, end))
if superset:
for route in sorted(routes, key=lambda r: r.number):
route_prop_name = "{}_{}_ch{}".format(self.name.lower().rsplit('_', 1)[0], bus, route.number)
route_prop = self.scan_props[self.parent.name].get(route_prop_name)
if not route_prop:
route_prop = CheckboxProperty(route_prop_name, "Enable channel {}".format(route.number), namespace=self.parent.namespace, visible=False)
channel_range_start = int(route.number / 8) * 8
channel_range_end = channel_range_start + 7
route_prop.category = self.category
route_prop.subcategory = "APORT bus {} channel {}-{}".format(bus, channel_range_start, channel_range_end)
self.scan_props[self.parent.name][route_prop_name] = route_prop
self.parent.add_property(route_prop)
else:
for route in sorted(routes, key=lambda r: r.number):
route_prop_name = "{}_{}_ch{}".format(self.name.lower().rsplit('_', 1)[0], bus, route.number)
route_prop = self.scan_props[self.parent.name].get(route_prop_name)
route_prop.label = "Enable channel {} ({})".format(route.number, route.padName)
route_prop.set_visibility(True)
updated_scan_props.append(route_prop)
if not superset:
return updated_scan_props
class AportScanMaskProperty(IntegerProperty):
"""
APORT scan mask
"""
def __init__(self, name, description, min, max, default, namespace='', visible=False, readonly=False, define_name=None):
IntegerProperty.__init__(self, name, description, min, max, default, namespace, visible, readonly, define_name)
self.channel_selectors = []
self.other_scan_masks = []
self.set_format("0x{:08X}UL")
self.channel_start = 0
def add_channel_selector(self, channel_selector):
self.channel_selectors.append(channel_selector)
class AportBondedMaskProperty(IntegerProperty):
"""
APORT bonded mask
"""
def __init__(self, name, description, min, max, default, namespace='', visible=False, readonly=False, define_name=None):
IntegerProperty.__init__(self, name, description, min, max, default, namespace, visible, readonly, define_name)
self.set_format("0x{:08X}UL")
self.channel_start = 0
self.aport = "1"
self.input_props = []
def mask_with_bus_list(self, bus_list, superset=False):
'''
Updates the list of allowed buses for this property.
:param bus_list: list of buses available on this part
:return:
'''
updated_inputs = []
bus_channels = {}
if not superset:
for signal, buses in bus_list.items():
for bus_name, routes in buses.items():
bus_name = bus_name[:-1]
if bus_name not in bus_channels:
bus_channels[bus_name] = set()
bus_channels[bus_name] = bus_channels[bus_name] | set(routes)
for route in sorted(bus_channels[aportname_to_busname(self.parent.name, self.aport)], key=lambda r: r.number):
route_prop = self.input_props[int(route.number) % 32]
route_prop.label = "Enable channel {} ({})".format(route.number, route.padName)
route_prop.set_visibility(True)
updated_inputs.append(route_prop)
return updated_inputs
class StudioModule(object):
"""docstring for StudioModule"""
def __init__(self, basename, modules):
super(StudioModule, self).__init__()
self.basename = basename
self.modules = {}
for m in modules:
# Allow both lists of frameworks and single framework to be specified
if isinstance(m.frameworks,list):
for framework in m.frameworks:
self.modules[framework.value] = m.name
else:
self.modules[m.frameworks.value] = m.name
def getModuleId(self, framework):
print ("%s: %s" % (self.basename, self.modules.keys()))
if framework not in self.modules:
return None
return "%s.%s" % (self.basename, self.modules[framework])
def __str__(self):
return self.basename
class Module(object):
'''
Class for describing a HALConfig module or device peripheral. A module is basically a collection of properties.
'''
def __init__(self, name, core=False, visible=False, namespace=None):
# Name is a required argument
self.name = name
self.displayname = name
# namespace defaults to module base (i.e. module without the instance number)
if namespace:
self.namespace = namespace
else:
self.namespace = name.rstrip('0123456789')
# No description by default
self.description = ""
# Core signifies a module contributed by the die
self.core = core
# Visible controls whether the module shows up in the UI
self.visible = visible
# List of properties on this module
self.properties = []
# Category is the category where to put the module on the UI. Default for core is 'Core'.
self.category = ' Peripherals' if core else " HAL"
# Define generated with the module being active (selected) or not
self.enabled_define = None
# Compatibility of module
self.compatibility = dep.Dependency()
# Association with an on-chip peripheral
self.peripheral = None
# Studio module specifier
self.studio_module = None
# By default, module has no custom name property
self.has_custom_name = False
self.model = None
self.family = None
# Contribute 'standard' properties for every module, allowing SDKs to take control in a hwconf doc
# (even though they shouldn't)
inuse = BoolProperty('usedbysdk', 'SDK is taking control over this module', visible=False)
self.add_property(inuse)
hidden = BoolProperty('hiddenbysdk', 'SDK is hiding this module', visible=False)
self.add_property(hidden)
hidden = BoolProperty('showadvanced', 'Show advanced options', visible=False)
self.add_property(hidden)
forceenable = BoolProperty('forceenable', 'Forcefully enabled in model', visible=False)
self.add_property(forceenable)
owner = StringProperty('owner', 'Owned by', visible=True, readonly=True)
owner.transient = True
self.add_property(owner)
if self.core and (self.namespace != self.name):
# Add custom name property if this is a parameterized core module (e.g. USARTn, TIMERn...)
customname = StringProperty('customname', 'Custom name', visible=True, readonly=False)
self.add_property(customname)
self.has_custom_name = True
def __str__(self):
if self.studio_module:
return str(self.studio_module)
return "none"
def add_property(self, prop):
'''
Add a property to this module
:type prop: Property
:param prop: property to add
:return: None
'''
# Regular list append for now
# TODO: change to property merge on properties with same ID
prop.set_namespace(self.namespace)
prop.set_parent_module(self)
self.properties.append(prop)
def load_halconfig_model(self, available_module_names_list, family=None):
'''
Load a HAL config model
:param model: a HAL config model
:param family: a halconfig_dependency Family object describing for which family this module is loaded or
str containing family name
:return: None
'''
if not family:
raise ValueError("Family is not set")
if isinstance(family, str):
self.family = dep.Family(family_str=family)
else:
self.family = family
self.family.available_mods = available_module_names_list
if hasattr(self.model, 'compatibility'):
self.compatibility = self.model.compatibility
if hasattr(self.model, "peripheral"):
self.peripheral = self.model.peripheral
if hasattr(self.model, "category"):
self.category = self.model.category
if hasattr(self.model, "displayname"):
self.displayname = self.model.displayname
if hasattr(self.model, "description"):
self.description = self.model.description
if hasattr(self.model, "studio_module"):
self.studio_module = StudioModule(self.model.studio_module["basename"], \
self.model.studio_module["modules"])
if hasattr(self.model, 'modes'):
mode_prop = ModeProperty(self.model.modes["define"], "mode", visible=True, hide_properties=self.model.modes.get('hide_properties', True))
for val in self.model.modes["values"]:
if isinstance(val, types.EnumValue):
if val.dependency:
if val.dependency.applies_to(family=self.family):
mode_prop.add_enum(val.display_name, define_value=val.define_value)
else:
mode_prop.add_enum(val.display_name, define_value=val.define_value)
else:
mode_prop.add_enum(val)
self.add_property(mode_prop)
if hasattr(self.model, 'enable'):
self.enabled_define = self.model.enable["define"]
for prop, options in self.model.options.items():
current_opt_set = None
# If one property has several option elements, iterate to find which option element has the correct dependency
if isinstance(options, list):
for opt in options:
# Skip documentation option
if opt.get("documentation"):
continue
if opt.get("dependency"):
if opt.get("dependency").applies_to_family(self.family):
if opt.get("dependency").applies_to_module(self.name):
current_opt_set = opt
break
else:
if options.get("dependency"):
if options.get("dependency").applies_to_family(self.family):
if options.get("dependency").applies_to_module(self.name):
current_opt_set = options
else:
current_opt_set = options
if current_opt_set is not None:
self._load_halconfig_property(prop, current_opt_set, self.family, self.model)
self.post_load()
def _load_halconfig_property(self, prop, opts, family, model):
"""
:param prop: a HAL config property
:param opts: dictionary containing a set of options for current prop
:param family: a halconfig_dependency Family object describing for which family this module is loaded
:return: None
"""
prop_obj = None
extra_properties = []
if opts['type'] == 'enable':
self.enabled_define = prop
elif opts['type'] == 'boolean':
prop_obj = BoolProperty(prop, opts['description'], visible=True)
elif opts['type'] == 'integer':
prop_obj = IntegerProperty(prop, opts['description'], opts['min'], opts['max'], 0, visible=True)
elif isinstance(opts['type'], str) and 'int' in opts['type'] and '_t' in opts['type']:
prop_obj = IntegerProperty(prop, opts['description'], opts['min'], opts['max'], 0, visible=True)
elif opts['type'] == 'string':
prop_obj = StringProperty(prop, opts['description'], visible=True)
elif opts['type'] == 'array':
prop_obj = ArrayProperty(prop, opts['description'], visible=True)
elif opts['type'] == 'enum':
if opts['values']:
prop_obj = EnumProperty(prop, opts['description'], visible=True)
for val in opts['values']:
if isinstance(val, types.EnumValue):
if val.dependency:
if val.dependency.applies_to_family(family=family):
prop_obj.add_enum(val.display_name, define_value=val.define_value)
else:
prop_obj.add_enum(val.display_name, define_value=val.define_value)
else:
prop_obj.add_enum(val)
elif isinstance(opts['type'], types.Pin):
prop_obj = PinProperty(prop, opts['description'], visible=True, disabled_label=opts['type'].disabled_label)
if opts['type'].signal:
# Pin is connected to a PORTIO signal
prop_obj.set_reference(self.name, opts['type'].signal)
if opts['type'].em4:
prop_obj.em4 = True
elif isinstance(opts['type'], types.Peripheral):
prop_obj = ModuleProperty(prop, opts['description'], visible=True)
for filter in opts['type'].filter:
prop_obj.add_allowed_module(filter)
prop_obj.inherit_options = opts['type'].inherit_options
prop_obj.define_value_prefix = opts['type'].define_value_prefix
prop_obj.define_name_postfix = opts['type'].define_name_postfix
if hasattr(opts['type'], 'mode'):
prop_obj.owned_mode = opts['type'].mode
elif isinstance(opts['type'], types.PinArray):
prop_obj = IntegerProperty(opts['type'].count_define, opts['description'], opts['type'].min, opts['type'].max, opts['type'].default, visible=True)
init_string = ""
for i in range(opts['type'].min, opts['type'].max):
visible = True if i < opts['type'].default else False
item_property = PinProperty(opts['type'].item_define.replace("%n", str(i)), opts['type'].item_description.replace("%n", str(i)), visible=visible)
if opts.get('allowedconflicts') is not None:
item_property.allowedconflicts = opts['allowedconflicts']
if visible:
init_string += ("{{ {0}, {1} }}, ".format(opts['type'].item_port_define.replace("%n", str(i)),
opts['type'].item_pin_define.replace("%n", str(i))))
extra_properties.append(item_property)
if init_string:
# Strip last comma space from default value
init_string = init_string[:-2]
init_property = ArrayProperty(opts['type'].init_define, "{} init".format(prop), visible=False)
init_property.defaultvalue = init_string
init_property.transient = True
extra_properties.append(init_property)
elif isinstance(opts['type'], types.PRSChannelLocation):
prs_chan_count = Metadata.get_prs_chan_with_gpio_count(family.get_name())
prop_obj = PRSChannelProperty(opts['type'].define, opts['description'], prs_chan_count,
custom_name=opts['type'].custom_name, gpio=opts['type'].gpio, visible=True)
if dep.Dependency(platform=dep.Platform.SERIES0).applies_to_family(family):
# Make PRS dropdown readonly on Series 0, since changing it will affect unrelated modules that
# also use PRS. Users will have to use PORTIO view to select PRS location.
readonly = True
else:
readonly = False
if opts['type'].gpio:
disabled_property = StringProperty(
"prs_disabled_chn_{}_pin".format(opts['type'].custom_name if opts['type'].custom_name else ""),
"PRS channel output pin",
visible=True, readonly=True, long_description="No PRS channel selected")
if opts.get('category') is not None:
disabled_property.category = opts['category']
if opts.get('subcategory') is not None:
disabled_property.subcategory = opts['subcategory']
extra_properties.append(disabled_property)
for i in range(prs_chan_count):
item_property = PinProperty(opts['type'].name + str(i),
opts['type'].output_description.replace("%n", str(i)),
visible=False,
readonly=readonly,
define_name=opts['type'].name)
if dep.Dependency(platform=dep.Platform.SERIES2).applies_to_family(family):
item_property.set_reference("PRS", "ASYNCH" + str(i))
else:
item_property.set_reference("PRS", "CH" + str(i))
if opts.get('category') is not None:
item_property.category = opts['category']
if opts.get('subcategory') is not None:
item_property.subcategory = opts['subcategory']
extra_properties.append(item_property)
elif isinstance(opts['type'], types.AportSingleChannel):
obj = opts['type']
prop_obj = AportBusProperty(obj.define, opts['description'], signal=obj.signal, define_name_prefix=obj.define_name_prefix, define_value_prefix=obj.define_value_prefix)
for val in obj.extra_values:
if isinstance(val, types.EnumValue):
if val.dependency:
if val.dependency.applies_to_family(family=family):
prop_obj.add_extra_enum(val.display_name, define_value=val.define_value)
else:
prop_obj.add_extra_enum(val.display_name, define_value=val.define_value)
else:
prop_obj.add_extra_enum(val)
elif isinstance(opts['type'], types.AportScanMode):
obj = opts['type']
prop_obj = AportScanMaskProperty(prop, opts['description'], 0, 0xFFFFFFFF, 0, visible=True, readonly=True)
prop_obj.channel_start = obj.channel_start
define_prefix = prop.rsplit('_', 1)[0]
range_start = int(obj.channel_start / 8)
for i in range(range_start, range_start + 4):
start = i * 8
end = (i + 1) * 8 - 1
input_number = "{}TO{}".format(start, end)
input_name = "{}_INPUT{}".format(define_prefix, input_number)
input_prop = AportScanProperty(input_name,
"Input {} to {}".format(start, end),
define_value_prefix=obj.define_value_prefix.replace("%n", input_number))
if opts.get('mode') is not None:
input_prop.mode = opts['mode']
input_prop.set_range(start, end)
if opts.get('subcategory') is not None:
input_prop.subcategory = opts['subcategory']
if opts.get('category') is not None:
input_prop.category = opts['category']
input_prop.attach_to_scan_mask(prop_obj)
prop_obj.add_channel_selector(input_prop)
extra_properties.append(input_prop)
for i in range(obj.channel_start, obj.channel_start + 32):
pin_prop = PinProperty("{}_INPUT{}".format(define_prefix, i), "Input {}".format(i), visible=True, readonly=True)
pin_prop.category = opts['category'] + " Pinout"
pin_prop.transient = True
extra_properties.append(pin_prop)
for p in self.properties:
if isinstance(p, AportScanMaskProperty):
prop_obj.other_scan_masks.append(p)
p.other_scan_masks.append(prop_obj)
elif isinstance(opts['type'], types.AportBondedMode):
obj = opts['type']
prop_obj = AportBondedMaskProperty(prop, opts['description'], 0, 0xFFFFFFFF, 0, visible=True, readonly=True)
prop_obj.channel_start = obj.channel_start
prop_obj.aport = obj.aport
define_prefix = prop.rsplit('_', 1)[0]
for i in range(obj.channel_start, obj.channel_start + 32):
input_prop_name = "{}_{}_ch{}".format(prop_obj.name.lower().rsplit('_', 1)[0], aportname_to_busname(self.name, obj.aport), i % 32)
input_prop = CheckboxProperty(input_prop_name, "Enable channel {}".format(i),
namespace=self.namespace, visible=False)
input_prop.category = opts['category']
input_prop.subcategory = opts['subcategory']
extra_properties.append(input_prop)
prop_obj.input_props.append(input_prop)
pin_prop = PinProperty("{}_INPUT{}".format(define_prefix, i), "Input {}".format(i), visible=True, readonly=True)
pin_prop.category = opts['category'] + " Pinout"
pin_prop.transient = True
extra_properties.append(pin_prop)
else:
print("ERROR: unknown property type {} in HAL config model for {}".format(opts['type'], model.name))
if prop_obj is not None:
if opts.get('mode') is not None:
prop_obj.mode = opts['mode']
if opts.get('generate_if_hidden') is not None:
prop_obj.generate_if_hidden = opts['generate_if_hidden']
# Hiding properties that don't belong to the default mode
mode_prop = next((prop for prop in self.get_properties() if isinstance(prop, ModeProperty)), None)
if mode_prop and mode_prop.hide_properties:
if hasattr(prop_obj, 'mode'):
if isinstance(prop_obj.mode, list):
prop_obj.set_visibility(True if mode_prop.values[0].define_value in prop_obj.mode else False)
elif prop_obj.mode:
prop_obj.set_visibility(True if prop_obj.mode == mode_prop.values[0].define_value else False)
# If _model specifically states visibility, this overrides hiding by default mode
if opts.get("visible") is not None:
prop_obj.set_visibility(opts['visible'])
if opts.get("advanced", False):
# Hide advanced properties by default
prop_obj.is_advanced = opts.get("advanced", False)
prop_obj.set_visibility(False)
if opts.get("readonly") is not None:
prop_obj.set_readonly(opts['readonly'])
if opts.get('defaultValue') is not None:
prop_obj.defaultvalue = opts['defaultValue']
if opts.get('overrideDefaultValue') is not None:
f = family.get_name().lower()
for override_for, value in opts.get('overrideDefaultValue').items():
if f.startswith(override_for.lower()):
prop_obj.defaultvalue = value
if opts.get('longdescription') is not None:
prop_obj.description = opts['longdescription']
elif opts.get("default") is not None:
prop_obj.defaultvalue = opts['default']
if opts.get('subcategory') is not None:
prop_obj.subcategory = opts['subcategory']
if opts.get('category') is not None:
prop_obj.category = opts['category']
if opts.get('allowedconflicts') is not None:
prop_obj.allowedconflicts = opts['allowedconflicts']
self.add_property(prop_obj)
for property in extra_properties:
self.add_property(property)
def get_property(self, name):
"""
Look up property on this module
:param name: Regular expression needing to match the name of the property
:return: Property if found, None elsewhere
"""
return next((x for x in self.properties if name == x.name), None)
def get_properties(self):
'''
:return: Collection of properties in this module
'''
return self.properties
def activate_runtime(self, state):
# Install default hooks
for prop in self.properties:
if isinstance(prop, ModuleProperty):
if prop.inherit_options:
RuntimeModel.set_change_handler(prop, RuntimeModel.owning_module_property_callback, on_enable=True)
if isinstance(prop, ModeProperty):
RuntimeModel.set_change_handler(prop, RuntimeModel.module_mode_callback)
if isinstance(prop, PinProperty):
if prop.referenced_route is None:
RuntimeModel.set_change_handler(prop, RuntimeModel.pin_selection_callback)
else:
RuntimeModel.configure_route_handler(prop, state)
if isinstance(prop, AportBusProperty):
RuntimeModel.configure_aport_single_route_handler(prop, state)
if isinstance(prop, AportScanMaskProperty):
RuntimeModel.configure_aport_scan(prop, state)
if isinstance(prop, AportBondedMaskProperty):
RuntimeModel.configure_aport_bonded(prop, state)
if prop.name == "owner":
RuntimeModel.set_change_handler(prop, RuntimeModel.owner_changed_callback)
if prop.name == "usedbysdk":
RuntimeModel.set_change_handler(prop, RuntimeModel.module_usedbysdk_callback)
if prop.name == "hiddenbysdk":
RuntimeModel.set_change_handler(prop, RuntimeModel.module_hiddenbysdk_callback)
if prop.name == "showadvanced":
RuntimeModel.set_change_handler(prop, RuntimeModel.module_showadvanced_callback)
if isinstance(prop, PRSChannelProperty):
RuntimeModel.set_change_handler(prop, RuntimeModel.prs_channel_changed_callback, on_enable=True)
RuntimeModel.set_enable_handler(self, RuntimeModel.module_enabled_callback)
# Install user hooks
self.set_runtime_hooks()
def set_runtime_hooks(self):
"""
To be overridden by the implementing HAL Config module
:return: None
"""
pass
def post_load(self):
"""
To be overridden by the implementing HAL Config module
:return: None
"""
pass
def get_property(mod, property_name):
"""
Get a property model object by searching for property name
:param mod: module on which to look for the property
:param property_name: name of the property
:return: ExporterModel.Property (or superclass) if found, None else.
"""
if mod is None:
return None
prop = mod.get_property(property_name)
return prop
def override_module(module_list, old, new):
"""
Override a module in the module_list with another instance
:param old:
:param new:
:return:
"""
if old.name != new.name:
print("ERROR: Not replacing module with same module")
return
for k,v in enumerate(module_list):
if v == old:
module_list[k] = new
def mask_peripheral_selectors_with_module_list(module_list, module_names):
for module_name, module in module_list.items():
for property in module.properties:
if isinstance(property, ModuleProperty):
property.mask_with_module_list(list(module_names))
def busname_to_aportname(module_name, busname):
if 'IDAC' in module_name:
idx = 1
elif len(busname) > 2:
idx = 0
else:
idx = ord(busname[0].upper()) - 64
aportname = "{}{}".format(idx, busname[-1])
return aportname
def aportname_to_busname(module_name, aportname):
if len(aportname) == 2:
diff = aportname[1]
aportname = aportname[0]
else:
diff = ''
if 'IDAC' in module_name:
busname = 'C'
elif aportname == '0':
busname = module_name
else:
busname = chr(ord(aportname) + 16)
return "{}{}".format(busname, diff)
def clear():
AportScanProperty.scan_props = {}
| 44.497537
| 179
| 0.613949
| 5,201
| 45,165
| 5.138819
| 0.080946
| 0.017959
| 0.01347
| 0.024956
| 0.444607
| 0.391814
| 0.342612
| 0.302915
| 0.286976
| 0.27968
| 0
| 0.003121
| 0.28356
| 45,165
| 1,014
| 180
| 44.54142
| 0.822857
| 0.112499
| 0
| 0.289474
| 0
| 0
| 0.056353
| 0.000589
| 0
| 0
| 0.000512
| 0.000986
| 0
| 1
| 0.097953
| false
| 0.002924
| 0.005848
| 0.002924
| 0.154971
| 0.008772
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b4ddf5eeb83ed879035c41d407475a7baf89592
| 6,320
|
py
|
Python
|
benchmark/HIGGS/explore/contour_nll.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | 2
|
2019-03-20T09:05:02.000Z
|
2019-03-20T15:23:44.000Z
|
benchmark/HIGGS/explore/contour_nll.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | null | null | null |
benchmark/HIGGS/explore/contour_nll.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import logging
import datetime
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from config import SAVING_DIR
from config import SEED
from visual import set_plot_config
set_plot_config()
from utils.log import set_logger
from utils.log import flush
from utils.log import print_line
from utils.evaluation import evaluate_minuit
from problem.higgs import HiggsConfigTesOnly as Config
from problem.higgs import get_minimizer
from problem.higgs import get_minimizer_no_nuisance
from problem.higgs import get_generators_torch
from problem.higgs import HiggsNLL as NLLComputer
from ..common import N_BINS
def do_iter(config, model, i_iter, valid_generator, test_generator, root_dir, n_bins=N_BINS):
logger = logging.getLogger()
directory = os.path.join(root_dir, model.name, f"iter_{i_iter}")
os.makedirs(directory, exist_ok=True)
logger.info(f"saving dir = {directory}")
logger.info('Generate testing data')
X_test, y_test, w_test = test_generator.generate(*config.TRUE, n_samples=config.N_TESTING_SAMPLES, no_grad=True)
logger.info('Set up NLL computer')
compute_summaries = model.summary_computer(n_bins=n_bins)
compute_nll = NLLComputer(compute_summaries, valid_generator, X_test, w_test, config=config)
basic_check(compute_nll, config)
basic_contourplot(compute_nll, config, directory)
# MINIMIZE NLL
logger.info('Prepare minuit minimizer')
minimizer = get_minimizer(compute_nll, config.CALIBRATED, config.CALIBRATED_ERROR)
some_dict = evaluate_minuit(minimizer, config.TRUE, directory, suffix="")
# FOCUSED contour plot
nll_func = lambda mu, tes : compute_nll(tes, config.TRUE.jes, config.TRUE.les, mu)
x = minimizer.values[3]
y = minimizer.values[0]
x_err = minimizer.errors[3]
y_err = minimizer.errors[0]
focused_contour(x, y, x_err, y_err, nll_func, directory, xlabel="mu", ylabel='tes')
nll_func = lambda mu, jes : compute_nll(config.TRUE.tes, jes, config.TRUE.les, mu)
x = minimizer.values[3]
y = minimizer.values[1]
x_err = minimizer.errors[3]
y_err = minimizer.errors[1]
focused_contour(x, y, x_err, y_err, nll_func, directory, xlabel="mu", ylabel='jes')
nll_func = lambda mu, les : compute_nll(config.TRUE.tes, config.TRUE.jes, les, mu)
x = minimizer.values[3]
y = minimizer.values[2]
x_err = minimizer.errors[3]
y_err = minimizer.errors[2]
focused_contour(x, y, x_err, y_err, nll_func, directory, xlabel="mu", ylabel='les')
def basic_check(compute_nll, config):
logger = logging.getLogger()
nll = compute_nll(*config.CALIBRATED)
logger.info(f"Calib nll = {nll}")
nll = compute_nll(*config.TRUE)
logger.info(f"TRUE nll = {nll}")
def basic_contourplot(compute_nll, config, directory):
logger = logging.getLogger()
ARRAY_SIZE = 10
# MESH NLL
logger.info(f"basic mu-tes contour plot...")
mu_array = np.linspace(0.5, 1.5, ARRAY_SIZE)
tes_array = np.linspace(0.95, 1.05, ARRAY_SIZE)
mu_mesh, tes_mesh = np.meshgrid(mu_array, tes_array)
nll_func = lambda mu, tes : compute_nll(tes, config.TRUE.jes, config.TRUE.les, mu)
nll_mesh = np.array([nll_func(mu, tes) for mu, tes in zip(mu_mesh.ravel(), tes_mesh.ravel())]).reshape(mu_mesh.shape)
plot_contour(mu_mesh, tes_mesh, nll_mesh, directory, xlabel="mu", ylabel="tes")
logger.info(f"basic mu-jes contour plot...")
jes_array = np.linspace(0.95, 1.05, ARRAY_SIZE)
mu_mesh, jes_mesh = np.meshgrid(mu_array, jes_array)
nll_func = lambda mu, jes : compute_nll(config.TRUE.tes, jes, config.TRUE.les, mu)
nll_mesh = np.array([nll_func(mu, jes) for mu, jes in zip(mu_mesh.ravel(), jes_mesh.ravel())]).reshape(mu_mesh.shape)
plot_contour(mu_mesh, jes_mesh, nll_mesh, directory, xlabel="mu", ylabel="jes")
logger.info(f"basic mu-les contour plot...")
les_array = np.linspace(0.95, 1.05, ARRAY_SIZE)
mu_mesh, les_mesh = np.meshgrid(mu_array, les_array)
nll_func = lambda mu, les : compute_nll(config.TRUE.tes, config.TRUE.jes, les, mu)
nll_mesh = np.array([nll_func(mu, les) for mu, les in zip(mu_mesh.ravel(), les_mesh.ravel())]).reshape(mu_mesh.shape)
plot_contour(mu_mesh, les_mesh, nll_mesh, directory, xlabel="mu", ylabel="les")
logger.info(f"basic tes-jes contour plot...")
tes_mesh, jes_mesh = np.meshgrid(tes_array, jes_array)
nll_func = lambda tes, jes : compute_nll(tes, jes, config.TRUE.les, config.TRUE.mu)
nll_mesh = np.array([nll_func(tes, jes) for tes, jes in zip(tes_mesh.ravel(), jes_mesh.ravel())]).reshape(tes_mesh.shape)
plot_contour(tes_mesh, jes_mesh, nll_mesh, directory, xlabel="tes", ylabel="jes")
def plot_contour(x, y, z, directory, xlabel="mu", ylabel="tes"):
logger = logging.getLogger()
fig, ax = plt.subplots()
CS = ax.contour(x, y, z)
ax.clabel(CS, inline=1, fontsize=10)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
now = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S\n")
fig.title(now)
fname = f"{xlabel}-{ylabel}_contour_plot.png"
path = os.path.join(directory, fname)
plt.savefig(path)
plt.clf()
plt.close(fig)
logger.info(f"saved at {path}")
def focused_contour(x, y, x_err, y_err, nll_func, directory, xlabel="mu", ylabel='tes'):
logger = logging.getLogger()
ARRAY_SIZE = 10
logger.info(f"focused {xlabel}-{ylabel} contour plot...")
x_array = np.linspace(x-3*x_err, x+3*x_err, ARRAY_SIZE)
y_array = np.linspace(y-3*y_err, y+3*y_err, ARRAY_SIZE)
x_mesh, y_mesh = np.meshgrid(x_array, y_array)
z_mesh = np.array([nll_func(x, y) for x, y in zip(x_mesh.ravel(), y_mesh.ravel())]).reshape(x_mesh.shape)
fig, ax = plt.subplots()
CS = ax.contour(x_mesh, y_mesh, z_mesh)
ax.clabel(CS, inline=1, fontsize=10)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
now = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S\n")
fig.title(now)
fname = f"{xlabel}-{ylabel}_focused_contour_plot.png"
path = os.path.join(directory, fname)
plt.savefig(path)
plt.clf()
plt.close(fig)
logger.info(f"saved at {path}")
| 39.012346
| 125
| 0.708861
| 1,012
| 6,320
| 4.222332
| 0.151186
| 0.039785
| 0.041189
| 0.043061
| 0.566113
| 0.494734
| 0.428739
| 0.39106
| 0.361573
| 0.321086
| 0
| 0.009211
| 0.158228
| 6,320
| 161
| 126
| 39.254658
| 0.793985
| 0.008861
| 0
| 0.322835
| 0
| 0
| 0.07637
| 0.012143
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03937
| false
| 0
| 0.188976
| 0
| 0.228346
| 0.015748
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b5611952b114a3d2cf44eadfe8d22e693d8c643
| 682
|
py
|
Python
|
python_fundamentals/Multiple_Sum_Average/index.py
|
justnclrk/Python
|
0922961cbd94694a69ae8132a5c33baf552d8d89
|
[
"MIT"
] | null | null | null |
python_fundamentals/Multiple_Sum_Average/index.py
|
justnclrk/Python
|
0922961cbd94694a69ae8132a5c33baf552d8d89
|
[
"MIT"
] | 8
|
2020-06-06T01:02:06.000Z
|
2022-03-12T00:24:13.000Z
|
python_fundamentals/Multiple_Sum_Average/index.py
|
justnclrk/Python
|
0922961cbd94694a69ae8132a5c33baf552d8d89
|
[
"MIT"
] | null | null | null |
# Multiples -- Part I - Write code that prints all the odd numbers from 1 to 1000. Use the for loop and don't use a list to do this exercise
for i in range(1, 1000, 2):
print(i)
# Multiples -- Part II - Create another program that prints all the multiples of 5 from 5 to 1,000,000
for m in range(5, 1000000, 5):
print(m)
# Sum List -- Create a program that prints the sum of all the values in the list: a = [1, 2, 5, 10, 255, 3]
a = [1, 2, 5, 10, 255, 3]
b = sum(a)
print(b)
# Average List -- Create a program that prints the average of the values in the list: c = [1, 2, 5, 10, 255, 3]
c = [1, 2, 5, 10, 255, 3]
dSum = sum(c)
eLen = len(c)
fAvg = (dSum / eLen)
print(fAvg)
| 40.117647
| 140
| 0.64956
| 141
| 682
| 3.141844
| 0.361702
| 0.090293
| 0.027088
| 0.045147
| 0.311512
| 0.230248
| 0.230248
| 0
| 0
| 0
| 0
| 0.123574
| 0.228739
| 682
| 16
| 141
| 42.625
| 0.718631
| 0.667155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b5646cef1fca290360a2f8a03244f3cf60a9b62
| 2,817
|
py
|
Python
|
examples/gen9_valset_test.py
|
mgesteiro/pyubx2
|
02fd8fa2863b88ed2d746b5800717a1b6b213181
|
[
"BSD-3-Clause"
] | null | null | null |
examples/gen9_valset_test.py
|
mgesteiro/pyubx2
|
02fd8fa2863b88ed2d746b5800717a1b6b213181
|
[
"BSD-3-Clause"
] | null | null | null |
examples/gen9_valset_test.py
|
mgesteiro/pyubx2
|
02fd8fa2863b88ed2d746b5800717a1b6b213181
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
"""
Demo example to test CFG-VALSET ublox message - generation 9
@author: mgesteiro
"""
import sys
import time
from serial import Serial, SerialException, SerialTimeoutException
from pyubx2 import (
UBXMessage,
GET,
SET,
VALSET_RAM,
UBXMessageError,
UBXTypeError,
UBXParseError,
)
def message_valsetuart1baudrate_set(baudrate):
"""
Function to generate a CFG-VALSET CFG-UART1-BAUDRATE set UBX message
"""
# https://www.u-blox.com/en/docs/UBX-18010854#page=86&zoom=auto,-74,499
# CFG-UART1-BAUDRATE Key = 0x40520001
return UBXMessage(
"CFG",
"CFG-VALSET",
SET,
payload=b"\x00"
+ VALSET_RAM # version
+ int(0).to_bytes(2, byteorder="little", signed=False) # layers
+ 0x40520001 .to_bytes(4, byteorder="little", signed=False) # reserved0
+ baudrate.to_bytes(4, byteorder="little", signed=False), # key # value
)
def message_valsetuart1baudrate_response():
"""
Function to generate a ACK-ACK-ACK UBX message
"""
# https://www.u-blox.com/en/docs/UBX-18010854#page=52&zoom=auto,-74,379
return UBXMessage("ACK", "ACK-ACK", GET, clsID=0x06, msgID=0x8A)
if __name__ == "__main__":
PORTNAME = "/dev/tty.usbserial-A50285BI"
BAUDRATE = 230400
try:
print("\nBuilding CFG-UART1-BAUDRATE VALSET message:")
msg = message_valsetuart1baudrate_set(BAUDRATE)
print(f" GENERATED: {msg.serialize().hex()}")
print(
" EXPECTED: b562068a0c00000100000100524000840300b7ef"
+ " (Note: valid for 230400 baudrate)"
)
print(f" {msg}\n")
print(f"This demo will now set your module's UART1 to {BAUDRATE} (only in RAM)")
try:
input("press <ENTER> to continue, CTRL-C to abort!\n")
except KeyboardInterrupt:
print("\nExecution aborted.\n")
sys.exit(0)
sport = Serial(PORTNAME, BAUDRATE, timeout=2)
time.sleep(0.250) # stabilize
print(
f"Sending set message to {PORTNAME} at {BAUDRATE} "
+ "(edit the code to change these values)\n"
)
sport.flushInput()
sport.write(msg.serialize())
print("Receiving response ...")
raw = sport.read(512)
START = raw.find(b"\xB5\x62")
data = raw[START : START + 10] # expected ACK
msg = message_valsetuart1baudrate_response()
print(f" RECEIVED: {data.hex()}")
print(f" EXPECTED: {msg.serialize().hex()}")
print(f" {UBXMessage.parse(data)}\n")
except (
UBXMessageError,
UBXTypeError,
UBXParseError,
SerialException,
SerialTimeoutException,
) as err:
print(f"Something broke 💥🤷♂️: {err}\n")
| 28.17
| 88
| 0.606674
| 320
| 2,817
| 5.290625
| 0.484375
| 0.028352
| 0.028352
| 0.046072
| 0.095688
| 0.095688
| 0.095688
| 0.055523
| 0.055523
| 0.055523
| 0
| 0.065796
| 0.266241
| 2,817
| 99
| 89
| 28.454545
| 0.750847
| 0.160454
| 0
| 0.179104
| 0
| 0
| 0.271706
| 0.060043
| 0
| 0
| 0.007775
| 0
| 0
| 1
| 0.029851
| false
| 0
| 0.059701
| 0
| 0.119403
| 0.179104
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b57edd76cfedc441b5ed69fe2a9fd78c4dbd2d2
| 3,996
|
py
|
Python
|
main.py
|
saswatsamal/Snake-Game
|
2c0f427fd6001f09d26a4586ce55453af706c355
|
[
"CC0-1.0"
] | 2
|
2021-04-25T07:34:14.000Z
|
2021-04-30T15:24:55.000Z
|
main.py
|
saswatsamal/Snake-Game
|
2c0f427fd6001f09d26a4586ce55453af706c355
|
[
"CC0-1.0"
] | null | null | null |
main.py
|
saswatsamal/Snake-Game
|
2c0f427fd6001f09d26a4586ce55453af706c355
|
[
"CC0-1.0"
] | null | null | null |
import pygame
import time
import sys, random
pygame.init()
yellow = (255, 255, 102)
green = (0, 255, 0)
black = (0,0,0)
width = 1280
height = 720
gameDisplay = pygame.display.set_mode((width, height))
pygame.display.set_caption('Snake Game By Saswat Samal')
clock = pygame.time.Clock()
snake_block = 10
snake_speed = 15
font_style = pygame.font.SysFont("ubuntu", 25)
score_font = pygame.font.SysFont("ubuntu", 20)
def main_menu():
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
gameLoop()
gameDisplay.fill(black)
main_menu_message = font_style.render('Press anywhere to start the game' , True , (255,255,255))
font_pos = main_menu_message.get_rect(center=(width//2, height//2))
gameDisplay.blit(main_menu_message , font_pos)
pygame.display.update()
def gameScore(score):
value = score_font.render("Your Score: " + str(score), True, green)
gameDisplay.blit(value, [width/2, 0])
def our_snake(snake_block, snake_list):
for x in snake_list:
pygame.draw.rect(gameDisplay, green, [x[0], x[1], snake_block, snake_block])
def message(msg, color):
mesg = font_style.render(msg, True, color)
gameDisplay.blit(mesg, [width / 6, height / 3])
def gameLoop():
game_over = False
game_close = False
x1 = width / 2
y1 = height / 2
x1_change = 0
y1_change = 0
snake_List = []
Length_of_snake = 1
foodx = round(random.randrange(0, width - snake_block) / 10.0) * 10.0
foody = round(random.randrange(0, height - snake_block) / 10.0) * 10.0
while not game_over:
while game_close == True:
gameDisplay.fill(black)
message("Game Over! Press P to Play Again and Press Q to Quit the game. ", green)
gameScore(Length_of_snake - 1)
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_q:
game_over = True
game_close = False
if event.key == pygame.K_p:
gameLoop()
for event in pygame.event.get():
if event.type == pygame.QUIT:
game_over = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
x1_change = -snake_block
y1_change = 0
elif event.key == pygame.K_RIGHT:
x1_change = snake_block
y1_change = 0
elif event.key == pygame.K_UP:
y1_change = -snake_block
x1_change = 0
elif event.key == pygame.K_DOWN:
y1_change = snake_block
x1_change = 0
if x1 >= width or x1 < 0 or y1 >= height or y1 < 0:
game_close = True
x1 += x1_change
y1 += y1_change
gameDisplay.fill(black)
pygame.draw.rect(gameDisplay, yellow, [foodx, foody, snake_block, snake_block])
snake_Head = []
snake_Head.append(x1)
snake_Head.append(y1)
snake_List.append(snake_Head)
if len(snake_List) > Length_of_snake:
del snake_List[0]
for x in snake_List[:-1]:
if x == snake_Head:
game_close = True
our_snake(snake_block, snake_List)
gameScore(Length_of_snake - 1)
pygame.display.update()
if x1 == foodx and y1 == foody:
foodx = round(random.randrange(0, width - snake_block) / 10.0) * 10.0
foody = round(random.randrange(0, height - snake_block) / 10.0) * 10.0
Length_of_snake += 1
clock.tick(snake_speed)
pygame.quit()
quit()
main_menu()
| 27.369863
| 104
| 0.56006
| 511
| 3,996
| 4.209393
| 0.221135
| 0.069735
| 0.039052
| 0.041841
| 0.362622
| 0.320316
| 0.295212
| 0.261274
| 0.222222
| 0.222222
| 0
| 0.044461
| 0.335836
| 3,996
| 146
| 105
| 27.369863
| 0.766014
| 0
| 0
| 0.336538
| 0
| 0
| 0.036277
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048077
| false
| 0
| 0.028846
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b59cb1bcbcd0c6d58e12de2aac812a57e139151
| 918
|
py
|
Python
|
SoftMax_Regression.py
|
chunish/tfboy-is-on-the-way
|
7cd4c1f7c0c1dd94189377ee0751f2c232a1e98c
|
[
"Apache-2.0"
] | null | null | null |
SoftMax_Regression.py
|
chunish/tfboy-is-on-the-way
|
7cd4c1f7c0c1dd94189377ee0751f2c232a1e98c
|
[
"Apache-2.0"
] | null | null | null |
SoftMax_Regression.py
|
chunish/tfboy-is-on-the-way
|
7cd4c1f7c0c1dd94189377ee0751f2c232a1e98c
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot = True)
sess = tf.InteractiveSession()
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x, W) + b)
y_ = tf.placeholder(tf.float32, [None, 10]) # 真实概率
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices = [1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
tf.global_variables_initializer().run()
for i in range(10000):
batch_xs, batch_ys = mnist.train.next_batch(100)
train_step.run({x: batch_xs, y_: batch_ys})
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(accuracy.eval({x: mnist.test.images, y_: mnist.test.labels}))
| 31.655172
| 87
| 0.734205
| 149
| 918
| 4.33557
| 0.483221
| 0.041796
| 0.04644
| 0.068111
| 0.080495
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037713
| 0.104575
| 918
| 28
| 88
| 32.785714
| 0.748175
| 0.004357
| 0
| 0
| 0
| 0
| 0.012061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b5fd841b1005516ab298b5be16fb1dd41b071b3
| 3,190
|
py
|
Python
|
taehyoungram/images/views.py
|
TaeHyoungKwon/taehyoungram
|
055c9effdaa718d60e7627196754ea6b48dded20
|
[
"MIT"
] | null | null | null |
taehyoungram/images/views.py
|
TaeHyoungKwon/taehyoungram
|
055c9effdaa718d60e7627196754ea6b48dded20
|
[
"MIT"
] | 7
|
2020-02-12T01:23:48.000Z
|
2022-03-11T23:26:02.000Z
|
taehyoungram/images/views.py
|
TaeHyoungKwon/taehyoungram
|
055c9effdaa718d60e7627196754ea6b48dded20
|
[
"MIT"
] | null | null | null |
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from .models import Image, Comment, Like
from .serializers import ImageSerializer, CommentSerializer, LikeSerializer
class Feed(APIView):
def get(self, request, format=None):
user = request.user
follwoing_users = user.following.all()
image_list = []
for following_user in follwoing_users:
user_images = following_user.images.all()[:2]
for image in user_images:
image_list.append(image)
sorted_list = sorted(image_list, key=lambda image:image.created_at, reverse=True)
serializer = ImageSerializer(sorted_list, many=True)
return Response(serializer.data)
class LikeImage(APIView):
def post(self, request, image_id, format=None):
try:
found_image = Image.objects.get(id=image_id)
except Image.DoesNotExist :
return Response(status=status.HTTP_404_NOT_FOUND)
try:
pre_exisiting_like = Like.objects.get(
creator=request.user,
image=found_image
)
return Response(status=status.HTTP_304_NOT_MODIFIED)
except Like.DoesNotExist:
new_like = Like.objects.create(
creator=request.user,
image=found_image
)
new_like.save()
return Response(status=status.HTTP_201_CREATED)
class UnLikeImage(APIView):
def delete(self, request, image_id, format=None):
user = request.user
try:
found_image = Image.objects.get(id=image_id)
except:
return Response(status=status.HTTP_404_NOT_FOUND)
try:
pre_existing_like = Like.objects.get(
creator=user,
image=found_image
)
pre_existing_like.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except Like.DoesNotExist:
return Response(status=status.HTTP_304_NOT_MODIFIED)
class CommentOnImage(APIView):
def post(self, request, image_id, format=None):
user = request.user
try:
found_image = Image.objects.get(id=image_id)
except Image.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
serializer = CommentSerializer(data=request.data)
if serializer.is_valid():
serializer.save(creator=user, image=found_image)
return Response(data=serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class Comment(APIView):
def delete(self, request, comment_id, format=None):
s
user = request.user
try:
comment = Comment.objects.get(id=comment_id, creator=user)
comment.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except Comment.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
| 27.73913
| 89
| 0.626646
| 354
| 3,190
| 5.449153
| 0.228814
| 0.087092
| 0.091239
| 0.121306
| 0.519959
| 0.414723
| 0.35718
| 0.35718
| 0.31156
| 0.255054
| 0
| 0.015138
| 0.295925
| 3,190
| 114
| 90
| 27.982456
| 0.843722
| 0
| 0
| 0.426667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.36
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b65eb4040ecf11e53140f9d3ec6fb5084fff907
| 6,298
|
py
|
Python
|
src/utilities/download_bc.py
|
geoschem/integrated_methane_inversion
|
0615e3b76c111beadaf0d7fb5b9fa99aa782f403
|
[
"MIT"
] | null | null | null |
src/utilities/download_bc.py
|
geoschem/integrated_methane_inversion
|
0615e3b76c111beadaf0d7fb5b9fa99aa782f403
|
[
"MIT"
] | 3
|
2022-02-14T20:42:35.000Z
|
2022-03-29T18:11:40.000Z
|
src/utilities/download_bc.py
|
geoschem/integrated_methane_inversion
|
0615e3b76c111beadaf0d7fb5b9fa99aa782f403
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Description:
------------
This Python script (assumes Python3) downloads boundary conditions
files from AWS S3 to a target directory for the requested date range.
Remarks:
--------
(1) Jiawei Zhuang found that it is much faster to issue aws s3 cp
commands from a bash script than a Python script. Therefore,
in this routine we create a bash script with all of the
download commands that will be executed by the main routine.
"""
# Imports
import os
import sys
import subprocess
# Exit with error if we are not using Python3
assert sys.version_info.major >= 3, "ERROR: Python 3 is required to run download_bc.py!"
# Define global variables
DATA_DOWNLOAD_SCRIPT = "./auto_generated_download_script.sh"
def list_missing_files(start_date, end_date, destination):
"""
Creates list of BC files in date range that do not already
exist at destination.
Args:
-----
start_date : str
Initial date of simulation.
end_date : str
Final date of simulation.
destination : str
Target directory for downloaded files
"""
missing_files = []
start_str = str(start_date)
start_year = start_str[:4]
start_month = start_str[4:6]
start_day = start_str[6:8]
end_str = str(end_date)
end_year = end_str[:4]
end_month = end_str[4:6]
end_day = end_str[6:8]
month_days = [31, [28, 29], 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
file_prefix = "GEOSChem.BoundaryConditions."
file_suffix = "_0000z.nc4"
for year in range(int(start_year), int(end_year) + 1):
# skip years with definite no data
if year < 2018:
print(
"Skipping BC data download for ", str(year), ": no data from this year"
)
continue
init_month = 1
final_month = 12
if year == int(start_year):
# only get desired months from incomplete years
init_month = int(start_month)
if year == int(end_year):
final_month = int(end_month)
for month in range(init_month, final_month + 1):
# skip months with definite no data
if year == 2018 and month < 4:
print(
"Skipping BC data download for ",
str(year),
"/0",
str(month),
": no data from this month",
)
continue
# add 0 to month string if necessary
month_prefix = "0" if month < 10 else ""
init_day = 1
final_day = month_days[month - 1]
# leap day
if month == 2:
if year % 4 == 0:
final_day = final_day[1]
else:
final_day = final_day[0]
if month == int(start_month) and year == int(start_year):
# only get desired days from incomplete months
init_day = int(start_day)
if month == int(end_month) and year == int(end_year):
final_day = int(end_day)
for day in range(init_day, final_day + 1):
# add 0 to day string if necessary
day_prefix = "0" if day < 10 else ""
# check if file for this day already exists
file_name = (
file_prefix
+ str(year)
+ month_prefix
+ str(month)
+ day_prefix
+ str(day)
+ file_suffix
)
# add file to download list if needed
if not os.path.exists(destination + "/" + file_name):
missing_files.append(file_name)
return missing_files
def create_download_script(paths, destination):
"""
Creates a data download script to obtain missing files
Args:
-----
paths : dict
Output of function list_missing_files.
"""
# Create the data download script
with open(DATA_DOWNLOAD_SCRIPT, "w") as f:
# Write shebang line to script
print("#!/bin/bash\n", file=f)
print("# This script was generated by download_bc.py\n", file=f)
cmd_prefix = "aws s3 cp --only-show-errors --request-payer=requester "
remote_root = "s3://imi-boundary-conditions/"
# make destination if needed
if not os.path.exists(destination):
os.mkdir(destination)
# Write download commands for only the missing data files
for path in paths:
cmd = cmd_prefix + remote_root + path + " " + destination
print(cmd, file=f)
print(file=f)
# Close file and make it executable
f.close()
os.chmod(DATA_DOWNLOAD_SCRIPT, 0o755)
def download_the_data(start_date, end_date, destination):
"""
Downloads required boundary conditions files from AWS.
Args:
-----
start_date : str
Initial date of simulation.
end_date : str
Final date of simulation.
destination : str
Target directory for downloaded files
"""
# Get a list of missing data paths
paths = list_missing_files(start_date, end_date, destination)
# Create script to download missing files from AWS S3
create_download_script(paths, destination)
# Run the data download script and return the status
# Remove the file afterwards
status = subprocess.call(DATA_DOWNLOAD_SCRIPT)
os.remove(DATA_DOWNLOAD_SCRIPT)
# Raise an exception if the data was not successfully downloaded
if status != 0:
err_msg = "Error downloading data from AWS!"
raise Exception(err_msg)
def main():
"""
Main program. Gets command-line arguments and calls function
download_the_data to initiate a data-downloading process.
Calling sequence:
-----------------
./download_data.py start_date end_date destination
Example call:
-------------
./download_data.py 20200501 20200531 /home/ubuntu/ExtData/BoundaryConditions
"""
download_the_data(sys.argv[1], sys.argv[2], sys.argv[3])
if __name__ == "__main__":
main()
| 30.872549
| 88
| 0.580343
| 790
| 6,298
| 4.473418
| 0.258228
| 0.043577
| 0.040747
| 0.01811
| 0.233729
| 0.164686
| 0.164686
| 0.131862
| 0.066214
| 0.066214
| 0
| 0.025549
| 0.335027
| 6,298
| 203
| 89
| 31.024631
| 0.81829
| 0.355668
| 0
| 0.043478
| 0
| 0
| 0.109699
| 0.030342
| 0
| 0
| 0
| 0
| 0.01087
| 1
| 0.043478
| false
| 0
| 0.032609
| 0
| 0.086957
| 0.065217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b6add180192d528e3ed133e29c757a81886beb8
| 483
|
py
|
Python
|
NoteBooks/Curso de Python/Python/Paradigmas/Object Oriented Programming/Modelando Objetos_2.py
|
Alejandro-sin/Learning_Notebooks
|
161d6bed4c7b1d171b45f61c0cc6fa91e9894aad
|
[
"MIT"
] | 1
|
2021-02-26T13:12:22.000Z
|
2021-02-26T13:12:22.000Z
|
NoteBooks/Curso de Python/Python/Paradigmas/Object Oriented Programming/Modelando Objetos_2.py
|
Alejandro-sin/Learning_Notebooks
|
161d6bed4c7b1d171b45f61c0cc6fa91e9894aad
|
[
"MIT"
] | null | null | null |
NoteBooks/Curso de Python/Python/Paradigmas/Object Oriented Programming/Modelando Objetos_2.py
|
Alejandro-sin/Learning_Notebooks
|
161d6bed4c7b1d171b45f61c0cc6fa91e9894aad
|
[
"MIT"
] | null | null | null |
"""
Ejercicio para operación entre currencies
"""
""" Representación del currency"""
class Curency:
def __init__(self, name, symbol, factor):
self.name = name
self.symbol = symbol
self.factor = factor
# No me queda muy claro el uso de esta función, sirve para mostrar puntualmente qué?
# def __repr__(self):
# info = self.name
# info2 = self.symbol
# return info, info2
euro = Curency("Euro","EU","3.2")
print(euro)
| 17.888889
| 84
| 0.621118
| 60
| 483
| 4.866667
| 0.65
| 0.082192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011299
| 0.267081
| 483
| 26
| 85
| 18.576923
| 0.813559
| 0.459627
| 0
| 0
| 0
| 0
| 0.042453
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.285714
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b6f1aec2b3a7aa82fa7792516bb55e9223b7c08
| 1,063
|
py
|
Python
|
bot.py
|
federicosapienza/InboxNotionTelegramBot
|
031d5e78cd352dfb692b93f3e0b421695f1dc18e
|
[
"MIT"
] | null | null | null |
bot.py
|
federicosapienza/InboxNotionTelegramBot
|
031d5e78cd352dfb692b93f3e0b421695f1dc18e
|
[
"MIT"
] | null | null | null |
bot.py
|
federicosapienza/InboxNotionTelegramBot
|
031d5e78cd352dfb692b93f3e0b421695f1dc18e
|
[
"MIT"
] | null | null | null |
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, ConversationHandler
import logging
from utils import TELEGRAM_TOKEN
from handlers import start, ask_new_url, get_url, get_description, cancel
from handlers import URL_URL, URL_DESCRIPTION
logging.basicConfig(format='%(levelname)s - %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
updater = None
def start_bot():
global updater
updater = Updater(TELEGRAM_TOKEN, use_context=True)
dispatcher = updater.dispatcher
dispatcher.add_handler(CommandHandler('start', start))
conversation_url_handler = ConversationHandler(
entry_points=[CommandHandler('url', ask_new_url)],
states={
URL_URL: [MessageHandler(Filters.text, get_url)],
URL_DESCRIPTION: [MessageHandler(Filters.text, get_description)],
},
fallbacks=[MessageHandler(Filters.command, cancel)]
)
dispatcher.add_handler(conversation_url_handler)
updater.start_polling(timeout=30)
updater.idle()
start_bot()
| 25.926829
| 94
| 0.739417
| 118
| 1,063
| 6.415254
| 0.432203
| 0.110964
| 0.047556
| 0.073976
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002257
| 0.16651
| 1,063
| 40
| 95
| 26.575
| 0.852144
| 0
| 0
| 0
| 0
| 0
| 0.032926
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.2
| 0
| 0.24
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b6fc4e98137fcd105847298b470d6ad64f71618
| 841
|
py
|
Python
|
examples/face.py
|
birkenfeld/python-gr
|
1d6cd36616a73c8e569b8348869e6e30f3830ec4
|
[
"RSA-MD"
] | null | null | null |
examples/face.py
|
birkenfeld/python-gr
|
1d6cd36616a73c8e569b8348869e6e30f3830ec4
|
[
"RSA-MD"
] | null | null | null |
examples/face.py
|
birkenfeld/python-gr
|
1d6cd36616a73c8e569b8348869e6e30f3830ec4
|
[
"RSA-MD"
] | null | null | null |
#!/usr/bin/env python
"""
Simple surface plot example
"""
from gr import *
from math import *
x = [-2 + i * 0.5 for i in range(0, 29)]
y = [-7 + i * 0.5 for i in range(0, 29)]
z = list(range(0, 841))
for i in range(0, 29):
for j in range(0, 29):
r1 = sqrt((x[j] - 5)**2 + y[i]**2)
r2 = sqrt((x[j] + 5)**2 + y[i]**2)
z[i * 29 - 1 + j] = (exp(cos(r1)) + exp(cos(r2)) - 0.9) * 25
setcharheight(24.0/500)
settextalign(TEXT_HALIGN_CENTER, TEXT_VALIGN_TOP)
textext(0.5, 0.9, "Surface Example")
(tbx, tby) = inqtextext(0.5, 0.9, "Surface Example")
fillarea(tbx, tby)
setwindow(-2, 12, -7, 7)
setspace(-80, 200, 45, 70)
setcharheight(14.0/500)
axes3d(1, 0, 20, -2, -7, -80, 2, 0, 2, -0.01)
axes3d(0, 1, 0, 12, -7, -80, 0, 2, 0, 0.01)
titles3d("X-Axis", "Y-Axis", "Z-Axis")
surface(x, y, z, 3)
surface(x, y, z, 1)
updatews()
| 22.72973
| 64
| 0.567182
| 171
| 841
| 2.766082
| 0.374269
| 0.063425
| 0.067653
| 0.084567
| 0.224101
| 0.224101
| 0.118393
| 0.118393
| 0.071882
| 0
| 0
| 0.156805
| 0.196195
| 841
| 36
| 65
| 23.361111
| 0.542899
| 0.057075
| 0
| 0
| 0
| 0
| 0.061147
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b714a892a0b336d54d129baf723bfd26bcf8c4a
| 1,495
|
py
|
Python
|
app/core.py
|
antmicro/raw-image-data-previewer
|
1fc14848a27ce628047cf3e473a9f30f83c9892d
|
[
"Apache-2.0"
] | 5
|
2021-06-08T15:37:23.000Z
|
2021-06-10T15:41:21.000Z
|
app/core.py
|
antmicro/raw-image-data-previewer
|
1fc14848a27ce628047cf3e473a9f30f83c9892d
|
[
"Apache-2.0"
] | 37
|
2021-03-12T12:48:56.000Z
|
2021-12-09T11:41:05.000Z
|
app/core.py
|
antmicro/raw-image-data-previewer
|
1fc14848a27ce628047cf3e473a9f30f83c9892d
|
[
"Apache-2.0"
] | 9
|
2021-03-22T14:03:37.000Z
|
2021-12-31T07:22:04.000Z
|
"""Main functionalities."""
from .image.image import (Image, RawDataContainer)
from .image.color_format import AVAILABLE_FORMATS
from .parser.factory import ParserFactory
import cv2 as cv
import os
def load_image(file_path, color_format, width):
try:
image = Image.from_file(file_path)
parser = ParserFactory.create_object(
determine_color_format(color_format))
except Exception as e:
print(type(e).__name__, e)
image = parser.parse(image.data_buffer,
determine_color_format(color_format), width)
return image
def get_displayable(image):
if image.color_format is None:
raise Exception("Image should be already parsed!")
parser = ParserFactory.create_object(image.color_format)
return parser.get_displayable(image)
def determine_color_format(format_string):
if format_string in AVAILABLE_FORMATS.keys():
return AVAILABLE_FORMATS[format_string]
else:
raise NotImplementedError(
"Provided string is not name of supported format.")
def save_image_as_file(image, file_path):
directory = file_path.replace('\\', '/')
if directory.rfind('/') == -1:
directory = './'
else:
directory = directory[:directory.rfind("/")]
if not os.path.isdir(directory):
os.makedirs(directory)
try:
cv.imwrite(file_path, cv.cvtColor(image, cv.COLOR_RGB2BGR))
except Exception as e:
print(type(e).__name__, e)
| 26.22807
| 69
| 0.681605
| 182
| 1,495
| 5.368132
| 0.357143
| 0.101331
| 0.04913
| 0.06346
| 0.131013
| 0.067554
| 0.067554
| 0.067554
| 0.067554
| 0
| 0
| 0.002571
| 0.219398
| 1,495
| 56
| 70
| 26.696429
| 0.834619
| 0.014047
| 0
| 0.210526
| 0
| 0
| 0.058583
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.131579
| 0
| 0.315789
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b7544498643883f50311519a373ed59f4faa469
| 3,478
|
py
|
Python
|
app/urls.py
|
etihadprime/etihadwebclass
|
3b46d9068afeb0806198ef08fe26849ab9a09bb9
|
[
"Apache-2.0"
] | null | null | null |
app/urls.py
|
etihadprime/etihadwebclass
|
3b46d9068afeb0806198ef08fe26849ab9a09bb9
|
[
"Apache-2.0"
] | 6
|
2021-03-19T03:55:20.000Z
|
2021-09-22T19:06:06.000Z
|
app/urls.py
|
etihadprime/etihadwebclass
|
3b46d9068afeb0806198ef08fe26849ab9a09bb9
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from .views import teacherregister,studentregister,login_view,logout
from . import views
from .views import (
ClassroomCreateView,ClassroomListView,ClassroomDetailView,ClassroomUpdateView,ClassroomDeleteView,
SubjectCreateView,SubjectListView,SubjectDetailView,SubjectUpdateView,SubjectDeleteView,
ClassMemberCreateView,ClassMemberListView,ClassMemberDetailView,ClassMemberUpdateView,ClassMemberDeleteView,
TimetableCreateView,TimetableListView,TimetableDetailView,TimetableUpdateView,TimetableDeleteView,CrudView,chatroom
)
urlpatterns = [
path('', views.index, name='index'),
path('health', views.health, name='health'),
path('404', views.handler404, name='404'),
path('500', views.handler500, name='500'),
path('signup/teacher', teacherregister,name='register-teacher'),
path('signup/student', studentregister,name='register-student'),
path('accounts/login/', login_view, name='login'),
path('logout/', logout,name='logout'),
#Classroom
path('classroom/new', ClassroomCreateView.as_view(),name='classroom-create'),
path('classroom_list', ClassroomListView.as_view(),name='classroom-list'),
path('classroom/<str:pk>/', ClassroomDetailView.as_view(),name='classroom-detail'),
path('classroom/<str:pk>/update', ClassroomUpdateView.as_view(),name='classroom-update'),
path('classroom/<str:pk>/delete', ClassroomDeleteView.as_view(),name='classroom-delete'),
#path('Classroom/<int:pk>/image',ChildImageUpdateView.as_view(),name='Classroom-image'),
#Subject
path('subject/new', SubjectCreateView.as_view(),name='subject-create'),
path('subject_list', SubjectListView.as_view(),name='subject-list'),
path('subject/<int:pk>/', SubjectDetailView.as_view(),name='subject-detail'),
path('subject/<int:pk>/update', SubjectUpdateView.as_view(),name='subject-update'),
path('subject/<int:pk>/delete', SubjectDeleteView.as_view(),name='subject-delete'),
# Class Members
path('classmember/new', ClassMemberCreateView.as_view(),name='classmember-create'),
path('classmember_list', ClassMemberListView.as_view(),name='classmember-list'),
path('classmember/<str:pk>/', ClassMemberDetailView.as_view(),name='classmember-detail'),
path('classmember/<str:pk>/update', ClassMemberUpdateView.as_view(),name='classmember-update'),
path('classmember/<str:pk>/delete', ClassMemberDeleteView.as_view(),name='classmember-delete'),
# TimeTable
path('timetable/new', TimetableCreateView.as_view(),name='timetable-create'),
path('timetable_list', TimetableListView.as_view(),name='timetable-list'),
path('timetable/<int:pk>/', TimetableDetailView.as_view(),name='timetable-detail'),
path('timetable/<int:pk>/update', TimetableUpdateView.as_view(),name='timetable-update'),
path('timetable/<int:pk>/delete', TimetableDeleteView.as_view(),name='timetable-delete'),
# chatroom
path('chat/new',chatroom,name='chatroom'),
path('crud/',CrudView.as_view(), name='crud_ajax'),
]
| 70.979592
| 115
| 0.648074
| 322
| 3,478
| 6.909938
| 0.208075
| 0.082697
| 0.098876
| 0.051236
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006494
| 0.20299
| 3,478
| 49
| 116
| 70.979592
| 0.796176
| 0.039103
| 0
| 0
| 0
| 0
| 0.25412
| 0.066227
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.097561
| 0
| 0.097561
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b76dabace6084b6df07b8d27c9db12c437ca835
| 44,634
|
py
|
Python
|
qaboard/qa.py
|
Samsung/qaboard
|
a2290f33da2bbd87cacf95822e1c85376083dfa1
|
[
"Apache-2.0"
] | 51
|
2019-12-02T07:25:58.000Z
|
2022-03-23T13:27:11.000Z
|
qaboard/qa.py
|
Samsung/qaboard
|
a2290f33da2bbd87cacf95822e1c85376083dfa1
|
[
"Apache-2.0"
] | 25
|
2020-01-20T16:13:49.000Z
|
2022-02-19T17:07:38.000Z
|
qaboard/qa.py
|
Samsung/qaboard
|
a2290f33da2bbd87cacf95822e1c85376083dfa1
|
[
"Apache-2.0"
] | 15
|
2020-01-17T21:21:17.000Z
|
2022-02-23T10:13:48.000Z
|
#!/usr/bin/env python
"""
CLI tool to runs various tasks related to QA.
"""
import os
import time
from pathlib import Path
import sys
import traceback
import json
import yaml
import uuid
import datetime
import click
from .run import RunContext
from .runners import runners, Job, JobGroup
from .runners.lsf import LsfPriority
from .conventions import batch_dir, batch_dir, make_batch_dir, make_batch_conf_dir, make_hash
from .conventions import serialize_config, deserialize_config, get_settings
from .utils import PathType, entrypoint_module, load_tuning_search
from .utils import save_outputs_manifest, total_storage
from .utils import redirect_std_streams
from .utils import getenvs
from .api import url_to_dir, print_url
from .api import get_outputs, notify_qa_database, serialize_paths
from .iterators import iter_inputs, iter_parameters
from .config import config_has_error, ignore_config_errors
from .config import project, project_root, subproject, config
from .config import default_batches_files, get_default_database, default_batch_label, default_platform
from .config import get_default_configuration, default_input_type
from .config import commit_id, outputs_commit, artifacts_commit, root_qatools, artifacts_commit_root, outputs_commit_root
from .config import user, is_ci, on_windows
@click.group()
@click.pass_context
@click.option('--platform', default=default_platform)
@click.option('--configuration', '--config', '-c', 'configurations', multiple=True, help="Will be passed to the run function")
@click.option('--label', '-l', default=default_batch_label, help="Gives tuning experiments a name.")
@click.option('--tuning', default=None, help="Extra parameters for tuning (JSON)")
@click.option('--tuning-filepath', type=PathType(), default=None, help="File with extra parameters for tuning")
@click.option('--dryrun', is_flag=True, help="Only show the commands that would be executed")
@click.option('--share', is_flag=True, help="Show outputs in QA-Board, doesn't just save them locally.")
@click.option('--database', type=PathType(), help="Input database location")
@click.option('--type', 'input_type', default=default_input_type, help="How we define inputs")
@click.option('--offline', is_flag=True, help="Do not notify QA-Board about run statuses.")
def qa(ctx, platform, configurations, label, tuning, tuning_filepath, dryrun, share, database, input_type, offline):
"""Entrypoint to running your algo, launching batchs..."""
# We want all paths to be relative to top-most qaboard.yaml
# it should be located at the root of the git repository
if config_has_error and not ignore_config_errors:
click.secho('Please fix the error(s) above in qaboard.yaml', fg='red', err=True, bold=True)
exit(1)
# Click passes `ctx.obj` to downstream commands, we can use it as a scratchpad
# http://click.pocoo.org/6/complex/
ctx.obj = {}
will_show_help = '-h' in sys.argv or '--help' in sys.argv
noop_command = 'get' in sys.argv or 'init' in sys.argv
if root_qatools and root_qatools != Path().resolve() and not will_show_help and not noop_command:
ctx.obj['previous_cwd'] = os.getcwd()
click.echo(click.style("Working directory changed to: ", fg='blue') + click.style(str(root_qatools), fg='blue', bold=True), err=True)
os.chdir(root_qatools)
# We want open permissions on outputs and artifacts
# it makes collaboration among mutliple users / automated tools so much easier...
os.umask(0)
ctx.obj['project'] = project
ctx.obj['project_root'] = project_root
ctx.obj['subproject'] = subproject
ctx.obj['HOST'] = os.environ.get('HOST', os.environ.get('HOSTNAME'))
ctx.obj['user'] = user
ctx.obj['dryrun'] = dryrun
ctx.obj['share'] = share
ctx.obj['offline'] = offline
ctx.obj['outputs_commit'] = outputs_commit
ctx.obj['artifacts_commit'] = artifacts_commit
# Note: to support multiple databases per project,
# either use / as database, or somehow we need to hash the db in the output path.
ctx.obj['raw_batch_label'] = label
ctx.obj['batch_label'] = label if not share else f"@{user}| {label}"
ctx.obj['platform'] = platform
ctx.obj['input_type'] = input_type
ctx.obj['inputs_settings'] = get_settings(input_type, config)
ctx.obj['database'] = database if database else get_default_database(ctx.obj['inputs_settings'])
# configuration singular is for backward compatibility to a time where there was a single str config
ctx.obj['configuration'] = ':'.join(configurations) if configurations else get_default_configuration(ctx.obj['inputs_settings'])
# we should refactor the str configuration away completly, and do a much simpler parsing, like
# deserialize_config = lambda configurations: return [maybe_json_loads(c) for c in configurations]
ctx.obj['configurations'] = deserialize_config(ctx.obj['configuration'])
ctx.obj['extra_parameters'] = {}
if tuning:
ctx.obj['extra_parameters'] = json.loads(tuning)
elif tuning_filepath:
ctx.obj['tuning_filepath'] = tuning_filepath
with tuning_filepath.open('r') as f:
if tuning_filepath.suffix == '.yaml':
ctx.obj['extra_parameters'] = yaml.load(f, Loader=yaml.SafeLoader)
elif tuning_filepath.suffix == '.cde':
from cde import Config
ctx.obj['extra_parameters'] = Config.loads(f.read()).asdict()
else:
ctx.obj['extra_parameters'] = json.load(f)
# batch runs will override this since batches may have different configurations
ctx.obj['batch_conf_dir'] = make_batch_conf_dir(outputs_commit, ctx.obj['batch_label'], platform, ctx.obj['configurations'], ctx.obj['extra_parameters'], share)
ctx.obj['batch_dir'] = make_batch_dir(outputs_commit, ctx.obj['batch_label'], platform, ctx.obj['configurations'], ctx.obj['extra_parameters'], share)
# For convenience, we allow users to change environment variables using {ENV: {VAR: value}}
# in configurations or tuning parameters
environment_variables = {}
for c in ctx.obj['configurations']:
if not isinstance(c, dict): continue
if 'ENV' in c: environment_variables.update(c['ENV'])
if 'ENV' in ctx.obj['extra_parameters']:
environment_variables.update(ctx.obj['extra_parameters']['ENV'])
os.environ.update(environment_variables)
# we manage stripping ansi color codes ourselfs since we redirect std streams
# to both the original stream and a log file
ctx.color = True
# colors in log files colors will be interpreted in the UIs
ctx.obj['color'] = is_ci or share
@qa.command()
@click.option('-i', '--input', 'input_path', type=PathType(), help='Path of the input/recording/test we should work on, relative to the database directory.')
@click.option('-o', '--output', 'output_path', type=PathType(), default=None, help='Custom output directory path. If not provided, defaults to ctx.obj["batch_conf_dir"] / input_path.with_suffix('')')
@click.argument('variable')
@click.pass_context
def get(ctx, input_path, output_path, variable):
"""Prints the value of the requested variable. Mostly useful for debug."""
try:
output_directory = ctx.obj['batch_conf_dir'] / input_path.with_suffix('') if not output_path else output_path
except:
pass
from .config import outputs_commit, commit_branch, artifacts_branch_root
# backward compatibility
if variable == "branch_ci_dir":
variable = "artifacts_branch_root"
if variable == "commit_ci_dir":
variable = "outputs_commit"
locals().update(globals())
locals().update(ctx.obj)
if variable in locals():
print(locals().get(variable))
else:
click.secho(f"Could not find {variable}", err=True, fg='red')
exit(1)
@qa.command(context_settings=dict(
ignore_unknown_options=True,
allow_interspersed_args=False,
))
@click.pass_context
@click.option('-i', '--input', 'input_path', required=True, type=PathType(), help='Path of the input/recording/test we should work on, relative to the database directory.')
@click.option('-o', '--output', 'output_path', type=PathType(), default=None, help='Custom output directory path. If not provided, defaults to ctx.obj["batch_conf_dir"] / input_path.with_suffix('')')
@click.option('--keep-previous', is_flag=True, help="Don't clean previous outputs before the run.")
@click.option('--no-postprocess', is_flag=True, help="Don't do the postprocessing.")
@click.option('--save-manifests-in-database', is_flag=True, help="Save the input and outputs manifests in the database.")
@click.argument('forwarded_args', nargs=-1, type=click.UNPROCESSED)
def run(ctx, input_path, output_path, keep_previous, no_postprocess, forwarded_args, save_manifests_in_database):
"""
Runs over a given input/recording/test and computes various success metrics and outputs.
"""
run_context = RunContext.from_click_run_context(ctx, config)
# Usually we want to remove any files already present in the output directory.
# It avoids issues with remaining state... This said,
# In some cases users want to debug long, multi-stepped runs, for which they have their own caching
if not keep_previous:
import shutil
shutil.rmtree(run_context.output_dir, ignore_errors=True)
run_context.output_dir.mkdir(parents=True, exist_ok=True)
with (run_context.output_dir / 'run.json').open('w') as f:
json.dump({
# run_context.database is always made absolute, we keep it relative if given so
"database": str(ctx.obj["database"]),
"input_path": str(run_context.rel_input_path),
"input_type": run_context.type,
"configurations": run_context.configurations,
"extra_parameters": run_context.extra_parameters,
"platform": run_context.platform,
}, f, sort_keys=True, indent=2, separators=(',', ': '))
# Without this, we can only log runs from `qa batch`, on linux, via LSF
# this redirect is not 100% perfect, we don't get stdout from C calls
# if not 'LSB_JOBID' in os.environ: # When using LSF, we usally already have incremental logs
with redirect_std_streams(run_context.output_dir / 'log.txt', color=ctx.obj['color']):
# Help reproduce qa runs with something copy-pastable in the logs
if is_ci:
from shlex import quote
click.secho(' '.join(['qa', *map(quote, sys.argv[1:])]), fg='cyan', bold=True)
click.echo(click.style("Outputs: ", fg='cyan') + click.style(str(run_context.output_dir), fg='cyan', bold=True), err=True)
print_url(ctx)
if not ctx.obj['offline']:
notify_qa_database(**ctx.obj, is_pending=True, is_running=True)
start = time.time()
cwd = os.getcwd()
try:
runtime_metrics = entrypoint_module(config).run(run_context)
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
click.secho(f'[ERROR] Your `run` function raised an exception: {e}', fg='red', bold=True)
try:
exc_type, exc_value, exc_traceback = sys.exc_info()
click.secho(''.join(traceback.format_exception(exc_type, exc_value, exc_traceback)), fg='red')
except Exception as e: # debug strange stale file errors, ideally remove this...
print(f"ERROR: {e}")
runtime_metrics = {'is_failed': True}
if not runtime_metrics:
click.secho('[WARNING] Your `run` function should return a dict with a least {"is_failed": False}', fg='yellow')
runtime_metrics = {"is_failed": False}
if not isinstance(runtime_metrics, dict):
click.secho(f'[ERROR] Your `run` function did not return a dict, but {runtime_metrics}', fg='red', bold=True)
runtime_metrics = {'is_failed': True}
runtime_metrics['compute_time'] = time.time() - start
# avoid issues if code in run() changes cwd
if os.getcwd() != cwd:
os.chdir(cwd)
metrics = postprocess_(runtime_metrics, run_context, skip=no_postprocess or runtime_metrics['is_failed'], save_manifests_in_database=save_manifests_in_database)
if not metrics:
metrics = runtime_metrics
if metrics['is_failed']:
click.secho('[ERROR] The run has failed.', fg='red', err=True)
click.secho(str(metrics), fg='red', bold=True)
exit(1)
else:
click.secho(str(metrics), fg='green')
def postprocess_(runtime_metrics, run_context, skip=False, save_manifests_in_database=False):
"""Computes computes various success metrics and outputs."""
from .utils import file_info
try:
if not skip:
try:
entrypoint_postprocess = entrypoint_module(config).postprocess
except:
metrics = runtime_metrics
else:
metrics = entrypoint_postprocess(runtime_metrics, run_context)
else:
metrics = runtime_metrics
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
# TODO: in case of import error because postprocess was not defined, just ignore it...?
# TODO: we should provide a default postprocess function, that reads metrics.json and returns {**previous, **runtime_metrics}
exc_type, exc_value, exc_traceback = sys.exc_info()
click.secho(f'[ERROR] Your `postprocess` function raised an exception:', fg='red', bold=True)
click.secho(''.join(traceback.format_exception(exc_type, exc_value, exc_traceback)), fg='red')
metrics = {**runtime_metrics, 'is_failed': True}
if 'is_failed' not in metrics:
click.secho("[Warning] The result of the `postprocess` function misses a key `is_failed` (bool)", fg='yellow')
metrics['is_failed'] = False
if (run_context.output_dir / 'metrics.json').exists():
with (run_context.output_dir / 'metrics.json').open('r') as f:
previous_metrics = json.load(f)
metrics = {
**previous_metrics,
**metrics,
}
with (run_context.output_dir / 'metrics.json').open('w') as f:
json.dump(metrics, f, sort_keys=True, indent=2, separators=(',', ': '))
# To help identify if input files change, we compute and save some metadata.
if is_ci or save_manifests_in_database:
manifest_inputs = run_context.obj.get('manifest-inputs', [run_context.input_path])
input_files = {}
for manifest_input in manifest_inputs:
manifest_input = Path(manifest_input)
if manifest_input.is_dir():
for idx, path in enumerate(manifest_input.rglob('*')):
if idx >= 200:
break
if not path.is_file():
continue
input_files[path.as_posix()] = file_info(path, config=config)
elif manifest_input.is_file():
input_files.update({manifest_input.as_posix(): file_info(manifest_input, config=config)})
with (run_context.output_dir / 'manifest.inputs.json').open('w') as f:
json.dump(input_files, f, indent=2)
outputs_manifest = save_outputs_manifest(run_context.output_dir, config=config)
output_data = {
'storage': total_storage(outputs_manifest),
}
if save_manifests_in_database:
if run_context.input_path.is_file():
click.secho('WARNING: saving the manifests in the database is only implemented for inputs that are *folders*.', fg='yellow', err=True)
else:
from .utils import copy
copy(run_context.output_dir / 'manifest.inputs.json', run_context.input_path / 'manifest.inputs.json')
copy(run_context.output_dir / 'manifest.outputs.json', run_context.input_path / 'manifest.outputs.json')
if not run_context.obj.get('offline') and not run_context.obj.get('dryrun'):
notify_qa_database(**run_context.obj, metrics=metrics, data=output_data, is_pending=False, is_running=False)
return metrics
@qa.command(context_settings=dict(
ignore_unknown_options=True,
))
@click.pass_context
@click.option('-i', '--input', 'input_path', required=True, type=PathType(), help='Path of the input/recording/test we should work on, relative to the database directory.')
@click.option('-o', '--output', 'output_path', type=PathType(), default=None, help='Custom output directory path. If not provided, defaults to ctx.obj["batch_conf_dir"] / input_path.with_suffix('')')
@click.argument('forwarded_args', nargs=-1, type=click.UNPROCESSED)
def postprocess(ctx, input_path, output_path, forwarded_args):
"""Run only the post-processing, assuming results already exist."""
run_context = RunContext.from_click_run_context(ctx, config)
with redirect_std_streams(run_context.output_dir / 'log.txt', color=ctx.obj['color']):
click.echo(click.style("Outputs: ", fg='cyan') + click.style(str(run_context.output_dir), fg='cyan', bold=True), err=True)
print_url(ctx)
metrics = postprocess_({}, run_context)
if metrics['is_failed']:
click.secho('[ERROR] The run has failed.', fg='red', err=True, bold=True)
click.secho(str(metrics), fg='red')
else:
click.secho(str(metrics), fg='green')
@qa.command(context_settings=dict(
ignore_unknown_options=True,
))
@click.pass_context
@click.option('-i', '--input', 'input_path', required=True, type=PathType(), help='Path of the input/recording/test we should work on, relative to the database directory.')
@click.option('-o', '--output', 'output_path', type=PathType(), default=None, help='Custom output directory path. If not provided, defaults to ctx.obj["batch_conf_dir"] / input_path.with_suffix('')')
def sync(ctx, input_path, output_path):
"""Updates the database metrics using metrics.json"""
run_context = RunContext.from_click_run_context(ctx, config)
if (run_context.output_dir / 'metrics.json').exists():
with (run_context.output_dir / 'metrics.json').open('r') as f:
metrics = json.load(f)
notify_qa_database(**ctx.obj, metrics=metrics, is_pending=False, is_running=False)
click.secho(str(metrics), fg='green')
@qa.command(context_settings=dict(
ignore_unknown_options=True,
))
@click.pass_context
@click.option('--output-id', 'output_id', help='Custom output directory path. If not provided, defaults to ctx.obj["batch_conf_dir"] / input_path.with_suffix('')')
def wait(ctx, output_id):
from .api import get_output
while True:
output = get_output(output_id)
click.secho("...waiting")
if output["is_pending"]:
time.sleep(5)
continue
break
exit(0 if not output["is_failed"] else 1)
runners_config = config.get('runners', {})
if 'default' in runners_config:
default_runner = runners_config['default']
else:
task_runners = [r for r in runners_config if r not in ['default', 'local']]
default_runner = task_runners[0] if task_runners else 'local'
lsf_config = config['lsf'] if 'lsf' in config else config.get('runners', {}).get('lsf', {})
if 'lsf' in config:
default_runner = 'lsf'
if default_runner == 'lsf' and os.name=='nt':
default_runner = 'local'
local_config = config.get('runners', {}).get('local', {})
@qa.command(context_settings=dict(
ignore_unknown_options=True,
))
@click.option('--batch', '-b', 'batches', multiple=True, help="We run over all inputs+configs+database in those batches")
@click.option('--batches-file', 'batches_files', type=PathType(), default=default_batches_files, multiple=True, help="YAML files listing batches of inputs+configs+database.")
@click.option('--tuning-search', 'tuning_search_dict', help='string containing JSON describing the tuning parameters to explore')
@click.option('--tuning-search-file', type=PathType(), default=None, help='tuning file describing the tuning parameters to explore')
@click.option('--no-wait', is_flag=True, help="If true, returns as soon as the jobs are sent, otherwise waits for completion.")
@click.option('--list', 'list_contexts', is_flag=True, help="Print as JSON details about each run we would do.")
@click.option('--list-output-dirs', is_flag=True, help="Only print the prefixes for the results of each batch we run on.")
@click.option('--list-inputs', is_flag=True, help="Print to stdout a JSON with a list of the inputs we would call qa run on.")
@click.option('--runner', default=default_runner, help="Run runs locally or using a task queue like Celery, LSF...")
@click.option('--local-concurrency', default=os.environ.get('QA_BATCH_CONCURRENCY', local_config.get('concurrency')), type=int, help="joblib's n_jobs: 0=unlimited, 2=2 at a time, -1=#cpu-1")
@click.option('--lsf-threads', default=lsf_config.get('threads', 0), type=int, help="restrict number of lsf threads to use. 0=no restriction")
@click.option('--lsf-memory', default=lsf_config.get('memory', 0), help="restrict memory (MB) to use. 0=no restriction")
@click.option('--lsf-queue', default=lsf_config.get('queue'), help="LSF queue (-q)")
@click.option('--lsf-fast-queue', default=lsf_config.get('fast_queue', lsf_config.get('queue')), help="Fast LSF queue, for interactive jobs")
@click.option('--lsf-resources', default=lsf_config.get('resources', None), help="LSF resources restrictions (-R)")
@click.option('--lsf-priority', default=lsf_config.get('priority', 0), type=int, help="LSF priority (-sp)")
@click.option('--action-on-existing', default=config.get('outputs', {}).get('action_on_existing', "run"), help="When there are already finished successful runs, whether to do run / postprocess (only) / sync (re-use results) / skip")
@click.option('--action-on-pending', default=config.get('outputs', {}).get('action_on_pending', "wait"), help="When there are already pending runs, whether to do wait (then run) / sync (use those runs' results) / skip (don't run) / continue (run as usual, can cause races)")
@click.option('--prefix-outputs-path', type=PathType(), default=None, help='Custom prefix for the outputs; they will be at $prefix/$output_path')
@click.argument('forwarded_args', nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def batch(ctx, batches, batches_files, tuning_search_dict, tuning_search_file, no_wait, list_contexts, list_output_dirs, list_inputs, runner, local_concurrency, lsf_threads, lsf_memory, lsf_queue, lsf_fast_queue, lsf_resources, lsf_priority, action_on_existing, action_on_pending, prefix_outputs_path, forwarded_args):
"""Run on all the inputs/tests/recordings in a given batch using the LSF cluster."""
if not batches_files:
click.secho(f'WARNING: Could not find how to identify input tests.', fg='red', err=True, bold=True)
click.secho(f'Consider adding to qaboard.yaml somelike like:\n```\ninputs:\n batches: batches.yaml\n```', fg='red', err=True)
click.secho(f'Where batches.yaml is formatted like in http://qa-docs/docs/batches-running-on-multiple-inputs', fg='red', err=True)
return
if not batches:
if not len(forwarded_args):
click.secho(f'ERROR: you must provide a batch', fg='red', err=True, bold=True)
click.secho(f'Use either `qa batch BATCH`, or `qa batch --batch BATCH_2 --batch BATCH_2`', fg='red', err=True)
exit(1)
single_batch, *forwarded_args = forwarded_args
batches = [single_batch]
print_url(ctx)
existing_outputs = get_outputs(ctx.obj)
command_id = str(uuid.uuid4()) # unique IDs for triggered runs makes it easier to wait/cancel them
os.environ['QA_BATCH']= 'true' # triggered runs will be less verbose than with just `qa run`
os.environ['QA_BATCHES_FILES'] = json.dumps([str(b) for b in batches_files])
dryrun = ctx.obj['dryrun'] or list_output_dirs or list_inputs or list_contexts
should_notify_qa_database = (is_ci or ctx.obj['share']) and not (dryrun or ctx.obj['offline'])
if should_notify_qa_database:
command_data = {
"command_created_at_datetime": datetime.datetime.utcnow().isoformat(),
"argv": sys.argv,
"runner": runner,
**ctx.obj,
}
job_url = getenvs(('BUILD_URL', 'CI_JOB_URL', 'CIRCLE_BUILD_URL', 'TRAVIS_BUILD_WEB_URL')) # jenkins, gitlabCI, cirlceCI, travisCI
if job_url:
command_data['job_url'] = job_url
if not os.environ.get('QA_BATCH_COMMAND_HIDE_LOGS'):
notify_qa_database(object_type='batch', command={command_id: command_data}, **ctx.obj)
tuning_search, filetype = load_tuning_search(tuning_search_dict, tuning_search_file)
default_runner_options = {
"type": runner,
"command_id": command_id,
}
# Each runner should add what it cares about...
# TODO: Having --runner-X prefixes makes it all a mess, but still the help text is useful
# TODO: It would be nice to generate the CLI help depending on the runner that's choosen, then we could use
if runner == 'lsf':
default_runner_options.update({
"project": lsf_config.get('project', str(project) if project else "qaboard"),
"max_threads": lsf_threads,
"max_memory": lsf_memory,
'resources': lsf_resources,
"queue": lsf_queue,
"fast_queue": lsf_fast_queue,
"user": ctx.obj['user'],
})
if runner == "local":
default_runner_options["concurrency"] = local_concurrency
if runner == 'local' or runner == 'celery':
default_runner_options["cwd"] = ctx.obj['previous_cwd'] if 'previous_cwd' in ctx.obj else os.getcwd()
jobs = JobGroup(job_options=default_runner_options)
inputs_iter = iter_inputs(batches, batches_files, ctx.obj['database'], ctx.obj['configurations'], ctx.obj['platform'], default_runner_options, config, ctx.obj['inputs_settings'])
for run_context in inputs_iter:
input_configuration_str = serialize_config(run_context.configurations)
for tuning_file, tuning_hash, tuning_params in iter_parameters(tuning_search, filetype=filetype, extra_parameters=ctx.obj['extra_parameters']):
if not prefix_outputs_path:
batch_conf_dir = make_batch_conf_dir(
outputs_commit,
ctx.obj["batch_label"],
run_context.platform,
run_context.configurations,
tuning_params,
ctx.obj['share']
)
else:
batch_conf_dir = outputs_commit / prefix_outputs_path
if tuning_file:
batch_conf_dir = batch_conf_dir / Path(tuning_file).stem
from qaboard.conventions import slugify_hash
input_dir = run_context.rel_input_path.with_suffix('')
if len(input_dir.as_posix()) > 90:
input_dir = Path(slugify_hash(input_dir.as_posix(), maxlength=90))
run_context.output_dir = batch_conf_dir / input_dir
if forwarded_args:
run_forwarded_args = [a for a in forwarded_args if not a in ("--keep-previous", "--no-postprocess", "--save-manifests-in-database")]
if run_forwarded_args:
run_context.extra_parameters = {"forwarded_args": run_forwarded_args, **tuning_params}
else:
run_context.extra_parameters = tuning_params
else:
run_context.extra_parameters = tuning_params
if list_output_dirs:
print(run_context.output_dir)
break
if list_inputs:
print(run_context.input_path)
break
matching_existing_outputs = [o for o in existing_outputs.values() if url_to_dir(o['output_dir_url']) == run_context.output_dir]
matching_existing_output = matching_existing_outputs[0] if matching_existing_outputs else None # at most 1, garanteed by database constaints
is_pending = matching_existing_output['is_pending'] if matching_existing_output else False
is_failed = matching_existing_output['is_failed'] if matching_existing_output else run_context.is_failed()
ran_before = True if matching_existing_output else run_context.ran()
should_run = not is_pending and (action_on_existing=='run' or is_failed or not ran_before)
if not should_run and action_on_existing=='skip':
continue
if is_pending and action_on_pending == 'skip':
continue
if not forwarded_args:
forwarded_args_cli = None
else:
if not on_windows:
# FIXME: we assume no single quotes...
forwarded_args_cli = ' '.join(f"'{a}'" for a in forwarded_args)
else:
from .compat import escaped_for_cli
forwarded_args_cli = ' '.join(escaped_for_cli(a) for a in forwarded_args)
if input_configuration_str == get_default_configuration(ctx.obj['inputs_settings']):
configuration_cli = None
else:
# We can't use --config, or "-c A -c B" until we ensure all clients updated a version supporting it
if not on_windows:
configuration = input_configuration_str.replace("'", "'\"'\"'") # support single-quotes
configuration_cli = f"--configuration '{configuration}'"
else:
from .compat import escaped_for_cli
configuration_cli = f'--configuration {escaped_for_cli(input_configuration_str)}'
# We could serialize properly the run_context/runner_options, and e.g. call "qa --pickled-cli" and use the CLI command below just for logs...
args = [
f"qa",
f'--share' if ctx.obj["share"] else None,
f'--offline' if ctx.obj['offline'] else None,
f'--label "{ctx.obj["raw_batch_label"]}"' if ctx.obj["raw_batch_label"] != default_batch_label else None,
f'--platform "{run_context.platform}"' if run_context.platform != default_platform else None, # TODO: make it customizable in batches
f'--type "{run_context.type}"' if run_context.type != default_input_type else None,
f'--database "{run_context.database.as_posix()}"' if run_context.database != get_default_database(ctx.obj['inputs_settings']) else None,
configuration_cli,
f'--tuning-filepath "{tuning_file}"' if tuning_params else None,
'run' if should_run else action_on_existing,
f'--input "{run_context.rel_input_path}"',
f'--output "{run_context.output_dir}"' if prefix_outputs_path else None,
forwarded_args_cli if forwarded_args_cli else None,
]
command = ' '.join([arg for arg in args if arg is not None])
click.secho(command, fg='cyan', err=True)
click.secho(f" {run_context.output_dir if run_context.output_dir.is_absolute else run_context.output_dir.relative_to(subproject)}", fg='blue', err=True)
import re
if 'QA_TESTING' in os.environ:
# we want to make sure we test the current code
command = re.sub('^qa', 'python -m qaboard', command)
if str(subproject) != '.':
command = f"cd {subproject} && {command}"
run_context.command = command
run_context.job_options['command_id'] = command_id
job = Job(run_context)
if should_notify_qa_database and not is_pending:
# TODO: accumulate and send all at once to avoid 100s of requests?
db_output = notify_qa_database(**{
**ctx.obj,
**run_context.obj, # for now we don't want to worry about backward compatibility, and input_path being abs vs relative...
"is_pending": True,
})
if db_output: # Note: the ID is already in the matching job above
job.id = db_output["id"]
if is_pending:
wait_command = f"qa wait --output-id {matching_existing_output['id']}"
if action_on_pending=="sync":
job.id = matching_existing_output['id']
job.run_context.command = wait_command
elif action_on_pending=="wait":
job.run_context.command = f"{wait_command} || {job.run_context.command}"
else:
assert action_on_pending=="continue"
jobs.append(job)
if list_contexts:
print(json.dumps([serialize_paths(j.run_context.asdict()) for j in jobs], indent=2))
return
if not dryrun:
is_failed = jobs.start(
blocking=not no_wait,
qa_context=ctx.obj,
)
from .gitlab import gitlab_token, update_gitlab_status
if gitlab_token and jobs and is_ci and 'QABOARD_TUNING' not in os.environ:
update_gitlab_status(commit_id, 'failed' if is_failed else 'success', ctx.obj["batch_label"], f"{len(jobs)} results")
if is_failed and not no_wait:
del os.environ['QA_BATCH'] # restore verbosity
print_url(ctx, status="failure")
exit(1)
@qa.command()
# Do we want this? we could simply use groups not defined in qatools.yaml:artifacts as paths
@click.option('--file', '-f', 'files', multiple=True, help="Save specific files instead of artifacts indicated by yaml file")
@click.option('--exclude', 'excluded_groups', multiple=True, help="Exclude specific artifact groups")
# Do we use this? yes in the API, but let's deprecate and remove for other uses...
@click.option('--out', '-o', 'artifacts_path', default='', help="Path to save artifacts in case of specified files")
@click.argument('groups', nargs=-1, type=click.UNPROCESSED, default=None)
@click.pass_context
def save_artifacts(ctx, files, excluded_groups, artifacts_path, groups):
"""Save the results at a standard location"""
import filecmp
from .config import is_in_git_repo, qatools_config_paths
from .utils import copy, file_info
from .compat import cased_path
click.secho(f"Saving artifacts in: {artifacts_commit}", bold=True, underline=True)
artifacts = {}
if files:
artifacts = {f"__{f}": {"glob": f} for f in files}
else:
if 'artifacts' not in config:
config['artifacts'] = {}
# We support both qaboard.yaml and qaboard.yaml for backward compatibility with SIRC's projects
# Default artifacts
config['artifacts']['__qaboard.yaml'] = {"glob": ['qaboard.yaml', 'qatools.yaml']}
config['artifacts']['__qatools'] = {"glob": ['qatools/*', 'qa/*']}
# Handle sub-projects
config['artifacts']['__sub-qaboard.yaml'] = {"glob": [str(p.relative_to(root_qatools).parent / 'qaboard.yaml') for p in qatools_config_paths]}
config['artifacts']['__sub-qaboard.yaml'] = {"glob": [str(p.relative_to(root_qatools).parent / 'qatools.yaml') for p in qatools_config_paths]}
config['artifacts']['__metrics.yaml'] = {"glob": config.get('outputs', {}).get('metrics')}
config['artifacts']['__batches.yaml'] = {"glob": default_batches_files}
config['artifacts']['__envrc'] = {"glob": ['.envrc', '**/*.envrc']}
if groups:
if excluded_groups:
groups = [g for g in groups if g not in excluded_groups]
artifacts = {g: config['artifacts'][g] for g in groups if g in config['artifacts'].keys()}
else:
artifacts = config['artifacts']
if 'QA_VERBOSE_VERBOSE' in os.environ: print(artifacts)
if not is_in_git_repo:
click.secho(
"You are not in a git repository, maybe in an artifacts folder. `save_artifacts` is unavailable.",
fg='yellow', dim=True)
exit(1)
for artifact_name, artifact_config in artifacts.items():
click.secho(f'Saving artifacts: {artifact_name}', bold=True)
manifest_path = artifacts_commit / 'manifests' / f'{artifact_name}.json'
manifest_path.parent.mkdir(parents=True, exist_ok=True)
if manifest_path.exists():
with manifest_path.open() as f:
try:
manifest = json.load(f)
except:
manifest = {}
else:
manifest = {}
nb_files = 0
globs = artifact_config.get('glob')
if not isinstance(globs, list):
globs = [globs]
for g in globs:
if not g: continue
for path in Path('.').glob(g):
path = cased_path(path)
if not path.is_file():
continue
if artifacts_path:
destination = artifacts_commit_root / artifacts_path / path
else:
destination = artifacts_commit_root / path
if 'QA_VERBOSE_VERBOSE' in os.environ: print(destination)
if destination.exists() and filecmp.cmp(str(path), str(destination), shallow=True):
# when working on subprojects, the artifact might be copied already,
# but manifests are saved per-subproject
if path.as_posix() not in manifest:
manifest[path.as_posix()] = file_info(path, config=config)
continue
if 'QA_VERBOSE' in os.environ or ctx.obj['dryrun']:
click.secho(str(path), dim=True)
if not ctx.obj['dryrun']:
copy(path, destination)
manifest[path.as_posix()] = file_info(path, config=config)
if not ctx.obj['dryrun']:
with manifest_path.open('w') as f:
json.dump(manifest, f)
if nb_files > 0:
click.secho(f"{nb_files} files copied")
if os.name == "nt" and not ctx.obj['dryrun']:
# [Samsung-SIRC specific]
print("... Fixing linux file permissions")
try:
# Windows does not set file permissions correctly on the shared storage,
# it does not respect umask 0: files are not world-writable.
# Trying to each_file.chmod(0o777) does not work either
# The only option is to make the call from linux.
# We could save a list of paths and chmod them with their parent directories...
# but to make things faster to code, we just "ssh linux chmod everything"
# from qaboard.compat import windows_to_linux_path
# # We can assume SSH to be present on Windows10
# ssh = f"ssh -i \\\\networkdrive\\home\\{user}\\.ssh\\id_rsa -oStrictHostKeyChecking=no"
# chmod = f'{ssh} {user}@{user}-srv \'chmod -R 777 "{windows_to_linux_path(artifacts_commit)}"\''
# print(chmod)
# os.system(chmod)
pass
except Exception as e:
print(f'WARNING: {e}')
# if the commit was deleted, this notification will mark it as good again
notify_qa_database(object_type='commit', **ctx.obj)
@qa.command()
@click.pass_context
@click.option('--batch', '-b', 'batches', required=True, multiple=True, help="Only check bit-accuracy for this batch of inputs+configs+database.")
@click.option('--batches-file', 'batches_files', type=PathType(), default=default_batches_files, multiple=True, help="YAML file listing batches of inputs+config+database selected from the database.")
def check_bit_accuracy_manifest(ctx, batches, batches_files):
"""
Checks the bit accuracy of the results in the current ouput directory
versus the latest commit on origin/develop.
"""
from .bit_accuracy import is_bit_accurate
commit_dir = outputs_commit if is_ci else Path()
all_bit_accurate = True
nb_compared = 0
for run_context in iter_inputs(batches, batches_files, ctx.obj['database'], ctx.obj['configurations'], default_platform, {}, config, ctx.obj['inputs_settings']):
nb_compared += 1
if run_context.input_path.is_file():
click.secho('ERROR: check_bit_accuracy_manifest only works for inputs that are folders', fg='red', err=True)
# otherwise the manifest is at
# * input_path.parent / 'manifest.json' in the database
# * input_path.with_suffix('') / 'manifest.json' in the results
# # reference_output_directory = run_context.input_path if run_context.input_path.is_folder() else run_context.input_path.parent
exit(1)
batch_conf_dir = make_batch_conf_dir(Path(), ctx.obj['batch_label'], ctx.obj["platform"], run_context.configurations, ctx.obj['extra_parameters'], ctx.obj['share'])
input_is_bit_accurate = is_bit_accurate(commit_dir / batch_conf_dir, run_context.database, [run_context.rel_input_path])
all_bit_accurate = all_bit_accurate and input_is_bit_accurate
if not all_bit_accurate:
click.secho("\nError: you are not bit-accurate versus the manifest.", fg='red', underline=True, bold=True)
click.secho("Reminder: the manifest lists the expected inputs/outputs for each test. It acts as an explicit gatekeeper against changes", fg='red', dim=True)
if not run_context.database.is_absolute():
click.secho("If that's what you wanted, update and commit all manifests.", fg='red')
# click.secho("If that's what you wanted, update all manifests using:", fg='red')
# click.secho("$ qa batch * --save-manifests-in-database", fg='red')
# click.secho("$ git add # your changes", fg='red')
# click.secho("$ git commit # now retry your CI", fg='red')
else:
click.secho("To update the manifests for all tests, run:", fg='red')
click.secho("$ qa batch --save-manifests --batch *", fg='red')
exit(1)
if not nb_compared:
click.secho("\nWARNING: Nothing was compared! It's not likely to be what you expected...", fg='yellow', underline=True, bold=True)
@qa.command()
@click.pass_context
@click.option(
"--reference",
default=config.get('project', {}).get('reference_branch', 'master'),
help="Branch, tag or commit used as reference."
)
@click.option('--batch', '-b', 'batches', multiple=True, help="Only check bit-accuracy for those batches of inputs+configs+database.")
@click.option('--batches-file', 'batches_files', type=PathType(), default=default_batches_files, multiple=True, help="YAML file listing batches of inputs+config+database selected from the database.")
@click.option('--reference-platform', help="Compare against a difference platform.")
def check_bit_accuracy(ctx, reference, batches, batches_files, reference_platform):
"""
Checks the bit accuracy of the results in the current ouput directory
versus the latest commit on origin/develop.
"""
from .config import is_in_git_repo, commit_branch, is_ci, outputs_project_root, repo_root
from .bit_accuracy import is_bit_accurate
from .gitlab import lastest_successful_ci_commit
from .conventions import get_commit_dirs
from .git import latest_commit, git_show, git_parents
if not is_in_git_repo:
click.secho("You are not in a git repository, maybe in an artifacts folder. `check_bit_accuracy` is unavailable.", fg='yellow', dim=True)
exit(1)
if is_ci and commit_branch == reference:
click.secho(f'We are on branch {reference}', fg='cyan', bold=True, err=True)
click.secho(f"Comparing bit-accuracy against this commit's ({commit_id[:8]}) parents.", fg='cyan', bold=True, err=True)
# It will work until we try to rebase merge requests.
# We really should use Gitlab' API (or our database) to ask about previous pipelines on the branch
reference_commits = git_parents(commit_id)
else:
click.secho(f'Comparing bit-accuracy versus the latest remote commit of {reference}', fg='cyan', bold=True, err=True)
reference_commits = [latest_commit(reference)]
click.secho(f"{commit_id[:8]} versus {reference_commits}.", fg='cyan', err=True)
# This where the new results are located
commit_dir = outputs_commit_root if is_ci else Path()
if not batches:
output_directories = list(p.parent.relative_to(commit_dir) for p in (commit_dir / subproject / 'output').rglob('manifest.outputs.json'))
else:
output_directories = []
for run_context in iter_inputs(batches, batches_files, ctx.obj['database'], ctx.obj['configurations'], default_platform, {}, config, ctx.obj['inputs_settings']):
batch_conf_dir = make_batch_conf_dir(subproject, ctx.obj['batch_label'], ctx.obj["platform"], run_context.configurations, ctx.obj["extra_parameters"], ctx.obj['share'])
input_path = run_context.input_path.relative_to(run_context.database)
output_directory = batch_conf_dir / input_path.with_suffix('')
output_directories.append(output_directory)
for reference_commit in reference_commits:
# if the reference commit is pending or failed, we wait or maybe pick a parent
reference_commit = lastest_successful_ci_commit(reference_commit)
click.secho(f'Current directory : {commit_dir}', fg='cyan', bold=True, err=True)
reference_rootproject_ci_dir = outputs_project_root / get_commit_dirs(reference_commit, repo_root)
click.secho(f"Reference directory: {reference_rootproject_ci_dir}", fg='cyan', bold=True, err=True)
all_bit_accurate = True
for o in output_directories:
all_bit_accurate = is_bit_accurate(commit_dir, reference_rootproject_ci_dir, [o], reference_platform) and all_bit_accurate
if not all_bit_accurate:
click.secho(f"\nERROR: results are not bit-accurate to {reference_commits}.", bg='red', bold=True)
if is_ci:
click.secho(f"\nTo investigate, go to", fg='red', underline=True)
for reference_commit in reference_commits:
click.secho(f"https://qa/{project.as_posix()}/commit/{commit_id}?reference={reference_commit}&selected_views=bit_accuracy", fg='red')
exit(1)
from .optimize import optimize
qa.add_command(optimize)
# TODO: split more...
# from .bit_accuracy import check_bit_accuracy, check_bit_accuracy_manifest
# qa.add_command(check_bit_accuracy)
# qa.add_command(check_bit_accuracy_manifest)
@qa.command()
@click.pass_context
def init(ctx):
"""Provide a sample qaboard.yaml configuration."""
from .init import qa_init
qa_init(ctx)
def main():
from .compat import ensure_cli_backward_compatibility
ensure_cli_backward_compatibility()
qa(obj={}, auto_envvar_prefix='QA')
if __name__ == '__main__':
main()
| 50.720455
| 318
| 0.703992
| 6,337
| 44,634
| 4.772921
| 0.117721
| 0.019639
| 0.012167
| 0.014448
| 0.32077
| 0.260762
| 0.2277
| 0.184818
| 0.165113
| 0.136646
| 0
| 0.001866
| 0.171663
| 44,634
| 879
| 319
| 50.778157
| 0.816212
| 0.139759
| 0
| 0.251852
| 0
| 0.035556
| 0.252898
| 0.026664
| 0
| 0
| 0
| 0.002275
| 0.001481
| 1
| 0.019259
| false
| 0.020741
| 0.078519
| 0
| 0.102222
| 0.022222
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b77f58f441974f14bdaad4bde4687feee866e3a
| 5,838
|
py
|
Python
|
20210220_simulation_sample/data_handler.py
|
3x3x3/Presentations
|
3c31b136ed4d9214bb3730fa41a4a575da38edc9
|
[
"MIT"
] | null | null | null |
20210220_simulation_sample/data_handler.py
|
3x3x3/Presentations
|
3c31b136ed4d9214bb3730fa41a4a575da38edc9
|
[
"MIT"
] | null | null | null |
20210220_simulation_sample/data_handler.py
|
3x3x3/Presentations
|
3c31b136ed4d9214bb3730fa41a4a575da38edc9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import threading
import time
import global_def as gd
from db_reader import DbReaderDef, DbReaer
from queue import Queue, Empty
class DataHandlerThd(threading.Thread):
def __init__(self, req_queue: Queue, rcv_queue: Queue, db_host: str, db_port: int, db_user: str, db_pw: str, db_name: str, db_char_set: str = 'utf8'):
threading.Thread.__init__(self)
self._db_host = db_host
self._db_port = db_port
self._db_user = db_user
self._db_pw = db_pw
self._db_name = db_name
self._db_char_set = db_char_set
self._req_queue = req_queue
self._rcv_queue = rcv_queue
self.is_run = False
def _send_err_msg(self, msg: str) -> None:
self._rcv_queue.put({
gd.KEY_NM_EVT: gd.EVT_TYPE_ERR,
gd.KEY_NM_MSG: msg
})
def _read_db(self, req: dict) -> bool:
req_date = int(req.get(gd.KEY_NM_DATE, 0))
tbl_infos = req.get(gd.KEY_NM_TBL_INFOS, None)
if 19900101 > req_date or 30000101 < req_date:
self._send_err_msg('Invalid Date')
return False
if list != type(tbl_infos) or 0 == len(tbl_infos):
self._send_err_msg('Invalid Table Infos1')
return False
db_readers = []
for reader_idx, tbl_info in enumerate(tbl_infos):
tbl_nm = tbl_info.get(gd.KEY_NM_TBL_NM, None)
col_nms = tbl_info.get(gd.KEY_NM_COL_NMS, [])
if tbl_nm is None or 0 == len(col_nms):
self._send_err_msg('Invalid Table Infos2')
return False
db_reader = DbReaer(reader_idx, req_date, tbl_nm, col_nms, self._db_host, self._db_port, self._db_user, self._db_pw, self._db_name, self._db_char_set)
db_readers.append(db_reader)
for db_reader in db_readers:
db_reader.read_thd.start()
is_st_read = False
is_error = False
while not is_st_read:
for db_reader in db_readers:
thd_state: int = db_reader.get_thd_state()
if DbReaderDef.STATE_ERROR == thd_state:
is_st_read = True
is_error = True
break
elif DbReaderDef.STATE_READY == thd_state:
break
else:
is_st_read = True
time.sleep(0.5)
if is_error:
for db_reader in db_readers:
db_reader.set_stop_thd()
time.sleep(1)
self._send_err_msg('Error in DbReaderThd1')
return False
# 처음에 하나씩 데이터를 읽는다
empty_reader_idxs = []
for reader_idx, db_reader in enumerate(db_readers):
if not db_reader.read_next_data():
empty_reader_idxs.append(reader_idx)
# 텅빈 Reader들을 목록에서 제거
for reader_idx in empty_reader_idxs:
del db_readers[reader_idx]
reader_cnt = len(db_readers)
fin_readers = []
while 0 < reader_cnt:
min_rtime_idx = -1
min_rtime = 9999999999999
find_min_ts = False
is_exist_fin_readers = False
for idx, db_reader in enumerate(db_readers):
row: list = db_reader.last_data
# 마지막 데이터가 비었을때
if row is None:
thd_state = db_reader.get_thd_state()
if DbReaderDef.STATE_WORKING == thd_state:
time.sleep(0.5)
db_reader.read_next_data()
find_min_ts = False
break
elif DbReaderDef.STATE_FINISHED == thd_state:
fin_readers.append(idx)
is_exist_fin_readers = True
continue
elif DbReaderDef.STATE_ERROR == thd_state:
self._send_err_msg('Error in DbReaderThd2')
fin_readers.append(idx)
is_exist_fin_readers = True
continue
pk_rtime = row[0]
if min_rtime > pk_rtime:
min_rtime = pk_rtime
min_rtime_idx = idx
find_min_ts = True
# 가장 과거의 값을 찾았다면
if find_min_ts:
target_reader: DbReaer = db_readers[min_rtime_idx]
self._rcv_queue.put({
gd.KEY_NM_EVT: gd.EVT_TYPE_READ_DB,
gd.KEY_NM_IDX: target_reader.reader_idx,
gd.KEY_NM_DATA: target_reader.last_data
})
target_reader.read_next_data()
# 종료된 Reader가 생겼다면
if is_exist_fin_readers:
fin_readers.sort(reverse=True)
for fin_reader_idx in fin_readers:
del db_readers[fin_reader_idx]
reader_cnt = len(db_readers)
fin_readers.clear()
self._rcv_queue.put({
gd.KEY_NM_EVT: gd.EVT_TYPE_FIN
})
return True
def run(self):
self.is_run = True
while self.is_run:
try:
req = self._req_queue.get(True, 1)
evt_type = req.get(gd.KEY_NM_EVT)
if gd.EVT_TYPE_READ_DB == evt_type:
print(f'Read DB Start!, data: {req}')
self._read_db(req)
print(f'Read DB End!, data: {req}')
elif gd.EVT_TYPE_FIN == evt_type:
break
except Empty as em:
pass
except Exception as e:
self.is_run = False
break
| 32.797753
| 162
| 0.52381
| 725
| 5,838
| 3.838621
| 0.194483
| 0.043119
| 0.027668
| 0.017966
| 0.348904
| 0.252964
| 0.185411
| 0.148042
| 0.099892
| 0.071146
| 0
| 0.013576
| 0.406989
| 5,838
| 177
| 163
| 32.983051
| 0.790295
| 0.017814
| 0
| 0.255639
| 0
| 0
| 0.026192
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030075
| false
| 0.007519
| 0.037594
| 0
| 0.112782
| 0.015038
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b7a04ca06d8701872be7f11c6588abbce31dce4
| 16,294
|
py
|
Python
|
hypothesis/_settings.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
hypothesis/_settings.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
hypothesis/_settings.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
# coding=utf-8
#
# This file is part of Hypothesis (https://github.com/DRMacIver/hypothesis)
#
# Most of this work is copyright (C) 2013-2015 David R. MacIver
# ([email protected]), but it contains contributions by others. See
# https://github.com/DRMacIver/hypothesis/blob/master/CONTRIBUTING.rst for a
# full list of people who may hold copyright, and consult the git log if you
# need to determine who owns an individual contribution.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
#
# END HEADER
"""A module controlling settings for Hypothesis to use in falsification.
Either an explicit settings object can be used or the default object on
this module can be modified.
"""
from __future__ import division, print_function, absolute_import
import os
import inspect
import warnings
import threading
from collections import namedtuple
from hypothesis.errors import InvalidArgument, HypothesisDeprecationWarning
from hypothesis.configuration import hypothesis_home_dir
from hypothesis.utils.conventions import not_set
from hypothesis.utils.dynamicvariables import DynamicVariable
__all__ = [
'settings',
]
all_settings = {}
_db_cache = {}
class SettingsProperty(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, type=None):
if obj is None:
return self
else:
try:
return obj.__dict__[self.name]
except KeyError:
raise AttributeError(self.name)
def __set__(self, obj, value):
obj.__dict__[self.name] = value
def __delete__(self, obj):
try:
del obj.__dict__[self.name]
except KeyError:
raise AttributeError(self.name)
@property
def __doc__(self):
return '\n'.join((
all_settings[self.name].description,
'default value: %r' % (getattr(settings.default, self.name),)
))
default_variable = DynamicVariable(None)
class SettingsMeta(type):
def __init__(self, *args, **kwargs):
super(SettingsMeta, self).__init__(*args, **kwargs)
@property
def default(self):
return default_variable.value
@default.setter
def default(self, value):
if default_variable.value is not None:
raise AttributeError('Cannot assign settings.default')
self._assign_default_internal(value)
def _assign_default_internal(self, value):
default_variable.value = value
class settings(SettingsMeta('settings', (object,), {})):
"""A settings object controls a variety of parameters that are used in
falsification. These may control both the falsification strategy and the
details of the data that is generated.
Default values are picked up from the settings.default object and
changes made there will be picked up in newly created settings.
"""
_WHITELISTED_REAL_PROPERTIES = [
'_database', '_construction_complete', 'storage'
]
__definitions_are_locked = False
_profiles = {}
def __getattr__(self, name):
if name in all_settings:
d = all_settings[name].default
if inspect.isfunction(d):
d = d()
return d
else:
raise AttributeError('settings has no attribute %s' % (name,))
def __init__(
self,
parent=None,
**kwargs
):
self._construction_complete = False
self._database = kwargs.pop('database', not_set)
explicit_kwargs = list(kwargs)
defaults = parent or settings.default
if defaults is not None:
for setting in all_settings.values():
if kwargs.get(setting.name, not_set) is not_set:
kwargs[setting.name] = getattr(defaults, setting.name)
if self._database is not_set:
self._database = defaults.database
for name, value in kwargs.items():
if name not in all_settings:
raise InvalidArgument(
'Invalid argument %s' % (name,))
setattr(self, name, value)
self.storage = threading.local()
self._construction_complete = True
for k in explicit_kwargs:
deprecation = all_settings[k].deprecation
if deprecation:
note_deprecation(deprecation, self)
def defaults_stack(self):
try:
return self.storage.defaults_stack
except AttributeError:
self.storage.defaults_stack = []
return self.storage.defaults_stack
def __call__(self, test):
test._hypothesis_internal_use_settings = self
return test
@classmethod
def define_setting(
cls, name, description, default, options=None, deprecation=None,
):
"""Add a new setting.
- name is the name of the property that will be used to access the
setting. This must be a valid python identifier.
- description will appear in the property's docstring
- default is the default value. This may be a zero argument
function in which case it is evaluated and its result is stored
the first time it is accessed on any given settings object.
"""
if settings.__definitions_are_locked:
from hypothesis.errors import InvalidState
raise InvalidState(
'Settings have been locked and may no longer be defined.'
)
if options is not None:
options = tuple(options)
if default not in options:
raise InvalidArgument(
'Default value %r is not in options %r' % (
default, options
)
)
all_settings[name] = Setting(
name, description.strip(), default, options, deprecation)
setattr(settings, name, SettingsProperty(name))
@classmethod
def lock_further_definitions(cls):
settings.__definitions_are_locked = True
def __setattr__(self, name, value):
if name in settings._WHITELISTED_REAL_PROPERTIES:
return object.__setattr__(self, name, value)
elif name == 'database':
if self._construction_complete:
raise AttributeError(
'Settings objects are immutable and may not be assigned to'
' after construction.'
)
else:
return object.__setattr__(self, '_database', value)
elif name in all_settings:
if self._construction_complete:
raise AttributeError(
'Settings objects are immutable and may not be assigned to'
' after construction.'
)
else:
setting = all_settings[name]
if (
setting.options is not None and
value not in setting.options
):
raise InvalidArgument(
'Invalid %s, %r. Valid options: %r' % (
name, value, setting.options
)
)
return object.__setattr__(self, name, value)
else:
raise AttributeError('No such setting %s' % (name,))
def __repr__(self):
bits = []
for name in all_settings:
value = getattr(self, name)
bits.append('%s=%r' % (name, value))
bits.sort()
return 'settings(%s)' % ', '.join(bits)
@property
def database(self):
"""An ExampleDatabase instance to use for storage of examples. May be
None.
If this was explicitly set at settings instantiation then that
value will be used (even if it was None). If not and the
database_file setting is not None this will be lazily loaded as
an SQLite backed ExampleDatabase using that file the first time
this property is accessed on a particular thread.
"""
try:
if self._database is not_set and self.database_file is not None:
from hypothesis.database import ExampleDatabase
from hypothesis.database.backend import SQLiteBackend
if self.database_file not in _db_cache:
_db_cache[self.database_file] = (
ExampleDatabase(
backend=SQLiteBackend(self.database_file)))
return _db_cache[self.database_file]
if self._database is not_set:
self._database = None
return self._database
except AttributeError:
import traceback
traceback.print_exc()
assert False
def __enter__(self):
default_context_manager = default_variable.with_value(self)
self.defaults_stack().append(default_context_manager)
default_context_manager.__enter__()
return self
def __exit__(self, *args, **kwargs):
default_context_manager = self.defaults_stack().pop()
return default_context_manager.__exit__(*args, **kwargs)
@staticmethod
def register_profile(name, settings):
"""registers a collection of values to be used as a settings profile.
These settings can be loaded in by name. Enable different defaults for
different settings.
- settings is a settings object
"""
settings._profiles[name] = settings
@staticmethod
def get_profile(name):
"""Return the profile with the given name.
- name is a string representing the name of the profile
to load
A InvalidArgument exception will be thrown if the
profile does not exist
"""
try:
return settings._profiles[name]
except KeyError:
raise InvalidArgument(
"Profile '{0}' has not been registered".format(
name
)
)
@staticmethod
def load_profile(name):
"""Loads in the settings defined in the profile provided If the profile
does not exist an InvalidArgument will be thrown.
Any setting not defined in the profile will be the library
defined default for that setting
"""
settings._assign_default_internal(settings.get_profile(name))
Setting = namedtuple(
'Setting', (
'name', 'description', 'default', 'options', 'deprecation'))
settings.define_setting(
'min_satisfying_examples',
default=5,
description="""
Raise Unsatisfiable for any tests which do not produce at least this many
values that pass all assume() calls and which have not exhaustively covered the
search space.
"""
)
settings.define_setting(
'max_examples',
default=200,
description="""
Once this many satisfying examples have been considered without finding any
counter-example, falsification will terminate.
"""
)
settings.define_setting(
'max_iterations',
default=1000,
description="""
Once this many iterations of the example loop have run, including ones which
failed to satisfy assumptions and ones which produced duplicates, falsification
will terminate.
"""
)
settings.define_setting(
'max_shrinks',
default=500,
description="""
Once this many successful shrinks have been performed, Hypothesis will assume
something has gone a bit wrong and give up rather than continuing to try to
shrink the example.
"""
)
settings.define_setting(
'timeout',
default=60,
description="""
Once this many seconds have passed, falsify will terminate even
if it has not found many examples. This is a soft rather than a hard
limit - Hypothesis won't e.g. interrupt execution of the called
function to stop it. If this value is <= 0 then no timeout will be
applied.
"""
)
settings.define_setting(
'derandomize',
default=False,
description="""
If this is True then hypothesis will run in deterministic mode
where each falsification uses a random number generator that is seeded
based on the hypothesis to falsify, which will be consistent across
multiple runs. This has the advantage that it will eliminate any
randomness from your tests, which may be preferable for some situations
. It does have the disadvantage of making your tests less likely to
find novel breakages.
"""
)
settings.define_setting(
'strict',
default=os.getenv('HYPOTHESIS_STRICT_MODE') == 'true',
description="""
If set to True, anything that would cause Hypothesis to issue a warning will
instead raise an error. Note that new warnings may be added at any time, so
running with strict set to True means that new Hypothesis releases may validly
break your code.
You can enable this setting temporarily by setting the HYPOTHESIS_STRICT_MODE
environment variable to the string 'true'.
"""
)
settings.define_setting(
'database_file',
default=lambda: (
os.getenv('HYPOTHESIS_DATABASE_FILE') or
os.path.join(hypothesis_home_dir(), 'examples.db')
),
description="""
database: An instance of hypothesis.database.ExampleDatabase that will be
used to save examples to and load previous examples from. May be None
in which case no storage will be used.
"""
)
class Verbosity(object):
def __repr__(self):
return 'Verbosity.%s' % (self.name,)
def __init__(self, name, level):
self.name = name
self.level = level
def __eq__(self, other):
return isinstance(other, Verbosity) and (
self.level == other.level
)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return self.level
def __lt__(self, other):
return self.level < other.level
def __le__(self, other):
return self.level <= other.level
def __gt__(self, other):
return self.level > other.level
def __ge__(self, other):
return self.level >= other.level
@classmethod
def by_name(cls, key):
result = getattr(cls, key, None)
if isinstance(result, Verbosity):
return result
raise InvalidArgument('No such verbosity level %r' % (key,))
Verbosity.quiet = Verbosity('quiet', 0)
Verbosity.normal = Verbosity('normal', 1)
Verbosity.verbose = Verbosity('verbose', 2)
Verbosity.debug = Verbosity('debug', 3)
Verbosity.all = [
Verbosity.quiet, Verbosity.normal, Verbosity.verbose, Verbosity.debug
]
ENVIRONMENT_VERBOSITY_OVERRIDE = os.getenv('HYPOTHESIS_VERBOSITY_LEVEL')
if ENVIRONMENT_VERBOSITY_OVERRIDE:
DEFAULT_VERBOSITY = Verbosity.by_name(ENVIRONMENT_VERBOSITY_OVERRIDE)
else:
DEFAULT_VERBOSITY = Verbosity.normal
settings.define_setting(
'verbosity',
options=Verbosity.all,
default=DEFAULT_VERBOSITY,
description='Control the verbosity level of Hypothesis messages',
)
settings.define_setting(
name='stateful_step_count',
default=50,
description="""
Number of steps to run a stateful program for before giving up on it breaking.
"""
)
settings.define_setting(
'perform_health_check',
default=True,
description=u"""
If set to True, Hypothesis will run a preliminary health check before
attempting to actually execute your test.
"""
)
settings.lock_further_definitions()
settings.register_profile('default', settings())
settings.load_profile('default')
assert settings.default is not None
def note_deprecation(message, s=None):
# If *either* self or the current default are non-strict
# then this should be an error. This is to handle e.g. the case
# where defining a new setting while non-strict updates a
# profile which is strict. This should not be an error, but
# using the profile here would cause it to be one.
if s is None:
s = settings.default
assert s is not None
strict = settings.default.strict and s.strict
verbosity = s.verbosity
warning = HypothesisDeprecationWarning(message)
if strict:
raise warning
elif verbosity > Verbosity.quiet:
warnings.warn(warning, stacklevel=3)
| 31.334615
| 79
| 0.650669
| 1,949
| 16,294
| 5.283222
| 0.238584
| 0.013985
| 0.022434
| 0.009226
| 0.098378
| 0.07643
| 0.063417
| 0.050403
| 0.033019
| 0.033019
| 0
| 0.002965
| 0.275562
| 16,294
| 519
| 80
| 31.39499
| 0.869366
| 0.167178
| 0
| 0.228883
| 0
| 0
| 0.228496
| 0.013117
| 0
| 0
| 0
| 0
| 0.008174
| 1
| 0.092643
| false
| 0.00545
| 0.038147
| 0.027248
| 0.220708
| 0.00545
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b7c945d6b1d560f6d85d5ab876aed99787d4072
| 1,989
|
py
|
Python
|
code/MergeTrack/print_max_reid_distance.py
|
MTonyM/PReMVOS
|
3d01f0c6156628083a4c8441b4b57622c500e04e
|
[
"MIT"
] | 140
|
2018-10-25T11:58:34.000Z
|
2022-01-18T15:29:38.000Z
|
code/MergeTrack/print_max_reid_distance.py
|
MTonyM/PReMVOS
|
3d01f0c6156628083a4c8441b4b57622c500e04e
|
[
"MIT"
] | 18
|
2018-11-21T04:48:03.000Z
|
2020-09-14T09:30:56.000Z
|
code/MergeTrack/print_max_reid_distance.py
|
MTonyM/PReMVOS
|
3d01f0c6156628083a4c8441b4b57622c500e04e
|
[
"MIT"
] | 32
|
2018-10-25T11:58:57.000Z
|
2021-12-27T06:13:45.000Z
|
import glob
from numpy.linalg import norm
import numpy as np
from copy import deepcopy as copy
from MergeTrack.merge_functions import read_ann,read_props
from MergeTrack.ReID_net_functions import ReID_net_init, add_ReID
input_images = "DAVIS/val17/"
input_proposals = "DAVIS/ReID_props/"
first_frame_anns = "DAVIS/val17-ff/"
output_images = "DAVIS/final_results/"
output_proposals = "DAVIS/final_props/"
ReID_net = ReID_net_init()
dataset_max_distances = []
for video_fn in sorted(glob.glob(input_images+"*/")):
video_proposals = []
templates = []
for image_fn in sorted(glob.glob(video_fn+"*")):
ann_fn = image_fn.replace(input_images,first_frame_anns).replace('.jpg','.png')
if glob.glob(ann_fn):
new_templates = read_ann(ann_fn)
new_templates = add_ReID(new_templates, image_fn, ReID_net)
# import json
# ff_fn = image_fn.replace(input_images, "DAVIS/ff_test/").replace('.jpg', '.json')
# with open(ff_fn, "r") as f:
# new_templates = json.load(f)
# for id, templ in enumerate(new_templates):
# templ['ReID'] = np.array(templ['ReID'])
# templ['id'] = id
templates = templates + new_templates
prop_fn = image_fn.replace(input_images,input_proposals).replace('.jpg','.json')
proposals = read_props(prop_fn)
video_proposals.append(proposals)
ReIDs = [[prop['ReID'] for prop in props] for props in video_proposals]
template_ReIDs = [templ['ReID'] for templ in templates]
all_reid_distances = [np.array([[norm(c_reid - gt_reid) for c_reid in curr] for gt_reid in template_ReIDs]) for curr in ReIDs]
all_reid_distances_no_inf = copy(all_reid_distances)
for mat in all_reid_distances_no_inf:
mat[np.isinf(mat)] = 0
max_distances = np.array([mat.max(axis=1) if mat.shape[1]>0 else np.zeros((mat.shape[0])) for mat in all_reid_distances_no_inf]).max(axis=0)
print(max_distances)
dataset_max_distances.append(max_distances.max())
print(np.array(dataset_max_distances).max())
| 38.25
| 142
| 0.723479
| 307
| 1,989
| 4.407166
| 0.250814
| 0.053215
| 0.059128
| 0.035477
| 0.144863
| 0.102735
| 0.042868
| 0.042868
| 0
| 0
| 0
| 0.005914
| 0.149824
| 1,989
| 52
| 143
| 38.25
| 0.794205
| 0.128708
| 0
| 0
| 0
| 0
| 0.063731
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.171429
| 0
| 0.171429
| 0.057143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b7e597bab0f3442569b2c0f944ee9a51ebdc5c8
| 5,004
|
py
|
Python
|
tests/unit/html/test_search_page.py
|
tttgm/basketball_reference_web_scraper
|
2dbd9d7bacbcfee17f08bcf8629bd7d50893761d
|
[
"MIT"
] | 325
|
2015-10-27T03:15:49.000Z
|
2022-03-16T06:49:12.000Z
|
tests/unit/html/test_search_page.py
|
tttgm/basketball_reference_web_scraper
|
2dbd9d7bacbcfee17f08bcf8629bd7d50893761d
|
[
"MIT"
] | 173
|
2018-10-16T04:11:05.000Z
|
2022-03-29T17:52:08.000Z
|
tests/unit/html/test_search_page.py
|
tttgm/basketball_reference_web_scraper
|
2dbd9d7bacbcfee17f08bcf8629bd7d50893761d
|
[
"MIT"
] | 97
|
2016-04-09T19:11:28.000Z
|
2022-03-21T09:57:50.000Z
|
from unittest import TestCase
from unittest.mock import patch, MagicMock, PropertyMock
from basketball_reference_web_scraper.html import SearchPage, PlayerSearchResult
class TestSearchPage(TestCase):
def test_nba_aba_baa_players_content_query(self):
self.assertEqual(
SearchPage(html=MagicMock()).nba_aba_baa_players_content_query,
'//div[@id="searches"]/div[@id="players"]',
)
@patch.object(SearchPage, 'nba_aba_baa_players_content_query', new_callable=PropertyMock)
def test_nba_aba_baa_players_pagination_links_query(self, mocked_query):
mocked_query.return_value = "some query"
self.assertEqual(
SearchPage(html=MagicMock()).nba_aba_baa_players_pagination_links_query,
'some query/div[@class="search-pagination"]/a',
)
@patch.object(SearchPage, 'nba_aba_baa_players_content_query', new_callable=PropertyMock)
def test_nba_aba_baa_player_search_items_query(self, mocked_query):
mocked_query.return_value = "some query"
self.assertEqual(
SearchPage(html=MagicMock()).nba_aba_baa_player_search_items_query,
'some query/div[@class="search-item"]',
)
@patch.object(SearchPage, 'nba_aba_baa_players_pagination_links_query', new_callable=PropertyMock)
def test_nba_aba_baa_players_pagination_links(self, mocked_query):
mocked_query.return_value = "some query"
html = MagicMock()
links = [MagicMock(return_value="some"), MagicMock(return_value="links")]
html.xpath = MagicMock(return_value=links)
self.assertEqual(
SearchPage(html=html).nba_aba_baa_players_pagination_links,
links,
)
html.xpath.asset_called_once_with("some query")
@patch.object(SearchPage, 'nba_aba_baa_players_pagination_links', new_callable=PropertyMock)
def test_nba_aba_baa_players_pagination_url_is_none_when_no_pagination_links(self, mocked_links):
mocked_links.return_value = []
self.assertIsNone(SearchPage(html=MagicMock()).nba_aba_baa_players_pagination_url)
@patch.object(SearchPage, 'nba_aba_baa_players_pagination_links', new_callable=PropertyMock)
def test_nba_aba_baa_players_pagination_url_is_first_link_href_attrib_when_single_link_is_not_at_end_of_results(
self,
mocked_links
):
link = MagicMock()
link.text_content = MagicMock(return_value="jaebaebae")
link.attrib = MagicMock()
link.attrib.__getitem__ = MagicMock(return_value="some text content")
mocked_links.return_value = [link]
self.assertEqual(
SearchPage(html=MagicMock()).nba_aba_baa_players_pagination_url,
"some text content",
)
link.attrib.__getitem__.assert_called_once_with("href")
@patch.object(SearchPage, 'nba_aba_baa_players_pagination_links', new_callable=PropertyMock)
def test_nba_aba_baa_players_pagination_url_is_none_when_single_link_is_at_end_of_results(
self,
mocked_links
):
link = MagicMock()
link.text_content = MagicMock(return_value="Previous 100 Results")
mocked_links.return_value = [link]
self.assertIsNone(SearchPage(html=MagicMock()).nba_aba_baa_players_pagination_url)
link.text_content.assert_called_once_with()
@patch.object(SearchPage, 'nba_aba_baa_players_pagination_links', new_callable=PropertyMock)
def test_nba_aba_baa_players_pagination_url_is_second_link_href_attrib_when_multiple_links(
self,
mocked_links
):
first_link = MagicMock()
first_link.attrib = MagicMock()
first_link.attrib.__getitem__ = MagicMock(return_value="some text content")
second_link = MagicMock()
second_link.attrib = MagicMock()
second_link.attrib.__getitem__ = MagicMock(return_value="some other text content")
mocked_links.return_value = [first_link, second_link]
self.assertEqual(
SearchPage(html=MagicMock()).nba_aba_baa_players_pagination_url,
"some other text content",
)
second_link.attrib.__getitem__.assert_called_once_with("href")
@patch.object(SearchPage, 'nba_aba_baa_player_search_items_query', new_callable=PropertyMock)
def test_nba_aba_baa_players(self, mocked_query):
mocked_query.return_value = "some query"
first_result = MagicMock(name="first html result")
second_result = MagicMock(name="second html result")
third_result = MagicMock(name="third html result")
html = MagicMock()
html.xpath = MagicMock(return_value=[first_result, second_result, third_result])
self.assertEqual(
SearchPage(html=html).nba_aba_baa_players,
[
PlayerSearchResult(html=first_result),
PlayerSearchResult(html=second_result),
PlayerSearchResult(html=third_result),
]
)
| 42.40678
| 116
| 0.711631
| 590
| 5,004
| 5.567797
| 0.130508
| 0.047489
| 0.071233
| 0.112024
| 0.700761
| 0.683105
| 0.625571
| 0.592998
| 0.592998
| 0.471233
| 0
| 0.000751
| 0.202038
| 5,004
| 117
| 117
| 42.769231
| 0.821938
| 0
| 0
| 0.375
| 0
| 0
| 0.130695
| 0.079736
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.09375
| false
| 0
| 0.03125
| 0
| 0.135417
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b7fad07fb9954bb150ff9b9a3fc6a0e8f2cf560
| 19,891
|
py
|
Python
|
cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | null | null | null |
cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | null | null | null |
cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | 1
|
2021-10-30T00:03:05.000Z
|
2021-10-30T00:03:05.000Z
|
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# New_WindGust_Tool
#
# Authors: Tom Mazza NWS Charleston, WV Created: 04/25/03
# Matthew H. Belk NWS Taunton, MA Last Modified: 06/16/03
# Mathewson FSL Modified: 3/30/04
# -change in model names to OB3 names
#----------------------------------------------------------------------------
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 02/10/2016 5283 nabowle Remove NGM support.
# ----------------------------------------------------------------------------
##
# This is an absolute override file, indicating that a higher priority version
# of the file will completely replace a lower priority version of the file.
##
ToolType = "numeric"
WeatherElementEdited = "WindGust"
from numpy import *
# without this, the builtin max() is used
from numpy import max
import LogStream
# You can screen the elements for which your tool will appear by using
# a ScreenList. For example:
#ScreenList = ["MixHgt","WindGust", "TransWind"]
# Set up variables to be solicited from the user:
VariableList = [
("Momentum algorithm:", "RUC", "radio", ["RUC", "Power"]),
("Use BL Winds:", "No", "radio", ["Yes", "No"]),
("Model:", "NAM12", "radio",
["GFS80", "NAM12", "gfsLR", "RAP40"])
]
#Set up Class
import SmartScript
## For available commands, see SmartScript
toolName = 'WindGustFromAlgorithm'
class Tool (SmartScript.SmartScript):
def __init__(self, dbss):
SmartScript.SmartScript.__init__(self, dbss)
# Define your site ID
self._SITEID = "BOX"
# Required Method: Execute
# Called once for each grid
# Fill in the arguments you want to use -- WeatherElement1, WeatherElement2...
def execute(self, Wind, MixHgt, Topo, GridTimeRange):
"Determines WindGust using one of two algorithms, one from the RUC or a power relationship. This tool assumes your mixing height has already been adjusted for your surface temperatures."
sounding = self.makeNumericSounding(self._model, "wind",
self._modelCube, GridTimeRange,
noDataError=0)
########################################################################
# If we don't have a model sounding at this point in time, or the
# size of the grids do not match
if sounding is None: # or sounding[0].shape != Topo.shape:
LogStream.logProblem(toolName, ': cannot obtain a Wind sounding')
return None # leaves current WindGust grid alone
########################################################################
# If we made it this far, split up the sounding into its component
# cubes of height and wind
(gh_Cube, wind_Cube) = sounding
if gh_Cube is None:
LogStream.logProblem(toolName, 'gh_Cube is None')
return None
if wind_Cube is None:
LogStream.logProblem(toolName, 'wind_Cube is None')
return None
########################################################################
# Convert topography from feet to meters
self._topo = self.ftToM(Topo)
########################################################################
# Initialize a cube to hold BL wind grids
bl_WindCube = {}
########################################################################
# Cycle through all the BL levels we have for this model
for lvl in self._blCube:
####################################################################
# Initialize BL wind grid for this level
grid = None
####################################################################
# If this is the NAM40/20 model
if self._model.find('NAM40') != -1:
################################################################
# Get BL winds from other NAM40/NAM20 file
tempModel = self._model.replace('NAM40', 'NAM20')
################################################################
# Try to get model BL winds for this time
grid = self.getGrids(tempModel, "wind", lvl, GridTimeRange,
noDataError=0)
####################################################################
# Otherwise
else:
################################################################
# Try to get model BL winds for this time
grid = self.getGrids(self._model, "Wind", lvl, GridTimeRange,
noDataError=0)
####################################################################
# Add this grid to the BL wind cube - if it is valid
if grid != None:
################################################################
# Store the wind speeds at this BL level
bl_WindCube[lvl] = grid[0]
####################################################################
# Otherwise
else:
################################################################
# Store a placeholder
bl_WindCube[lvl] = None
########################################################################
# Convert mixing height from ft ASL to m ASL
mixHgt_m = self.ftToM(MixHgt)
########################################################################
# Make a 3D mask where the model sounding level is ABOVE the ground,
# but below the Mixing Height
self._mixedLayer = (gh_Cube >= self._topo) & (gh_Cube <= mixHgt_m)
########################################################################
# Method to compute WindGust using a version of the RUC technique
# adapted by Matthew H. Belk (BOX).
########################################################################
# Initialize WindGust using current 10m Wind speeds - (mag, dir)
WindGust = Wind[0]
########################################################################
# Move vertically through the model BL cube
for lvl in self._blCube:
####################################################################
# Make a mask where this BL surface is at or below the MixHgt
blMask = MixHgt <= self._blHgt[lvl]
####################################################################
# If there are any points in the mixed layer at this surface, and
# there actually is a wind grid
if any(blMask) and bl_WindCube[lvl] != None:
################################################################
# Get wind magnitude at current level - remember model winds
# are in m/s and need to be in kts for comparison
curMag = self.mpsToKt(bl_WindCube[lvl])
################################################################
# Compute difference between wind at this level and SFC wind
# where points are in the mixed layer
deltaSpd = curMag - Wind[0]
################################################################
# Get the depth of the mixed layer to this point (m AGL)
deltaZ = self._blHgt[lvl]
################################################################
# Adjust change in wind speed by a coefficient - using the
# lesser of 0.5 or (deltaZ / 2000)
# First get the factor, which will range from 0.5 to 1.0,
# higher closer to the ground
delta = max(1.0 - deltaZ/2000.0, 0.5)
################################################################
# Employ the power relationship if selected: it focuses in on
# how much lower than one this factor will be (it ranges from
# no less than 1 just above the surface to 0.5 lower than 1
# 1000 or more feet from the surface). The power relationship
# takes this small number (between 0 and 0.5) to the second
# power, which makes it smaller still. It actually first
# doubles it, then squares it, then halves it again. This
# causes a difference of 0 to stay 0, a difference of 0.5 to
# stay at 0.5, but a difference of 0.25 will become 0.125.
# This difference is then subtracted from one, to get a new,
# equal or larger factor by which to multiply the potential
# wind gust, to arrive at a gust potential that decreases more
# slowly at first with height, then more rapidly later on, to
# arrive at the same factor up at 1000 m and more above the
# surface. The resulting wind gust is always equal to or
# greater than using the RUC algorthm straight up.
if self._algorithm == 'Power':
delta = 1 - (pow((2 * (1 - delta)), 2)) / 2
################################################################
# Adjust wind speed difference by chosen coefficient
deltaSpd *= delta
gustV = Wind[0] + deltaSpd
################################################################
# Make a mask where this WindGust is > current WindGust
newGust = gustV > WindGust
################################################################
# Assign new WindGust where new WindGust is greater and the
# surface is still within the mixed layer
WindGustMask = newGust & blMask
WindGust[WindGustMask] = gustV[WindGustMask]
########################################################################
# Move vertically through the model cube
for i in xrange(gh_Cube.shape[0]):
####################################################################
# If there are any points in the mixed layer at this surface
if any(self._mixedLayer[i]):
################################################################
# Get wind magnitude at current level - remember model winds
# are in m/s and need to be in kts for comparison
curMag = self.mpsToKt(wind_Cube[0][i])
################################################################
# Compute difference between wind at this level and SFC wind
# where points are in the mixed layer
deltaSpd = curMag - Wind[0]
################################################################
# Get the depth of the mixed layer to this point (m AGL)
deltaZ = gh_Cube[i] - self._topo
################################################################
# Adjust change in wind speed by a coefficient - using the
# lesser of 0.5 or (deltaZ / 2000)
# First get the factor, which will range from 0.5 to 1.0,
# higher closer to the ground
delta = max(1.0-deltaZ/2000.0,0.5)
################################################################
# Employ the power relationship if selected: it focuses in on
# how much lower than one this factor will be (it ranges from
# no less than 1 just above the surface to 0.5 lower than 1
# 1000 or more feet from the surface). The power relationship
# takes this small number (between 0 and 0.5) to the second
# power, which makes it smaller still. It actually first
# doubles it, then squares it, then halves it again. This
# causes a difference of 0 to stay 0, a difference of 0.5 to
# stay at 0.5, but a difference of 0.25 will become 0.125.
# This difference is then subtracted from one, to get a new,
# equal or larger factor by which to multiply the potential
# wind gust, to arrive at a gust potential that decreases more
# slowly at first with height, then more rapidly later on, to
# arrive at the same factor up at 1000 feet and more above the
# surface. The resulting wind gust is always equal to or
# greater than using the RUC algorthm straight up.
if self._algorithm == 'Power':
delta = 1 - (pow((2 * (1 - delta)), 2)) / 2
################################################################
# Adjust wind speed difference by chosen coefficient
deltaSpd *= delta
gustV = Wind[0] + deltaSpd
################################################################
# Make a mask where this WindGust is > current WindGust
newGust = gustV > WindGust
################################################################
# Assign new WindGust where new WindGust is greater and the
# surface is still within the mixed layer
WindGustMask = newGust & self._mixedLayer[i]
WindGust[WindGustMask] = gustV[WindGustMask]
########################################################################
# Return the computed WindGust
return WindGust
# Optional Methods
# These methods can have the additional argument:
# ToolTimeRange -- selected time range over which we are running the tool
def preProcessTool(self, varDict):
# Called once at beginning of Tool
# Cannot have WeatherElement or Grid arguments
########################################################################
# Get site ID
try:
siteID=self.mutableID().siteID()
except:
siteID=self._SITEID
########################################################################
# Get name of chosen model - and fix it up so we can use it later on.
# This will grab the latest version of the chosen model from the D2D
# netCDF files.
self._model = "%s_D2D_%s" % (siteID, varDict["Model:"])
########################################################################
# Get chosen algorithm
self._algorithm = varDict["Momentum algorithm:"]
########################################################################
# Get answer if we should use BL winds
useBLwinds = varDict["Use BL Winds:"]
########################################################################
# Initialize a list of model levels
self._modelCube = []
########################################################################
# Determine model levels available for each model
if self._model.find( 'GFS80') != -1 or \
self._model.find( 'GFS') != -1:
self._modelCube = ["MB850", "MB700", "MB500", "MB400", "MB300"]
self._blCube = []
elif self._model.find( 'NAM12') != -1:
self._modelCube = ["MB1000", "MB950", "MB900", "MB850", "MB800",
"MB750", "MB700", "MB650", "MB600", "MB550",
"MB500", "MB450", "MB400", "MB350"]
self._blCube = ["BL030", "BL03060", "BL6090", "BL90120", "BL12015"]
elif self._model.find( 'NAM40') != -1 or \
self._model.find( 'NAM20') != -1:
self._modelCube = ["MB975", "MB950", "MB925", "MB900", "MB875",
"MB850", "MB825", "MB800", "MB775", "MB750",
"MB725", "MB700", "MB675", "MB650", "MB625",
"MB600", "MB550", "MB500", "MB450", "MB400",
"MB350", "MB300"]
self._blCube = ["BL030", "BL03060", "BL6090", "BL90120", "BL120150"]
elif self._model.find( 'gfsLR') != -1:
self._modelCube = ["MB1000", "MB850", "MB700", "MB500", "MB300"]
self._blCube = []
elif self._model.find( 'RAP40') != -1:
self._modelCube = ["MB1000", "MB950", "MB900", "MB850", "MB800",
"MB750", "MB700", "MB650", "MB600", "MB550",
"MB500", "MB450", "MB400", "MB350", "MB300"]
self._blCube = ["BL030", "BL6090", "BL15018"]
########################################################################
# If we should not use the BL winds
if useBLwinds is 'No':
####################################################################
# Reset the levels in the BL cube so we don't do anything
self._blCube = []
########################################################################
# Determine height of all possible BL levels available for each model.
# If level is not at a fixed height AGL, use the hydrostatic equation.
# Assume the density of the air is 1 kg/m3 and gravity is 9.80 m/s^2.
# The height will be in m AGL at the center of the layer. Remember
# there are 100 Pa per 1 mb.
self._blHgt = {'BL030' : (15.0 * 100.0/ 9.8),
'BL3060' : (45.0 * 100.0 / 9.8),
'BL03060' : (45.0 * 100.0 / 9.8),
'BL6090' : (75.0 * 100.0 / 9.8),
'BL90120' : (105.0 * 100.0 / 9.8),
'BL12015' : (135.0 * 100.0 / 9.8),
'BL120150': (135.0 * 100.0 / 9.8),
'BL15018' : (165.0 * 100.0 / 9.8),
'FH1829' : 1829.0,
'FH2743' : 2743.0,
'FH3658' : 3658.0
}
LogStream.logDebug(toolName, ': preProcessTool complete.')
| 48.045894
| 195
| 0.42934
| 1,904
| 19,891
| 4.448004
| 0.26208
| 0.003306
| 0.01228
| 0.005668
| 0.459558
| 0.414571
| 0.388003
| 0.380446
| 0.380446
| 0.380446
| 0
| 0.045883
| 0.32286
| 19,891
| 413
| 196
| 48.162228
| 0.582894
| 0.394902
| 0
| 0.259843
| 0
| 0.007874
| 0.114477
| 0.002428
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023622
| false
| 0
| 0.031496
| 0
| 0.094488
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b7fd5f816b4e255d1e40adf591dc8b3e21efaa2
| 2,291
|
py
|
Python
|
CH04_Iterators_and_Generators/4.4.Implementing_the_iterator_protocol.py
|
Chang-Liu-TAMU/Python-Cookbook-reading
|
7b974c32f77b4b3d7cfeed30d1671081057c566f
|
[
"MIT"
] | null | null | null |
CH04_Iterators_and_Generators/4.4.Implementing_the_iterator_protocol.py
|
Chang-Liu-TAMU/Python-Cookbook-reading
|
7b974c32f77b4b3d7cfeed30d1671081057c566f
|
[
"MIT"
] | null | null | null |
CH04_Iterators_and_Generators/4.4.Implementing_the_iterator_protocol.py
|
Chang-Liu-TAMU/Python-Cookbook-reading
|
7b974c32f77b4b3d7cfeed30d1671081057c566f
|
[
"MIT"
] | null | null | null |
# @Time: 2022/4/12 20:50
# @Author: chang liu
# @Email: [email protected]
# @File:4.4.Implementing_the_iterator_protocol.py
################ clean version #########################
# class Node:
# def __init__(self, val):
# self._value = val
# self._children = []
#
# def __repr__(self):
# return "Node({!r})".format(self._value)
#
# def add_child(self, node):
# self._children.append(node)
#
# def __iter__(self):
# return iter(self._children)
#
# def depth_first(self):
# yield self
# for c in self:
# yield from c.depth_first()
############# some messy version ####################
class Node:
def __init__(self, value):
self._value = value
self._children = []
def __repr__(self):
return "Node({!r})".format(self._value)
def add_child(self, node):
self._children.append(node)
def __iter__(self):
return iter(self._children)
# def iter(self):
# return iter(self._children)
def depth_first(self):
return DepthFirstIterator(self)
# def __iter__(self):
# return DepthFirstIterator(self)
class DepthFirstIterator:
'''
DFS traversal
'''
def __init__(self, start_node):
self._node = start_node
self._children_iter = None
self._child_iter = None
def __iter__(self):
return self
def __next__(self):
if self._children_iter == None:
self._children_iter = iter(self._node)
# self._children_iter = self._node.iter()
return self._node
elif self._child_iter:
try:
following = next(self._child_iter)
return following
except StopIteration:
self._child_iter = None
return next(self)
else:
self._child_iter = next(self._children_iter).depth_first()
return next(self)
# return next(self._child_iter)
root = Node(0)
left = Node(1)
right = Node(2)
left.add_child(Node(3))
left.add_child(Node(4))
right.add_child(Node(5))
right.add_child(Node(6))
root.add_child(left)
root.add_child(right)
for i in root.depth_first():
print(i)
# for i in root:
# print(i)
| 22.91
| 70
| 0.572676
| 274
| 2,291
| 4.434307
| 0.255474
| 0.118519
| 0.064198
| 0.069959
| 0.341564
| 0.302058
| 0.257613
| 0.257613
| 0.257613
| 0.257613
| 0
| 0.01218
| 0.283282
| 2,291
| 100
| 71
| 22.91
| 0.727771
| 0.338717
| 0
| 0.136364
| 0
| 0
| 0.007158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0
| 0.090909
| 0.409091
| 0.022727
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b800dc76b871db39c746e292171f32b25ee44ff
| 29,762
|
py
|
Python
|
FGPVAE_model.py
|
metodj/FGP-VAE
|
607559ab465b29878f10a5d95b8e3c6ec8d94e0c
|
[
"MIT"
] | 3
|
2021-01-27T14:06:01.000Z
|
2021-09-09T12:10:34.000Z
|
FGPVAE_model.py
|
metodj/FGP-VAE
|
607559ab465b29878f10a5d95b8e3c6ec8d94e0c
|
[
"MIT"
] | null | null | null |
FGPVAE_model.py
|
metodj/FGP-VAE
|
607559ab465b29878f10a5d95b8e3c6ec8d94e0c
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import tensorflow_probability as tfp
import numpy as np
import pickle
import random
from utils import gauss_cross_entropy
tfk = tfp.math.psd_kernels
def _add_diagonal_jitter(matrix, jitter=1e-6):
return tf.linalg.set_diag(matrix, tf.linalg.diag_part(matrix) + jitter)
class FGP:
dtype = np.float64
def __init__(self, init_amplitude, init_length_scale, GP_joint, L_w,
object_vectors_init=None, object_prior_corr=False, K_obj_normalize=False):
"""
GP class for FGPVAE.
:param init_amplitude:
:param init_length_scale:
:param GP_joint:
:param L_w: number of local latent channels
:param object_vectors_init: initizalition for object vectors (GP-LVM)
:param object_prior_corr: whether or not correlated object priors are used
:param K_obj_normalize: whether or not to normalize object kernel (linear kernel)
"""
self.object_prior_corr = object_prior_corr
self.K_obj_normalize = K_obj_normalize
if GP_joint:
self.amplitude = tf.Variable(initial_value=init_amplitude,
name="GP_amplitude", trainable=True, dtype=self.dtype)
self.length_scale = tf.Variable(initial_value=init_length_scale,
name="GP_length_scale", trainable=True, dtype=self.dtype)
else:
self.amplitude = tf.constant(init_amplitude, dtype=self.dtype)
self.length_scale = tf.constant(init_length_scale, dtype=self.dtype)
# kernels
self.kernel_local = tfk.ExpSinSquared(amplitude=self.amplitude, length_scale=self.length_scale, period=2*np.pi)
self.kernel_global = tfk.Linear()
# GP-LVM, object vectors
if object_vectors_init is not None:
self.object_vectors = tf.Variable(initial_value=object_vectors_init,
name="GP_LVM_object_vectors",
dtype=self.dtype)
else:
self.object_vectors = None
# number of local (views/angles) channels
self.L_w = L_w
def build_1d_gp_local(self, X, Y, varY, X_test):
"""
Fits GP for local latent channels.
Takes input-output dataset and returns post mean, var, marginal lhood.
This is standard GP regression with heteroscedastic noise.
:param X: inputs tensor (batch, npoints)
:param Y: outputs tensor (batch, npoints)
:param varY: outputs tensor (batch, npoints)
:param X_test: (batch, ns) input points to compute post mean + var
Returns:
p_m: (batch, ns) post mean at X_test
p_v: (batch, ns) post var at X_test
logZ: (batch) marginal lhood of each dataset in batch
"""
# Prepare all constants
batch = tf.shape(X)[0]
n = tf.shape(X)[1]
ns = tf.shape(X_test)[1]
# K_x + \sigma_x^*
K = self.kernel_local.matrix(tf.expand_dims(X, 2), tf.expand_dims(X, 2)) # (batch, n n)
K = K + tf.matrix_diag(varY) # (batch, n, n)
chol_K = tf.linalg.cholesky(K) # (batch, n, n)
# lhood term 1/3
lhood_pi_term = tf.cast(n, dtype=self.dtype) * np.log(2 * np.pi)
# lhood term 2/3
lhood_logdet_term = 2 * tf.reduce_sum(tf.log(tf.matrix_diag_part(chol_K)), 1) # (batch)
# lhood term 3/3
Y = tf.expand_dims(Y, 2)
iKY = tf.cholesky_solve(chol_K, Y) # (batch, n, 1)
lh_quad_term = tf.matmul(tf.transpose(Y, (0,2,1)), iKY) # (batch, 1, 1)
lh_quad_term = tf.reshape(lh_quad_term, [batch])
# log P(Y|X) = -1/2 * ( n log(2 pi) + Y inv(K+noise) Y + log det(K+noise))
gp_lhood = -0.5 * (lhood_pi_term + lh_quad_term + lhood_logdet_term)
# Compute posterior mean and variances
Ks = self.kernel_local.matrix(tf.expand_dims(X, 2), tf.expand_dims(X_test, 2)) # (batch, n, ns)
Ks_t = tf.transpose(Ks, (0, 2, 1)) # (batch, ns, n)
# posterior mean
p_m = tf.matmul(Ks_t, iKY)
p_m = tf.reshape(p_m, (batch, ns))
# posterior variance
iK_Ks = tf.cholesky_solve(chol_K, Ks) # (batch, n, ns)
Ks_iK_Ks = tf.reduce_sum(Ks * iK_Ks, axis=1) # (batch, ns)
p_v = 1 - Ks_iK_Ks # (batch, ns)
p_v = tf.reshape(p_v, (batch, ns))
return p_m, p_v, gp_lhood, K
def build_1d_gp_global(self, means, vars):
"""
Fits GP for global latent channels.
:param Y: encoder means (batch, npoints)
:param varY: encoder vars (batch, npoints)
Returns:
p_m: (batch) posterior means
p_v: (batch) post vars
logZ: (batch) product of Gaussians terms
"""
n = tf.shape(means)[1]
sigma_squared_bar = 1 / (tf.reduce_sum(tf.math.reciprocal_no_nan(vars), axis=1) + 1)
mu_bar = sigma_squared_bar * tf.reduce_sum(means * tf.math.reciprocal_no_nan(vars), axis=1)
lhood = tf.log(tf.sqrt(sigma_squared_bar)) + 0.5*tf.math.reciprocal_no_nan(sigma_squared_bar)*mu_bar**2 - \
0.5*tf.cast(n, dtype=self.dtype)*tf.log(2.0*tf.cast(np.pi, dtype=self.dtype)) - \
tf.reduce_sum(tf.log(tf.sqrt(vars)), axis=1) - 0.5*tf.reduce_sum(tf.math.reciprocal_no_nan(vars)*means**2)
return mu_bar, sigma_squared_bar, lhood
@staticmethod
def preprocess_1d_gp_global_correlated_object_priors(means, vars):
"""
Product of Gaussians for each global latent channel. See 2.9 in FGPVAE.tex
N = nr. of digits
N_t = nr. of angles for digit t
:param means: (N, N_t)
:param vars: (N, N_t)
Returns:
bar_means: \Bar{\mu} (1, N,)
bar_vars: \Bar{\sigma}^2 (1, N,)
C_tilde: \Tilde{C} (1, N,)
"""
N_t = tf.shape(means)[1]
N_t = tf.cast(N_t, dtype=tf.float64)
alpha = tf.reduce_sum(tf.math.reciprocal_no_nan(vars), axis=1)
beta = tf.reduce_sum(means / vars, axis=1)
bar_means = tf.expand_dims(beta / alpha, 0) # expand_dims to make it compatible with batching latter on
bar_vars = tf.expand_dims(1 / alpha, 0) # expand_dims to make it compatible with batching latter on
# C_1 = (2.0 * np.pi)**(-0.5 * N_t) * tf.reduce_prod(vars**(-0.5), axis=1)
C_1 = (2.0 * np.pi) ** (-0.5 * N_t) * tf.reduce_prod(tf.sqrt(tf.math.reciprocal_no_nan(vars)), axis=1)
C_2 = tf.exp(-0.5*tf.reduce_sum(means**2/vars, axis=1))
C_3 = tf.exp(0.5*beta**2 / alpha)
C_4 = tf.sqrt(2*np.pi/alpha)
C_tilde = tf.expand_dims(C_1*C_2*C_3*C_4, 0) # expand_dims to make it compatible with batching latter on
# C_tilde = tf.clip_by_value(C_tilde, 1e-90, 100)
bar_vars = tf.clip_by_value(bar_vars, 1e-3, 100)
return bar_means, bar_vars, C_tilde
def kernel_matrix_correlated_object_priors(self, x, y):
"""
Computes object kernel matrix in case correlated object priors are used.
See 2.9 in FGPVAE.tex
:param x: (1, N, 10)
:param y: (1, N, 10)
:param K_obj_normalized: whether or not to normalize (between -1 and 1) object kernel matrix (linear kernel)
:return: object kernel matrix (1, N, N)
"""
# unpack auxiliary data
if self.object_vectors is None:
x_object, y_object =x[:, :, 2:], y[:, :, 2:]
else:
x_object = tf.gather(self.object_vectors, tf.cast(x[:, :, 0], dtype=tf.int64))
y_object = tf.gather(self.object_vectors, tf.cast(y[:, :, 0], dtype=tf.int64))
# compute kernel matrix
object_matrix = self.kernel_global.matrix(x_object, y_object)
if self.K_obj_normalize: # normalize object matrix
obj_norm = 1 / tf.matmul(tf.math.reduce_euclidean_norm(x_object, axis=2, keepdims=True),
tf.transpose(tf.math.reduce_euclidean_norm(y_object, axis=2, keepdims=True),
perm=[0, 2, 1]))
object_matrix = object_matrix * obj_norm
return object_matrix
def X_matrix(self, x):
"""
Computes X matrix. We need this function (instead of working directly with X) in order to support GP-LVM vectors
joint optimization.
:param x: (1, N, 10)
:param normalized: whether or not to normalize object vectors (so that every object vector has norm 1)
:return:
"""
# unpack auxiliary data
if self.object_vectors is None:
x_object = x[:, :, 2:]
else:
x_object = tf.gather(self.object_vectors, tf.cast(x[:, :, 0], dtype=tf.int64))
if self.K_obj_normalize:
x_object = x_object / tf.math.reduce_euclidean_norm(x_object, axis=2, keepdims=True)
return x_object
def build_1d_gp_global_correlated_object_priors(self, X, Y, varY, X_test, C_tilde, omit_C_tilde,
bayesian_reg_view, EPSILON=1e-6):
"""
See 2.9 in FGPVAE.tex
Since using build_1d_gp_global_correlated_object_priors leads to numerical issues,
we add support for fitting global GP using Bayesian linear regression view.
:param X: auxiliary data, train points of GP (1, N, 10)
:param Y: encoded and processed means for train points (1, N)
:param varY: encoded and processed vars for train points (1, N)
:param X_test: auxiliary data, test points of GP (1, N_s, 10)
:param C_tilde: (1, N)
:param omit_C_tilde: omit C_tilde from derivation and modify cross-entropy term instead
:param bayesian_reg_view: whether or not to use Bayesian regression view to fit global GP.
:param EPSILON: for numerical stability in log()
:return:
"""
if bayesian_reg_view:
p = 8 # dimension of object vectors
N = tf.shape(X)[1]
# get (and normalize) X and X_test
X = self.X_matrix(X) # (1, N, p)
X_T = tf.transpose(X, (0, 2, 1)) # (1, p, N)
X_test = self.X_matrix(X_test) # (1, N_s, p)
X_test_T = tf.transpose(X_test, (0, 2, 1)) # (1, p, N_s)
# posterior params
A = tf.matmul(X_T, tf.matmul(tf.linalg.diag(tf.math.reciprocal_no_nan(varY)), X)) + \
tf.expand_dims(tf.eye(p, dtype=tf.float64), 0) # (1, p, p)
A_inv = tf.linalg.inv(_add_diagonal_jitter(A)) # (1, p, p)
w_bar = tf.linalg.matvec(A_inv, tf.linalg.matvec(X_T, tf.math.reciprocal_no_nan(varY) * Y)) # (1, p)
p_m = tf.linalg.matvec(X_test, w_bar) # (1, N)
p_v = tf.linalg.diag_part(tf.matmul(X_test, tf.matmul(A_inv, X_test_T))) # (1, N)
p_v = tf.clip_by_value(p_v, 1e-6, 100)
# log GPML (marginal likelihood)
lhood_pi_term = tf.cast(N, dtype=tf.float64) * np.log(2 * np.pi) # ()
mid_mat = tf.linalg.diag(varY) - tf.matmul(X, tf.matmul(A_inv, X_T)) # (1, N, N)
Y_tilde = tf.math.reciprocal_no_nan(varY) * Y # (1, N)
lhood_quad_term = tf.reduce_sum(Y_tilde * tf.linalg.matvec(mid_mat, Y_tilde), axis=1) # (1, )
A_chol = tf.linalg.cholesky(_add_diagonal_jitter(A)) # (1, p, p)
lhood_logdet_term = tf.reduce_sum(tf.math.log(tf.math.sqrt(varY)), axis=1) + \
2 * tf.reduce_sum(tf.log(tf.matrix_diag_part(A_chol)), axis=1) # (1, )
gp_lhood = -0.5 * (lhood_pi_term + lhood_quad_term + lhood_logdet_term) # (1, )
# add C_tilde terms
if not omit_C_tilde:
gp_lhood = gp_lhood + tf.reduce_sum(tf.log(C_tilde + EPSILON)) # (1, )
else:
# Prepare all constants
batch = tf.shape(X)[0]
n = tf.shape(X)[1]
ns = tf.shape(X_test)[1]
# K_x + \sigma_x^*
K = self.kernel_matrix_correlated_object_priors(X, X) # (batch, n n)
K = K + tf.matrix_diag(varY) # (batch, n, n)
chol_K = tf.linalg.cholesky(K) # (batch, n, n)
# no cholesky_solve implementation
# inv_K = tf.linalg.inv(_add_diagonal_jitter(K, 1e-2))
# lhood term 1/3
lhood_pi_term = tf.cast(n, dtype=self.dtype) * np.log(2 * np.pi)
# lhood term 2/3
lhood_logdet_term = 2 * tf.reduce_sum(tf.log(tf.matrix_diag_part(chol_K)), 1) # (batch)
# lhood term 3/3
Y = tf.expand_dims(Y, 2) # (batch, n, 1)
iKY = tf.cholesky_solve(_add_diagonal_jitter(chol_K), Y) # (batch, n, 1)
lh_quad_term = tf.matmul(tf.transpose(Y, (0, 2, 1)), iKY) # (batch, 1, 1)
lh_quad_term = tf.reshape(lh_quad_term, [batch])
# no cholesky_solve implementation
# iKY = tf.linalg.matvec(inv_K, Y)
# lh_quad_term = tf.matmul(iKY, tf.transpose(Y, (1, 0))) # (batch, 1, 1)
# lh_quad_term = tf.reshape(lh_quad_term, [batch])
# log P(Y|X) = -1/2 * ( n log(2 pi) + Y inv(K+noise) Y + log det(K+noise))
gp_lhood = -0.5 * (lhood_pi_term + lh_quad_term + lhood_logdet_term)
# add C_tilde terms
if not omit_C_tilde:
gp_lhood = gp_lhood + tf.reduce_sum(tf.log(C_tilde + EPSILON))
# Compute posterior mean and variances
Ks = self.kernel_matrix_correlated_object_priors(X, X_test) # (batch, n, ns)
Ks_t = tf.transpose(Ks, (0, 2, 1)) # (batch, ns, n)
# posterior mean
p_m = tf.matmul(Ks_t, iKY)
# no cholesky_solve implementation
# p_m = tf.matmul(Ks_t, tf.expand_dims(iKY, 2))
p_m = tf.reshape(p_m, (batch, ns))
# posterior variance
iK_Ks = tf.cholesky_solve(_add_diagonal_jitter(chol_K), Ks) # (batch, n, ns)
Ks_iK_Ks = tf.reduce_sum(Ks * iK_Ks, axis=1) # (batch, ns)
# no cholesky_solve implementation
# Ks_iK_Ks = 1 - tf.linalg.diag_part(tf.matmul(Ks, tf.matmul(inv_K, Ks)))
p_v = 1 - Ks_iK_Ks # (batch, ns)
p_v = tf.reshape(p_v, (batch, ns))
p_v = tf.clip_by_value(p_v, 1e-6, 100)
# drop first axis
p_m = tf.squeeze(p_m)
p_v = tf.squeeze(p_v)
gp_lhood = tf.squeeze(gp_lhood)
return p_m, p_v, gp_lhood
def forward_pass_FGPVAE_rotated_mnist(data_batch, beta, vae, GP, N_t, clipping_qs,
bayes_reg_view, omit_C_tilde, C_ma, lagrange_mult, alpha,
kappa, GECO=False):
"""
:param data_batch:
:param beta:
:param vae:
:param GP:
:param N_t:
:param clipping_qs:
:param bayes_reg_view: whether or not to use Bayesian regresion view for linear kernel in global channels
:param omit_C_tilde: omit C_tilde from derivation and modify cross-entropy term instead
:param C_ma: average constraint from t-1 step (GECO)
:param lagrange_mult: lambda from t-1 step (GECO)
:param kappa: reconstruction level parameter for GECO
:param alpha: moving average parameter for GECO
:param GECO: whether or not to use GECO algorithm for training
:return:
"""
images, aux_data = data_batch
aux_data = tf.reshape(aux_data, (-1, N_t, 10))
L = vae.L
L_w = GP.L_w
w = tf.shape(images)[1]
h = tf.shape(images)[2]
K = tf.cast(w, dtype=tf.float64) * tf.cast(h, dtype=tf.float64)
b = tf.cast(tf.shape(images)[0], dtype=tf.float64) # batch_size
# ENCODER NETWORK
qnet_mu, qnet_var = vae.encode(images)
qnet_mu = tf.reshape(qnet_mu, (-1, N_t, L))
qnet_var = tf.reshape(qnet_var, (-1, N_t, L))
# clipping of VAE posterior variance
if clipping_qs:
qnet_var = tf.clip_by_value(qnet_var, 1e-3, 100)
# GP
p_m, p_v, lhoods_local, lhoods_global = [], [], [], []
for i in range(L_w): # fit local GPs
p_m_i, p_v_i, lhood_i, K_local = GP.build_1d_gp_local(X=aux_data[:, :, 1], Y=qnet_mu[:, :, i],
varY=qnet_var[:, :, i], X_test=aux_data[:, :, 1])
p_m.append(p_m_i)
p_v.append(p_v_i)
lhoods_local.append(lhood_i)
ce_global_arr = []
for i in range(L_w, L): # fit global GPs
if GP.object_prior_corr:
object_aux_data_filtered = tf.transpose(aux_data[:, ::N_t, :], perm=[1, 0, 2])
bar_means, bar_vars, C_tilde = GP.preprocess_1d_gp_global_correlated_object_priors(qnet_mu[:, :, i],
qnet_var[:, :, i])
p_m_i, p_v_i, lhood_i = GP.build_1d_gp_global_correlated_object_priors(object_aux_data_filtered,
bar_means,
bar_vars,
object_aux_data_filtered,
C_tilde,
bayesian_reg_view=bayes_reg_view,
omit_C_tilde=omit_C_tilde)
if omit_C_tilde:
ce_global_i = gauss_cross_entropy(p_m_i, p_v_i, bar_means, bar_vars)
ce_global_arr.append(ce_global_i)
else:
p_m_i, p_v_i, lhood_i = GP.build_1d_gp_global(means=qnet_mu[:, :, i], vars=qnet_var[:, :, i])
# repeat p_m_i and p_v_i N_t times, since those are shared across all images within one object dataset D_t
p_m_i = tf.tile(tf.expand_dims(p_m_i, 1), [1, N_t])
p_v_i = tf.tile(tf.expand_dims(p_v_i, 1), [1, N_t])
p_m.append(p_m_i)
p_v.append(p_v_i)
lhoods_global.append(lhood_i)
p_m = tf.stack(p_m, axis=2)
p_v = tf.stack(p_v, axis=2)
if GP.object_prior_corr:
# for local channels sum over latent channels and over digits' datasets
# for global channels we only sum over latent channels (as there is only one global GP per channel)
lhoods = tf.reduce_sum(lhoods_local, axis=(0, 1)) + tf.reduce_sum(lhoods_global, axis=0)
# CE (cross-entropy)
if omit_C_tilde:
ce_global = tf.reduce_sum(ce_global_arr)
ce_local = gauss_cross_entropy(p_m[:, :, :L_w], p_v[:, :, :L_w], qnet_mu[:, :, :L_w], qnet_var[:, :, :L_w])
ce_local = tf.reduce_sum(ce_local, (0, 1, 2)) # sum also over digits' datasets
ce_term = ce_global + ce_local
else:
ce_term = gauss_cross_entropy(p_m, p_v, qnet_mu, qnet_var)
ce_term = tf.reduce_sum(ce_term, (0, 1, 2)) # sum also over digits' datasets
# KL part
elbo_kl_part = lhoods - ce_term
else:
lhoods = lhoods_global + lhoods_local
lhoods = tf.reduce_sum(lhoods, axis=0)
# CE (cross-entropy)
ce_term = gauss_cross_entropy(p_m, p_v, qnet_mu, qnet_var)
ce_term = tf.reduce_sum(ce_term, (1, 2))
# KL part
elbo_kl_part = lhoods - ce_term
# SAMPLE
epsilon = tf.random.normal(shape=tf.shape(p_m), dtype=tf.float64)
latent_samples = p_m + epsilon * tf.sqrt(p_v)
# DECODER NETWORK (Gaussian observational likelihood, MSE)
recon_images = vae.decode(tf.reshape(latent_samples, (-1, L)))
if GP.object_prior_corr:
if GECO:
recon_loss = tf.reduce_sum((tf.reshape(images, (-1, N_t, w, h)) - tf.reshape(recon_images,
(-1, N_t, w, h))) ** 2,
axis=[2, 3])
recon_loss = tf.reduce_sum(recon_loss/K - kappa**2)
C_ma = alpha * C_ma + (1 - alpha) * recon_loss / b
# elbo = - (1/L) * KL_term + lagrange_mult * C_ma
# elbo = - (1/b) * KL_term + lagrange_mult * C_ma
# elbo = - KL_term + lagrange_mult * C_ma
elbo = - elbo_kl_part + lagrange_mult * (recon_loss / b + tf.stop_gradient(C_ma - recon_loss / b))
lagrange_mult = lagrange_mult * tf.exp(C_ma)
else:
recon_loss = tf.reduce_sum((tf.reshape(images, (-1, N_t, w, h)) - tf.reshape(recon_images,
(-1, N_t, w, h))) ** 2,
axis=[1, 2, 3])
recon_loss = tf.reduce_sum(recon_loss) / K
elbo = -recon_loss + (beta / L) * elbo_kl_part
else:
if GECO:
recon_loss = tf.reduce_mean((tf.reshape(images, (-1, N_t, w, h)) - tf.reshape(recon_images,
(-1, N_t, w, h))) ** 2,
axis=[2, 3])
N_t = tf.cast(N_t, dtype=tf.float64)
C_ma = alpha * C_ma + (1 - alpha) * tf.reduce_mean(recon_loss - kappa ** 2)
recon_loss = tf.reduce_sum(recon_loss - kappa ** 2)
# elbo = - (1/L) * elbo_kl_part + lagrange_mult * C_ma
# elbo = - (1/b) * elbo_kl_part + lagrange_mult * C_ma
# elbo = - elbo_kl_part + lagrange_mult * C_ma
elbo = - elbo_kl_part + lagrange_mult * (recon_loss / N_t + tf.stop_gradient(C_ma - recon_loss / N_t))
lagrange_mult = lagrange_mult * tf.exp(C_ma)
else:
recon_loss = tf.reduce_sum((tf.reshape(images, (-1, N_t, w, h)) - tf.reshape(recon_images,
(-1, N_t, w, h))) ** 2,
axis=[1, 2, 3])
# ELBO
# beta plays role of sigma_gaussian_decoder here (\lambda(\sigma_y) in Casale paper)
# K and L are not part of ELBO. They are used in loss objective to account for the fact that magnitudes of
# reconstruction and KL terms depend on number of pixels (K) and number of latent GPs used (L), respectively
recon_loss = recon_loss / K
elbo = -recon_loss + (beta/L) * elbo_kl_part
# average across object datasets
elbo = tf.reduce_sum(elbo)
elbo_kl_part = tf.reduce_sum(elbo_kl_part)
recon_loss = tf.reduce_sum(recon_loss)
return elbo, recon_loss, elbo_kl_part, p_m, p_v, qnet_mu, qnet_var, recon_images, latent_samples, C_ma, lagrange_mult
def predict_FGPVAE_rotated_mnist(test_images, test_aux_data, train_images, train_aux_data, vae, GP,
bayes_reg_view, omit_C_tilde, N_t=15, clipping_qs=False):
"""
Get FGPVAE predictions for rotated MNIST test data.
:param test_data_batch:
:param train_images:
:param train_aux_data:
:param vae:
:param GP:
:param N_t:
:param clipping_qs:
:return:
"""
L = vae.L
L_w = GP.L_w
w = tf.shape(train_images)[1]
h = tf.shape(train_images)[2]
train_aux_data = tf.reshape(train_aux_data, (-1, N_t, 10))
test_aux_data = tf.expand_dims(test_aux_data, 1)
# encode train images
qnet_mu, qnet_var = vae.encode(train_images)
qnet_mu = tf.reshape(qnet_mu, (-1, N_t, L))
qnet_var = tf.reshape(qnet_var, (-1, N_t, L))
# clipping of VAE posterior variance
if clipping_qs:
qnet_var = tf.clip_by_value(qnet_var, 1e-3, 100)
# GP, get latent embeddings for test images
p_m, p_v = [], []
for i in range(L_w): # fit local GPs
p_m_i, p_v_i, _ , _= GP.build_1d_gp_local(X=train_aux_data[:, :, 1], Y=qnet_mu[:, :, i],
varY=qnet_var[:, :, i], X_test=test_aux_data[:, :, 1])
p_m.append(p_m_i)
p_v.append(p_v_i)
for i in range(L_w, L): # fit global GPs
if GP.object_prior_corr:
object_aux_data_filtered = tf.transpose(train_aux_data[:, ::N_t, :], perm=[1, 0, 2])
bar_means, bar_vars, C_tilde = GP.preprocess_1d_gp_global_correlated_object_priors(qnet_mu[:, :, i],
qnet_var[:, :, i])
p_m_i, p_v_i, _ = GP.build_1d_gp_global_correlated_object_priors(object_aux_data_filtered,
bar_means,
bar_vars,
object_aux_data_filtered,
C_tilde,
omit_C_tilde=omit_C_tilde,
bayesian_reg_view=bayes_reg_view)
else:
p_m_i, p_v_i, _ = GP.build_1d_gp_global(means=qnet_mu[:, :, i], vars=qnet_var[:, :, i])
p_m.append(tf.expand_dims(p_m_i, 1))
p_v.append(tf.expand_dims(p_v_i, 1))
p_m = tf.stack(p_m, axis=2)
p_v = tf.stack(p_v, axis=2)
# SAMPLE
epsilon = tf.random.normal(shape=tf.shape(p_m), dtype=tf.float64)
latent_samples = p_m + epsilon * tf.sqrt(p_v)
# decode, calculate error (Gaussian observational likelihood, MSE)
recon_images = vae.decode(tf.reshape(latent_samples, (-1, L)))
recon_loss = tf.reduce_mean((test_images - recon_images) ** 2)
return recon_images, recon_loss
def extrapolate_experiment_eval_data(mnist_path, digit, N_t, pred_angle_id=7, nr_angles=16):
"""
Prepare validation dataset for the extrapolate experiment.
:param mnist_path:
:param digit:
:param N_t: how many angles do we observe for each image in test set
:param pred_angle_id: which angle to leave out for prediction
:param nr_angles: size of object dataset
:return:
"""
eval_data_dict = pickle.load(open(mnist_path + 'eval_data{}_not_shuffled.p'.format(digit), 'rb'))
eval_images, eval_aux_data = eval_data_dict["images"], eval_data_dict["aux_data"]
pred_angle_mask = [pred_angle_id + i * nr_angles for i in range(int(len(eval_aux_data) / nr_angles))]
not_pred_angle_mask = [i for i in range(len(eval_images)) if i not in pred_angle_mask]
observed_images = eval_images[not_pred_angle_mask]
observed_aux_data = eval_aux_data[not_pred_angle_mask]
# randomly drop some observed angles
if N_t < 15:
digit_mask = [True]*N_t + [False]*(15-N_t)
mask = [random.sample(digit_mask, len(digit_mask)) for _ in range(int(len(eval_aux_data)/nr_angles))]
flatten = lambda l: [item for sublist in l for item in sublist]
mask = flatten(mask)
observed_images = observed_images[mask]
observed_aux_data = observed_aux_data[mask]
test_images = eval_images[pred_angle_mask]
test_aux_data = eval_aux_data[pred_angle_mask]
return observed_images, observed_aux_data, test_images, test_aux_data
def latent_samples_FGPVAE(train_images, train_aux_data, vae, GP, N_t, clipping_qs=False):
"""
Get latent samples for training data. For t-SNE plots :)
:param train_images:
:param train_aux_data:
:param vae:
:param GP:
:param clipping_qs:
:return:
"""
train_aux_data = tf.reshape(train_aux_data, (-1, N_t, 10))
L = vae.L
L_w = GP.L_w
# ENCODER NETWORK
qnet_mu, qnet_var = vae.encode(train_images)
qnet_mu = tf.reshape(qnet_mu, (-1, N_t, L))
qnet_var = tf.reshape(qnet_var, (-1, N_t, L))
# clipping of VAE posterior variance
if clipping_qs:
qnet_var = tf.clip_by_value(qnet_var, 1e-3, 100)
# GP
p_m, p_v = [], []
for i in range(L_w): # fit local GPs
p_m_i, p_v_i, _, _ = GP.build_1d_gp_local(X=train_aux_data[:, :, 1], Y=qnet_mu[:, :, i],
varY=qnet_var[:, :, i], X_test=train_aux_data[:, :, 1])
p_m.append(p_m_i)
p_v.append(p_v_i)
for i in range(L_w, L): # fit global GPs
p_m_i, p_v_i, lhood_i = GP.build_1d_gp_global(means=qnet_mu[:, :, i], vars=qnet_var[:, :, i])
# repeat p_m_i and p_v_i N_t times, since those are shared across all images within one object dataset D_t
p_m_i = tf.tile(tf.expand_dims(p_m_i, 1), [1, N_t])
p_v_i = tf.tile(tf.expand_dims(p_v_i, 1), [1, N_t])
p_m.append(p_m_i)
p_v.append(p_v_i)
p_m = tf.stack(p_m, axis=2)
p_v = tf.stack(p_v, axis=2)
# SAMPLE
epsilon = tf.random.normal(shape=tf.shape(p_m), dtype=tf.float64)
latent_samples = p_m + epsilon * tf.sqrt(p_v)
return latent_samples
| 42.456491
| 123
| 0.556145
| 4,318
| 29,762
| 3.562297
| 0.091014
| 0.007801
| 0.022884
| 0.003641
| 0.584319
| 0.520869
| 0.475166
| 0.436354
| 0.390131
| 0.38012
| 0
| 0.019712
| 0.335226
| 29,762
| 700
| 124
| 42.517143
| 0.757746
| 0.247799
| 0
| 0.47076
| 0
| 0
| 0.004329
| 0.00226
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035088
| false
| 0.002924
| 0.017544
| 0.002924
| 0.090643
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b849b209996da99ee667a5b45419939d4653d3a
| 9,495
|
py
|
Python
|
tests/test_protocols/test_generator.py
|
cyenyxe/agents-aea
|
c2aec9127028ae13def3f69fbc80d35400de1565
|
[
"Apache-2.0"
] | null | null | null |
tests/test_protocols/test_generator.py
|
cyenyxe/agents-aea
|
c2aec9127028ae13def3f69fbc80d35400de1565
|
[
"Apache-2.0"
] | 1
|
2020-02-21T14:28:13.000Z
|
2020-03-05T14:53:53.000Z
|
tests/test_protocols/test_generator.py
|
cyenyxe/agents-aea
|
c2aec9127028ae13def3f69fbc80d35400de1565
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This module contains the tests for the protocol generator."""
import inspect
import os
import shutil
import tempfile
import yaml
from aea.configurations.base import ProtocolSpecification
from aea.configurations.loader import ConfigLoader
from aea.protocols.generator import ProtocolGenerator
CUR_PATH = os.path.dirname(inspect.getfile(inspect.currentframe())) # type: ignore
class TestGenerateProtocol:
"""Test that the generating a protocol works correctly in correct preconditions."""
@classmethod
def setup_class(cls):
"""Set the test up."""
# Specification
cls.protocol_name = "two_party_negotiation"
cls.specification_file_name = "spec.yaml"
correct_specification = {
"name": cls.protocol_name,
"author": "fetchai",
"version": "0.1.0",
"license": "Apache-2.0",
"description": "A protocol for negotiation over a fixed set of resources involving two parties.",
"speech_acts": {
"cfp": {"query": "DataModel"},
"propose": {"query": "DataModel", "price": "float"},
"accept": {},
"decline": {},
"match_accept": {},
},
}
# Dump the config
cls.cwd = os.getcwd()
# os.mkdir(os.path.join(CUR_PATH, "temp"))
cls.t = tempfile.mkdtemp()
os.chdir(cls.t)
# cls.path_to_specification = os.path.join(".", cls.specification_file_name)
cls.path_to_specification = os.path.join(cls.t, cls.specification_file_name)
yaml.safe_dump(correct_specification, open(cls.path_to_specification, "w"))
# Load the config
cls.config_loader = ConfigLoader(
"protocol-specification_schema.json", ProtocolSpecification
)
cls.protocol_specification = cls.config_loader.load(
open(cls.path_to_specification)
)
# Generate the protocol
cls.protocol_generator = ProtocolGenerator(cls.protocol_specification, cls.t)
cls.protocol_generator.generate()
# Add as module
# dotted_path = "packages.fetchai.protocols." + cls.protocol_name
# import pdb;pdb.set_trace()
# module_object = load_module(dotted_path, Path(os.path.join(cls.t, cls.protocol_name)))
# import_module(dotted_path, module_object)
# sys.modules[dotted_path] = module_object
# def test_exit_code_equal_to_0(self):
# """Test that the exit code is equal to 0."""
# from packages.fetchai.protocols.two_party_negotiation.message import TwoPartyNegotiationMessage
# # from two_party_negotiation.serialization import TwoPartyNegotiationSerializer
# # from two_party_negotiation.message import DataModel
# assert 0 == 0
@classmethod
def teardown_class(cls):
"""Tear the test down."""
os.chdir(cls.cwd)
try:
shutil.rmtree(cls.t)
# os.remove(os.path.join(cls.t, cls.protocol_name))
except (OSError, IOError):
pass
# class TestCases(TestCase):
# """Test class for the light protocol generator."""
#
# def test_all_custom_data_types(self):
# """Test all custom data types."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "all_custom.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
# test_protocol_template.load()
# test_protocol_generator = ProtocolGenerator(test_protocol_template, 'tests')
# test_protocol_generator.generate()
#
# from two_party_negotiation_protocol.message import TwoPartyNegotiationMessage
# from two_party_negotiation_protocol.serialization import TwoPartyNegotiationSerializer
# from two_party_negotiation_protocol.message import DataModel
# from two_party_negotiation_protocol.message import Signature
#
# data_model = DataModel()
# signature = Signature()
# content_list = [data_model, signature]
#
# message = TwoPartyNegotiationMessage(message_id=5, target=4, performative="propose", contents=content_list)
# print(str.format("message is {}", message))
# message.check_consistency()
# serialized_message = TwoPartyNegotiationSerializer().encode(msg=message)
# print(str.format("serialized message is {}", serialized_message))
# deserialised_message = TwoPartyNegotiationSerializer().decode(obj=serialized_message)
# print(str.format("deserialized message is {}", deserialised_message))
#
# assert message == deserialised_message, "Failure"
#
# def test_correct_functionality(self):
# """End to end test of functionality."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "correct_spec.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
# test_protocol_template.load()
# test_protocol_generator = ProtocolGenerator(test_protocol_template, 'tests')
# test_protocol_generator.generate()
#
# from two_party_negotiation_protocol.message import TwoPartyNegotiationMessage
# from two_party_negotiation_protocol.serialization import TwoPartyNegotiationSerializer
# from two_party_negotiation_protocol.message import DataModel
#
# data_model = DataModel()
# content_list = [data_model, 10.5]
#
# message = TwoPartyNegotiationMessage(message_id=5, target=4, performative="propose", contents=content_list)
# print(str.format("message is {}", message))
# message.check_consistency()
# serialized_message = TwoPartyNegotiationSerializer().encode(msg=message)
# print(str.format("serialized message is {}", serialized_message))
# deserialised_message = TwoPartyNegotiationSerializer().decode(obj=serialized_message)
# print(str.format("deserialized message is {}", deserialised_message))
#
# assert message == deserialised_message, "Failure"
#
# def test_missing_name(self):
# """Test missing name handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "missing_name.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
#
# def test_missing_description(self):
# """Test missing description handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "missing_description.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# assert test_protocol_template.load(), "Failure"
#
# def test_missing_speech_acts(self):
# """Test missing speech acts handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "missing_speech_acts.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
#
# def test_extra_fields(self):
# """Test extra fields handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "extra_fields.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# assert test_protocol_template.load(), "Failure"
#
# def test_one_document(self):
# """Test one document handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "one_document.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
#
# def test_wrong_speech_act_type_sequence_performatives(self):
# """Test wrong speech act handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "wrong_speech_act_type_sequence_performatives.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
#
# def test_wrong_speech_act_type_dictionary_contents(self):
# """Test wrong speech act dictionary contents handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "wrong_speech_act_type_dictionary_contents.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
| 45.214286
| 128
| 0.685519
| 1,011
| 9,495
| 6.18002
| 0.22453
| 0.080666
| 0.06402
| 0.083547
| 0.601312
| 0.571063
| 0.557939
| 0.529609
| 0.509123
| 0.509123
| 0
| 0.003816
| 0.199579
| 9,495
| 209
| 129
| 45.430622
| 0.818289
| 0.758189
| 0
| 0.041667
| 0
| 0
| 0.133615
| 0.025785
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0.020833
| 0.166667
| 0
| 0.229167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b86ef7acd08f81f39f9fde4c5d2779a3995da3e
| 6,981
|
py
|
Python
|
tabfkioskgoogledrive/MyGDTest3.py
|
isalan06/myflaskapiserver
|
2922f62c9b9ede2b6cba2db774e924b226a120f7
|
[
"MIT"
] | null | null | null |
tabfkioskgoogledrive/MyGDTest3.py
|
isalan06/myflaskapiserver
|
2922f62c9b9ede2b6cba2db774e924b226a120f7
|
[
"MIT"
] | null | null | null |
tabfkioskgoogledrive/MyGDTest3.py
|
isalan06/myflaskapiserver
|
2922f62c9b9ede2b6cba2db774e924b226a120f7
|
[
"MIT"
] | null | null | null |
import os.path
import os
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from datetime import datetime
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
print("Refresh Creds")
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'client_secrets.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
results = service.files().list(
q="mimeType = 'application/vnd.google-apps.folder' and '0ALNhV0hP-QYDUk9PVA' in parents",
pageSize=100, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
pic_id = ''
if not items:
print('No files found.')
else:
print('1st Files:')
for item in items:
if item['name']=='KIOSK Picture':
pic_id = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
#print(pic_id)
# Check Machine ID
q_str = "mimeType = 'application/vnd.google-apps.folder' and '" + str(pic_id) +"' in parents"
#print(q_str)
results = service.files().list(
q=q_str, #"mimeType = 'application/vnd.google-apps.folder' and '" + str(pic_id) +"' in parents",
pageSize=10, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
bHasBaseFolder = False
sMachineID = 'Test_MachineID'
sMachineID_ID = ''
if not items:
print('No files found.')
else:
print('2nd Files:')
for item in items:
if item['name']==sMachineID:
bHasBaseFolder = True
sMachineID_ID = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
if bHasBaseFolder == False:
file_metadata = {
'name': sMachineID,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [str(pic_id)]
}
file = service.files().create(body=file_metadata,
fields='id').execute()
sMachineID_ID = str(file.get('id'))
print('Folder ID: %s' % file.get('id'))
#print(sMachineID_ID)
# Check Date Folder
sTodayDateString = datetime.now().strftime("%Y%m%d")
sTodayDate_ID = ''
bHasBaseFolder = False
q_str = "mimeType = 'application/vnd.google-apps.folder' and '" + str(sMachineID_ID) +"' in parents"
results = service.files().list(
q=q_str,
pageSize=10, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('3nd Files:')
for item in items:
if item['name']==sTodayDateString:
bHasBaseFolder = True
sTodayDate_ID = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
if bHasBaseFolder == False:
file_metadata = {
'name': sTodayDateString,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [str(sMachineID_ID)]
}
file = service.files().create(body=file_metadata,
fields='id').execute()
sTodayDate_ID = str(file.get('id'))
print('Folder ID: %s' % file.get('id'))
#Check Test Location
sTestLocation='我是測試考場(真的是測試用)'
sTestLocation_ID = ''
bHasBaseFolder = False
q_str = "mimeType = 'application/vnd.google-apps.folder' and '" + str(sTodayDate_ID) +"' in parents"
results = service.files().list(
q=q_str,
pageSize=10, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('4nd Files:')
for item in items:
if item['name']==sTestLocation:
bHasBaseFolder = True
sTestLocation_ID = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
if bHasBaseFolder == False:
file_metadata = {
'name': sTestLocation,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [str(sTodayDate_ID)]
}
file = service.files().create(body=file_metadata,
fields='id').execute()
sTestLocation_ID = str(file.get('id'))
print('Folder ID: %s' % file.get('id'))
sTestLocation_ID = CreateGoogleDriveFolder(service, sTestLocation, sTodayDate_ID)
print('Check Function')
print(sTestLocation_ID)
def CreateGoogleDriveFolder(service, titlestring, folderid):
returnfolderid=''
bHasFolder = False
q_str = "mimeType = 'application/vnd.google-apps.folder' and '" + str(folderid) +"' in parents"
results = service.files().list(
q=q_str,
pageSize=10, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
for item in items:
if item['name']==titlestring:
bHasFolder = True
returnfolderid = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
if bHasFolder == False:
try:
file_metadata = {
'name': titlestring,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [str(folderid)]
}
file = service.files().create(body=file_metadata,
fields='id').execute()
returnfolderid = str(file.get('id'))
print('Folder ID: %s' % file.get('id'))
except Exception as ex:
print(ex)
return returnfolderid
if __name__ == '__main__':
main()
| 37.331551
| 104
| 0.578284
| 792
| 6,981
| 5.018939
| 0.207071
| 0.019371
| 0.055346
| 0.07044
| 0.52956
| 0.523522
| 0.523522
| 0.501132
| 0.423648
| 0.423648
| 0
| 0.007974
| 0.281478
| 6,981
| 187
| 105
| 37.331551
| 0.78449
| 0.091391
| 0
| 0.480263
| 0
| 0.006579
| 0.207885
| 0.053357
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013158
| false
| 0
| 0.046053
| 0
| 0.065789
| 0.144737
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b88bb3938cbed6bd9ddf6e52090c0d588399179
| 2,631
|
py
|
Python
|
clustering/conditional_probs.py
|
griffij/QuakeRates
|
70069bb271a1987e72fcbdf3aa0c0a8a79591580
|
[
"Apache-2.0"
] | null | null | null |
clustering/conditional_probs.py
|
griffij/QuakeRates
|
70069bb271a1987e72fcbdf3aa0c0a8a79591580
|
[
"Apache-2.0"
] | null | null | null |
clustering/conditional_probs.py
|
griffij/QuakeRates
|
70069bb271a1987e72fcbdf3aa0c0a8a79591580
|
[
"Apache-2.0"
] | null | null | null |
"""Calculate conditional probability of a short interevent
time being followed by another short interevent time, compared
with the unconditional probability.
This is used to test whether fault records have memory
"""
import os, sys
from glob import glob
import numpy as np
import matplotlib.pyplot as plt
from QuakeRates.dataman.parse_params import parse_param_file, \
get_event_sets
# Define parameter files
filepath = '../params'
param_file_list = glob(os.path.join(filepath, '*.txt'))
n_samples = 500 # Number of Monte Carlo samples of the eq chronologies
half_n = int(n_samples/2)
plot_dir = './plots_conditional_probs'
if not os.path.exists(plot_dir):
os.makedirs(plot_dir)
# Define subset to take
#faulting_styles = ['Reverse']
#faulting_styles = ['Normal']
#faulting_styles = ['Strike_slip']
faulting_styles = ['all']
tectonic_regions = ['all']
#tectonic_regions = ['Plate_boundary_master', 'Plate_boundary_network']
min_number_events = 10
names, event_sets, event_certainties, num_events = \
get_event_sets(param_file_list, tectonic_regions,
faulting_styles, min_number_events)
# Now loop over paleo-earthquake records
for i, event_set in enumerate(event_sets):
# Generate some chronologies
event_set.gen_chronologies(n_samples, observation_end=2019, min_separation=1)
print(num_events[i])
event_set.calculate_cov() # Calculate interevent times and mean as part of this
# Lists to store results
uncond_probs = []
cond_probs = []
for j, sample in enumerate(event_set.interevent_times.T):
num_less_mean = len(np.argwhere(sample < event_set.means[j]))
uncond_prob_less_mean = num_less_mean/event_set.num_events
count_short = 0
for k, ie_time in enumerate(sample):
if k==0:
ie_time_0 = ie_time
else:
if ie_time < event_set.means[i] and \
ie_time_0 < event_set.means[i]:
count_short += 1
ie_time_0 = ie_time
cond_prob_less_mean = count_short/num_less_mean
uncond_probs.append(uncond_prob_less_mean)
cond_probs.append(cond_prob_less_mean)
print(uncond_probs)
print(cond_probs)
uncond_probs = np.array(uncond_probs)
cond_probs = np.array(cond_probs)
probs_ratio = cond_probs/uncond_probs
print(probs_ratio)
plt.clf()
plt.hist(probs_ratio, bins = 10, facecolor='0.6',
edgecolor='0.2', density=True)
figname = 'conditional_prob_ratio_histogram_%s.png' % names[i]
fig_filename = os.path.join(plot_dir, figname)
plt.savefig(fig_filename)
| 36.541667
| 84
| 0.707336
| 373
| 2,631
| 4.697051
| 0.412869
| 0.03653
| 0.027397
| 0.022831
| 0.01484
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010984
| 0.204105
| 2,631
| 71
| 85
| 37.056338
| 0.825692
| 0.232231
| 0
| 0.038462
| 0
| 0
| 0.044933
| 0.031952
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.096154
| 0
| 0.096154
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b8e4f10e68bbf6b6e9801bf943ec3cb8b4d1bf7
| 3,120
|
py
|
Python
|
src/dynamic_programming/basic_scripts/value_iteration.py
|
johannesharmse/move_37_course
|
a2060129cbc6fb651113aa18f1a6ea2673845182
|
[
"MIT"
] | 1
|
2019-03-13T06:29:54.000Z
|
2019-03-13T06:29:54.000Z
|
src/dynamic_programming/basic_scripts/value_iteration.py
|
johannesharmse/move_37_course
|
a2060129cbc6fb651113aa18f1a6ea2673845182
|
[
"MIT"
] | null | null | null |
src/dynamic_programming/basic_scripts/value_iteration.py
|
johannesharmse/move_37_course
|
a2060129cbc6fb651113aa18f1a6ea2673845182
|
[
"MIT"
] | null | null | null |
# From The School of AI's Move 37 Course https://www.theschool.ai/courses/move-37-course/
# Coding demo by Colin Skow
# Forked from https://github.com/lazyprogrammer/machine_learning_examples/tree/master/rl
# Credit goes to LazyProgrammer
from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
import numpy as np
from grid_world import standard_grid
from utils import print_values, print_policy
# SMALL_ENOUGH is referred to by the mathematical symbol theta in equations
SMALL_ENOUGH = 1e-3
GAMMA = 0.9
ALL_POSSIBLE_ACTIONS = ('U', 'D', 'L', 'R')
def best_action_value(grid, V, s):
# finds the highest value action (max_a) from state s, returns the action and value
best_a = None
best_value = float('-inf')
grid.set_state(s)
# loop through all possible actions to find the best current action
for a in ALL_POSSIBLE_ACTIONS:
transititions = grid.get_transition_probs(a)
expected_v = 0
expected_r = 0
for (prob, r, state_prime) in transititions:
expected_r += prob * r
expected_v += prob * V[state_prime]
v = expected_r + GAMMA * expected_v
if v > best_value:
best_value = v
best_a = a
return best_a, best_value
def calculate_values(grid):
# initialize V(s)
V = {}
states = grid.all_states()
for s in states:
V[s] = 0
# repeat until convergence
# V[s] = max[a]{ sum[s',r] { p(s',r|s,a)[r + gamma*V[s']] } }
while True:
# biggest_change is referred to by the mathematical symbol delta in equations
biggest_change = 0
for s in grid.non_terminal_states():
old_v = V[s]
_, new_v = best_action_value(grid, V, s)
V[s] = new_v
biggest_change = max(biggest_change, np.abs(old_v - new_v))
if biggest_change < SMALL_ENOUGH:
break
return V
def initialize_random_policy():
# policy is a lookup table for state -> action
# we'll randomly choose an action and update as we learn
policy = {}
for s in grid.non_terminal_states():
policy[s] = np.random.choice(ALL_POSSIBLE_ACTIONS)
return policy
def calculate_greedy_policy(grid, V):
policy = initialize_random_policy()
# find a policy that leads to optimal value function
for s in policy.keys():
grid.set_state(s)
# loop through all possible actions to find the best current action
best_a, _ = best_action_value(grid, V, s)
policy[s] = best_a
return policy
if __name__ == '__main__':
# this grid gives you a reward of -0.1 for every non-terminal state
# we want to see if this will encourage finding a shorter path to the goal
grid = standard_grid(obey_prob=0.8, step_cost=-0.5)
# print rewards
print("rewards:")
print_values(grid.rewards, grid)
# calculate accurate values for each square
V = calculate_values(grid)
# calculate the optimum policy based on our values
policy = calculate_greedy_policy(grid, V)
# our goal here is to verify that we get the same answer as with policy iteration
print("values:")
print_values(V, grid)
print("policy:")
print_policy(policy, grid)
| 31.515152
| 89
| 0.709936
| 501
| 3,120
| 4.243513
| 0.351297
| 0.008467
| 0.042333
| 0.026811
| 0.176388
| 0.151929
| 0.122295
| 0.06397
| 0.06397
| 0.06397
| 0
| 0.007235
| 0.202564
| 3,120
| 98
| 90
| 31.836735
| 0.847267
| 0.400321
| 0
| 0.098361
| 0
| 0
| 0.020574
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065574
| false
| 0
| 0.081967
| 0
| 0.213115
| 0.131148
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b8f66af4fc844e8c289287b2a2bc4ba119f529e
| 19,238
|
py
|
Python
|
photoplace/addons/CSVImport/GTKcsvimport.py
|
jriguera/photoplace
|
93674ef8531d0e5b8f26de9ba568ed8e115b27e1
|
[
"Apache-2.0"
] | 10
|
2015-02-20T19:01:19.000Z
|
2021-12-13T23:07:19.000Z
|
photoplace/addons/CSVImport/GTKcsvimport.py
|
jriguera/photoplace
|
93674ef8531d0e5b8f26de9ba568ed8e115b27e1
|
[
"Apache-2.0"
] | 1
|
2020-06-16T13:23:05.000Z
|
2021-02-13T14:14:57.000Z
|
photoplace/addons/CSVImport/GTKcsvimport.py
|
jriguera/photoplace
|
93674ef8531d0e5b8f26de9ba568ed8e115b27e1
|
[
"Apache-2.0"
] | 4
|
2017-03-28T23:06:14.000Z
|
2019-09-25T07:59:36.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GTKcsvimport.py
#
# Copyright 2010-2015 Jose Riguera Lopez <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Parse a CSV to add variables or geolocate photos. GTK User Interface.
"""
__program__ = "photoplace.csvimport"
__author__ = "Jose Riguera Lopez <[email protected]>"
__version__ = "0.1.2"
__date__ = "Dec 2014"
__license__ = "Apache 2.0"
__copyright__ ="(c) Jose Riguera"
import os.path
import csv
import sys
import codecs
import warnings
import gettext
import locale
warnings.filterwarnings('ignore', module='gtk')
try:
import pygtk
pygtk.require("2.0")
import gtk
import gobject
except Exception as e:
warnings.resetwarnings()
print("Warning: %s" % str(e))
print("You don't have the PyGTK 2.0 module installed")
raise
warnings.resetwarnings()
from csvimport import *
# I18N gettext support
__GETTEXT_DOMAIN__ = __program__
__PACKAGE_DIR__ = os.path.abspath(os.path.dirname(__file__))
__LOCALE_DIR__ = os.path.join(__PACKAGE_DIR__, u"locale")
try:
if not os.path.isdir(__LOCALE_DIR__):
print ("Error: Cannot locate default locale dir: '%s'." % (__LOCALE_DIR__))
__LOCALE_DIR__ = None
locale.setlocale(locale.LC_ALL,"")
#gettext.bindtextdomain(__GETTEXT_DOMAIN__, __LOCALE_DIR__)
t = gettext.translation(__GETTEXT_DOMAIN__, __LOCALE_DIR__, fallback=False)
_ = t.ugettext
except Exception as e:
print ("Error setting up the translations: %s" % (str(e)))
_ = lambda s: unicode(s)
class GTKCSVImport(object):
def __init__(self, plugin, gui, userfacade, logger):
object.__init__(self)
self.plugin = gtk.VBox(False)
self.logger = logger
self.options = None
self.userfacade = userfacade
# 1st line
hbox = gtk.HBox(False)
label_name = gtk.Label()
align = gtk.Alignment(0.01, 0.5, 0, 0)
label_name.set_markup(_("CSV file:"))
align.add(label_name)
hbox.pack_start(align, False, False, 5)
self.button_addfile = gtk.Button()
image = gtk.Image()
image.set_from_stock(gtk.STOCK_ADD, gtk.ICON_SIZE_BUTTON)
self.button_addfile.set_image(image)
self.button_addfile.set_tooltip_text(_("Select a CSV file to load photo's variables"))
self.button_addfile.set_label(_("Select file"))
self.button_addfile.connect('clicked', self._load_csv)
align = gtk.Alignment(0.01, 0.5, 0, 0)
align.add(self.button_addfile)
hbox.pack_start(align, False, False, 5)
self.plugin.pack_start(hbox, False, False, 5)
# 3rd line
hbox_headers = gtk.HBox(False)
label_headers = gtk.Label()
label_headers.set_markup(_("Headers:"))
hbox_headers.pack_start(label_headers, False, False, 5)
self.entry_headers = gtk.Entry(max=256)
self.entry_headers.connect('focus-out-event', self._out_entry)
self.entry_headers.connect('activate', self._set_entry)
self.entry_headers.set_tooltip_text(_("List of column headers of the CSV file. Each header will be a variable for each photo"))
self.entry_headers.set_sensitive(False)
hbox_headers.pack_start(self.entry_headers, True, True, 2)
label_headerid = gtk.Label()
label_headerid.set_markup(_("where photo name is:"))
hbox_headers.pack_start(label_headerid, False, False, 0)
self.cb_headerid = gtk.combo_box_new_text()
self.cb_headerid.connect("changed", self._combo_id)
self.cb_headerid.set_tooltip_text(_("Name of the column to match with each photo file name. It must be one of the Headers"))
self.cb_headerid.set_sensitive(False)
self.cb_headerid.append_text(' ')
hbox_headers.pack_start(self.cb_headerid, False, False, 5)
self.plugin.pack_start(hbox_headers, False, False, 5)
# 4st line
self.checkbutton_geolocate = gtk.CheckButton(_("Geolocate photos using CSV headers"))
self.checkbutton_geolocate.set_tooltip_text(_("It is active, it will assign the following headers to each photo. It will geotag the photos by using those headers, but, warning: GPX data will take precedence!"))
self.checkbutton_geolocate.connect('toggled', self._geolocate)
self.checkbutton_geolocate.set_sensitive(False)
# Headers Variables
self.frame = gtk.Frame()
self.frame.set_label_widget(self.checkbutton_geolocate)
table = gtk.Table(2, 4, True)
label_lat = gtk.Label()
label_lat.set_markup(_("Latitude:"))
align = gtk.Alignment(1.00, 0.5, 0, 0)
align.add(label_lat)
table.attach(align, 0, 1, 0, 1, gtk.FILL)
self.cb_lat = gtk.combo_box_new_text()
self.cb_lat.connect("changed", self._combo_geolocate, CSVImport_CONFKEY_HEADER_LAT)
self.cb_lat.set_tooltip_text(_("Latitude header name"))
table.attach(self.cb_lat, 1, 2, 0, 1)
label_lon = gtk.Label()
label_lon.set_markup(_("Longitude:"))
align = gtk.Alignment(1.00, 0.5, 0, 0)
align.add(label_lon)
table.attach(align, 2, 3, 0, 1, gtk.FILL)
self.cb_lon = gtk.combo_box_new_text()
self.cb_lon.connect("changed", self._combo_geolocate, CSVImport_CONFKEY_HEADER_LON)
self.cb_lon.set_tooltip_text(_("Longitude header name"))
table.attach(self.cb_lon, 3, 4, 0, 1)
label_date = gtk.Label()
label_date.set_markup(_("Time-Date:"))
align = gtk.Alignment(1.00, 0.5, 0, 0)
align.add(label_date)
table.attach(align, 0, 1, 1, 2)
self.cb_date = gtk.combo_box_new_text()
self.cb_date.connect("changed", self._combo_geolocate, CSVImport_CONFKEY_HEADER_DATE)
table.attach(self.cb_date, 1, 2, 1, 2)
label_ele = gtk.Label()
label_ele.set_markup(_("Elevation:"))
align = gtk.Alignment(1.00, 0.5, 0, 0)
align.add(label_ele)
table.attach(align, 2, 3, 1, 2)
self.cb_ele = gtk.combo_box_new_text()
self.cb_ele.connect("changed", self._combo_geolocate, CSVImport_CONFKEY_HEADER_ELE)
self.cb_ele.set_tooltip_text(_("Elevation header name"))
table.attach(self.cb_ele, 3, 4, 1, 2)
table.set_border_width(20)
table.set_row_spacings(5)
self.frame.add(table)
self.frame.set_border_width(5)
self.frame.set_sensitive(False)
self.plugin.pack_start(self.frame, False, False, 5)
# Button
self.button_process = gtk.Button()
self.button_process.set_label(_("Process"))
image = gtk.Image()
image.set_from_stock(gtk.STOCK_EXECUTE, gtk.ICON_SIZE_BUTTON)
self.button_process.set_image(image)
self.button_process.connect('clicked', self.process)
align = gtk.Alignment(0.50, 0.5, 0.1, 0)
align.add(self.button_process)
self.plugin.pack_start(align, False, False, 0)
self.button_process.set_sensitive(False)
# Attributes
self.rootplugin = plugin
self.rootgui = gui
self.window = gui.builder.get_object("window")
self.events = True
def _load_csv(self, widget):
dialog = gtk.FileChooserDialog(title=_("Select CSV file ..."),
parent=self.window, action=gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK))
ffilter = gtk.FileFilter()
ffilter.set_name(_("Comma Separated Values (CSV)"))
ffilter.add_pattern("*.csv")
dialog.add_filter(ffilter)
ffilter = gtk.FileFilter()
ffilter.set_name(_("All files"))
ffilter.add_pattern("*")
dialog.add_filter(ffilter)
filename = None
if dialog.run() == gtk.RESPONSE_OK:
filename = dialog.get_filename()
dialog.destroy()
self._set_csv(filename)
def _set_csv(self, filename):
if filename != None and os.path.isfile(filename):
if not isinstance(filename, unicode):
try:
filename = unicode(filename, 'UTF-8')
except:
pass
shortfilename = " " + os.path.basename(filename) + " "
if len(shortfilename) > 150:
shortfilename = shortfilename[0:150] + " ... "
image = self.button_addfile.get_image()
image.clear()
self.button_addfile.set_label(shortfilename)
self.frame.set_sensitive(True)
self.checkbutton_geolocate.set_sensitive(True)
self.cb_headerid.set_sensitive(True)
self.entry_headers.set_sensitive(True)
self.button_process.set_sensitive(True)
self._init_csv(filename)
self._geolocate()
else:
self.reset()
def _init_csv(self, filename):
dgettext = dict()
dgettext['file'] = filename
try:
fd = open(filename, 'rb')
except Exception as e:
dgettext['error'] = str(e)
msg = _("Cannot read file '%(file)s': %(error)s")
self.logger.error(msg % dgettext)
self.rootgui.show_dialog(_("Error"), msg, _('Please check file permisions'))
else:
dialect = 'excel'
headers = self.options[CSVImport_CONFKEY_HEADERS]
delimiter = self.options[CSVImport_CONFKEY_DELIMITER]
quotechar = self.options[CSVImport_CONFKEY_QUOTECHAR]
if not delimiter or not quotechar:
dialect = csv.Sniffer().sniff(fd.read(10240))
delimiter = dialect.delimiter
quotechar = dialect.quotechar
fd.seek(0)
else:
dgettext['delimiter'] = delimiter
dgettext['quotechar'] = quotechar
has_header = csv.Sniffer().has_header(fd.read(10240))
fd.seek(0)
headers_defined = False
if headers:
headers_defined = True
else:
reader = csv.DictReader(fd, dialect=dialect, delimiter=delimiter, quotechar=quotechar)
if has_header:
reader.next()
headers = reader.fieldnames
headers_defined = True
self.options[CSVImport_CONFKEY_FILENAME] = filename
if not headers_defined:
msg = _("File has no headers")
tip = _('You have to define the name of the headers.')
self.rootgui.show_dialog(_("Warning"), msg, tip, gtk.MESSAGE_WARNING)
else:
self.entry_headers.set_text(', '.join(headers))
try:
index = headers.index(self.options[CSVImport_CONFKEY_HEADER_ID])
except:
index = 0
self._set_combo(self.cb_headerid, headers, CSVImport_CONFKEY_HEADER_ID, index)
self.rootplugin.update_headers(headers)
fd.close()
def process(self, widget=None):
filename = self.options[CSVImport_CONFKEY_FILENAME]
if filename != None:
self.rootplugin.init_csv(filename)
counter = self.rootplugin.process_csv(self.userfacade.state.geophotos)
self.rootplugin.logger.info(_("%d photos processed with CSV data.") % counter)
self.rootplugin.end_csv()
self.rootgui.reload_treeviewgeophotos()
def _geolocate(self, widget=None):
self.events = False
value = self.checkbutton_geolocate.get_active()
self.cb_date.set_sensitive(value)
self.cb_ele.set_sensitive(value)
self.cb_lon.set_sensitive(value)
self.cb_lat.set_sensitive(value)
self.options[CSVImport_CONFKEY_GEOLOCATE] = value
if not value:
self.options[CSVImport_CONFKEY_HEADER_LAT] = ''
self.options[CSVImport_CONFKEY_HEADER_LON] = ''
self.options[CSVImport_CONFKEY_HEADER_ELE] = ''
self.options[CSVImport_CONFKEY_HEADER_DATE] = ''
self._empty_combo(self.cb_date)
self._empty_combo(self.cb_ele)
self._empty_combo(self.cb_lon)
self._empty_combo(self.cb_lat)
else:
headers = [" "] + self.options[CSVImport_CONFKEY_HEADERS]
try:
headers.remove(self.cb_headerid.get_active_text())
except:
pass
self._set_combo(self.cb_date, headers)
self._set_combo(self.cb_ele, headers)
self._set_combo(self.cb_lon, headers)
self._set_combo(self.cb_lat, headers)
counter = 0
for i in headers:
item = i.lower()
if i == self.options[CSVImport_CONFKEY_HEADER_LAT]:
self.cb_lat.set_active(counter)
elif i == self.options[CSVImport_CONFKEY_HEADER_LON]:
self.cb_lon.set_active(counter)
elif i == self.options[CSVImport_CONFKEY_HEADER_ELE]:
self.cb_ele.set_active(counter)
elif i == self.options[CSVImport_CONFKEY_HEADER_DATE]:
self.cb_date.set_active(counter)
elif 'lat' in item:
self.cb_lat.set_active(counter)
self.options[CSVImport_CONFKEY_HEADER_LAT] = i
elif 'lon' in item:
self.cb_lon.set_active(counter)
self.options[CSVImport_CONFKEY_HEADER_LON] = i
elif 'ele' in item:
self.cb_ele.set_active(counter)
self.options[CSVImport_CONFKEY_HEADER_ELE] = i
elif 'date' in item or 'time' in item:
self.cb_date.set_active(counter)
self.options[CSVImport_CONFKEY_HEADER_DATE] = i
counter += 1
self.events = True
def _out_entry(self, widget, e):
widget.set_text(', '.join(self.options[CSVImport_CONFKEY_HEADERS]))
return False
def _set_entry(self, widget):
value = widget.get_text()
items = []
try:
char = None
for c in [',', ';', '|', '#']:
if c in value:
char = c
break
else:
raise Exception
for item in value.split(char):
items.append(item.strip())
if len(items) < 2:
raise Exception
except:
msg = _("Cannot set headers")
tip = _("Please, define the name of the headers to be used as variables.")
self.rootgui.show_dialog(_("Error"), msg, tip)
else:
try:
index = items.index(self.options[CSVImport_CONFKEY_HEADER_ID])
except:
index = 0
self._set_combo(self.cb_headerid, items, CSVImport_CONFKEY_HEADER_ID, index)
self.rootplugin.update_headers(items)
self._geolocate()
def _set_combo(self, cb, items=[], key=None, active=None):
self.events = False
cb.get_model().clear()
for item in items:
cb.append_text(item)
if active != None:
self.options[key] = items[active]
cb.set_active(active)
self.events = True
def _empty_combo(self, cb):
cb.get_model().clear()
def _combo_geolocate(self, widget, key):
if self.events:
header = widget.get_active_text()
if header in self.options[CSVImport_CONFKEY_HEADERS]:
self.options[key] = header
else:
self.options[key] = ''
def _activate_combo(self, cb, key, value, no):
counter = 0
for row in cb.get_model():
if value == row[0]:
if row[0] == no:
cb.set_active(0)
self.options[key] = ''
else:
cb.set_active(counter)
self.options[key] = row[0]
break
counter += 1
def _combo_id(self, widget):
if self.events:
header = widget.get_active_text()
self.options[CSVImport_CONFKEY_HEADER_ID] = header
header_lat = self.cb_lat.get_active_text()
header_lon = self.cb_lon.get_active_text()
header_ele = self.cb_ele.get_active_text()
header_date = self.cb_date.get_active_text()
self._geolocate()
self._activate_combo(self.cb_lat, CSVImport_CONFKEY_HEADER_LAT, header_lat, header)
self._activate_combo(self.cb_lon, CSVImport_CONFKEY_HEADER_LON, header_lon, header)
self._activate_combo(self.cb_ele, CSVImport_CONFKEY_HEADER_ELE, header_ele, header)
self._activate_combo(self.cb_date, CSVImport_CONFKEY_HEADER_DATE, header_date, header)
def show(self, widget=None, options=None):
if widget:
widget.add(self.plugin)
if options:
self.setup(options)
self.plugin.show_all()
def hide(self, reset=False):
self.plugin.hide_all()
if reset:
self.reset()
def reset(self):
self.button_process.set_sensitive(False)
self.checkbutton_geolocate.set_sensitive(False)
self.frame.set_sensitive(False)
self._empty_combo(self.cb_headerid)
self.cb_headerid.set_sensitive(False)
self.options[CSVImport_CONFKEY_HEADER_ID] = ''
self.entry_headers.set_sensitive(False)
self.entry_headers.set_text('')
image = self.button_addfile.get_image()
image.set_from_stock(gtk.STOCK_ADD, gtk.ICON_SIZE_BUTTON)
self.button_addfile.set_image(image)
self.button_addfile.set_label(_("Select file"))
self.checkbutton_geolocate.set_active(False)
self.options[CSVImport_CONFKEY_FILENAME] = ''
self.rootplugin.update_headers()
self.userfacade.state.photovariables = self.rootplugin.photovariables_old
self._geolocate()
self.rootgui.reload_treeviewgeophotos()
self.events = True
def setup(self, options):
self.options = options
self.cb_date.set_tooltip_text(_("Date header name. Format should be: ") + self.options[CSVImport_CONFKEY_DATE_PARSE])
if options[CSVImport_CONFKEY_GEOLOCATE]:
self.checkbutton_geolocate.set_active(True)
filename = options[CSVImport_CONFKEY_FILENAME]
if filename:
self._set_csv(filename)
#self.entry_headers.set_text(', '.join(options[CSVImport_CONFKEY_HEADERS]))
#EOF
| 39.995842
| 218
| 0.615501
| 2,342
| 19,238
| 4.790777
| 0.153715
| 0.031551
| 0.061497
| 0.064973
| 0.379144
| 0.292068
| 0.183244
| 0.145989
| 0.077718
| 0.059358
| 0
| 0.011798
| 0.281838
| 19,238
| 480
| 219
| 40.079167
| 0.800304
| 0.049641
| 0
| 0.26799
| 0
| 0.002481
| 0.074862
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042184
| false
| 0.004963
| 0.129032
| 0
| 0.176179
| 0.009926
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b8fcf8f0fe4212ea52ae11e77f6cd66ebb3437f
| 9,024
|
py
|
Python
|
src/opt_utils.py
|
mateuszz0000/POSA
|
1295065251dd22c89d923fbff7d8bf4c53339d95
|
[
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | 71
|
2021-05-02T21:40:29.000Z
|
2022-03-30T03:52:01.000Z
|
src/opt_utils.py
|
mateuszz0000/POSA
|
1295065251dd22c89d923fbff7d8bf4c53339d95
|
[
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | 4
|
2021-06-18T06:31:29.000Z
|
2021-12-07T07:29:21.000Z
|
src/opt_utils.py
|
mateuszz0000/POSA
|
1295065251dd22c89d923fbff7d8bf4c53339d95
|
[
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | 10
|
2021-05-08T08:16:31.000Z
|
2022-02-17T04:40:30.000Z
|
# -*- coding: utf-8 -*-
# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
# holder of all proprietary rights on this computer program.
# You can only use this computer program if you have closed
# a license agreement with MPG or you get the right to use the computer
# program from someone who is authorized to grant you that right.
# Any use of the computer program without a valid license is prohibited and
# liable to prosecution.
#
# Copyright©2020 Max-Planck-Gesellschaft zur Förderung
# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
# for Intelligent Systems. All rights reserved.
#
# Contact: [email protected]
import torch
import torch.nn.functional as F
import numpy as np
import torchgeometry as tgm
from src import misc_utils, eulerangles
from tqdm import tqdm
def compute_afford_loss(vertices=None, scene_data=None, gen_batch=None, pen_w=0.0, no_obj_classes=None,
use_semantics=False,
semantics_w=0.0, **kwargs):
contact_ids = gen_batch[:, :, 0] > 0.5
x = misc_utils.read_sdf(vertices, scene_data['sdf'],
scene_data['grid_dim'], scene_data['grid_min'], scene_data['grid_max'],
mode="bilinear").squeeze()
batch_size = vertices.shape[0]
device = vertices.device
contact_loss = torch.sum(x[contact_ids.flatten()] ** 2)
pen_loss = torch.tensor(0.0)
if pen_w > 0:
mask = x.lt(0).flatten().int() + (~contact_ids.flatten()).int()
x_neg = torch.abs(x[mask == 2])
if len(x_neg) == 0:
pen_loss = torch.tensor(0.0)
else:
pen_loss = pen_w * x_neg.sum()
semantics_loss = torch.tensor(0.0)
if use_semantics:
# Read semantics
x_semantics = misc_utils.read_sdf(vertices, scene_data['semantics'],
scene_data['grid_dim'], scene_data['grid_min'],
scene_data['grid_max'], mode="bilinear").squeeze()
x_semantics = contact_ids.flatten().float() * x_semantics.unsqueeze(0)
x_semantics = torch.zeros(x_semantics.shape[0], x_semantics.shape[1], no_obj_classes, device=device).scatter_(
-1, x_semantics.unsqueeze(-1).type(torch.long), 1.)
# Compute loss
targets = gen_batch[:, :, 1:].argmax(dim=-1).type(torch.long).reshape(batch_size, -1)
semantics_loss = semantics_w * F.cross_entropy(x_semantics.permute(0, 2, 1), targets,
reduction='sum')
return contact_loss, pen_loss, semantics_loss
def eval_init_points(init_pos=None, init_ang=None, vertices=None, scene_data=None, gen_batch=None, **kwargs):
with torch.no_grad():
losses = []
init_pos_batches = init_pos.split(1)
for i in tqdm(range(len(init_pos_batches))):
curr_init_pos = init_pos_batches[i]
rot_aa = torch.cat((torch.zeros((1, 2), device=vertices.device), init_ang[i].reshape(1, 1)), dim=1)
rot_mat = tgm.angle_axis_to_rotation_matrix(rot_aa.reshape(-1, 3))[:, :3, :3]
curr_vertices = torch.bmm(rot_mat, vertices.permute(0, 2, 1)).permute(0, 2, 1)
curr_vertices = curr_vertices + curr_init_pos
contact_loss, pen_loss, semantics_loss = compute_afford_loss(vertices=curr_vertices, scene_data=scene_data,
gen_batch=gen_batch, **kwargs)
loss = contact_loss + pen_loss + semantics_loss
losses.append(loss.item())
# Sort initial positions and orientations from best to wrost
losses = np.array(losses)
ids = np.argsort(losses)
losses = losses[ids]
init_pos = init_pos[ids]
init_ang = init_ang[ids]
return losses, init_pos, init_ang
def init_points_culling(init_pos=None, vertices=None, scene_data=None, gen_batch=None, max_init_points=50, **kwargs):
init_ang = []
angles = torch.arange(0, 2 * np.pi, np.pi / 2, device=vertices.device)
angles[0] = 1e-9
for ang in angles:
init_ang.append(ang * torch.ones(init_pos.shape[0], 1, device=vertices.device))
init_ang = torch.cat(init_ang).to(init_pos.device)
init_pos = init_pos.repeat(angles.shape[0], 1, 1)
# Shuffle
rnd_ids = np.random.choice(init_pos.shape[0], init_pos.shape[0], replace=False)
init_pos = init_pos[rnd_ids, :]
init_ang = init_ang[rnd_ids, :]
losses, init_pos, init_ang = eval_init_points(init_pos=init_pos, init_ang=init_ang,
vertices=vertices.unsqueeze(0),
scene_data=scene_data, gen_batch=gen_batch, **kwargs)
# Select only a subset from initial points for optimization
if init_pos.shape[0] > max_init_points:
init_pos = init_pos[:max_init_points]
init_ang = init_ang[:max_init_points]
return init_pos, init_ang
class opt_wrapper(object):
def __init__(self, vertices=None, vertices_can=None, pelvis=None, scene_data=None,
down_sample_fn=None, down_sample_fn2=None,
device=None, dtype=None, pen_w=None, use_semantics=None, no_obj_classes=None, nv=None, optimizer=None,
gen_batch=None, body_model=None, opt_pose=False,
semantics_w=None, init_body_pose=None, pose_w=None, **kwargs):
self.optimizer = optimizer
self.vertices = vertices
self.vertices_can = vertices_can
self.pelvis = pelvis
self.scene_data = scene_data
self.down_sample_fn = down_sample_fn
self.down_sample_fn2 = down_sample_fn2
self.device = device
self.dtype = dtype
self.pen_w = pen_w
self.pose_w = pose_w
self.semantics_w = semantics_w
self.use_semantics = use_semantics
self.no_obj_classes = no_obj_classes
self.nv = nv
self.gen_batch = gen_batch
self.opt_pose = opt_pose
self.body_model = body_model
self.init_body_pose = init_body_pose
self.R_smpl2scene = torch.tensor(eulerangles.euler2mat(np.pi / 2, 0, 0, 'sxyz'), dtype=dtype, device=device)
def compute_vertices(self, t_free, y_ang, vertices=None, down_sample=True):
curr_batch_size = self.vertices.shape[0]
rot_aa = torch.cat((torch.zeros((curr_batch_size, 2), device=self.device), y_ang), dim=1)
rot_mat = tgm.angle_axis_to_rotation_matrix(rot_aa.reshape(-1, 3))[:, :3, :3]
if self.opt_pose:
body_model_output = self.body_model(return_verts=True)
pelvis = body_model_output.joints[:, 0, :].reshape(1, 3)
vertices_local = body_model_output.vertices.squeeze()
vertices_local = torch.matmul(self.R_smpl2scene, (vertices_local - pelvis).t()).t()
vertices_local.unsqueeze_(0)
if down_sample:
vertices_local = self.down_sample_fn.forward(vertices_local.permute(0, 2, 1))
vertices_local = self.down_sample_fn2.forward(vertices_local).permute(0, 2, 1)
vertices_local = torch.bmm(rot_mat, vertices_local.permute(0, 2, 1)).permute(0, 2, 1)
vertices_local += t_free
else:
# very important to make a local copy, so that you don't change the original variable
if vertices is None:
vertices_local = torch.bmm(rot_mat, self.vertices.permute(0, 2, 1)).permute(0, 2, 1)
else:
vertices_local = torch.bmm(rot_mat, vertices.permute(0, 2, 1)).permute(0, 2, 1)
vertices_local += t_free
return vertices_local, rot_mat
def compute_loss(self, t_free, y_ang):
pose_loss = torch.tensor(0.0)
if self.opt_pose:
pose_loss = self.pose_w * F.mse_loss(self.body_model.body_pose, self.init_body_pose)
vertices_local, rot_mat = self.compute_vertices(t_free, y_ang)
contact_loss, pen_loss, semantic_loss = compute_afford_loss(vertices=vertices_local, scene_data=self.scene_data,
gen_batch=self.gen_batch, pen_w=self.pen_w,
no_obj_classes=self.no_obj_classes,
use_semantics=self.use_semantics,
semantics_w=self.semantics_w)
return contact_loss, pen_loss, pose_loss, semantic_loss
def create_fitting_closure(self, t_free, y_ang):
def fitting_func():
self.optimizer.zero_grad()
recon_loss, pen_loss, pose_loss, semantic_loss = self.compute_loss(t_free, y_ang)
loss_total = recon_loss + pen_loss + pose_loss + semantic_loss
loss_total.backward(retain_graph=True)
return loss_total
return fitting_func
| 47
| 120
| 0.625332
| 1,240
| 9,024
| 4.281452
| 0.195161
| 0.034281
| 0.018648
| 0.02072
| 0.304012
| 0.246374
| 0.190055
| 0.165191
| 0.14466
| 0.092673
| 0
| 0.017694
| 0.273493
| 9,024
| 191
| 121
| 47.246073
| 0.791946
| 0.098515
| 0
| 0.078014
| 0
| 0
| 0.010229
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056738
| false
| 0
| 0.042553
| 0
| 0.156028
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b95d82a263834a4e169c435b74dfded71be2e85
| 5,538
|
py
|
Python
|
siemstress/trigger.py
|
dogoncouch/siemstress
|
be7f60bb0228a886d48deb4f46309be7fb8aa0af
|
[
"MIT"
] | 28
|
2017-08-14T12:41:56.000Z
|
2022-02-18T01:18:11.000Z
|
siemstress/trigger.py
|
dogoncouch/siemstress
|
be7f60bb0228a886d48deb4f46309be7fb8aa0af
|
[
"MIT"
] | 1
|
2017-08-23T10:47:16.000Z
|
2017-08-24T18:52:48.000Z
|
siemstress/trigger.py
|
dogoncouch/siemstress
|
be7f60bb0228a886d48deb4f46309be7fb8aa0af
|
[
"MIT"
] | 6
|
2018-01-07T11:42:18.000Z
|
2020-06-08T00:04:57.000Z
|
#!/usr/bin/env python
#_MIT License
#_
#_Copyright (c) 2017 Dan Persons ([email protected])
#_
#_Permission is hereby granted, free of charge, to any person obtaining a copy
#_of this software and associated documentation files (the "Software"), to deal
#_in the Software without restriction, including without limitation the rights
#_to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#_copies of the Software, and to permit persons to whom the Software is
#_furnished to do so, subject to the following conditions:
#_
#_The above copyright notice and this permission notice shall be included in all
#_copies or substantial portions of the Software.
#_
#_THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#_IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#_FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#_AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#_LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#_OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#_SOFTWARE.
import time
from time import strftime
from time import sleep
from time import daylight
from time import timezone
from time import altzone
from random import randrange
from datetime import datetime
import MySQLdb as mdb
import json
import threading
import os
from sys import exit
import siemstress.manage
#import signal
class SiemTrigger:
def __init__(self, db, rule):
"""Initialize trigger object"""
self.db = db
self.rule = rule
self.tzone = None
def watch_rule(self):
"""Watch a trigger rule"""
# Set time zone:
if daylight:
self.tzone = \
str(int(float(altzone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(altzone) / 60 % 60)).ljust(2, '0')
else:
self.tzone = \
str(int(float(timezone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(timezone) / 60 % 60)).ljust(2, '0')
if not '-' in self.tzone:
self.tzone = '+' + self.tzone
while True:
# Check the rule:
self.check_rule()
# Wait until the next interval
sleep(int(self.rule['time_int']) * 60)
def check_rule(self):
"""Check a trigger rule"""
# To Do: Add date_stamp_utc/int logic
if not self.tzone:
# Set time zone:
if time.localtime().tm_isdst:
self.tzone = \
str(int(float(altzone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(altzone) / 60 % 60)).ljust(2, '0')
else:
self.tzone = \
str(int(float(timezone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(timezone) / 60 % 60)).ljust(2, '0')
if not '-' in self.tzone:
self.tzone = '+' + self.tzone
# Query the database:
con = mdb.connect(self.db['host'], self.db['user'],
self.db['password'], self.db['database'])
with con:
cur = con.cursor()
cur.execute(self.rule['sql_query'])
rows = cur.fetchall()
cur.close()
con.close()
# Evaluate the results:
if len(rows) > int(self.rule['event_limit']):
idtags = json.dumps([int(row[0]) for row in rows])
datestamp = datetime.now().strftime('%Y%m%d%H%M%S')
datestamputc = datetime.utcnow().strftime('%Y%m%d%H%M%S')
magnitude = (((len(rows) // 2) // \
(self.rule['event_limit'] + 1) // 2) + 5) * \
( 7 - self.rule['severity'])
outstatement = 'INSERT INTO ' + \
self.rule['out_table'] + \
'(date_stamp, date_stamp_utc, t_zone, ' + \
'source_rule, severity, source_table, event_limit, ' + \
'event_count, magnitude, time_int, message, source_ids) ' + \
'VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)'
# Send an event to the database:
con = mdb.connect(self.db['host'], self.db['user'],
self.db['password'], self.db['database'])
with con:
cur = con.cursor()
cur.execute(outstatement, (datestamp, datestamputc,
self.tzone,
self.rule['rule_name'], self.rule['severity'],
self.rule['source_table'],
self.rule['event_limit'], len(rows), magnitude,
self.rule['time_int'], self.rule['message'],
idtags))
cur.close()
con.close()
def start_rule(db, rule, oneshot):
"""Initialize trigger object and start watching"""
# Make sure the table exists:
siemstress.manage.create_ruleevent_table(rule['out_table'])
sentry = SiemTrigger(db, rule)
if oneshot:
sentry.check_rule()
elif int(rule['time_int']) == 0:
pass
else:
# Before starting, sleep randomly up to rule interval to stagger
# database use:
sleep(randrange(0, int(rule['time_int']) * 60))
sentry.watch_rule()
| 34.397516
| 81
| 0.548754
| 668
| 5,538
| 4.462575
| 0.338323
| 0.034888
| 0.010064
| 0.012076
| 0.19792
| 0.19792
| 0.19792
| 0.188527
| 0.188527
| 0.188527
| 0
| 0.017354
| 0.334056
| 5,538
| 160
| 82
| 34.6125
| 0.790944
| 0.275731
| 0
| 0.376344
| 0
| 0.010753
| 0.108415
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043011
| false
| 0.032258
| 0.150538
| 0
| 0.204301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b9aca9719a2480581a602385b8fda1e00bcfadc
| 3,040
|
py
|
Python
|
ooobuild/lo/util/time_with_timezone.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/lo/util/time_with_timezone.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/lo/util/time_with_timezone.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Struct Class
# this is a auto generated file generated by Cheetah
# Namespace: com.sun.star.util
# Libre Office Version: 7.3
from ooo.oenv.env_const import UNO_NONE
import typing
from .time import Time as Time_604e0855
class TimeWithTimezone(object):
"""
Struct Class
represents a combined time value with time zone.
**since**
LibreOffice 4.1
See Also:
`API TimeWithTimezone <https://api.libreoffice.org/docs/idl/ref/structcom_1_1sun_1_1star_1_1util_1_1TimeWithTimezone.html>`_
"""
__ooo_ns__: str = 'com.sun.star.util'
__ooo_full_ns__: str = 'com.sun.star.util.TimeWithTimezone'
__ooo_type_name__: str = 'struct'
typeName: str = 'com.sun.star.util.TimeWithTimezone'
"""Literal Constant ``com.sun.star.util.TimeWithTimezone``"""
def __init__(self, TimeInTZ: typing.Optional[Time_604e0855] = UNO_NONE, Timezone: typing.Optional[int] = 0) -> None:
"""
Constructor
Arguments:
TimeInTZ (Time, optional): TimeInTZ value.
Timezone (int, optional): Timezone value.
"""
super().__init__()
if isinstance(TimeInTZ, TimeWithTimezone):
oth: TimeWithTimezone = TimeInTZ
self.TimeInTZ = oth.TimeInTZ
self.Timezone = oth.Timezone
return
kargs = {
"TimeInTZ": TimeInTZ,
"Timezone": Timezone,
}
if kargs["TimeInTZ"] is UNO_NONE:
kargs["TimeInTZ"] = None
self._init(**kargs)
def _init(self, **kwargs) -> None:
self._time_in_tz = kwargs["TimeInTZ"]
self._timezone = kwargs["Timezone"]
@property
def TimeInTZ(self) -> Time_604e0855:
"""
the time (in TimeZone)
"""
return self._time_in_tz
@TimeInTZ.setter
def TimeInTZ(self, value: Time_604e0855) -> None:
self._time_in_tz = value
@property
def Timezone(self) -> int:
"""
contains the time zone, as signed offset in minutes from UTC, that is east of UTC, that is the amount of minutes that should be added to UTC time to obtain the time in that timezone.
To obtain UTC time from TimeInTZ, you need to subtract TimeZone minutes.
"""
return self._timezone
@Timezone.setter
def Timezone(self, value: int) -> None:
self._timezone = value
__all__ = ['TimeWithTimezone']
| 30.4
| 190
| 0.650329
| 382
| 3,040
| 5.018325
| 0.408377
| 0.031299
| 0.026082
| 0.036515
| 0.077726
| 0.045383
| 0
| 0
| 0
| 0
| 0
| 0.022085
| 0.255263
| 3,040
| 99
| 191
| 30.707071
| 0.824647
| 0.436842
| 0
| 0.052632
| 0
| 0
| 0.104801
| 0.045977
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0
| 0.078947
| 0
| 0.447368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
4b9cdb57c833e7e628efc0c75d61d7090e29a276
| 393
|
py
|
Python
|
exercicios/Lista6/Q5.py
|
AlexandrePeBrito/CursoUdemyPython
|
3de58cb30c9f333b32078309847179ff3f9d7e22
|
[
"MIT"
] | null | null | null |
exercicios/Lista6/Q5.py
|
AlexandrePeBrito/CursoUdemyPython
|
3de58cb30c9f333b32078309847179ff3f9d7e22
|
[
"MIT"
] | null | null | null |
exercicios/Lista6/Q5.py
|
AlexandrePeBrito/CursoUdemyPython
|
3de58cb30c9f333b32078309847179ff3f9d7e22
|
[
"MIT"
] | null | null | null |
""" 5. Faça um programa que receba do usuário um arquivo texto e um caracter. Mostre na tela
quantas vezes aquele caractere ocorre dentro do arquivo.
"""
arquivo=open('CursoUdemyPython/exercicios/Lista6/arq.txt')
texto=arquivo.read()
carac=input('Informe um caractere: ')
ca=0
for c in texto:
if(c == carac):
ca+=1
arquivo.close()
print(f"Foi identificado {ca} deste caractere")
| 28.071429
| 92
| 0.725191
| 60
| 393
| 4.75
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012085
| 0.157761
| 393
| 13
| 93
| 30.230769
| 0.848943
| 0.368957
| 0
| 0
| 0
| 0
| 0.422594
| 0.175732
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|