hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
890a5161d6eed2959007d6f815eb9b7dd35c2414 | 2,541 | py | Python | src/main.py | chanleoc/kbc_demo | 9138de9083d92f5c8bab1dfc42d3dde50544920d | [
"MIT"
] | null | null | null | src/main.py | chanleoc/kbc_demo | 9138de9083d92f5c8bab1dfc42d3dde50544920d | [
"MIT"
] | null | null | null | src/main.py | chanleoc/kbc_demo | 9138de9083d92f5c8bab1dfc42d3dde50544920d | [
"MIT"
] | 1 | 2019-02-01T19:37:30.000Z | 2019-02-01T19:37:30.000Z | "__author__ = 'Leo Chan'"
"__credits__ = 'Keboola 2019'"
"__project__ = 'kbc_demo'"
"""
Python 3 environment
"""
#import pip
#pip.main(['install', '--disable-pip-version-check', '--no-cache-dir', 'logging_gelf'])
import sys
import os
import logging
import csv
import json
import pandas as pd
import logging_gelf.formatters
import logging_gelf.handlers
from keboola import docker
### Environment setup
abspath = os.path.abspath(__file__)
script_path = os.path.dirname(abspath)
os.chdir(script_path)
### Logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt="%Y-%m-%d %H:%M:%S")
"""
logger = logging.getLogger()
logging_gelf_handler = logging_gelf.handlers.GELFTCPSocketHandler(
host=os.getenv('KBC_LOGGER_ADDR'),
port=int(os.getenv('KBC_LOGGER_PORT'))
)
logging_gelf_handler.setFormatter(logging_gelf.formatters.GELFFormatter(null_character=True))
logger.addHandler(logging_gelf_handler)
# removes the initial stdout logging
logger.removeHandler(logger.handlers[0])
"""
### Access the supplied rules
cfg = docker.Config('/data/')
params = cfg.get_parameters()
#data_table = cfg.get_parameters()["data_table"]
### Get proper list of tables
cfg = docker.Config('/data/')
in_tables = cfg.get_input_tables()
out_tables = cfg.get_expected_output_tables()
logging.info("IN tables mapped: "+str(in_tables))
logging.info("OUT tables mapped: "+str(out_tables))
### destination to fetch and output files
DEFAULT_FILE_INPUT = "/data/in/tables/"
DEFAULT_FILE_DESTINATION = "/data/out/tables/"
def get_tables(in_tables):
"""
Evaluate input and output table names.
Only taking the first one into consideration!
"""
### input file
table = in_tables[0]
in_name = table["full_path"]
in_destination = table["destination"]
logging.info("Data table: " + str(in_name))
logging.info("Input table source: " + str(in_destination))
return in_name
def get_output_tables(out_tables):
"""
Evaluate output table names.
Only taking the first one into consideration!
"""
### input file
table = out_tables[0]
in_name = table["full_path"]
in_destination = table["source"]
logging.info("Data table: " + str(in_name))
logging.info("Input table source: " + str(in_destination))
return in_name
def main():
"""
Main execution script.
"""
print('demo 2')
print('demo 3')
print('demo4')
return
if __name__ == "__main__":
main()
logging.info("Done.")
| 23.1 | 93 | 0.69815 | 335 | 2,541 | 5.059701 | 0.364179 | 0.051917 | 0.031858 | 0.020059 | 0.264307 | 0.234808 | 0.234808 | 0.234808 | 0.234808 | 0.234808 | 0 | 0.005169 | 0.162534 | 2,541 | 109 | 94 | 23.311927 | 0.791353 | 0.19205 | 0 | 0.2 | 0 | 0 | 0.219528 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06 | false | 0 | 0.18 | 0 | 0.3 | 0.06 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
890c561bf0aec0a3506eddd281bfa30bb906a86e | 92 | py | Python | greek_app/apps.py | HCDigitalScholarship/GreekPal | 15a99b1dae971edef0cc90be064fb7cb291c317b | [
"MIT"
] | null | null | null | greek_app/apps.py | HCDigitalScholarship/GreekPal | 15a99b1dae971edef0cc90be064fb7cb291c317b | [
"MIT"
] | 28 | 2019-10-08T05:36:39.000Z | 2022-01-13T02:54:15.000Z | greek_app/apps.py | HCDigitalScholarship/GreekPal | 15a99b1dae971edef0cc90be064fb7cb291c317b | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class GreekAppConfig(AppConfig):
name = 'greek_app'
| 15.333333 | 33 | 0.76087 | 11 | 92 | 6.272727 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.163043 | 92 | 5 | 34 | 18.4 | 0.896104 | 0 | 0 | 0 | 0 | 0 | 0.097826 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 4 |
890cb45c8ba1a1a9867fe4cb3e4acf45b6533679 | 5,504 | py | Python | peac_pkg/contourer.py | emcramer/peac | 74a3b7c5885d84a0b6e1dfadd887d08aa3967866 | [
"MIT"
] | null | null | null | peac_pkg/contourer.py | emcramer/peac | 74a3b7c5885d84a0b6e1dfadd887d08aa3967866 | [
"MIT"
] | null | null | null | peac_pkg/contourer.py | emcramer/peac | 74a3b7c5885d84a0b6e1dfadd887d08aa3967866 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 8 10:01:34 2019
@author: ecramer
"""
import numpy as np
from scipy import interpolate
from skimage.feature import peak_local_max
class Contourer():
""" TODO: Full writeup of class documentation here.
Steps:
1. generate the contours for each factor
2. find the peak coordinates and values
3. transform the peak coordinates into the manifold embedding's space
"""
def __init__(self):
self._interp_method = 'linear'
self._resolution = 50 # default resolution
self._peak_distance = np.floor(self._resolution/10.0).astype(np.uint8)
self._peak_threshold = 0.5
self._factor_names = [] # list to contain the factors of each column
# storage structures for the results
self.contours_ = {}
self.peak_values_ = {}
self.peak_coors_ = {}
self.transformed_peaks_ = {}
self.all_transformed_peaks_ = []
self.peak_names_ = []
def _check_input_dims(self, X, Y):
return (X.shape[1] == 2) and (len(Y.shape) > 0)
def fit(self, X, Y, **kwargs):
"""
X = the manifold embeddings for a 2D space as a numpy array. Each column must be a
Y = a pandas dataframe with the values for a series of predictors/factors
kwargs = extra parameters to feed to the contouring and peak finding algorithms
"""
if self._check_input_dims(X, Y):
self.X_ = X
self.Y_ = Y
# get the names of columns if Y is a pandas dataframe, otherwise assign numbers
if hasattr(Y, 'columns'):
self._factor_names = Y.columns
elif Y.ndim > 1:
self._factor_names = np.arange(Y.shape[1])
else:
self._factor_names = [0]
# unpack the dictionary to populate the fields in the class
for key, value in kwargs.items():
setattr(self, key, value)
# run the algorithms
self._gen_contours()
self._find_peaks()
self._transform_peaks()
return self
else:
print('Please double check input for correct dimensions. See documentation for details.')
return False
def _gen_contour(self, x1, x2, z):
"""
Generates a contour from the manifold embeddings and factor levels
"""
x_lin = np.linspace(min(x1), max(x1), self._resolution)
y_lin = np.linspace(min(x2), max(x2), self._resolution)
# create a grid of points
x_grid, y_grid = np.meshgrid(x_lin, y_lin)
z_grid = interpolate.griddata((x1, x2), z, (x_grid, y_grid), method=self._interp_method)
return x_grid, y_grid, z_grid
pass
def _gen_contours(self):
"""
Step 1
Generate the contours for each factor in Y
"""
# check to see if the number of factors to contour is > 1, otherwise
if self.Y_.ndim < 2:
z = np.asarray(self.Y_)
# get the values of the manifold embedding
x1 = self.X_[:, 0]
x2 = self.X_[:, 1]
x1g, x2g, zg = self._gen_contour(x1, x2, z)
self.contours_[0] = np.nan_to_num(zg)
else:
col = 0
while col < self.Y_.shape[self.Y_.ndim-1]:
z = np.asarray(self.Y_)[:, col]
# get the values of the manifold embedding
x1 = self.X_[:, 0]
x2 = self.X_[:, 1]
x1g, x2g, zg = self._gen_contour(x1, x2, z)
self.contours_[col] = np.nan_to_num(zg) # zero out the non-contoured points in the 2D space
col += 1 # go to the next column
def _find_peaks(self):
"""
Step 2
Find the local peaks in each contour.
"""
# find the peaks for each contour
for key, contour in self.contours_.items():
# find the peaks such that they are not within _peak_distance 'pixels' of each other
# and the peaks are above the _peak_threshold
peaks = peak_local_max(contour,
min_distance=self._peak_distance,
threshold_rel=self._peak_threshold)
self.peak_coors_[key] = peaks
# get the value of each peak found
self.peak_values_[key] = [contour[i[0], i[1]] for i in peaks]
def _transform_peaks(self):
"""
Step 3
Transform the peaks into the same space as the manifold embedding
"""
x = np.arange(0, self._resolution+1, 1)
x = np.interp(x, (x.min(), x.max()), (self.X_[:, 0].min(), self.X_[:, 0].max()))
y = np.arange(0, self._resolution+1, 1)
y = np.interp(y, (y.min(), y.max()), (self.X_[:, 1].min(), self.X_[:, 1].max()))
xx, yy = np.meshgrid(x, y)
for key in self.peak_coors_.keys():
self.transformed_peaks_[key] = np.column_stack(([x[a[0]] for a in self.peak_coors_[key]],
[y[a[1]] for a in self.peak_coors_[key]]))
self.all_transformed_peaks_ = np.concatenate(tuple(self.transformed_peaks_.values()))
self.peak_names_ = np.concatenate([[self._factor_names[k]]*len(v) for k, v in self.peak_coors_.items()])
| 39.597122 | 112 | 0.553779 | 729 | 5,504 | 3.997257 | 0.263374 | 0.038435 | 0.026767 | 0.02059 | 0.134523 | 0.115992 | 0.115992 | 0.061084 | 0.061084 | 0.061084 | 0 | 0.021607 | 0.344113 | 5,504 | 139 | 113 | 39.597122 | 0.785596 | 0.26508 | 0 | 0.115385 | 0 | 0 | 0.024282 | 0 | 0 | 0 | 0 | 0.007194 | 0 | 1 | 0.089744 | false | 0.012821 | 0.038462 | 0.012821 | 0.192308 | 0.012821 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
890cd66cad388ad20cba21addbcdb0f32f1ebfc7 | 1,344 | py | Python | cert_core/cert_store/config.py | johnykkwan/cert-core | 40fdf04bdc255de1b36ca1f99fae10e6994858a1 | [
"MIT"
] | 13 | 2017-03-10T01:03:08.000Z | 2021-06-05T14:13:35.000Z | cert_core/cert_store/config.py | johnykkwan/cert-core | 40fdf04bdc255de1b36ca1f99fae10e6994858a1 | [
"MIT"
] | 2 | 2018-05-09T23:37:21.000Z | 2018-05-09T23:49:56.000Z | cert_core/cert_store/config.py | johnykkwan/cert-core | 40fdf04bdc255de1b36ca1f99fae10e6994858a1 | [
"MIT"
] | 14 | 2017-05-27T16:21:43.000Z | 2022-02-12T16:25:21.000Z | import os
import configargparse
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
def create_config():
p = configargparse.getArgumentParser(default_config_files=[os.path.join(BASE_DIR, 'conf_test.ini'),
os.path.join(BASE_DIR, 'conf_local.ini'),
os.path.join(BASE_DIR, 'conf.ini'),
'/etc/cert-issuer/conf.ini'])
p.add('-c', '--my-config', required=False, is_config_file=True, help='config file path')
p.add_argument('--mongodb_uri', default='mongodb://localhost:27017/test', type=str, env_var='MONGODB_URI',
help='Mongo connection string, including db containing certificates')
p.add_argument('--cert_store_type', type=str, help='type of key value store to use for Cert Store')
p.add_argument('--cert_store_path', type=str, help='path to file system Cert Store')
p.add_argument('--v1_aware', action='store_true', help='Whether to support v1 certs')
args, _ = p.parse_known_args()
return args
parsed_config = None
def get_config():
global parsed_config
if parsed_config:
return parsed_config
parsed_config = create_config()
return parsed_config
| 42 | 110 | 0.614583 | 171 | 1,344 | 4.602339 | 0.421053 | 0.045743 | 0.050826 | 0.053367 | 0.179161 | 0.087675 | 0.060991 | 0 | 0 | 0 | 0 | 0.007099 | 0.266369 | 1,344 | 31 | 111 | 43.354839 | 0.791075 | 0 | 0 | 0.086957 | 0 | 0 | 0.267857 | 0.040923 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086957 | false | 0 | 0.086957 | 0 | 0.304348 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
890e9894c6720002f79090ec3f6c0b3e8a97d456 | 3,053 | py | Python | tests/core/inst/jump/inst_jal.py | cornell-brg/lizard | 7f9a78a913e64b5cfdee3a26223539ad225bd6da | [
"BSD-3-Clause"
] | 50 | 2019-05-22T08:43:15.000Z | 2022-03-21T23:58:50.000Z | tests/core/inst/jump/inst_jal.py | cornell-brg/lizard | 7f9a78a913e64b5cfdee3a26223539ad225bd6da | [
"BSD-3-Clause"
] | 1 | 2019-07-27T18:51:52.000Z | 2019-08-02T01:20:22.000Z | tests/core/inst/jump/inst_jal.py | cornell-brg/lizard | 7f9a78a913e64b5cfdee3a26223539ad225bd6da | [
"BSD-3-Clause"
] | 11 | 2019-12-26T06:00:48.000Z | 2022-03-27T02:29:35.000Z | #=========================================================================
# jal
#=========================================================================
import random
from pymtl import *
from tests.context import lizard
from tests.core.inst_utils import *
#-------------------------------------------------------------------------
# gen_basic_test
#-------------------------------------------------------------------------
def gen_trivial_test():
return """
# Use r3 to track the control flow pattern
addi x3, x0, 0 # 0x0200
jal x1, label_a # 204
addi x3, x3, 0b01
label_a:
addi x3, x3, 0b10
# Only the second bit should be set if jump was taken
csrw proc2mngr, x1 > 0x0208
csrw proc2mngr, x3 > 0b10
"""
def gen_basic_test():
return """
# Use r3 to track the control flow pattern
addi x3, x0, 0 # 0x0200
#
nop # 0x0204
nop # 0x0208
nop # 0x020c
nop # 0x0210
nop # 0x0214
nop # 0x0218
nop # 0x021c
nop # 0x0220
#
jal x1, label_a # 0x0224
addi x3, x3, 0b01 # 0x0228
nop
nop
nop
nop
nop
nop
nop
nop
label_a:
addi x3, x3, 0b10
# Check the link address
csrw proc2mngr, x1 > 0x0228
# Only the second bit should be set if jump was taken
csrw proc2mngr, x3 > 0b10
"""
# ''' LAB TASK ''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
# Define additional directed and random test cases.
# '''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
# tests that x1 stalls/bypass are accounted for from JAL
def gen_dest_dep_test():
return """
# Use r3 to track the control flow pattern
addi x3, x0, 0 # 0x0200
jal x1, label_a # 0x0204
addi x3, x3, 0b01 # 0x0208
label_a:
# Check the link address
csrw proc2mngr, x1 > 0x0208
addi x3, x3, 0b10
# Only the second bit should be set if jump was taken
csrw proc2mngr, x3 > 0b10
"""
def gen_jal_multi_test():
return """
# Use r3 to track the control flow pattern
addi x3, x0, 0 # 0x0200
jal x1, label_a # 0x0208
jal x1, label_b # 0x020c
label_a:
addi x3, x3, 0b01
jal x1, label_c
label_b:
jal x1, label_a
addi x3, x3, 0b01
label_c:
addi x3, x3, 0b10
# Only the second bit should be set if jump was taken
csrw proc2mngr, x3 > 0b11
"""
def gen_bne_over_jal_test():
return """
# Use r3 to track the control flow pattern
addi x3, x0, 0 # 0x0200
beq x3, x0, label_c
jal x1, label_b # 0x0208
jal x1, label_a # 0x020c
label_a:
addi x3, x3, 0b01
jal x1, label_c
label_b:
jal x1, label_a
addi x3, x3, 0b01
label_c:
addi x3, x3, 0b10
# Only the second bit should be set if jump was taken
csrw proc2mngr, x3 > 0b10
"""
| 22.123188 | 74 | 0.480511 | 376 | 3,053 | 3.800532 | 0.223404 | 0.071379 | 0.06718 | 0.053884 | 0.671798 | 0.663401 | 0.624913 | 0.560532 | 0.560532 | 0.560532 | 0 | 0.118389 | 0.324926 | 3,053 | 137 | 75 | 22.284672 | 0.574964 | 0.183426 | 0 | 0.673684 | 0 | 0 | 0.865888 | 0 | 0 | 0 | 0.057994 | 0 | 0 | 1 | 0.052632 | true | 0 | 0.042105 | 0.052632 | 0.147368 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
890f2b51c60c3ae89e15389064fecd83f7930ef7 | 181 | py | Python | ecs/scratchpad/forms.py | programmierfabrik/ecs | 2389a19453e21b2ea4e40b272552bcbd42b926a9 | [
"Apache-2.0"
] | 9 | 2017-02-13T18:17:13.000Z | 2020-11-21T20:15:54.000Z | ecs/scratchpad/forms.py | programmierfabrik/ecs | 2389a19453e21b2ea4e40b272552bcbd42b926a9 | [
"Apache-2.0"
] | 2 | 2021-05-20T14:26:47.000Z | 2021-05-20T14:26:48.000Z | ecs/scratchpad/forms.py | programmierfabrik/ecs | 2389a19453e21b2ea4e40b272552bcbd42b926a9 | [
"Apache-2.0"
] | 4 | 2017-04-02T18:48:59.000Z | 2021-11-23T15:40:35.000Z | from django import forms
from ecs.scratchpad.models import ScratchPad
class ScratchPadForm(forms.ModelForm):
class Meta:
model = ScratchPad
fields = ('text',)
| 20.111111 | 44 | 0.701657 | 20 | 181 | 6.35 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.220994 | 181 | 8 | 45 | 22.625 | 0.900709 | 0 | 0 | 0 | 0 | 0 | 0.022099 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
891149c09a494b51da684081827ffe7afa2014f5 | 1,531 | py | Python | tests/test_canvas.py | 0xF4D3C0D3/ray-tracer-challenge-with-python | 4140f2d534b3af5d70a9aa12dbc411a5bae7b918 | [
"MIT"
] | null | null | null | tests/test_canvas.py | 0xF4D3C0D3/ray-tracer-challenge-with-python | 4140f2d534b3af5d70a9aa12dbc411a5bae7b918 | [
"MIT"
] | 3 | 2020-03-24T18:01:09.000Z | 2021-02-02T22:19:35.000Z | tests/test_canvas.py | 0xF4D3C0D3/ray-tracer-challenge-with-python | 4140f2d534b3af5d70a9aa12dbc411a5bae7b918 | [
"MIT"
] | 1 | 2020-08-07T22:49:48.000Z | 2020-08-07T22:49:48.000Z | import numpy as np
from src.canvas import Canvas
from src.grid import Color
def test_create_canvas():
c = Canvas(10, 20)
assert c.rows == 10
assert c.cols == 20
assert np.all(c == Color(0, 0, 0))
def test_writing_pixel_to_canvas():
c = Canvas(10, 20)
red = Color(1, 0, 0)
c[2, 3] = red
assert red == c[2, 3]
def test_constructing_ppm_header():
c = Canvas(5, 3)
ppm = c.to_ppm()
line_1_3 = "\n".join(ppm.splitlines()[0:3])
assert line_1_3 == ("P3\n" "5 3\n" "255")
def test_constructing_ppm_pixel_data():
c = Canvas(5, 3)
cs = Color([0, 0, 1], [0, 0.5, 0], [1.5, 0, -0.5])
c[(0, 1, 2), (4, 2, 0)] = cs
ppm = c.to_ppm()
lines_4_6 = "\n".join(ppm.splitlines()[3:6])
assert lines_4_6 == (
"255 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n"
"0 0 0 0 0 0 0 128 0 0 0 0 0 0 0\n"
"0 0 0 0 0 0 0 0 0 0 0 0 0 0 255"
)
def test_splitting_long_lines_in_ppm_files():
c = Canvas(10, 2)
c[:2, :10] = Color(1, 0.8, 0.6)
ppm = c.to_ppm()
lines_4_7 = "\n".join(ppm.splitlines()[3:7])
assert lines_4_7 == (
"255 204 153 255 204 153 255 204 153 255 204 153 255 204 153 255 204\n"
"153 255 204 153 255 204 153 255 204 153 255 204 153\n"
"255 204 153 255 204 153 255 204 153 255 204 153 255 204 153 255 204\n"
"153 255 204 153 255 204 153 255 204 153 255 204 153"
)
def test_ppm_files_are_terminated_by_a_newline_character():
c = Canvas(5, 3)
ppm = c.to_ppm()
assert ppm[-1] == "\n"
| 24.693548 | 79 | 0.573481 | 313 | 1,531 | 2.667732 | 0.182109 | 0.105389 | 0.125749 | 0.143713 | 0.435928 | 0.349701 | 0.313772 | 0.313772 | 0.270659 | 0.270659 | 0 | 0.285064 | 0.282822 | 1,531 | 61 | 80 | 25.098361 | 0.47541 | 0 | 0 | 0.25 | 0 | 0 | 0.234487 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 1 | 0.136364 | false | 0 | 0.068182 | 0 | 0.204545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
891182fd38f282bf8f36b0168fcfb4e616539c2d | 273 | py | Python | setup.py | kiran-vemuri/ifmap-python-client | 06198cface6421c50c7352009b8370713a414db5 | [
"BSD-2-Clause-FreeBSD"
] | 2 | 2015-01-21T11:52:27.000Z | 2017-01-24T05:13:55.000Z | setup.py | Mirantis/ifmap-python-client | 9d3ba070ca6b60d67a947632d476fca55f31dc5b | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | setup.py | Mirantis/ifmap-python-client | 9d3ba070ca6b60d67a947632d476fca55f31dc5b | [
"BSD-2-Clause-FreeBSD"
] | 3 | 2015-03-11T07:51:30.000Z | 2016-10-12T06:08:08.000Z | #!/usr/bin/env python
import setuptools
setuptools.setup(
name='ifmap',
version='0.1',
description='IF-MAP Python Client Library',
url='https://github.com/ITI/ifmap-python-client',
packages=setuptools.find_packages(),
include_package_data=True,
)
| 21 | 53 | 0.699634 | 35 | 273 | 5.371429 | 0.8 | 0.12766 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008621 | 0.150183 | 273 | 12 | 54 | 22.75 | 0.801724 | 0.07326 | 0 | 0 | 0 | 0 | 0.309524 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
891468b009c1b13ce530b24e7304fbda965e0a7e | 4,697 | py | Python | errvallist.py | stefantkeller/ErrorValues | ad81c45d12067d54f3418ce5ceaf8bd9e78e8054 | [
"MIT"
] | null | null | null | errvallist.py | stefantkeller/ErrorValues | ad81c45d12067d54f3418ce5ceaf8bd9e78e8054 | [
"MIT"
] | null | null | null | errvallist.py | stefantkeller/ErrorValues | ad81c45d12067d54f3418ce5ceaf8bd9e78e8054 | [
"MIT"
] | null | null | null | #! /usr/bin/python2.7
# -*- coding: utf-8 -*-
'''
Work with whole lists of errval's:
[v0+-e0, v1+-e1, ...]
'''
import numpy as np
from errval import *
class errvallist(list):
def __init__(self,vals=[],errs=0,printout='latex'):
if isinstance(vals,errvallist):
self.__errl = vals
elif isinstance(vals,(list,np.ndarray)) or isinstance(errs,(list,np.ndarray)):
if isinstance(vals,(list,np.ndarray)) and isinstance(errs,(list,np.ndarray)):
if len(vals)==len(errs):
self.__errl = [errval(vals[j],errs[j],printout) for j in xrange(len(vals))]
elif isinstance(vals,(list,np.ndarray)) and isinstance(errs,(int,float,long)):
# note: this also covers the case vals is a list of errval entries,
# in this case the errs are ignored and the result is a conversion of list to errvallist
self.__errl = [errval(v,errs,printout) for v in vals]
elif isinstance(vals,(int,float,long)) and isinstance(errs,(list,np.ndarray)):
self.__errl = [errval(vals,e,printout) for e in errs]
else:
raise ValueError, 'Cannot assign input data: {0}'.format(type(vals))
def __getitem__(self,key):
return self.__errl[key]
def __setitem__(self,key,value):
self.__errl[key]=value
def __getslice__(self,i,j):
# https://docs.python.org/2/reference/datamodel.html#object.__getslice__
# Deprecated since version 2.0
# but since I derive from list I have to ignore this deprecation...
return errvallist(self.__errl[i:j])
def __str__(self):
outp = '['
for evl in self.__errl:
outp += evl.__str__()+','
outp = outp[:-1]+']'
return outp
def __iter__(self):
# to make the errvallist iterable
# i.e. to make the 'in' possible in 'for err in errvallist:'
for err in self.__errl:
yield err
def __len__(self):
return len(self.__errl)
def __add__(self,other):
if isinstance(other,(errvallist,list)) and len(self)==len(other):
errvall = [self[j]+other[j] for j in xrange(len(self))]
elif isinstance(other,(int,float,long,errval)):
errvall = [s+other for s in self]
else:
raise TypeError, 'unsupported operand type(s) for +: errval with {0}'.format(type(other))
return errvallist(errvall)
def __radd__(self,other):
return self.__add__(other)
def __sub__(self,other):
if isinstance(other,(errvallist,list)) and len(self)==len(other):
errvall = [self[j]-other[j] for j in xrange(len(self))]
elif isinstance(other,(int,float,long,errval)):
errvall = [s-other for s in self]
else:
raise TypeError, 'unsupported operand type(s) for -: errval with {0}'.format(type(other))
return errvallist(errvall)
def __rsub__(self,other):
return -1*self.__sub__(other)
def __mul__(self,other):
if isinstance(other,(errvallist,list)) and len(self)==len(other):
errvall = [self[j]*other[j] for j in xrange(len(self))]
elif isinstance(other,(int,float,long,errval)):
errvall = [s*other for s in self]
else:
raise TypeError, 'unsupported operand type(s) for *: errval with {0}'.format(type(other))
return errvallist(errvall)
def __rmul__(self,other):
return self.__mul__(other)
def __div__(self,other):
if isinstance(other,(errvallist,list)) and len(self)==len(other):
errvall = [self[j]/other[j] for j in xrange(len(self))]
elif isinstance(other,(int,float,long,errval)):
errvall = [s/other for s in self]
else:
raise TypeError, 'unsupported operand type(s) for /: errval with {0}'.format(type(other))
return errvallist(errvall)
def __rdiv__(self,other):
return 1.0/self.__div__(other)
def append(self,value):
self.__errl.append(value)
def round(self,n=0):
# returns new instance
return errvallist([errv.round(n) for errv in self])
'''
Depending on the circumstances the code incorporating this class
may want to use different names for the following functions:
'''
def v(self): return np.array([ev.val() for ev in self])
def val(self): return self.v()
def vals(self): return self.v()
def values(self): return self.v()
def e(self): return np.array([ev.err() for ev in self])
def err(self): return self.e()
def errs(self): return self.e()
def errors(self): return self.e()
| 37.879032 | 104 | 0.607409 | 640 | 4,697 | 4.292188 | 0.226563 | 0.032035 | 0.028395 | 0.021842 | 0.502366 | 0.440116 | 0.413178 | 0.397525 | 0.36549 | 0.36549 | 0 | 0.00605 | 0.261018 | 4,697 | 123 | 105 | 38.186992 | 0.785364 | 0.10049 | 0 | 0.2 | 0 | 0 | 0.05922 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.023529 | null | null | 0.047059 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8916b84699c1edcb6e4ab2a662ea82184162ff53 | 1,148 | py | Python | proyectofinal/quiz/models.py | Syferk/trabajofinal | 54f778fc8b6e21a845702b43524b88c98badd52d | [
"MIT"
] | 1 | 2021-09-05T22:34:26.000Z | 2021-09-05T22:34:26.000Z | proyectofinal/quiz/models.py | Syferk/trabajofinal | 54f778fc8b6e21a845702b43524b88c98badd52d | [
"MIT"
] | null | null | null | proyectofinal/quiz/models.py | Syferk/trabajofinal | 54f778fc8b6e21a845702b43524b88c98badd52d | [
"MIT"
] | 3 | 2021-09-05T23:30:15.000Z | 2021-09-07T02:25:26.000Z | from django.db import models
from user.models import Users
class Category(models.Model):
category_name = models.CharField(max_length=50)
question_available = models.PositiveIntegerField(default=0)
question_number = models.PositiveIntegerField()
def __str__(self):
return self.category_name
class Question(models.Model):
category=models.ForeignKey(Category,on_delete=models.CASCADE)
question=models.CharField(max_length=600)
option1=models.CharField(max_length=200)
option2=models.CharField(max_length=200)
option3=models.CharField(max_length=200)
option4=models.CharField(max_length=200)
cat=(('Option1','Option1'),('Option2','Option2'),('Option3','Option3'),('Option4','Option4'))
answer=models.CharField(max_length=200,choices=cat)
class Result(models.Model):
user = models.ForeignKey(Users,on_delete=models.CASCADE)
exam = models.ForeignKey(Category,on_delete=models.CASCADE)
marks = models.PositiveIntegerField()
date = models.DateTimeField(auto_now=True)
correct = models.PositiveIntegerField(default= 0)
total_questions = models.PositiveIntegerField(default= 0)
| 39.586207 | 97 | 0.762195 | 137 | 1,148 | 6.240876 | 0.357664 | 0.122807 | 0.147368 | 0.196491 | 0.263158 | 0.105263 | 0.105263 | 0 | 0 | 0 | 0 | 0.034551 | 0.117596 | 1,148 | 28 | 98 | 41 | 0.809477 | 0 | 0 | 0 | 0 | 0 | 0.048823 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | false | 0 | 0.083333 | 0.041667 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
89170f8e42f9d4e440f81502dbb93240f0f1b350 | 11,203 | py | Python | Data_Structures/Graph/unweighted_graph.py | D-Chase-H/PurePy-Data-Structures | 892b9666a80054f4524c090a7b442b125c372403 | [
"MIT"
] | null | null | null | Data_Structures/Graph/unweighted_graph.py | D-Chase-H/PurePy-Data-Structures | 892b9666a80054f4524c090a7b442b125c372403 | [
"MIT"
] | 1 | 2017-12-15T04:13:08.000Z | 2017-12-15T04:13:08.000Z | Data_Structures/Graph/unweighted_graph.py | D-Chase-H/PurePy-Data-Structures | 892b9666a80054f4524c090a7b442b125c372403 | [
"MIT"
] | null | null | null |
# Author: D-Chase-H
"""
License:
MIT License
Copyright (c) 2017 Dustin Chase Harmon
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
class Node(object):
"""docstring for Node."""
def __init__(self):
self.id_num = None
self.edges = set()
class Graph(object):
"""docstring for Graph."""
def __init__(self):
self.nodes = dict()
############################################################################
# Insertion Methods
############################################################################
def insert_node(self, id_num):
try:
self.nodes[id_num]
except KeyError:
new_node = Node()
new_node.id_num = id_num
self.nodes[id_num] = new_node
return
def undirected_insert_nodes_from_edge_pair(self, pair):
""" pair: list/tuple type == [x, y] """
node_1_id_num = pair[0]
self.insert_node(node_1_id_num)
node_1 = self.nodes[node_1_id_num]
node_2_id_num = pair[1]
self.insert_node(node_2_id_num)
node_2 = self.nodes[node_2_id_num]
node_1.edges.add(node_2)
node_2.edges.add(node_1)
return
def directed_insert_nodes_from_edge_pair(self, from_node, to_node):
"""
from_node = integer
to_node = integer
"""
node_1_id_num = from_node
self.insert_node(node_1_id_num)
node_1 = self.nodes[node_1_id_num]
node_2_id_num = to_node
self.insert_node(node_2_id_num)
node_2 = self.nodes[node_2_id_num]
node_1.edges.add(node_2)
return
############################################################################
# Disjoint-Set Methods
############################################################################
def arr_of_disjoint_sets(self):
def search_forest(curr_node, curr_subset):
if curr_node.id_num in curr_subset:
return
curr_subset.add(curr_node.id_num)
visited_nodes.add(curr_node.id_num)
for node in curr_node.edges:
search_forest(node, curr_subset)
return
arr = []
visited_nodes = set()
for n in self.nodes.values():
if n.id_num not in visited_nodes:
curr_subset = set()
search_forest(n, curr_subset)
if curr_subset:
arr.append(curr_subset)
return arr
############################################################################
# Depth-First-Search Methods
############################################################################
def depth_first_search_bool(self, start, end):
"""
start = integer
end = integer
Returns: Bool
"""
def dfs_check(curr_node):
nonlocal is_connected
nonlocal end_node
if is_connected is True:
return
if curr_node in visited_nodes:
return
visited_nodes.add(curr_node)
for node in curr_node.edges:
if node == end_node:
is_connected = True
return
else:
dfs_check(node)
start_node = self.nodes[start]
end_node = self.nodes[end]
if start_node == end_node:
return True
visited_nodes = set([])
is_connected = False
dfs_check(start_node)
return is_connected
def depth_first_search_all_paths(self, start, end):
from copy import copy
"""
start = integer
end = integer
Returns: Bool
"""
def dfs_find_path(curr_node, visited_nodes=set(), path=[]):
nonlocal all_paths
nonlocal end_node
if curr_node in visited_nodes:
return
visited_nodes.add(curr_node)
path.append(curr_node)
for node in curr_node.edges:
if node == end_node:
temp_path = tuple(copy(path) + [node])
all_paths.append(temp_path)
else:
temp_visited_nodes = copy(visited_nodes)
temp_path = copy(path)
dfs_find_path(node, temp_visited_nodes, temp_path)
start_node = self.nodes[start]
end_node = self.nodes[end]
if start_node == end_node:
return [[start_node]]
all_paths = []
dfs_find_path(start_node)
return all_paths
############################################################################
# Breadth-First-Search Methods
############################################################################
def breadth_first_search_bool(self, start, end):
"""
start = integer
end = integer
Returns: Bool
"""
def bfs_check(curr_edges):
nonlocal is_connected
nonlocal visited_nodes
nonlocal end_node
if end_node in curr_edges:
is_connected = True
return
new_edges = set()
for node in curr_edges:
visited_nodes.add(node)
for node_edge in node.edges:
if node_edge not in visited_nodes:
new_edges.add(node_edge)
if not new_edges:
return
else:
bfs_check(new_edges)
start_node = self.nodes[start]
end_node = self.nodes[end]
if start_node == end_node:
return True
visited_nodes = set([start_node])
curr_edges = set([edg for edg in start_node.edges])
is_connected = False
bfs_check(curr_edges)
return is_connected
def bfs_shortest_path(self, start, end):
"""
start = integer
end = integer
Returns: Bool
"""
def determine_path():
nonlocal curr_edges
nonlocal visited_nodes
nonlocal start_node
nonlocal end_node
nonlocal depth
paths = [[end_node]]
complete = False
tally = 0
for loop_num in range(depth):
remove_nodes = set()
new_paths = []
for index, p in enumerate(paths):
last_node = p[-1]
poss_paths = []
for node in last_node.edges:
# If we are on the last node, then skip any edge-node
# that is not the start node.
if loop_num == depth - 1:
if node != start_node:
continue
if node in visited_nodes:
temp = p + [node]
poss_paths.append(temp)
remove_nodes.add(node)
for sub_path in poss_paths:
new_paths.append(sub_path)
for node in remove_nodes:
visited_nodes.remove(node)
paths = new_paths
paths = tuple([tuple(reversed(p)) for p in paths])
return paths
def bfs_check(curr_edges):
nonlocal is_connected
nonlocal visited_nodes
nonlocal end_node
nonlocal depth
depth += 1
if end_node in curr_edges:
is_connected = True
return
new_edges = set()
for node in curr_edges:
visited_nodes.add(node)
for node_edge in node.edges:
if node_edge not in visited_nodes:
new_edges.add(node_edge)
if not new_edges:
return
else:
bfs_check(new_edges)
start_node = self.nodes[start]
end_node = self.nodes[end]
if start_node == end_node:
return [[start_node]]
depth = 0
visited_nodes = set([start_node])
curr_edges = set([edg for edg in start_node.edges])
is_connected = False
bfs_check(curr_edges)
paths = determine_path()
return paths
if __name__ == '__main__':
import sys
from random import randrange
print("START\n")
############################################################################
pairs = set()
while len(pairs) < 20:
num1 = randrange(20)
num2 = randrange(20)
if num1 == num2:
continue
new = (num1, num2)
pairs.add(new)
pairs = list(pairs)
g = Graph()
print(pairs, "\n")
for p in pairs:
g.undirected_insert_nodes_from_edge_pair(p)
d_set = g.arr_of_disjoint_sets()
print("Disjoint Set: ", d_set, "\n")
start = pairs[9][0]
end = pairs[4][1]
dfs = g.depth_first_search_bool(start, end)
print("DFS: Path from {} to {} is {}\n".format(start, end, dfs))
dfs_paths = g.depth_first_search_all_paths(start, end)
dfs_paths = [[node.id_num for node in path] for path in dfs_paths]
print("DFS: All paths from {} to {} are {}\n".format(start, end, dfs_paths))
bfs = g.breadth_first_search_bool(start, end)
print("BFS: Path from {} to {} is {}\n".format(start, end, bfs))
bfs_short = g.bfs_shortest_path(start, end)
bfs_short = [[j.id_num for j in i] for i in bfs_short]
print("BFS Shortest: Path from {} to {} is {}\n".format(start, end, bfs_short))
############################################################################
print("\nEND")
| 28.652174 | 84 | 0.496028 | 1,243 | 11,203 | 4.224457 | 0.170555 | 0.022853 | 0.013712 | 0.011426 | 0.403733 | 0.374595 | 0.336126 | 0.336126 | 0.322986 | 0.31156 | 0 | 0.00694 | 0.369722 | 11,203 | 390 | 85 | 28.725641 | 0.736723 | 0.136214 | 0 | 0.479452 | 0 | 0 | 0.021537 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.073059 | false | 0 | 0.013699 | 0 | 0.200913 | 0.03653 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
891932991aa3c2e3cf18663abf2e256d53e75925 | 3,106 | py | Python | dsatools/generator/_marple_sequence.py | diarmaidocualain/dsatools | 50b9259e2846b5fdd3dc52206967b0ee8d0144de | [
"MIT"
] | 31 | 2020-09-14T16:12:01.000Z | 2022-03-30T01:44:18.000Z | generator/_marple_sequence.py | Jimmy-INL/dsatools | 5c811838bb3fb8ae00195d5f68e451bd23b3448c | [
"MIT"
] | 4 | 2021-09-24T17:47:42.000Z | 2022-03-04T08:06:43.000Z | generator/_marple_sequence.py | Jimmy-INL/dsatools | 5c811838bb3fb8ae00195d5f68e451bd23b3448c | [
"MIT"
] | 11 | 2020-12-06T08:18:56.000Z | 2022-03-20T13:28:22.000Z |
import numpy as np
def marple_sequence():
'''
Specific signal-like sequence,
proposed by Marple S.L.
Returns
--------
* signal: 1d ndarray (64 samples).
Example
--------
import dsatools.generator
from dsatools.generator import callbacks
import dsatools.utilits as ut
signal = dsatools.generator.marple_sequence()
ut.probe(signal)
References
----------
[1] S.L. Marple,
Digital spectral analysis with applications.
– New-York: Present-Hall, 1986.
[2a] Cokelaer et al, (2017),
'Spectrum': Spectral Analysis in Python,
Journal of Open Source Software, 2(18),
348, doi:10.21105/joss.00348
[2b] https://pyspectrum.readthedocs.io/en/latest/
[2c] https://github.com/cokelaer/spectrum
'''
marple_data = [
1.349839091+ 2.011167288j,
-2.117270231+ 0.817693591j,
-1.786421657- 1.291698933j,
1.162236333- 1.482598066j,
1.641072035+ 0.372950256j,
0.072213709+ 1.828492761j,
-1.564284801+ 0.824533045j,
-1.080565453- 1.869776845j,
0.927129090- 1.743406534j,
1.891979456+ 0.972347319j,
-0.105391249+ 1.602209687j,
-1.618367076+ 0.637513280j,
-0.945704579- 1.079569221j,
1.135566235- 1.692269921j,
1.855816245+ 0.986030221j,
-1.032083511+ 1.414613724j,
-1.571600199+ 0.089229003j,
-0.243143231- 1.444692016j,
0.838980973- 0.985756695j,
1.516003132+ 0.928058863j,
0.257979959+ 1.170676708j,
-2.057927608+ 0.343388647j,
-0.578682184- 1.441192508j,
1.584011555- 1.011150956j,
0.614114344+ 1.508176208j,
-0.710567117+ 1.130144477j,
-1.100205779- 0.584209621j,
0.150702029- 1.217450142j,
0.748856127- 0.804411888j,
0.795235813+ 1.114466429j,
-0.071512341+ 1.017092347j,
-1.732939839- 0.283070654j,
0.404945314- 0.781708360j,
1.293794155- 0.352723092j,
-0.119905084+ 0.905150294j,
-0.522588372+ 0.437393665j,
-0.974838495- 0.670074046j,
0.275279552- 0.509659231j,
0.854210198- 0.008278057j,
0.289598197+ 0.506233990j,
-0.283553183+ 0.250371397j,
-0.359602571- 0.135261074j,
0.102775671- 0.466086507j,
-0.009722650+ 0.030377999j,
0.185930878+ 0.808869600j,
-0.243692726- 0.200126961j,
-0.270986766- 0.460243553j,
0.399368525+ 0.249096692j,
-0.250714004- 0.362990230j,
0.419116348- 0.389185309j,
-0.050458215+ 0.702862442j,
-0.395043731+ 0.140808776j,
0.746575892- 0.126762003j,
-0.559076190+ 0.523169816j,
-0.344389260- 0.913451135j,
0.733228028- 0.006237417j,
-0.480273813+ 0.509469569j,
0.033316225+ 0.087501869j,
-0.321229130- 0.254548967j,
-0.063007891- 0.499800682j,
1.239739418- 0.013479125j,
0.083303742+ 0.673984587j,
-0.762731433+ 0.408971250j,
-0.895898521- 0.364855707j]
return np.asarray(marple_data) | 31.06 | 53 | 0.606246 | 359 | 3,106 | 5.236769 | 0.56546 | 0.027128 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.575372 | 0.265293 | 3,106 | 100 | 54 | 31.06 | 0.248028 | 0.227302 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014706 | false | 0 | 0.014706 | 0 | 0.044118 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
89195d2c36779798584689308f4ae52e7d568dbf | 728 | py | Python | polls/views.py | robsonleal/django_enquete | 5dd2743d62ba82d5e63b6210b4893592677def80 | [
"MIT"
] | null | null | null | polls/views.py | robsonleal/django_enquete | 5dd2743d62ba82d5e63b6210b4893592677def80 | [
"MIT"
] | null | null | null | polls/views.py | robsonleal/django_enquete | 5dd2743d62ba82d5e63b6210b4893592677def80 | [
"MIT"
] | null | null | null | from django.views import generic
from django.utils import timezone
from .models import Question
class IndexView(generic.ListView):
template_name = 'polls/index.html'
context_object_name = 'latest_question_list'
def get_queryset(self):
""" Retorna as 5 últimas questões(não inclui publicações futuras) """
return Question.objects.filter(
pub_date__lte=timezone.now()
).order_by('-pub_date')[:5]
class DetailView(generic.DetailView):
model = Question
template_name = 'polls/detail.html'
class ResultsView(generic.DetailView):
model = Question
template_name = 'polls/results.html'
def vote(request, question_id):
... # same as above, no changes needed.
| 24.266667 | 77 | 0.706044 | 89 | 728 | 5.617978 | 0.629213 | 0.072 | 0.102 | 0.12 | 0.188 | 0.188 | 0.188 | 0 | 0 | 0 | 0 | 0.003407 | 0.193681 | 728 | 29 | 78 | 25.103448 | 0.848382 | 0.133242 | 0 | 0.111111 | 0 | 0 | 0.128205 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.166667 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
8919dcadf341d13761481e587ce0aa0f760975fb | 3,550 | py | Python | buildtest/tools/unittests.py | buildntest/buildtest | d371048631cdd33ae7bf66f795f5afed83491a90 | [
"MIT"
] | 29 | 2017-10-20T02:47:10.000Z | 2020-03-26T17:24:03.000Z | buildtest/tools/unittests.py | shahzebsiddiqui/testgen-HPC | e69d9334cf2939af4fca59e75f397b0b1edbbfaf | [
"MIT"
] | 219 | 2017-08-25T13:21:53.000Z | 2020-04-18T19:07:05.000Z | buildtest/tools/unittests.py | shahzebsiddiqui/BuildTest | 5a04641f37ba588c906112b3848249b241061a9c | [
"MIT"
] | 5 | 2017-08-24T11:20:30.000Z | 2020-02-21T04:28:40.000Z | import argparse
import os
import shutil
import sys
import coverage
import pytest
from buildtest.defaults import (
BUILDTEST_ROOT,
BUILDTEST_UNITTEST_ROOT,
BUILDTEST_USER_HOME,
VAR_DIR,
console,
)
from buildtest.utils.file import is_dir, resolve_path
def run_unit_tests(pytestopts=None, sourcefiles=None, enable_coverage=False):
"""Entry point for running buildtest unit tests. This method can be invoked via ``buildtest unittests`` or run
via command line as standalone program. The unit tests are run via `pytest <https://docs.pytest.org/>`_ and `coverage <https://coverage.readthedocs.io/en/6.2/>`_
for measuring coverage report. This method will report coverage results that can be viewable in html or json.
Args:
pytestopts (str): Specify options to pytest command.
sourcefiles (list): List of source files to run with pytest
enable_coverage (bool): Enable coverage when running regression test
"""
if not os.getenv("BUILDTEST_ROOT"):
sys.exit(
"Please check your buildtest installation by running 'source setup.sh'"
)
pytestopts = pytestopts.split() if pytestopts else []
sources = []
# if --sourcefiles specified we resolve path to each argument otherwise default to BUILDTEST_UNITTEST_ROOT which is root of test directory
sourcefiles = sourcefiles or [BUILDTEST_UNITTEST_ROOT]
for fpath in sourcefiles:
sources.append(resolve_path(fpath))
# need to remove any None types from list since resolve_path method can return None if path is invalid
sources = list(filter(None, sources))
pytest_cmd = pytestopts + sources
html_dir = os.path.join(BUILDTEST_ROOT, "htmlcov")
if is_dir(BUILDTEST_USER_HOME):
shutil.rmtree(BUILDTEST_USER_HOME)
if is_dir(VAR_DIR):
shutil.rmtree(VAR_DIR)
cov = coverage.Coverage(branch=True)
# run regression test with coverage if --coverage is specified
if enable_coverage:
cov.erase()
cov.start()
# run regression test
retcode = pytest.main(pytest_cmd)
# if there is a failure in pytest raise exit 1
if retcode == pytest.ExitCode.TESTS_FAILED:
sys.exit(1)
if enable_coverage:
cov.stop()
cov.html_report(title="buildtest unittests coverage report", directory=html_dir)
cov.json_report(outfile=os.path.join(BUILDTEST_ROOT, "coverage.json"))
cov.report(ignore_errors=True, skip_empty=True, sort="-cover", precision=2)
print("\n\n")
console.print("Writing coverage results to: ", html_dir)
coverage_file = os.path.join(html_dir, "index.html")
assert os.path.exists(coverage_file)
console.print("You can view coverage report by viewing file: ", coverage_file)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="unittest",
description="Run buildtest unit tests",
)
parser.add_argument(
"-c",
"--coverage",
action="store_true",
help="Enable coverage when running regression test",
)
parser.add_argument("-p", "--pytestopts", type=str, help="Specify option to pytest")
parser.add_argument(
"-s",
"--sourcefiles",
type=str,
help="Specify path to file or directory when running regression test",
action="append",
)
args = parser.parse_args()
run_unit_tests(
pytestopts=args.pytestopts,
sourcefiles=args.sourcefiles,
enable_coverage=args.coverage,
)
| 32.568807 | 165 | 0.684225 | 454 | 3,550 | 5.211454 | 0.365639 | 0.04142 | 0.026627 | 0.031699 | 0.052409 | 0.032967 | 0 | 0 | 0 | 0 | 0 | 0.001818 | 0.225352 | 3,550 | 108 | 166 | 32.87037 | 0.858545 | 0.266197 | 0 | 0.054795 | 0 | 0 | 0.179267 | 0 | 0 | 0 | 0 | 0 | 0.013699 | 1 | 0.013699 | false | 0 | 0.109589 | 0 | 0.123288 | 0.041096 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8919eadb500a2ec6192616456774d8da32531229 | 2,909 | py | Python | dashboard/__init__.py | VisionTale/StreamHelper-overlay | f50ea9ddfecf177db10c4ee99eac6880d362c3cc | [
"MIT"
] | null | null | null | dashboard/__init__.py | VisionTale/StreamHelper-overlay | f50ea9ddfecf177db10c4ee99eac6880d362c3cc | [
"MIT"
] | null | null | null | dashboard/__init__.py | VisionTale/StreamHelper-overlay | f50ea9ddfecf177db10c4ee99eac6880d362c3cc | [
"MIT"
] | null | null | null | from os.path import realpath, join, dirname
from flask import render_template, request, flash
from flask_login import login_required
from flask_wtf.csrf import validate_csrf
from wtforms.validators import ValidationError
from webapi.libs.network import is_up
from webapi.libs.text import camel_case
from .. import bp, name
from .forms import create_data_form, create_settings_form
from .caspar_connector import is_caspar_up
from .backend_connector import backend_request
@bp.route('/', methods=['POST', 'GET'])
@bp.route('/dashboard', methods=['POST', 'GET'])
@login_required
def dashboard():
"""
Create a dashboard page.
:return:
"""
from .. import config
# Create default config values
_init_server_config()
# Create the server settings form
settings = create_settings_form(config)
# Save config values if submitted
if settings.validate_on_submit():
_set_server_config(settings.server.data, settings.overlay_server.data)
# Read config values
server, port = _get_caspar_server_and_port()
if not is_up(server):
# Check if server is not reachable
flash(f"Server {server} is not reachable")
reachable = False
elif not is_caspar_up(server, port):
# Check if CasparCG server is not reachable
flash(f"CasparCG server on route {server} port {port} not reachable")
reachable = False
else:
reachable = True
# If a form was submitted, check the csrf token for security
validated = False
if request.form.get('csrf_token'):
try:
validate_csrf(request.form.get('csrf_token'))
validated = True
except ValidationError as e:
flash(f'ValidationError: {e}')
if reachable and validated:
backend_request(request.form.to_dict())
# MAGIC Parse routes file to get form values
from .. import definitions
defs = dict()
for e in dir(definitions):
if e.endswith('_definition'):
defs[e.replace('_definition', '')] = getattr(definitions, e)
form_list = list()
for e in defs:
# Create the form for the overlay and save it
form = create_data_form(defs[e], e)
form_list.append((camel_case(e, '_'), form))
return render_template('overlay_dashboard.html', settings=settings, forms=form_list)
def _init_server_config():
from .. import config
config.set_if_none(name, 'server', 'localhost:5250')
config.set_if_none(name, 'overlay_server', 'http://localhost:5000/overlay/')
def _set_server_config(server: str, overlay_server: str):
from .. import config
if '://' in server:
server = server.split('://')[1]
config.set(name, 'server', server)
config.set(name, 'overlay_server', overlay_server)
def _get_caspar_server_and_port() -> tuple:
from .. import config
return config.get(name, 'server').split(':')
| 29.989691 | 88 | 0.682709 | 382 | 2,909 | 5.013089 | 0.282723 | 0.031332 | 0.03342 | 0.031332 | 0.093995 | 0.027154 | 0 | 0 | 0 | 0 | 0 | 0.003939 | 0.214507 | 2,909 | 96 | 89 | 30.302083 | 0.834136 | 0.12616 | 0 | 0.098361 | 0 | 0 | 0.118536 | 0.008751 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065574 | false | 0 | 0.262295 | 0 | 0.360656 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
891b38b7dc3a77153c98be492001da8cd2a61caf | 3,536 | py | Python | web/openerp/addons/base/tests/test_ir_attachment.py | diogocs1/comps | 63df07f6cf21c41e4527c06e2d0499f23f4322e7 | [
"Apache-2.0"
] | 1 | 2019-12-29T11:53:56.000Z | 2019-12-29T11:53:56.000Z | odoo/openerp/addons/base/tests/test_ir_attachment.py | tuanquanghpvn/odoo8-tutorial | 52d25f1ca5f233c431cb9d3b24b79c3b4fb5127e | [
"MIT"
] | null | null | null | odoo/openerp/addons/base/tests/test_ir_attachment.py | tuanquanghpvn/odoo8-tutorial | 52d25f1ca5f233c431cb9d3b24b79c3b4fb5127e | [
"MIT"
] | 3 | 2020-10-08T14:42:10.000Z | 2022-01-28T14:12:29.000Z | import hashlib
import os
import openerp
import openerp.tests.common
HASH_SPLIT = 2 # FIXME: testing implementations detail is not a good idea
class test_ir_attachment(openerp.tests.common.TransactionCase):
def setUp(self):
super(test_ir_attachment, self).setUp()
registry, cr, uid = self.registry, self.cr, self.uid
self.ira = registry('ir.attachment')
self.filestore = self.ira._filestore(cr, uid)
# Blob1
self.blob1 = 'blob1'
self.blob1_b64 = self.blob1.encode('base64')
blob1_hash = hashlib.sha1(self.blob1).hexdigest()
self.blob1_fname = blob1_hash[:HASH_SPLIT] + '/' + blob1_hash
# Blob2
blob2 = 'blob2'
self.blob2_b64 = blob2.encode('base64')
def test_01_store_in_db(self):
registry, cr, uid = self.registry, self.cr, self.uid
# force storing in database
registry('ir.config_parameter').set_param(cr, uid, 'ir_attachment.location', 'db')
# 'ir_attachment.location' is undefined test database storage
a1 = self.ira.create(cr, uid, {'name': 'a1', 'datas': self.blob1_b64})
a1_read = self.ira.read(cr, uid, [a1], ['datas'])
self.assertEqual(a1_read[0]['datas'], self.blob1_b64)
a1_db_datas = self.ira.browse(cr, uid, a1).db_datas
self.assertEqual(a1_db_datas, self.blob1_b64)
def test_02_store_on_disk(self):
registry, cr, uid = self.registry, self.cr, self.uid
a2 = self.ira.create(cr, uid, {'name': 'a2', 'datas': self.blob1_b64})
a2_store_fname = self.ira.browse(cr, uid, a2).store_fname
self.assertEqual(a2_store_fname, self.blob1_fname)
self.assertTrue(os.path.isfile(os.path.join(self.filestore, a2_store_fname)))
def test_03_no_duplication(self):
registry, cr, uid = self.registry, self.cr, self.uid
a2 = self.ira.create(cr, uid, {'name': 'a2', 'datas': self.blob1_b64})
a2_store_fname = self.ira.browse(cr, uid, a2).store_fname
a3 = self.ira.create(cr, uid, {'name': 'a3', 'datas': self.blob1_b64})
a3_store_fname = self.ira.browse(cr, uid, a3).store_fname
self.assertEqual(a3_store_fname, a2_store_fname)
def test_04_keep_file(self):
registry, cr, uid = self.registry, self.cr, self.uid
a2 = self.ira.create(cr, uid, {'name': 'a2', 'datas': self.blob1_b64})
a3 = self.ira.create(cr, uid, {'name': 'a3', 'datas': self.blob1_b64})
a2_store_fname = self.ira.browse(cr, uid, a2).store_fname
a2_fn = os.path.join(self.filestore, a2_store_fname)
self.ira.unlink(cr, uid, [a3])
self.assertTrue(os.path.isfile(a2_fn))
# delete a2 it is unlinked
self.ira.unlink(cr, uid, [a2])
self.assertFalse(os.path.isfile(a2_fn))
def test_05_change_data_change_file(self):
registry, cr, uid = self.registry, self.cr, self.uid
a2 = self.ira.create(cr, uid, {'name': 'a2', 'datas': self.blob1_b64})
a2_store_fname = self.ira.browse(cr, uid, a2).store_fname
a2_fn = os.path.join(self.filestore, a2_store_fname)
self.assertTrue(os.path.isfile(a2_fn))
self.ira.write(cr, uid, [a2], {'datas': self.blob2_b64})
self.assertFalse(os.path.isfile(a2_fn))
new_a2_store_fname = self.ira.browse(cr, uid, a2).store_fname
self.assertNotEqual(a2_store_fname, new_a2_store_fname)
new_a2_fn = os.path.join(self.filestore, new_a2_store_fname)
self.assertTrue(os.path.isfile(new_a2_fn))
| 38.021505 | 90 | 0.64819 | 520 | 3,536 | 4.209615 | 0.171154 | 0.059388 | 0.098675 | 0.080402 | 0.579717 | 0.510279 | 0.486067 | 0.417085 | 0.373687 | 0.356327 | 0 | 0.043088 | 0.212387 | 3,536 | 92 | 91 | 38.434783 | 0.742908 | 0.050622 | 0 | 0.366667 | 0 | 0 | 0.051045 | 0.006567 | 0 | 0 | 0 | 0.01087 | 0.183333 | 1 | 0.1 | false | 0 | 0.066667 | 0 | 0.183333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
891b9abd04593024e436067023f0359758fe9dff | 13,412 | py | Python | UserCode/John/MergeBinaries.py | RunzZhang/SBCcode | e75b8e751cec5fb2c28950edef0c82f005caedcb | [
"MIT"
] | 4 | 2018-08-27T18:02:34.000Z | 2020-06-09T21:19:04.000Z | UserCode/John/MergeBinaries.py | RunzZhang/SBCcode | e75b8e751cec5fb2c28950edef0c82f005caedcb | [
"MIT"
] | null | null | null | UserCode/John/MergeBinaries.py | RunzZhang/SBCcode | e75b8e751cec5fb2c28950edef0c82f005caedcb | [
"MIT"
] | 4 | 2019-06-20T21:36:26.000Z | 2020-11-10T17:23:14.000Z | ## John Gresl
import os
from collections import defaultdict, OrderedDict
import time
from copy import deepcopy
import numpy as np
from SBCcode.DataHandling.ReadBinary import ReadBlock as RB
from SBCcode.DataHandling.WriteBinary import WriteBinaryNtupleFile as WB
from SBCcode.AnalysisModules.AnalyzeDytran import dytranAnalysis as da
from SBCcode.AnalysisModules.EventAnalysis import EventAnalysis as eva
from SBCcode.AnalysisModules.ImageAnalysis import BubbleFinder
from SBCcode.AnalysisModules.AcousticT0 import AcousticAnalysis as aa
from SBCcode.AnalysisModules.PMTComprehensiveModule import PMTcm as pmtpa
from SBCcode.AnalysisModules.PMTfastDAQalignment import PMTandFastDAQalignment as pmtfda
from SBCcode.AnalysisModules.PTData import main as ptd
from SBCcode.AnalysisModules.TimingAnalysis import TimingAnalysis as ta
from SBCcode.DataHandling.GetSBCEvent import GetEvent as get_event
from SBCcode.DataHandling.WriteBinary import WriteBinaryNtupleFile as wb
from SBCcode.UserCode.John.NewT0 import calculate_t0 as calculate_t0
def sort_runs(arr):
# Input:
# arr: An array of run_ids as strings. Should look like ["20170623_0", "20170623_5", etc...]
# Outputs: A natural-ish sorted version that puts the dates in order and the run numbers for each date in order
dates_only = []
runs_only = []
for run_id in arr:
dates_only.append(run_id.split("_")[0])
runs_only.append(run_id.split("_")[1])
run_dict = defaultdict(list)
for date, run in zip(dates_only, runs_only):
run_dict[date].append(run)
k = sorted(list(run_dict.keys()), key=int)
out_list = []
for date in k:
run_ids_d = sorted(run_dict[date], key=int)
for run_id in run_ids_d:
out_list.append(date+"_"+run_id)
return out_list
def trim_runlist(arr, start=None, stop=None):
# Inputs:
# arr: An array of run_ids as strings. Should look like ["20170623_0", "20170623_5", etc...]
# start: Start run number. If this is not supplied, will start at the beginning
# stop: Stop run number. If this is not supplied, will continue to end
# Outputs: A sorted, trimmed runlist that goes from start to stop
arr = sort_runs(arr)
start = arr[0] if start == None else start
stop = arr[-1] if stop == None else stop
start_date = int(start.split("_")[0])
start_run_num = int(start.split("_")[1])
stop_date = int(stop.split("_")[0])
stop_run_num = int(stop.split("_")[1])
out = [ ]
for run in arr:
date = int(run.split("_")[0])
run_num = int(run.split("_")[1])
if start_date > date or date > stop_date:
continue
if (start_run_num > run_num and date == start_date) or (run_num > stop_run_num and date == stop_date):
continue
out.append(run)
return out
def dictionary_append(d1, *args):
# Inputs:
# d1: Dictionary
# args: Any number of dictionaries
# Outputs: A dictionary with the same keys, but the values are the values from args appended to the values of d1
# Note: The keys in d1 and args MUST match, and all of the values MUST be lists.
d1 = deepcopy(d1)
if len(args) == 0:
raise TypeError("dictionary_append must be called with at least 2 arguments.")
for arg in args:
if type(arg) not in [dict, defaultdict, OrderedDict]:
raise TypeError("args must be dictionaries!")
for d2 in args:
if not set(d1.keys()) == set(d2.keys()):
raise KeyError("The keys for the two dictionaries must match! Mismatched keys = {}".\
format(set(d1.keys()).symmetric_difference(set(d2.keys()))))
for d2 in args: # This '2nd' for loop because we want to make sure all the keys are the same first.
for k,v in d2.items():
if type(v) not in [list, np.ndarray]:
raise ValueError("The values of the dictionary MUST be list or np.ndarray. Key {} has type(value)={}".\
format(k, type(v)))
try:
d1[k].extend(v) # <-- If we have python lists
except AttributeError:
print("DEBUG:", k, d1[k].shape, v.shape)
d1[k] = np.append(d1[k], v, axis=0) # <-- If we have numpy arrays
return d1
if __name__ == "__main__":
file_templates = [#"AcousticAnalysis_{runid}.bin",
#"DytranAnalysis_{runid}.bin",
#"EventAnalysis_{runid}.bin",
#"HistoryAnalysis_{runid}.bin",
#"ImageAnalysis_{runid}.bin",
#"PMTfastDAQalignment_{runid}.bin",
#"PMTpulseAnalysis_{runid}.bin",
"TimingAnalysis_{runid}.bin",
]
defaults = [#aa(None, None),
#da(None),
#eva(None),
#ptd(None),
#BubbleFinder(None, None, None, None, None, None),
#pmtfda(None),
#pmtpa(None),
ta(None, None, None),
#calculate_t0(None, None, None, None)
]
p_list = [(f, d) for f, d in zip(file_templates, defaults)]
recon_directory = "/pnfs/coupp/persistent/grid_output/SBC-17-T0Test3/output/"
output_directory = "/nashome/j/jgresl/"
runid_list = sort_runs([f for f in os.listdir(recon_directory) if os.path.isdir(os.path.join(recon_directory, f))])
runid_list = trim_runlist(runid_list, start="20170623_3")
bad_run_file = "BadRunsV6.npy"
remake_badruns = False
if remake_badruns:
print("Building bad run list.")
bad_runs = defaultdict()
for f_temp in file_templates:
bad_runs[f_temp] = set()
bad_runs["AcousticAnalysis_{runid}.bin"] = set()
for f_temp in file_templates:
if f_temp != "AcousticTEST_{runid}.bin":
for runid in runid_list:
if not os.path.isfile(os.path.join(recon_directory, runid, f_temp.format(runid=runid))):
print("\tSkipping {}. File not present."\
.format(os.path.join(recon_directory, runid, f_temp.format(runid=runid))))
bad_runs[f_temp].add(runid)
continue
try:
RB(os.path.join(recon_directory, runid, f_temp.format(runid=runid)), max_file_size=800)
except IndexError:
print("\tSkipping {}. File exists, but unable to read properly." \
.format(os.path.join(recon_directory, runid, f_temp.format(runid=runid))))
bad_runs[f_temp].add(runid)
except OSError:
print("\tSkipping {}. File above maximum file size. (Raise this in ReadBlock(...)" \
.format(os.path.join(recon_directory, runid, f_temp.format(runid=runid))))
bad_runs[f_temp].add(runid)
if f_temp == "AcousticTEST_{runid}.bin":
f_temp = "AcousticAnalysis_{runid}.bin"
for runid in runid_list:
recon_directory = "/pnfs/coupp/persistent/grid_output/SBC-17-T0Test2/output"
if not os.path.isfile(os.path.join(recon_directory, runid, f_temp.format(runid=runid))):
print("\tSkipping {}. File not present."\
.format(os.path.join(recon_directory, runid, f_temp.format(runid=runid))))
bad_runs[f_temp].add(runid)
continue
try:
RB(os.path.join(recon_directory, runid, f_temp.format(runid=runid)), max_file_size=800)
except IndexError:
print("\tSkipping {}. File exists, but unable to read properly." \
.format(os.path.join(recon_directory, runid, f_temp.format(runid=runid))))
bad_runs[f_temp].add(runid)
except OSError:
print("\tSkipping {}. File above maximum file size. (Raise this in ReadBlock(...)" \
.format(os.path.join(recon_directory, runid, f_temp.format(runid=runid))))
bad_runs[f_temp].add(runid)
print("----------")
print("-Bad Runs-")
print("----------")
for k,v in bad_runs.items():
print("{} has {} bad runs.".format(k, len(v)))
print("\tSaving bad runs to {}.".format(bad_run_file))
np.save(bad_run_file, bad_runs)
###############################################################################
###############################################################################
###############################################################################
###############################################################################
###############################################################################
do_merge = True
if do_merge:
print("Beginning to merge all files.")
big_out = []
tstart = time.time()
try:
bad_runs = np.load(bad_run_file).flat[0]
except:
print("Couldn't find bad run file...")
bad_runs = defaultdict(list)
print("----------")
print("-Bad Runs-")
print("----------")
for k, v in bad_runs.items():
print("\t{} has {} bad runs.".format(k, len(v)))
bad_list_intersection = set.intersection(*bad_runs.values())
print("\t\tIntersection of bad events (failed for all analyses): {} items: {}".format(len(bad_list_intersection),
bad_list_intersection))
for f_temp, d_default in p_list:
if f_temp == "AcousticTEST_{runid}.bin":
recon_directory = "/pnfs/coupp/persistent/grid_output/SBC-17-T0Test/output/"
f_temp = "AcousticAnalysis_{runid}.bin"
t0 = time.time()
out = {}
first_real_run = True
print("\tStarting {}".format(f_temp))
n_to_process = len(runid_list) - len(bad_runs[f_temp])
n_processed = 0
for n, runid in enumerate(runid_list):
npev = np.array([int(runid.split("_")[1])], dtype=np.int32)
nprunid = np.int32(runid.split("_"))
print(nprunid)
if n%25 == 0: # Print a status message every 25 runs
print("\t\t{:.2f}% done with file {}".\
format(n_processed/n_to_process*100, f_temp))
if runid in bad_list_intersection: # If it failed for all analysis, skip it entirely.
continue
if runid in bad_runs[f_temp]:
# print("#####FAKE VALUES#####")
temp = deepcopy(d_default)
temp["runid"] = np.array([nprunid])
temp["ev"] = npev
if first_real_run:
out = temp
first_real_run = False
# big_out.append(out)
else:
# big_out.append(temp)
out = dictionary_append(out, temp)
continue
# print("\t\t\tRunID: {}".format(runid))
if first_real_run:
out = RB(os.path.join(recon_directory, runid, f_temp.format(runid=runid)), max_file_size=800)
# big_out.append(out)
first_real_run = False
else:
# print("******REAL VALUES*******")
d=RB(os.path.join(recon_directory, runid, f_temp.format(runid=runid)),
max_file_size=800)
# big_out.append(d)
try:
out = dictionary_append(out, d)
except:
print("Failed.#########")
if runid == "20170805_4":
for eee in range(12):
temp = deepcopy(d_default)
temp["runid"] = np.array([nprunid])
temp["ev"] = npev
out = dictionary_append(out, temp)
else:
temp = deepcopy(d_default)
temp["runid"] = np.array([nprunid])
temp["ev"] = npev
out = dictionary_append(out, temp)
n_processed += 1
t1 = time.time()
print("\tTook {:.2f} seconds to read input files for {}".format(t1 - t0, f_temp))
print("\tStarting to write output merged file.")
WB(os.path.join(output_directory, f_temp.format(runid="all")), out, rowdef=1)
t2 = time.time()
print("\tTook {:.2f} seconds to write merged file {}".format(t2 - t1, f_temp.format(runid="all")))
print("\tTook {:.2f} seconds for entire process to read and create {}". \
format(t2 - t0, f_temp.format(runid="all")))
tfinish = time.time()
| 49.308824 | 121 | 0.530122 | 1,542 | 13,412 | 4.457198 | 0.191959 | 0.025462 | 0.024007 | 0.034919 | 0.374363 | 0.332315 | 0.315001 | 0.292594 | 0.283282 | 0.262331 | 0 | 0.016357 | 0.329928 | 13,412 | 271 | 122 | 49.490775 | 0.748414 | 0.115419 | 0 | 0.351852 | 0 | 0 | 0.134898 | 0.030746 | 0 | 0 | 0 | 0 | 0 | 1 | 0.013889 | false | 0 | 0.083333 | 0 | 0.111111 | 0.12963 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
891bdc5511e0a29c4be2339f1dc933bccb992ce1 | 2,066 | py | Python | src/ndc/get_ndc.py | TEI-EAJ/auto_aozora_tei | 5535abef680a1e186f8a7dc6efc30a1dcf4efeec | [
"CC0-1.0"
] | 3 | 2019-02-12T13:28:22.000Z | 2021-07-25T20:58:07.000Z | src/ndc/get_ndc.py | TEI-EAJ/auto_aozora_tei | 5535abef680a1e186f8a7dc6efc30a1dcf4efeec | [
"CC0-1.0"
] | null | null | null | src/ndc/get_ndc.py | TEI-EAJ/auto_aozora_tei | 5535abef680a1e186f8a7dc6efc30a1dcf4efeec | [
"CC0-1.0"
] | 1 | 2019-02-12T22:04:00.000Z | 2019-02-12T22:04:00.000Z | import urllib.request
from bs4 import BeautifulSoup
import json
def extract(index):
url = "http://yozora.main.jp/"
html = urllib.request.urlopen(url)
# htmlをBeautifulSoupで扱う
soup = BeautifulSoup(html, "lxml")
a_list = soup.find_all(class_="navi")[index].find_all("a")
arr = []
for a in a_list:
id = a.get("href").split(".")[0]
name = a.text
print(name)
print("id\t" + id)
obj = {}
obj["name"] = name
obj["id"] = id
arr2 = []
obj["children"] = arr2
arr.append(obj)
url2 = "http://yozora.main.jp/" + id + ".html"
html2 = urllib.request.urlopen(url2)
# htmlをBeautifulSoupで扱う
soup2 = BeautifulSoup(html2, "lxml")
a_list2 = soup2.find(class_="navi").find_all("a")
for a2 in a_list2:
id2 = a2.get("href").split(".")[0]
print("id2\t" + id2)
obj2 = {}
obj2["name"] = a2.text
obj2["id"] = id2
arr3 = []
obj2["children"] = arr3
arr2.append(obj2)
url3 = "http://yozora.main.jp/" + id2 + ".html"
html3 = urllib.request.urlopen(url3)
# htmlをBeautifulSoupで扱う
soup3 = BeautifulSoup(html3, "lxml")
a_list3 = soup3.find(class_="navi").find_all("a")
for a3 in a_list3:
id3 = a3.get("href").split(".")[0]
id3 = id2.split("/")[0] + "/" + id3
print("id3\t" + id3)
obj3 = {}
obj3["name"] = a3.text
obj3["id"] = id3
obj3["value"] = 0
arr3.append(obj3)
return arr
result = {}
result["name"] = "all"
arr = []
result["children"] = arr
arr.append({
"name": "分野別トップ",
"children": extract(0)
})
arr.append({
"name": "児童書トップ",
"children": extract(1)
})
with open('data/ndc.json', 'w') as outfile:
json.dump(result, outfile, ensure_ascii=False, indent=4, sort_keys=True, separators=(',', ': '))
| 22.703297 | 100 | 0.494676 | 234 | 2,066 | 4.303419 | 0.34188 | 0.051639 | 0.041708 | 0.047666 | 0.047666 | 0.047666 | 0.047666 | 0 | 0 | 0 | 0 | 0.042972 | 0.335431 | 2,066 | 90 | 101 | 22.955556 | 0.690459 | 0.031462 | 0 | 0.098361 | 0 | 0 | 0.120681 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016393 | false | 0 | 0.04918 | 0 | 0.081967 | 0.065574 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
891be27156d1066f41832049260bb47279ac1736 | 4,116 | py | Python | backend/server/task/views.py | munteanugabriel25/Javascript-Django-TodoList- | e3cb8d4a573dfbb84960839b7a01a24a195c7755 | [
"Unlicense"
] | null | null | null | backend/server/task/views.py | munteanugabriel25/Javascript-Django-TodoList- | e3cb8d4a573dfbb84960839b7a01a24a195c7755 | [
"Unlicense"
] | null | null | null | backend/server/task/views.py | munteanugabriel25/Javascript-Django-TodoList- | e3cb8d4a573dfbb84960839b7a01a24a195c7755 | [
"Unlicense"
] | null | null | null | from django.shortcuts import render
from rest_framework.views import APIView
from rest_framework import status
from .serializers import TaskListCreateSerializer, UserCreateSerializer, UserLoginSerializer, UserSerializer, TaskUpdateSerializer
from rest_framework.response import Response
from .models import Task
from rest_framework.views import csrf_exempt
from django.contrib.auth import login, authenticate
from rest_framework.authtoken.models import Token
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from rest_framework.permissions import IsAuthenticated
# Create your views here.
class ListCreateApiView(APIView):
permission_classes = [IsAuthenticated]
def get(self, request):
filter = request.GET.get("period", None)
user = get_object_or_404(User, username=request.user)
if filter != None:
if filter == 'week':
query = Task.objects.next_week_tasks(user.id)
else:
query = Task.objects.today_tasks(user.id)
else:
query = Task.objects.all_tasks(user.id)
serializer = TaskListCreateSerializer(query, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def post(self, request):
user = get_object_or_404(User,username=request.user)
serializer = TaskListCreateSerializer(data=request.data, partial=True)
if serializer.is_valid():
serializer.save(user=user)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UserRegisterApiView(APIView):
serializer_class = UserCreateSerializer
def post(self, request):
serializer = UserCreateSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UserLoginApiView(APIView):
def post(self, request):
serializer = UserLoginSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data["user"]
token, created = Token.objects.get_or_create(user=user)
user_serializer = UserSerializer(user, context={"request":request})
return Response(user_serializer.data, status=status.HTTP_200_OK)
class RetrieveDeleteUpdateApiView(APIView):
permission_classes = [IsAuthenticated]
def get(self, request, pk):
task_object = get_object_or_404(Task, pk=pk)
user = get_object_or_404(User, username=request.user)
if task_object.user == user:
serializer = TaskUpdateSerializer(task_object)
return Response(serializer.data, status=status.HTTP_200_OK)
else:
return Response({"status": "you don't have permissions for this task"}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk):
task_object = get_object_or_404(Task, pk=pk)
user = get_object_or_404(User, username=request.user)
if task_object.user == user:
task_object.delete()
return Response({"status": "deleted"}, status=status.HTTP_200_OK)
else:
return Response({"status": "you don't have permissions for this task"}, status=status.HTTP_400_BAD_REQUEST)
def put(self, request, pk):
task_object = get_object_or_404(Task, pk=pk)
user = get_object_or_404(User, username=request.user)
serializer = TaskUpdateSerializer(task_object, data =request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response({"status": "updated"}, status=status.HTTP_200_OK)
else:
print(serializer.errors)
return Response({"status": "you don't have permissions for this task"}, status=status.HTTP_400_BAD_REQUEST) | 40.752475 | 130 | 0.688047 | 478 | 4,116 | 5.740586 | 0.198745 | 0.061224 | 0.069971 | 0.045918 | 0.581268 | 0.514942 | 0.505831 | 0.470481 | 0.416545 | 0.388484 | 0 | 0.019743 | 0.224733 | 4,116 | 101 | 131 | 40.752475 | 0.840175 | 0.005588 | 0 | 0.455696 | 0 | 0 | 0.04521 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088608 | false | 0 | 0.151899 | 0 | 0.481013 | 0.012658 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
891c8b6da0ab48f652a004e57633049c482f9248 | 15,003 | py | Python | bookwyrm/preview_images.py | mouse-reeve/fedireads | e3471fcc3500747a1b1deaaca662021aae5b08d4 | [
"CC0-1.0"
] | 270 | 2020-01-27T06:06:07.000Z | 2020-06-21T00:28:18.000Z | bookwyrm/preview_images.py | mouse-reeve/fedireads | e3471fcc3500747a1b1deaaca662021aae5b08d4 | [
"CC0-1.0"
] | 158 | 2020-02-10T20:36:54.000Z | 2020-06-26T17:12:54.000Z | bookwyrm/preview_images.py | mouse-reeve/fedireads | e3471fcc3500747a1b1deaaca662021aae5b08d4 | [
"CC0-1.0"
] | 15 | 2020-02-13T21:53:33.000Z | 2020-06-17T16:52:46.000Z | """ Generate social media preview images for twitter/mastodon/etc """
import math
import os
import textwrap
from io import BytesIO
from uuid import uuid4
import logging
import colorsys
from colorthief import ColorThief
from PIL import Image, ImageDraw, ImageFont, ImageOps, ImageColor
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.core.files.storage import default_storage
from django.db.models import Avg
from bookwyrm import models, settings
from bookwyrm.tasks import app
logger = logging.getLogger(__name__)
IMG_WIDTH = settings.PREVIEW_IMG_WIDTH
IMG_HEIGHT = settings.PREVIEW_IMG_HEIGHT
BG_COLOR = settings.PREVIEW_BG_COLOR
TEXT_COLOR = settings.PREVIEW_TEXT_COLOR
DEFAULT_COVER_COLOR = settings.PREVIEW_DEFAULT_COVER_COLOR
DEFAULT_FONT = settings.PREVIEW_DEFAULT_FONT
TRANSPARENT_COLOR = (0, 0, 0, 0)
margin = math.floor(IMG_HEIGHT / 10)
gutter = math.floor(margin / 2)
inner_img_height = math.floor(IMG_HEIGHT * 0.8)
inner_img_width = math.floor(inner_img_height * 0.7)
def get_imagefont(name, size):
"""Loads an ImageFont based on config"""
try:
config = settings.FONTS[name]
path = os.path.join(settings.FONT_DIR, config["directory"], config["filename"])
return ImageFont.truetype(path, size)
except KeyError:
logger.error("Font %s not found in config", name)
except OSError:
logger.error("Could not load font %s from file", name)
return ImageFont.load_default()
def get_font(weight, size=28):
"""Gets a custom font with the given weight and size"""
font = get_imagefont(DEFAULT_FONT, size)
try:
if weight == "light":
font.set_variation_by_name("Light")
if weight == "bold":
font.set_variation_by_name("Bold")
if weight == "regular":
font.set_variation_by_name("Regular")
except AttributeError:
pass
return font
def get_wrapped_text(text, font, content_width):
"""text wrap length depends on the max width of the content"""
low = 0
high = len(text)
try:
# ideal length is determined via binary search
while low < high:
mid = math.floor(low + high)
wrapped_text = textwrap.fill(text, width=mid)
width = font.getsize_multiline(wrapped_text)[0]
if width < content_width:
low = mid
else:
high = mid - 1
except AttributeError:
wrapped_text = text
return wrapped_text
def generate_texts_layer(texts, content_width):
"""Adds text for images"""
font_text_zero = get_font("bold", size=20)
font_text_one = get_font("bold", size=48)
font_text_two = get_font("bold", size=40)
font_text_three = get_font("regular", size=40)
text_layer = Image.new("RGBA", (content_width, IMG_HEIGHT), color=TRANSPARENT_COLOR)
text_layer_draw = ImageDraw.Draw(text_layer)
text_y = 0
if "text_zero" in texts and texts["text_zero"]:
# Text one (Book title)
text_zero = get_wrapped_text(texts["text_zero"], font_text_zero, content_width)
text_layer_draw.multiline_text(
(0, text_y), text_zero, font=font_text_zero, fill=TEXT_COLOR
)
try:
text_y = text_y + font_text_zero.getsize_multiline(text_zero)[1] + 16
except (AttributeError, IndexError):
text_y = text_y + 26
if "text_one" in texts and texts["text_one"]:
# Text one (Book title)
text_one = get_wrapped_text(texts["text_one"], font_text_one, content_width)
text_layer_draw.multiline_text(
(0, text_y), text_one, font=font_text_one, fill=TEXT_COLOR
)
try:
text_y = text_y + font_text_one.getsize_multiline(text_one)[1] + 16
except (AttributeError, IndexError):
text_y = text_y + 26
if "text_two" in texts and texts["text_two"]:
# Text one (Book subtitle)
text_two = get_wrapped_text(texts["text_two"], font_text_two, content_width)
text_layer_draw.multiline_text(
(0, text_y), text_two, font=font_text_two, fill=TEXT_COLOR
)
try:
text_y = text_y + font_text_one.getsize_multiline(text_two)[1] + 16
except (AttributeError, IndexError):
text_y = text_y + 26
if "text_three" in texts and texts["text_three"]:
# Text three (Book authors)
text_three = get_wrapped_text(
texts["text_three"], font_text_three, content_width
)
text_layer_draw.multiline_text(
(0, text_y), text_three, font=font_text_three, fill=TEXT_COLOR
)
text_layer_box = text_layer.getbbox()
return text_layer.crop(text_layer_box)
def generate_instance_layer(content_width):
"""Places components for instance preview"""
font_instance = get_font("light", size=28)
site = models.SiteSettings.objects.get()
if site.logo_small:
logo_img = Image.open(site.logo_small)
else:
try:
static_path = os.path.join(settings.STATIC_ROOT, "images/logo-small.png")
logo_img = Image.open(static_path)
except FileNotFoundError:
logo_img = None
instance_layer = Image.new("RGBA", (content_width, 62), color=TRANSPARENT_COLOR)
instance_text_x = 0
if logo_img:
logo_img.thumbnail((50, 50), Image.ANTIALIAS)
instance_layer.paste(logo_img, (0, 0))
instance_text_x = instance_text_x + 60
instance_layer_draw = ImageDraw.Draw(instance_layer)
instance_layer_draw.text(
(instance_text_x, 10), site.name, font=font_instance, fill=TEXT_COLOR
)
line_width = 50 + 10 + font_instance.getsize(site.name)[0]
line_layer = Image.new(
"RGBA", (line_width, 2), color=(*(ImageColor.getrgb(TEXT_COLOR)), 50)
)
instance_layer.alpha_composite(line_layer, (0, 60))
return instance_layer
def generate_rating_layer(rating, content_width):
"""Places components for rating preview"""
try:
icon_star_full = Image.open(
os.path.join(settings.STATIC_ROOT, "images/icons/star-full.png")
)
icon_star_empty = Image.open(
os.path.join(settings.STATIC_ROOT, "images/icons/star-empty.png")
)
icon_star_half = Image.open(
os.path.join(settings.STATIC_ROOT, "images/icons/star-half.png")
)
except FileNotFoundError:
return None
icon_size = 64
icon_margin = 10
rating_layer_base = Image.new(
"RGBA", (content_width, icon_size), color=TRANSPARENT_COLOR
)
rating_layer_color = Image.new("RGBA", (content_width, icon_size), color=TEXT_COLOR)
rating_layer_mask = Image.new(
"RGBA", (content_width, icon_size), color=TRANSPARENT_COLOR
)
position_x = 0
for _ in range(math.floor(rating)):
rating_layer_mask.alpha_composite(icon_star_full, (position_x, 0))
position_x = position_x + icon_size + icon_margin
if math.floor(rating) != math.ceil(rating):
rating_layer_mask.alpha_composite(icon_star_half, (position_x, 0))
position_x = position_x + icon_size + icon_margin
for _ in range(5 - math.ceil(rating)):
rating_layer_mask.alpha_composite(icon_star_empty, (position_x, 0))
position_x = position_x + icon_size + icon_margin
rating_layer_mask = rating_layer_mask.getchannel("A")
rating_layer_mask = ImageOps.invert(rating_layer_mask)
rating_layer_composite = Image.composite(
rating_layer_base, rating_layer_color, rating_layer_mask
)
return rating_layer_composite
def generate_default_inner_img():
"""Adds cover image"""
font_cover = get_font("light", size=28)
default_cover = Image.new(
"RGB", (inner_img_width, inner_img_height), color=DEFAULT_COVER_COLOR
)
default_cover_draw = ImageDraw.Draw(default_cover)
text = "no image :("
text_dimensions = font_cover.getsize(text)
text_coords = (
math.floor((inner_img_width - text_dimensions[0]) / 2),
math.floor((inner_img_height - text_dimensions[1]) / 2),
)
default_cover_draw.text(text_coords, text, font=font_cover, fill="white")
return default_cover
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
def generate_preview_image(
texts=None, picture=None, rating=None, show_instance_layer=True
):
"""Puts everything together"""
texts = texts or {}
# Cover
try:
inner_img_layer = Image.open(picture)
inner_img_layer.thumbnail((inner_img_width, inner_img_height), Image.ANTIALIAS)
color_thief = ColorThief(picture)
dominant_color = color_thief.get_color(quality=1)
except: # pylint: disable=bare-except
inner_img_layer = generate_default_inner_img()
dominant_color = ImageColor.getrgb(DEFAULT_COVER_COLOR)
# Color
if BG_COLOR in ["use_dominant_color_light", "use_dominant_color_dark"]:
red, green, blue = dominant_color
image_bg_color = f"rgb({red}, {green}, {blue})"
# Adjust color
image_bg_color_rgb = [x / 255.0 for x in ImageColor.getrgb(image_bg_color)]
image_bg_color_hls = colorsys.rgb_to_hls(*image_bg_color_rgb)
if BG_COLOR == "use_dominant_color_light":
lightness = max(0.9, image_bg_color_hls[1])
else:
lightness = min(0.15, image_bg_color_hls[1])
image_bg_color_hls = (
image_bg_color_hls[0],
lightness,
image_bg_color_hls[2],
)
image_bg_color = tuple(
math.ceil(x * 255) for x in colorsys.hls_to_rgb(*image_bg_color_hls)
)
else:
image_bg_color = BG_COLOR
# Background (using the color)
img = Image.new("RGBA", (IMG_WIDTH, IMG_HEIGHT), color=image_bg_color)
# Contents
inner_img_x = margin + inner_img_width - inner_img_layer.width
inner_img_y = math.floor((IMG_HEIGHT - inner_img_layer.height) / 2)
content_x = margin + inner_img_width + gutter
content_width = IMG_WIDTH - content_x - margin
contents_layer = Image.new(
"RGBA", (content_width, IMG_HEIGHT), color=TRANSPARENT_COLOR
)
contents_composite_y = 0
if show_instance_layer:
instance_layer = generate_instance_layer(content_width)
contents_layer.alpha_composite(instance_layer, (0, contents_composite_y))
contents_composite_y = contents_composite_y + instance_layer.height + gutter
texts_layer = generate_texts_layer(texts, content_width)
contents_layer.alpha_composite(texts_layer, (0, contents_composite_y))
contents_composite_y = contents_composite_y + texts_layer.height + gutter
if rating:
# Add some more margin
contents_composite_y = contents_composite_y + gutter
rating_layer = generate_rating_layer(rating, content_width)
if rating_layer:
contents_layer.alpha_composite(rating_layer, (0, contents_composite_y))
contents_composite_y = contents_composite_y + rating_layer.height + gutter
contents_layer_box = contents_layer.getbbox()
contents_layer_height = contents_layer_box[3] - contents_layer_box[1]
contents_y = math.floor((IMG_HEIGHT - contents_layer_height) / 2)
if show_instance_layer:
# Remove Instance Layer from centering calculations
contents_y = contents_y - math.floor((instance_layer.height + gutter) / 2)
contents_y = max(contents_y, margin)
# Composite layers
img.paste(
inner_img_layer, (inner_img_x, inner_img_y), inner_img_layer.convert("RGBA")
)
img.alpha_composite(contents_layer, (content_x, contents_y))
return img.convert("RGB")
def save_and_cleanup(image, instance=None):
"""Save and close the file"""
if not isinstance(instance, (models.Book, models.User, models.SiteSettings)):
return False
image_buffer = BytesIO()
try:
try:
file_name = instance.preview_image.name
except ValueError:
file_name = None
if not file_name or file_name == "":
uuid = uuid4()
file_name = f"{instance.id}-{uuid}.jpg"
# Clean up old file before saving
if file_name and default_storage.exists(file_name):
default_storage.delete(file_name)
# Save
image.save(image_buffer, format="jpeg", quality=75)
instance.preview_image = InMemoryUploadedFile(
ContentFile(image_buffer.getvalue()),
"preview_image",
file_name,
"image/jpg",
image_buffer.tell(),
None,
)
save_without_broadcast = isinstance(instance, (models.Book, models.User))
if save_without_broadcast:
instance.save(broadcast=False, update_fields=["preview_image"])
else:
instance.save(update_fields=["preview_image"])
finally:
image_buffer.close()
return True
# pylint: disable=invalid-name
@app.task(queue="low_priority")
def generate_site_preview_image_task():
"""generate preview_image for the website"""
if not settings.ENABLE_PREVIEW_IMAGES:
return
site = models.SiteSettings.objects.get()
if site.logo:
logo = site.logo
else:
logo = os.path.join(settings.STATIC_ROOT, "images/logo.png")
texts = {
"text_zero": settings.DOMAIN,
"text_one": site.name,
"text_three": site.instance_tagline,
}
image = generate_preview_image(texts=texts, picture=logo, show_instance_layer=False)
save_and_cleanup(image, instance=site)
# pylint: disable=invalid-name
@app.task(queue="low_priority")
def generate_edition_preview_image_task(book_id):
"""generate preview_image for a book"""
if not settings.ENABLE_PREVIEW_IMAGES:
return
book = models.Book.objects.select_subclasses().get(id=book_id)
rating = models.Review.objects.filter(
privacy="public",
deleted=False,
book__in=[book_id],
).aggregate(Avg("rating"))["rating__avg"]
texts = {
"text_one": book.title,
"text_two": book.subtitle,
"text_three": book.author_text,
}
image = generate_preview_image(texts=texts, picture=book.cover, rating=rating)
save_and_cleanup(image, instance=book)
@app.task(queue="low_priority")
def generate_user_preview_image_task(user_id):
"""generate preview_image for a book"""
if not settings.ENABLE_PREVIEW_IMAGES:
return
user = models.User.objects.get(id=user_id)
texts = {
"text_one": user.display_name,
"text_three": f"@{user.localname}@{settings.DOMAIN}",
}
if user.avatar:
avatar = user.avatar
else:
avatar = os.path.join(settings.STATIC_ROOT, "images/default_avi.jpg")
image = generate_preview_image(texts=texts, picture=avatar)
save_and_cleanup(image, instance=user)
| 31.585263 | 88 | 0.672799 | 1,965 | 15,003 | 4.82799 | 0.150127 | 0.020238 | 0.017708 | 0.013281 | 0.336671 | 0.255297 | 0.212607 | 0.188152 | 0.162854 | 0.162854 | 0 | 0.009937 | 0.228621 | 15,003 | 474 | 89 | 31.651899 | 0.809816 | 0.063787 | 0 | 0.160606 | 1 | 0 | 0.055659 | 0.018052 | 0 | 0 | 0 | 0.00211 | 0 | 1 | 0.036364 | false | 0.00303 | 0.045455 | 0 | 0.127273 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
64dfbc4c6711f4cdd56dde0eb4ae1be40d05958f | 1,334 | py | Python | capsule.py | lebek/reversible-raytracer | 9b502737da0e0a7cfd664a795b3a38c1809c1774 | [
"MIT"
] | 15 | 2015-04-11T14:40:35.000Z | 2020-06-05T14:17:53.000Z | capsule.py | lebek/RRT | 9b502737da0e0a7cfd664a795b3a38c1809c1774 | [
"MIT"
] | null | null | null | capsule.py | lebek/RRT | 9b502737da0e0a7cfd664a795b3a38c1809c1774 | [
"MIT"
] | 3 | 2016-02-09T18:12:51.000Z | 2018-05-24T13:07:52.000Z | import numpy as np
import theano
import theano.tensor as T
class Capsule():
def __init__(self, name, n_hidden, n_output, num_caps):
self.name = name
bias = np.asarray([0,0, 3 * num_caps,1,1,1], dtype=theano.config.floatX)/ num_caps
self.params = [self.init_capsule_weight(n_hidden),
theano.shared(bias, borrow=True)]
def init_capsule_weight(self, n_hidden_l3):
l3_to_center = 0.05*np.asarray(
np.random.uniform(
low=-4 * np.sqrt(6. / 6+n_hidden_l3),
high=4 * np.sqrt(6. / 6+n_hidden_l3),
size=(n_hidden_l3, 3)
), dtype=theano.config.floatX)
l3_to_radius = 0.0005*np.asarray( np.random.uniform(
low=-4 * np.sqrt(6. / 6+n_hidden_l3),
high=4 * np.sqrt(6. / 6+n_hidden_l3),
size=(n_hidden_l3, 3)
), dtype=theano.config.floatX)
return theano.shared(np.concatenate((l3_to_center, l3_to_radius), 1))
#return theano.shared(0.07*np.asarray(
# np.random.uniform(
# low=-4 * np.sqrt(6. / n_output+n_hidden_l3),
# high=4 * np.sqrt(6. / n_output+n_hidden_l3),
# size=(n_hidden_l3, n_output)
# ), dtype=theano.config.floatX),borrow=True)
| 33.35 | 90 | 0.55997 | 192 | 1,334 | 3.65625 | 0.25 | 0.119658 | 0.128205 | 0.068376 | 0.42735 | 0.42735 | 0.42735 | 0.408832 | 0.403134 | 0.346154 | 0 | 0.054054 | 0.306597 | 1,334 | 39 | 91 | 34.205128 | 0.704865 | 0.1994 | 0 | 0.363636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.136364 | 0 | 0.318182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64e0d083a4907b69b2dd393b09adcf2074033d97 | 60,731 | py | Python | SubExperiment.py | zhangyintai/Experiment_Manager | 800f95068a12b64d4a7e524fe406d5ef3b47f521 | [
"MIT"
] | null | null | null | SubExperiment.py | zhangyintai/Experiment_Manager | 800f95068a12b64d4a7e524fe406d5ef3b47f521 | [
"MIT"
] | null | null | null | SubExperiment.py | zhangyintai/Experiment_Manager | 800f95068a12b64d4a7e524fe406d5ef3b47f521 | [
"MIT"
] | null | null | null | # For controlling experiments for the ion trap lab led by Prof. Yiheng Lin
# The code is written by Yintai Zhang, School of Physical Sciences, USTC
# Last updated: April 29th, 2019
from PyQt5 import QtWidgets, QtCore, QtGui
# import pylint
from Ui_SubExperiment import Ui_SubExperiment_Dialog
import sys
import os
import Functions
import DataType
import time
##-------------------------------------------------------------------------------
class SubExperiment(QtWidgets.QWidget, Ui_SubExperiment_Dialog):
def __init__(self, exp_name):
##Configure window
self.SubExperiment_Dialog = QtWidgets.QDialog()
super(SubExperiment, self).__init__()
self.setupUi(self.SubExperiment_Dialog)
self._translate = QtCore.QCoreApplication.translate
##Initiate Parametres
self.FVar_num = 0
self.TVar_num = 0
self.AmpVar_num = 0
self.PhVar_num = 0
self.OVar_num = 0
self.exp_name = exp_name
#self.channels = 16 ## this number is for test
self.FScan = 0
self.TScan = 0
self.AmpScan = 0
self.PhScan = 0
self.OScan = 0
self.FScan_step = 0
self.TScan_step = 0
self.AmpScan_step = 0
self.PhScan_step = 0
self.OScan_step = 0
self.name = ''
self.exp_dir = ''
self.script_dir = ''
self.winconfig_dir = ''
self.FVar_list = []
self.TVar_list = []
self.AmpVar_list = []
self.PhVar_list = []
self.OVar_list = []
##Initiate Widgets
self.FVar_scan_CheckBox.setDisabled(True)
self.TVar_scan_CheckBox.setDisabled(True)
self.AmpVar_scan_CheckBox.setDisabled(True)
self.PhVar_scan_CheckBox.setDisabled(True)
self.OVar_scan_CheckBox.setDisabled(True)
self.FVar_step_SpinBox.setDisabled(True)
self.TVar_step_SpinBox.setDisabled(True)
self.AmpVar_step_SpinBox.setDisabled(True)
self.PhVar_step_SpinBox.setDisabled(True)
self.OVar_step_SpinBox.setDisabled(True)
self.FVar_lb_SpinBox.setDisabled(True)
self.FVar_ub_SpinBox.setDisabled(True)
self.FVar_var_SpinBox.setDisabled(True)
self.OVar_lb_SpinBox.setDisabled(True)
self.OVar_ub_SpinBox.setDisabled(True)
self.OVar_var_SpinBox.setDisabled(True)
self.TVar_lb_SpinBox.setDisabled(True)
self.TVar_ub_SpinBox.setDisabled(True)
self.TVar_var_SpinBox.setDisabled(True)
self.AmpVar_lb_SpinBox.setDisabled(True)
self.AmpVar_ub_SpinBox.setDisabled(True)
self.AmpVar_var_SpinBox.setDisabled(True)
self.PhVar_lb_SpinBox.setDisabled(True)
self.PhVar_ub_SpinBox.setDisabled(True)
self.PhVar_var_SpinBox.setDisabled(True)
#self.FVarChannel_ComboBox.setDisabled(True)
#self.AmpVarChannel_ComboBox.setDisabled(True)
#self.TVarChannel_ComboBox.setDisabled(True)
#self.PhVarChannel_ComboBox.setDisabled(True)
self.ScriptSave_Button.setDisabled(True)
self.ScriptDirectoryBrowse_Button.setDisabled(True)
self.SetDir_Button.setDisabled(True)
self.FVar_times_SpinBox.setDisabled(True)
self.TVar_times_SpinBox.setDisabled(True)
self.PhVar_times_SpinBox.setDisabled(True)
self.AmpVar_times_SpinBox.setDisabled(True)
self.OVar_times_SpinBox.setDisabled(True)
self.ExpScriptRun_Button.setDisabled(True)
self.ExpScriptView_Button.setDisabled(True)
self.TitleConfirm_Button.setDisabled(True)
self.WinConfigView_Button.setDisabled(True)
self.ParaScriptView_Button.setDisabled(True)
self.f1shortcut = QtWidgets.QShortcut(QtGui.QKeySequence(QtCore.Qt.Key_F1), self.FVar_Label)
self.f1shortcut.activated.connect(self.bilibili)
self.f5shortcut = QtWidgets.QShortcut(QtGui.QKeySequence(QtCore.Qt.Key_F5), self.FVar_Label)
self.f5shortcut.activated.connect(self.ExpScriptRun)
self.f2shortcut = QtWidgets.QShortcut(QtGui.QKeySequence(QtCore.Qt.Key_F2), self.FVar_Label)
self.f2shortcut.activated.connect(self.arxiv)
##
self.SubExperiment_Dialog.setWindowTitle("Experiment Name: " + self.exp_name + "[*]")
##Connect Widgets
self.ConfigFileBrowse_Button.clicked.connect(self.ConfigFileBrowse)
self.ConfigFileConfirm_Button.clicked.connect(self.ConfigFileConfirm)
self.ScriptDirectoryBrowse_Button.clicked.connect(self.ScriptDirectoryBrowse)
self.SetDir_Button.clicked.connect(self.SetDir)
self.ScriptSave_Button.clicked.connect(self.ScriptSave)
self.FVar_ComboBox.currentIndexChanged.connect(self.FVarIndexChanged)
self.TVar_ComboBox.currentIndexChanged.connect(self.TVarIndexChanged)
self.AmpVar_ComboBox.currentIndexChanged.connect(self.AmpVarIndexChanged)
self.PhVar_ComboBox.currentIndexChanged.connect(self.PhVarIndexChanged)
self.OVar_ComboBox.currentIndexChanged.connect(self.OVarIndexChanged)
self.FVar_lb_SpinBox.valueChanged.connect(self.FVar_lbChanged)
self.FVar_ub_SpinBox.valueChanged.connect(self.FVar_ubChanged)
self.FVar_var_SpinBox.valueChanged.connect(self.FVar_varChanged)
self.FVar_step_SpinBox.valueChanged.connect(self.FVar_stepChanged)
self.FVar_scan_CheckBox.stateChanged.connect(self.FVar_scanChanged)
self.OVar_lb_SpinBox.valueChanged.connect(self.OVar_lbChanged)
self.OVar_ub_SpinBox.valueChanged.connect(self.OVar_ubChanged)
self.OVar_var_SpinBox.valueChanged.connect(self.OVar_varChanged)
self.OVar_step_SpinBox.valueChanged.connect(self.OVar_stepChanged)
self.OVar_scan_CheckBox.stateChanged.connect(self.OVar_scanChanged)
self.TVar_lb_SpinBox.valueChanged.connect(self.TVar_lbChanged)
self.TVar_ub_SpinBox.valueChanged.connect(self.TVar_ubChanged)
self.TVar_var_SpinBox.valueChanged.connect(self.TVar_varChanged)
self.TVar_step_SpinBox.valueChanged.connect(self.TVar_stepChanged)
self.TVar_scan_CheckBox.stateChanged.connect(self.TVar_scanChanged)
self.AmpVar_lb_SpinBox.valueChanged.connect(self.AmpVar_lbChanged)
self.AmpVar_ub_SpinBox.valueChanged.connect(self.AmpVar_ubChanged)
self.AmpVar_var_SpinBox.valueChanged.connect(self.AmpVar_varChanged)
self.AmpVar_step_SpinBox.valueChanged.connect(self.AmpVar_stepChanged)
self.AmpVar_scan_CheckBox.stateChanged.connect(self.AmpVar_scanChanged)
self.PhVar_lb_SpinBox.valueChanged.connect(self.PhVar_lbChanged)
self.PhVar_ub_SpinBox.valueChanged.connect(self.PhVar_ubChanged)
self.PhVar_var_SpinBox.valueChanged.connect(self.PhVar_varChanged)
self.PhVar_step_SpinBox.valueChanged.connect(self.PhVar_stepChanged)
self.PhVar_scan_CheckBox.stateChanged.connect(self.PhVar_scanChanged)
#self.FVarChannel_ComboBox.currentIndexChanged.connect(self.FVarChannel_Change)
#self.TVarChannel_ComboBox.currentIndexChanged.connect(self.TVarChannel_Change)
#self.AmpVarChannel_ComboBox.currentIndexChanged.connect(self.AmpVarChannel_Change)
#self.PhVarChannel_ComboBox.currentIndexChanged.connect(self.PhVarChannel_Change)
self.FVar_times_SpinBox.valueChanged.connect(self.FVar_timesChanged)
self.TVar_times_SpinBox.valueChanged.connect(self.TVar_timesChanged)
self.AmpVar_times_SpinBox.valueChanged.connect(self.AmpVar_timesChanged)
self.PhVar_times_SpinBox.valueChanged.connect(self.PhVar_timesChanged)
self.OVar_times_SpinBox.valueChanged.connect(self.OVar_timesChanged)
self.ExpDirBrowse_Button.clicked.connect(self.ExpDirBrowse)
self.ExpDirSet_Button.clicked.connect(self.ExpDirSet)
self.ExpScriptView_Button.clicked.connect(self.ExpScriptView)
self.ExpScriptRun_Button.clicked.connect(self.ExpScriptRun)
self.WinConfigView_Button.clicked.connect(self.WinConfigView)
self.TitleConfirm_Button.clicked.connect(self.TitleConfirm)
self.Help_Button.clicked.connect(self.bilibili)
self.ParaScriptView_Button.clicked.connect(self.ParaScriptView)
def arxiv(self):
os.system("explorer https://arxiv.org/")
def bilibili(self):
os.system("explorer https://www.bilibili.com/")
def TitleConfirm(self):
try:
text = self.Title_LEdit.text()
if text != '':
text = Functions.RemoveSpace(text)
self.Title_LEdit.setText(text)
self.name = text
else:
self.Title_LEDit.setText(self.name)
except:
pass
def test(self):
print("test passed!")
def ConfigFileBrowse(self):
try:
path = QtWidgets.QFileDialog.getOpenFileName(self, "Browse Configuration File", "explorer", "(*.zyt)")
self.ConfigFile_LEdit.setText(path[0])
if os.path.exists(path[0]):
self.WinConfigView_Button.setEnabled(True)
self.winconfig_dir = path[0]
else:
self.WinConfigView_Button.setDisabled(True)
except:
self.ConfigFile_LEdit.clear()
def ExpDirBrowse(self):
try:
path = QtWidgets.QFileDialog.getOpenFileName(self, "Browse Experiment Script Directory", "explorer", "(*.py)")
print(path)
self.ExpDir_LineEdit.setText(path[0])
except:
self.ExpDir_LineEdit.clear()
def ScriptDirectoryBrowse(self):
try:
path = QtWidgets.QFileDialog.getExistingDirectory(self, "Browse Parameters Script Directory", "explorer")
print(path)
self.ScriptDirectory_LineEdit.setText(path)
self.script_dir = path
self.ParaScriptView_Button.setEnabled(True)
except:
self.ScriptDirectory_LineEdit.clear()
self.ParaScriptView_Button.setDisabled(True)
def ParaScriptView(self):
try:
print(self.script_dir + "/" + self.name + "_para.py")
if (os.path.exists(self.script_dir + "/" + self.name + "_para.py")):
print("exists!")
os.system("notepad " + self.script_dir + "/" + self.name + "_para.py")
except:
pass
def SetDir(self):
directory = self.ScriptDirectory_LineEdit.text()
if os.path.exists(directory):
self.ScriptSave_Button.setEnabled(True)
else:
self.ScriptSave_Button.setDisabled(True)
def ConfigFileConfirm(self):##Read Configuration File
if not os.path.exists(self.ConfigFile_LEdit.text()):
self.FVar_step_SpinBox.setDisabled(True)
self.OVar_step_SpinBox.setDisabled(True)
self.TVar_step_SpinBox.setDisabled(True)
self.AmpVar_step_SpinBox.setDisabled(True)
self.PhVar_step_SpinBox.setDisabled(True)
self.FVar_lb_SpinBox.setDisabled(True)
self.FVar_ub_SpinBox.setDisabled(True)
self.FVar_var_SpinBox.setDisabled(True)
self.OVar_lb_SpinBox.setDisabled(True)
self.OVar_ub_SpinBox.setDisabled(True)
self.OVar_var_SpinBox.setDisabled(True)
self.TVar_lb_SpinBox.setDisabled(True)
self.TVar_ub_SpinBox.setDisabled(True)
self.TVar_var_SpinBox.setDisabled(True)
self.AmpVar_lb_SpinBox.setDisabled(True)
self.AmpVar_ub_SpinBox.setDisabled(True)
self.AmpVar_var_SpinBox.setDisabled(True)
self.PhVar_lb_SpinBox.setDisabled(True)
self.PhVar_ub_SpinBox.setDisabled(True)
self.PhVar_var_SpinBox.setDisabled(True)
#self.FVarChannel_ComboBox.setDisabled(True)
#self.TVarChannel_ComboBox.setDisabled(True)
#self.AmpVarChannel_ComboBox.setDisabled(True)
#self.PhVarChannel_ComboBox.setDisabled(True)
self.FVar_times_SpinBox.setDisabled(True)
self.OVar_times_SpinBox.setDisabled(True)
self.TVar_times_SpinBox.setDisabled(True)
self.AmpVar_times_SpinBox.setDisabled(True)
self.PhVar_times_SpinBox.setDisabled(True)
self.TitleConfirm_Button.setDisabled(True)
self.ScriptDirectoryBrowse_Button.setDisabled(True)
self.SetDir_Button.setDisabled(True)
try:
inputfilename = self.ConfigFile_LEdit.text()
inputfile = open(inputfilename, 'r+')
text = inputfile.readlines()
flag = 0
flag_another = 0
self.FVar_list.clear()
self.OVar_list.clear()
self.TVar_list.clear()
self.AmpVar_list.clear()
self.PhVar_list.clear()
for line in text:
if flag == 0:
self.name = line.replace("\n", "")
self.Title_LEdit.setText(self.name)
flag = flag + 1
else:
try:
num = int(line)
if flag_another == 0:
self.FVar_num = num
elif flag_another == 1:
self.TVar_num = num
elif flag_another == 2:
self.AmpVar_num = num
elif flag_another == 3:
self.PhVar_num = num
elif flag_another == 4:
self.OVar_num = num
else:
pass
flag_another = flag_another + 1
except:
if True:
s_list = Functions.StringSeparate(line)
name = s_list[0]
lb = float(s_list[1])
ub = float(s_list[2])
var = float(s_list[3])
llb = float(s_list[4])
uub = float(s_list[5])
##print(flag_another)
if flag_another == 1:
self.FVar_list.append(DataType.FVar(name, lb, ub, var, llb, uub))
elif flag_another == 2:
self.TVar_list.append(DataType.TVar(name, lb, ub, var, llb, uub))
elif flag_another == 3:
self.AmpVar_list.append(DataType.AmpVar(name, lb, ub, var, llb, uub))
elif flag_another == 4:
self.PhVar_list.append(DataType.PhVar(name, lb, ub, var, llb, uub))
elif flag_another == 5:
self.OVar_list.append(DataType.OVar(name, lb, ub, var, llb, uub))
print("Input Finished!")
self.ScriptDirectoryBrowse_Button.setEnabled(True)
self.SetDir_Button.setEnabled(True)
self.VarCombo_Init()
##break
inputfile.close()
except:
pass
def VarCombo_Init(self):
#self.FVarChannel_ComboBox.clear()
#self.TVarChannel_ComboBox.clear()
#self.AmpVarChannel_ComboBox.clear()
#self.PhVarChannel_ComboBox.clear()
self.FVar_ComboBox.clear()
self.TVar_ComboBox.clear()
self.AmpVar_ComboBox.clear()
self.PhVar_ComboBox.clear()
self.OVar_ComboBox.clear()
#for i in range(0, self.channels):
#self.FVarChannel_ComboBox.addItem(str(i))
#self.TVarChannel_ComboBox.addItem(str(i))
#self.AmpVarChannel_ComboBox.addItem(str(i))
#self.PhVarChannel_ComboBox.addItem(str(i))
##pass
##Add items to each combobox
##The index of each combobox starts from 0
for fvar in self.FVar_list:
var = fvar.var
ub = fvar.ub
lb = fvar.lb
step = fvar.step
scan = fvar.scan
self.FVar_ComboBox.addItem(fvar.name)
fvar.var = var
fvar.ub = ub
fvar.lb = lb
fvar.step = step
fvar.scan = scan
##print(self.FVar_list[0].ub, self.FVar_list[0].lb)
for tvar in self.TVar_list:
var = tvar.var
ub = tvar.ub
lb = tvar.lb
step = tvar.step
scan = tvar.scan
self.TVar_ComboBox.addItem(tvar.name)
tvar.var = var
tvar.ub = ub
tvar.lb = lb
tvar.step = step
tvar.scan = scan
for ampvar in self.AmpVar_list:
var = ampvar.var
ub = ampvar.ub
lb = ampvar.lb
step = ampvar.step
scan = ampvar.scan
self.AmpVar_ComboBox.addItem(ampvar.name)
ampvar.var = var
ampvar.ub = ub
ampvar.lb = lb
ampvar.step = step
ampvar.scan = scan
for phvar in self.PhVar_list:
var = phvar.var
ub = phvar.ub
lb = phvar.lb
step = phvar.step
scan = phvar.scan
self.PhVar_ComboBox.addItem(phvar.name)
phvar.var = var
phvar.ub = ub
phvar.lb = lb
phvar.step = step
phvar.scan = scan
for ovar in self.OVar_list:
var = ovar.var
ub = ovar.ub
lb = ovar.lb
step = ovar.step
scan = ovar.scan
self.OVar_ComboBox.addItem(ovar.name)
ovar.var = var
ovar.ub = ub
ovar.lb = lb
ovar.step = step
ovar.scan = scan
##Initiate the rest part
self.FVar_step_SpinBox.setEnabled(True)
self.TVar_step_SpinBox.setEnabled(True)
self.AmpVar_step_SpinBox.setEnabled(True)
self.PhVar_step_SpinBox.setEnabled(True)
self.OVar_step_SpinBox.setEnabled(True)
self.FVar_lb_SpinBox.setEnabled(True)
self.FVar_ub_SpinBox.setEnabled(True)
self.FVar_var_SpinBox.setEnabled(True)
self.TVar_lb_SpinBox.setEnabled(True)
self.TVar_ub_SpinBox.setEnabled(True)
self.TVar_var_SpinBox.setEnabled(True)
self.AmpVar_lb_SpinBox.setEnabled(True)
self.AmpVar_ub_SpinBox.setEnabled(True)
self.AmpVar_var_SpinBox.setEnabled(True)
self.PhVar_lb_SpinBox.setEnabled(True)
self.PhVar_ub_SpinBox.setEnabled(True)
self.PhVar_var_SpinBox.setEnabled(True)
self.OVar_lb_SpinBox.setEnabled(True)
self.OVar_ub_SpinBox.setEnabled(True)
self.OVar_var_SpinBox.setEnabled(True)
#self.FVarChannel_ComboBox.setEnabled(True)
#self.TVarChannel_ComboBox.setEnabled(True)
#self.AmpVarChannel_ComboBox.setEnabled(True)
#self.PhVarChannel_ComboBox.setEnabled(True)
self.FVar_times_SpinBox.setEnabled(True)
self.TVar_times_SpinBox.setEnabled(True)
self.AmpVar_times_SpinBox.setEnabled(True)
self.PhVar_times_SpinBox.setEnabled(True)
self.TitleConfirm_Button.setEnabled(True)
self.OVar_times_SpinBox.setEnabled(True)
try:
self.FVarIndexChanged(0)
except:
pass
try:
self.TVarIndexChanged(0)
except:
pass
try:
self.AmpVarIndexChanged(0)
except:
pass
try:
self.PhVarIndexChanged(0)
except:
pass
try:
self.OVarIndexChanged(0)
except:
pass
def FVarIndexChanged(self, i):
self.FVar_lb_SpinBox.setMinimum(self.FVar_list[i].llb)
self.FVar_lb_SpinBox.setMaximum(self.FVar_list[i].uub)
self.FVar_ub_SpinBox.setMinimum(self.FVar_list[i].llb)
self.FVar_ub_SpinBox.setMaximum(self.FVar_list[i].uub)
self.FVar_var_SpinBox.setMaximum(self.FVar_list[i].uub)
self.FVar_var_SpinBox.setMinimum(self.FVar_list[i].llb)
##print(self.FVar_list[i].lb, self.FVar_list[i].ub, self.FVar_list[i].var)
self.FVar_lb_SpinBox.setValue(self.FVar_list[i].lb)
self.FVar_ub_SpinBox.setValue(self.FVar_list[i].ub)
self.FVar_var_SpinBox.setValue(self.FVar_list[i].var)
self.FVar_times_SpinBox.setValue(self.FVar_list[i].times)
self.FVar_lb_SpinBox.setMinimum(self.FVar_list[i].llb)
self.FVar_lb_SpinBox.setMaximum(self.FVar_list[i].ub)
self.FVar_ub_SpinBox.setMinimum(self.FVar_list[i].lb)
self.FVar_ub_SpinBox.setMaximum(self.FVar_list[i].uub)
self.FVar_var_SpinBox.setMaximum(self.FVar_list[i].ub)
self.FVar_var_SpinBox.setMinimum(self.FVar_list[i].lb)
self.FVar_step_SpinBox.setValue(self.FVar_list[i].step)
self.FVar_step_SpinBox.setMaximum(self.FVar_list[i].uub - self.FVar_list[i].llb)
self.FVar_step_SpinBox.setMinimum(-(self.FVar_list[i].uub - self.FVar_list[i].llb))
if self.FVar_list[i].step == 0:
self.FVar_scan_CheckBox.setDisabled(True)
else:
self.FVar_scan_CheckBox.setEnabled(True)
self.FVar_scan_CheckBox.setCheckState(self.FVar_list[i].scan)
#self.FVarChannel_ComboBox.setCurrentIndex(self.FVar_list[i].channel)
def TVarIndexChanged(self, i):
self.TVar_lb_SpinBox.setMinimum(self.TVar_list[i].llb)
self.TVar_lb_SpinBox.setMaximum(self.TVar_list[i].uub)
self.TVar_var_SpinBox.setMaximum(self.TVar_list[i].uub)
self.TVar_var_SpinBox.setMinimum(self.TVar_list[i].llb)
self.TVar_ub_SpinBox.setMinimum(self.TVar_list[i].llb)
self.TVar_ub_SpinBox.setMaximum(self.TVar_list[i].uub)
self.TVar_lb_SpinBox.setValue(self.TVar_list[i].lb)
self.TVar_ub_SpinBox.setValue(self.TVar_list[i].ub)
self.TVar_var_SpinBox.setValue(self.TVar_list[i].var)
self.TVar_times_SpinBox.setValue(self.TVar_list[i].times)
self.TVar_lb_SpinBox.setMinimum(self.TVar_list[i].llb)
self.TVar_lb_SpinBox.setMaximum(self.TVar_list[i].ub)
self.TVar_var_SpinBox.setMaximum(self.TVar_list[i].ub)
self.TVar_var_SpinBox.setMinimum(self.TVar_list[i].lb)
self.TVar_ub_SpinBox.setMinimum(self.TVar_list[i].lb)
self.TVar_ub_SpinBox.setMaximum(self.TVar_list[i].uub)
self.TVar_step_SpinBox.setValue(self.TVar_list[i].step)
self.TVar_step_SpinBox.setMaximum(self.TVar_list[i].uub - self.TVar_list[i].llb)
self.TVar_step_SpinBox.setMinimum(-(self.TVar_list[i].uub - self.TVar_list[i].llb))
if self.TVar_list[i].step == 0:
self.TVar_scan_CheckBox.setDisabled(True)
else:
self.TVar_scan_CheckBox.setEnabled(True)
self.TVar_scan_CheckBox.setCheckState(self.TVar_list[i].scan)
# self.TVarChannel_ComboBox.setCurrentIndex(self.TVar_list[i].channel)
def AmpVarIndexChanged(self, i):
self.AmpVar_lb_SpinBox.setMinimum(self.AmpVar_list[i].llb)
self.AmpVar_lb_SpinBox.setMaximum(self.AmpVar_list[i].uub)
self.AmpVar_var_SpinBox.setMaximum(self.AmpVar_list[i].uub)
self.AmpVar_var_SpinBox.setMinimum(self.AmpVar_list[i].llb)
self.AmpVar_ub_SpinBox.setMinimum(self.AmpVar_list[i].llb)
self.AmpVar_ub_SpinBox.setMaximum(self.AmpVar_list[i].uub)
self.AmpVar_lb_SpinBox.setValue(self.AmpVar_list[i].lb)
self.AmpVar_ub_SpinBox.setValue(self.AmpVar_list[i].ub)
self.AmpVar_var_SpinBox.setValue(self.AmpVar_list[i].var)
self.AmpVar_times_SpinBox.setValue(self.AmpVar_list[i].times)
self.AmpVar_lb_SpinBox.setMinimum(self.AmpVar_list[i].llb)
self.AmpVar_lb_SpinBox.setMaximum(self.AmpVar_list[i].ub)
self.AmpVar_var_SpinBox.setMaximum(self.AmpVar_list[i].ub)
self.AmpVar_var_SpinBox.setMinimum(self.AmpVar_list[i].lb)
self.AmpVar_ub_SpinBox.setMinimum(self.AmpVar_list[i].lb)
self.AmpVar_ub_SpinBox.setMaximum(self.AmpVar_list[i].uub)
self.AmpVar_step_SpinBox.setMaximum(self.AmpVar_list[i].uub - self.AmpVar_list[i].llb)
self.AmpVar_step_SpinBox.setMinimum(-(self.AmpVar_list[i].uub - self.AmpVar_list[i].llb))
if self.AmpVar_list[i].step == 0:
self.AmpVar_scan_CheckBox.setDisabled(True)
else:
self.AmpVar_scan_CheckBox.setEnabled(True)
self.AmpVar_scan_CheckBox.setCheckState(self.AmpVar_list[i].scan)
# self.AmpVarChannel_ComboBox.setCurrentIndex(self.AmpVar_list[i].channel)
def PhVarIndexChanged(self, i):
self.PhVar_lb_SpinBox.setMinimum(self.PhVar_list[i].llb)
self.PhVar_lb_SpinBox.setMaximum(self.PhVar_list[i].uub)
self.PhVar_ub_SpinBox.setMinimum(self.PhVar_list[i].llb)
self.PhVar_ub_SpinBox.setMaximum(self.PhVar_list[i].uub)
self.PhVar_var_SpinBox.setMaximum(self.PhVar_list[i].uub)
self.PhVar_var_SpinBox.setMinimum(self.PhVar_list[i].llb)
self.PhVar_lb_SpinBox.setValue(self.PhVar_list[i].lb)
self.PhVar_ub_SpinBox.setValue(self.PhVar_list[i].ub)
self.PhVar_var_SpinBox.setValue(self.PhVar_list[i].var)
self.PhVar_times_SpinBox.setValue(self.PhVar_list[i].times)
self.PhVar_lb_SpinBox.setMinimum(self.PhVar_list[i].llb)
self.PhVar_lb_SpinBox.setMaximum(self.PhVar_list[i].ub)
self.PhVar_ub_SpinBox.setMinimum(self.PhVar_list[i].lb)
self.PhVar_ub_SpinBox.setMaximum(self.PhVar_list[i].uub)
self.PhVar_var_SpinBox.setMaximum(self.PhVar_list[i].ub)
self.PhVar_var_SpinBox.setMinimum(self.PhVar_list[i].lb)
self.PhVar_step_SpinBox.setMaximum(self.PhVar_list[i].uub - self.PhVar_list[i].llb)
self.PhVar_step_SpinBox.setMinimum(-(self.PhVar_list[i].uub - self.PhVar_list[i].llb))
if self.PhVar_list[i].step == 0:
self.PhVar_scan_CheckBox.setDisabled(True)
else:
self.PhVar_scan_CheckBox.setEnabled(True)
self.PhVar_scan_CheckBox.setCheckState(self.PhVar_list[i].scan)
# self.PhVarChannel_ComboBox.setCurrentIndex(self.PhVar_list[i].channel)
def OVarIndexChanged(self, i):
self.OVar_lb_SpinBox.setMinimum(self.OVar_list[i].llb)
self.OVar_lb_SpinBox.setMaximum(self.OVar_list[i].uub)
self.OVar_var_SpinBox.setMaximum(self.OVar_list[i].uub)
self.OVar_var_SpinBox.setMinimum(self.OVar_list[i].llb)
self.OVar_ub_SpinBox.setMinimum(self.OVar_list[i].llb)
self.OVar_ub_SpinBox.setMaximum(self.OVar_list[i].uub)
self.OVar_lb_SpinBox.setValue(self.OVar_list[i].lb)
self.OVar_ub_SpinBox.setValue(self.OVar_list[i].ub)
self.OVar_var_SpinBox.setValue(self.OVar_list[i].var)
self.OVar_times_SpinBox.setValue(self.OVar_list[i].times)
self.OVar_lb_SpinBox.setMinimum(self.OVar_list[i].llb)
self.OVar_lb_SpinBox.setMaximum(self.OVar_list[i].ub)
self.OVar_var_SpinBox.setMaximum(self.OVar_list[i].ub)
self.OVar_var_SpinBox.setMinimum(self.OVar_list[i].lb)
self.OVar_ub_SpinBox.setMinimum(self.OVar_list[i].lb)
self.OVar_ub_SpinBox.setMaximum(self.OVar_list[i].uub)
self.OVar_step_SpinBox.setValue(self.OVar_list[i].step)
self.OVar_step_SpinBox.setMaximum(self.OVar_list[i].uub - self.OVar_list[i].llb)
self.OVar_step_SpinBox.setMinimum(-(self.OVar_list[i].uub - self.OVar_list[i].llb))
if self.OVar_list[i].step == 0:
self.OVar_scan_CheckBox.setDisabled(True)
else:
self.OVar_scan_CheckBox.setEnabled(True)
self.OVar_scan_CheckBox.setCheckState(self.TVar_list[i].scan)
def FVarSelect(self):
index = self.FVar_ComboBox.currentIndex()
print("Current FVar index is", index)
self.FVarIndexChange(index)
def TVarSelect(self):
index = self.TVar_ComboBox.currentIndex()
self.TVarIndexChange(index)
def AmpVarSelect(self):
index = self.AmpVar_ComboBox.currentIndex()
self.AmpVarIndexChange(index)
def PhVarSelect(self):
index = self.PhVar_ComboBox.currentIndex()
self.PhVarIndexChange(index)
def OVarSelect(self):
index = self.OVar_ComboBox.currentIndex()
print("Current OVar index is", index)
self.OVarIndexChange(index)
def FVar_lbChanged(self):
try:
index = self.FVar_ComboBox.currentIndex()
self.FVar_list[index].set_lb(self.FVar_lb_SpinBox.value())
if self.FVar_list[index].var < self.FVar_list[index].lb:
self.FVar_list[index].set_var(self.FVar_list[index].lb)
self.FVar_var_SpinBox.setValue(self.FVar_list[index].lb)
self.FVar_var_SpinBox.setMinimum(self.FVar_list[index].lb)
self.FVar_ub_SpinBox.setMinimum(self.FVar_list[index].lb)
except:
print("FVAR LB CHANGE Warning!")
def FVar_ubChanged(self):
try:
index = self.FVar_ComboBox.currentIndex()
self.FVar_list[index].set_ub(self.FVar_ub_SpinBox.value())
if self.FVar_list[index].var > self.FVar_list[index].ub:
self.FVar_list[index].set_var(self.FVar_list[index].ub)
self.FVar_var_SpinBox.setValue(self.FVar_list[index].ub)
self.FVar_var_SpinBox.setMaximum(self.FVar_list[index].ub)
self.FVar_lb_SpinBox.setMaximum(self.FVar_list[index].ub)
except:
print("FVAR UB CHANGE Warning!")
def FVar_varChanged(self):
try:
index = self.FVar_ComboBox.currentIndex()
self.FVar_list[index].set_var(self.FVar_var_SpinBox.value())
except:
print("FVAR VAR CHANGE Warning!")
def FVar_timesChanged(self):
try:
index = self.FVar_ComboBox.currentIndex()
self.FVar_list[index].set_times(self.FVar_times_SpinBox.value())
except:
print("FVAR TIMES CHANGE Warning!")
def FVar_stepChanged(self):
try:
index = self.FVar_ComboBox.currentIndex()
self.FVar_list[index].set_step(self.FVar_step_SpinBox.value())
if self.FVar_list[index].step == 0:
self.FVar_scan_CheckBox.setDisabled(True)
self.FVar_list[index].set_scan(0)
else:
self.FVar_scan_CheckBox.setEnabled(True)
self.FVar_scan_CheckBox.setCheckState(self.FVar_list[index].scan)
except:
print("FVar step Warning!")
def FVar_scanChanged(self):
try:
print("FVar scan changed")
index = self.FVar_ComboBox.currentIndex()
self.FVar_list[index].set_scan(self.FVar_scan_CheckBox.checkState())
except:
print("Fvar scan Warning!")
def TVar_lbChanged(self):
try:
index = self.TVar_ComboBox.currentIndex()
self.TVar_list[index].set_lb(self.TVar_lb_SpinBox.value())
print(self.TVar_list[index].lb)
if self.TVar_list[index].var < self.TVar_list[index].lb:
self.TVar_list[index].set_var(self.TVar_list[index].lb)
self.TVar_var_SpinBox.setValue(self.TVar_list[index].lb)
self.TVar_var_SpinBox.setMinimum(self.TVar_list[index].lb)
self.TVar_ub_SpinBox.setMinimum(self.TVar_list[index].lb)
except:
pass
def TVar_ubChanged(self):
try:
index = self.TVar_ComboBox.currentIndex()
self.TVar_list[index].set_ub(self.TVar_ub_SpinBox.value())
if self.TVar_list[index].var > self.TVar_list[index].ub:
self.TVar_list[index].set_var(self.TVar_list[index].ub)
self.TVar_var_SpinBox.setValue(self.TVar_list[index].ub)
self.TVar_var_SpinBox.setMaximum(self.TVar_list[index].ub)
self.TVar_lb_SpinBox.setMaximum(self.TVar_list[index].ub)
except:
print("TVar ub change Warning!")
def TVar_varChanged(self):
try:
index = self.TVar_ComboBox.currentIndex()
self.TVar_list[index].set_var(self.TVar_var_SpinBox.value())
print(self.TVar_var_SpinBox.value(), self.TVar_list[index].var)
except:
print("TVar var change Warning!")
def TVar_timesChanged(self):
try:
index = self.TVar_ComboBox.currentIndex()
self.TVar_list[index].set_times(self.TVar_times_SpinBox.value())
except:
print("TVar var change Warning!")
def TVar_stepChanged(self):
try:
index = self.TVar_ComboBox.currentIndex()
self.TVar_list[index].set_step(self.TVar_step_SpinBox.value())
if self.TVar_list[index].step == 0:
self.TVar_scan_CheckBox.setDisabled(True)
self.TVar_list[index].set_scan(0)
else:
self.TVar_scan_CheckBox.setEnabled(True)
self.TVar_scan_CheckBox.setCheckState(self.TVar_list[index].scan)
except:
print("TVar step changeWarning!")
def TVar_scanChanged(self):
try:
print("changed")
index = self.TVar_ComboBox.currentIndex()
self.TVar_list[index].set_scan(self.TVar_scan_CheckBox.checkState())
except:
print("TVar scan changed Warning!")
def OVar_lbChanged(self):
try:
index = self.OVar_ComboBox.currentIndex()
self.OVar_list[index].set_lb(self.OVar_lb_SpinBox.value())
if self.OVar_list[index].var < self.OVar_list[index].lb:
self.OVar_list[index].set_var(self.OVar_list[index].lb)
self.OVar_var_SpinBox.setValue(self.OVar_list[index].lb)
self.OVar_var_SpinBox.setMinimum(self.OVar_list[index].lb)
self.OVar_ub_SpinBox.setMinimum(self.OVar_list[index].lb)
except:
print("OVar LB CHANGE Warning!")
def OVar_ubChanged(self):
try:
index = self.OVar_ComboBox.currentIndex()
self.OVar_list[index].set_ub(self.OVar_ub_SpinBox.value())
if self.OVar_list[index].var > self.OVar_list[index].ub:
self.OVar_list[index].set_var(self.OVar_list[index].ub)
self.OVar_var_SpinBox.setValue(self.OVar_list[index].ub)
self.OVar_var_SpinBox.setMaximum(self.OVar_list[index].ub)
self.OVar_lb_SpinBox.setMaximum(self.OVar_list[index].ub)
except:
print("OVar UB CHANGE Warning!")
def OVar_varChanged(self):
try:
index = self.OVar_ComboBox.currentIndex()
self.OVar_list[index].set_var(self.OVar_var_SpinBox.value())
except:
print("OVar VAR CHANGE Warning!")
def OVar_timesChanged(self):
try:
index = self.OVar_ComboBox.currentIndex()
self.OVar_list[index].set_times(self.OVar_times_SpinBox.value())
except:
print("OVar TIMES CHANGE Warning!")
def OVar_stepChanged(self):
try:
index = self.OVar_ComboBox.currentIndex()
self.OVar_list[index].set_step(self.OVar_step_SpinBox.value())
if self.OVar_list[index].step == 0:
self.OVar_scan_CheckBox.setDisabled(True)
self.OVar_list[index].set_scan(0)
else:
self.OVar_scan_CheckBox.setEnabled(True)
self.OVar_scan_CheckBox.setCheckState(self.OVar_list[index].scan)
except:
print("OVar step Warning!")
def OVar_scanChanged(self):
try:
print("OVar scan changed")
index = self.OVar_ComboBox.currentIndex()
self.OVar_list[index].set_scan(self.OVar_scan_CheckBox.checkState())
except:
print("OVar scan Warning!")
def AmpVar_lbChanged(self):
try:
index = self.AmpVar_ComboBox.currentIndex()
self.AmpVar_list[index].set_lb(self.AmpVar_lb_SpinBox.value())
if self.AmpVar_list[index].var < self.AmpVar_list[index].lb:
self.AmpVar_list[index].set_var(self.AmpVar_list[index].lb)
self.AmpVar_var_SpinBox.setValue(self.AmpVar_list[index].lb)
self.AmpVar_var_SpinBox.setMinimum(self.AmpVar_list[index].lb)
self.AmpVar_ub_SpinBox.setMinimum(self.AmpVar_list[index].lb)
except:
print("AmpVar Warning!")
def AmpVar_ubChanged(self):
try:
index = self.AmpVar_ComboBox.currentIndex()
self.AmpVar_list[index].set_ub(self.AmpVar_ub_SpinBox.value())
if self.AmpVar_list[index].var > self.AmpVar_list[index].ub:
self.AmpVar_list[index].set_var(self.AmpVar_list[index].ub)
self.AmpVar_var_SpinBox.setValue(self.AmpVar_list[index].ub)
except:
print("AmpVar Warning!")
def AmpVar_varChanged(self):
try:
index = self.AmpVar_ComboBox.currentIndex()
self.AmpVar_list[index].set_var(self.AmpVar_var_SpinBox.value())
except:
print("AmpVar Warning!")
def AmpVar_timesChanged(self):
try:
index = self.AmpVar_ComboBox.currentIndex()
self.AmpVar_list[index].set_times(self.AmpVar_times_SpinBox.value())
except:
print("AmpVar Warning!")
def AmpVar_stepChanged(self):
try:
index = self.AmpVar_ComboBox.currentIndex()
self.AmpVar_list[index].set_step(self.AmpVar_step_SpinBox.value())
if self.AmpVar_list[index].step == 0:
self.AmpVar_scan_CheckBox.setDisabled(True)
self.AmpVar_list[index].set_scan(0)
else:
self.AmpVar_scan_CheckBox.setEnabled(True)
self.AmpVar_scan_CheckBox.setCheckState(self.AmpVar_list[index].scan)
except:
print("AmpVar Warning!")
def AmpVar_scanChanged(self):
try:
print("changed")
index = self.AmpVar_ComboBox.currentIndex()
self.AmpVar_list[index].set_scan(self.AmpVar_scan_CheckBox.checkState())
except:
print("AmpVar Warning!")
def PhVar_lbChanged(self):
try:
index = self.PhVar_ComboBox.currentIndex()
self.PhVar_list[index].set_lb(self.PhVar_lb_SpinBox.value())
self.PhVar_var_SpinBox.setMinimum(self.PhVar_list[index].lb)
self.PhVar_ub_SpinBox.setMinimum(self.PhVar_list[index].lb)
if self.PhVar_list[index].var < self.PhVar_list[index].lb:
self.PhVar_list[index].set_var(self.PhVar_list[index].lb)
self.PhVar_var_SpinBox.setValue(self.PhVar_list[index].lb)
except:
print("PhVar Warning!")
def PhVar_ubChanged(self):
try:
index = self.PhVar_ComboBox.currentIndex()
self.PhVar_list[index].set_ub(self.PhVar_ub_SpinBox.value())
self.PhVar_var_SpinBox.setMaximum(self.PhVar_list[index].ub)
self.PhVar_lb_SpinBox.setMaximum(self.PhVar_list[index].ub)
if self.PhVar_list[index].var > self.PhVar_list[index].ub:
self.PhVar_list[index].set_var(self.PhVar_list[index].ub)
self.PhVar_var_SpinBox.setValue(self.PhVar_list[index].ub)
except:
print("PhVar Warning!")
def PhVar_varChanged(self):
try:
index = self.PhVar_ComboBox.currentIndex()
self.PhVar_list[index].set_var(self.PhVar_var_SpinBox.value())
except:
print("PhVar Warning!")
def PhVar_timesChanged(self):
try:
index = self.PhVar_ComboBox.currentIndex()
self.PhVar_list[index].set_times(self.PhVar_times_SpinBox.value())
except:
print("PhVar Warning!")
def PhVar_stepChanged(self):
try:
index = self.PhVar_ComboBox.currentIndex()
self.PhVar_list[index].set_step(self.PhVar_step_SpinBox.value())
if self.PhVar_list[index].step == 0:
self.PhVar_scan_CheckBox.setDisabled(True)
self.PhVar_list[index].set_scan(0)
else:
self.PhVar_scan_CheckBox.setEnabled(True)
self.PhVar_scan_CheckBox.setCheckState(self.PhVar_list[index].scan)
except:
print("PhVar Warning!")
def PhVar_scanChanged(self):
try:
print("changed")
index = self.PhVar_ComboBox.currentIndex()
self.PhVar_list[index].set_scan(self.PhVar_scan_CheckBox.checkState())
except:
print("PhVar Warning!")
"""
def FVarChannel_Change(self):
try:
index = self.FVar_ComboBox.currentIndex()
self.FVar_list[index].set_channel(self.FVarChannel_ComboBox.currentIndex())
except:
print("FVar Warning!")
def TVarChannel_Change(self):
try:
index = self.TVar_ComboBox.currentIndex()
self.TVar_list[index].set_channel(self.TVarChannel_ComboBox.currentIndex())
except:
print("TVar Channel Change Warning!")
def AmpVarChannel_Change(self):
try:
index = self.AmpVar_ComboBox.currentIndex()
self.AmpVar_list[index].set_channel(self.AmpVarChannel_ComboBox.currentIndex())
except:
print("AmpVar Warning!")
def PhVarChannel_Change(self):
try:
index = self.PhVar_ComboBox.currentIndex()
self.PhVar_list[index].set_channel(self.PhVarChannel_ComboBox.currentIndex())
except:
print("PhVar Warning!")
"""
def Configure_change(self):
print("Configure change Warning!")
def ScriptSave(self):
script_name = self.script_dir + "/" + self.name + "_para.py"
try:
script_file = open(script_name, "w")
print("#This is a the list of all defined variables!", file = script_file)
f_count = 0
t_count = 0
ph_count = 0
amp_count = 0
o_count = 0
for var in self.FVar_list:
if var.name != "None":
print(var.name, " = ", var.var, file = script_file)
print(var.name + "_lb", " = ", var.lb, file = script_file)
print(var.name + "_ub", " = ", var.ub, file = script_file)
#print(var.name + "_channel", " = ", var.channel, file = script_file)
print(var.name+"_times", " = ", var.times, file = script_file)
print(var.name + "_step", " = ", var.step, file = script_file)
print(var.name + "_type", " = \'fvar\'", file = script_file)
print(var.name + "_name = \'" + var.name + '\'', file = script_file)
if var.scan == 0:
print(var.name + "_scan = False", file = script_file)
else:
print(var.name + "_scan = True", file = script_file)
f_count = f_count + 1
print(file = script_file)
print("n_fvar =", f_count, file = script_file)
print(file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name, " = ", var.var, file = script_file)
print(var.name + "_lb", " = ", var.lb, file = script_file)
print(var.name + "_ub", " = ", var.ub, file = script_file)
#print(var.name + "_channel", " = ", var.channel, file = script_file)
print(var.name+"_times", " = ", var.times, file = script_file)
print(var.name + "_step", " = ", var.step, file = script_file)
print(var.name + "_type", " = \'tvar\'", file = script_file)
print(var.name + "_name = \'" + var.name + '\'', file = script_file)
if var.scan == 0:
print(var.name + "_scan = False", file = script_file)
else:
print(var.name + "_scan = True", file = script_file)
t_count = t_count + 1
print(file = script_file)
print("n_tvar =", t_count, file = script_file)
print(file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name, " = ", var.var, file = script_file)
print(var.name + "_lb", " = ", var.lb, file = script_file)
print(var.name + "_ub", " = ", var.ub, file = script_file)
#print(var.name + "_channel", " = ", var.channel, file = script_file)
print(var.name+"_times", " = ", var.times, file = script_file)
print(var.name + "_step", " = ", var.step, file = script_file)
print(var.name + "_type", " = \'ampvar\'", file = script_file)
print(var.name + "_name = \'" + var.name + '\'', file = script_file)
if var.scan == 0:
print(var.name + "_scan = False", file = script_file)
else:
print(var.name + "_scan = True", file = script_file)
amp_count = amp_count + 1
print(file = script_file)
print("n_ampvar =", amp_count, file = script_file)
print(file = script_file)
for var in self.PhVar_list:
print()
if var.name != "None":
print(var.name, " = ", var.var, file = script_file)
print(var.name + "_lb", " = ", var.lb, file = script_file)
print(var.name + "_ub", " = ", var.ub, file = script_file)
#print(var.name + "_channel", " = ", var.channel, file = script_file)
print(var.name+"_times", " = ", var.times, file = script_file)
print(var.name + "_step", " = ", var.step, file = script_file)
print(var.name + "_type", " = \'phvar\'", file = script_file)
print(var.name + "_name = \'" + var.name + '\'', file = script_file)
if var.scan == 0:
print(var.name + "_scan = False", file = script_file)
else:
print(var.name + "_scan = True", file = script_file)
ph_count = ph_count + 1
print("n_phvar =", ph_count, file = script_file)
print(file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name, " = ", var.var, file = script_file)
print(var.name + "_lb", " = ", var.lb, file = script_file)
print(var.name + "_ub", " = ", var.ub, file = script_file)
#print(var.name + "_channel", " = ", var.channel, file = script_file)
print(var.name+"_times", " = ", var.times, file = script_file)
print(var.name + "_step", " = ", var.step, file = script_file)
print(var.name + "_type", " = \'ovar\'", file = script_file)
print(var.name + "_name = \'" + var.name + '\'', file = script_file)
if var.scan == 0:
print(var.name + "_scan = False", file = script_file)
else:
print(var.name + "_scan = True", file = script_file)
o_count = o_count + 1
print("n_ovar =", o_count, file = script_file)
print(file = script_file)
print(file = script_file)
print("#___________________________________________", file = script_file)
print ("var_list = [", end = '', file = script_file)
for var in self.FVar_list:
if var.name != "None":
print(var.name, ", ", sep = "", end = '', file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name, ", ", sep = "", end = '', file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name, ", ", sep = "", end = '', file = script_file)
for var in self.PhVar_list:
if var.name != "None":
print(var.name, ", ", sep = "", end = '', file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name, ", ", sep = "", end = '', file = script_file)
print("]", file = script_file)
print ("var_lb_list = [", end = '', file = script_file)
for var in self.FVar_list:
if var.name != "None":
print(var.name + "_lb", ", ", sep = "", end = '', file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name + "_lb", ", ", sep = "", end = '', file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name + "_lb", ", ", sep = "", end = '', file = script_file)
for var in self.PhVar_list:
if var.name != "None":
print(var.name + "_lb", ", ", sep = "", end = '', file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name + "_lb", ", ", sep = "", end = '', file = script_file)
print("]", file = script_file)
print ("var_ub_list = [", end = '', file = script_file)
for var in self.FVar_list:
if var.name != "None":
print(var.name + "_ub", ", ", sep = "", end = '', file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name + "_ub", ", ", sep = "", end = '', file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name + "_ub", ", ", sep = "", end = '', file = script_file)
for var in self.PhVar_list:
if var.name != "None":
print(var.name + "_ub", ", ", sep = "", end = '', file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name + "_ub", ", ", sep = "", end = '', file = script_file)
print("]", file = script_file)
print ("var_step_list = [", end = '', file = script_file)
for var in self.FVar_list:
if var.name != "None":
print(var.name + "_step", ", ", sep = "", end = '', file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name + "_step", ", ", sep = "", end = '', file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name + "_step", ", ", sep = "", end = '', file = script_file)
for var in self.PhVar_list:
if var.name != "None":
print(var.name + "_step", ", ", sep = "", end = '', file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name + "_step", ", ", sep = "", end = '', file = script_file)
print("]", file = script_file)
print ("var_times_list = [", end = '', file = script_file)
for var in self.FVar_list:
if var.name != "None":
print(var.name + "_times", ", ", sep = "", end = '', file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name + "_times", ", ", sep = "", end = '', file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name + "_times", ", ", sep = "", end = '', file = script_file)
for var in self.PhVar_list:
if var.name != "None":
print(var.name + "_times", ", ", sep = "", end = '', file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name + "_times", ", ", sep = "", end = '', file = script_file)
print("]", file = script_file)
print ("var_scan_list = [", end = '', file = script_file)
for var in self.FVar_list:
if var.name != "None":
print(var.name + "_scan", ", ", sep = "", end = '', file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name + "_scan", ", ", sep = "", end = '', file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name + "_scan", ", ", sep = "", end = '', file = script_file)
for var in self.PhVar_list:
if var.name != "None":
print(var.name + "_scan", ", ", sep = "", end = '', file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name + "_scan", ", ", sep = "", end = '', file = script_file)
print("]", file = script_file)
print ("var_type_list = [", end = '', file = script_file)
for var in self.FVar_list:
if var.name != "None":
print(var.name + "_type", ", ", sep = "", end = '', file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name + "_type", ", ", sep = "", end = '', file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name + "_type", ", ", sep = "", end = '', file = script_file)
for var in self.PhVar_list:
if var.name != "None":
print(var.name + "_type", ", ", sep = "", end = '', file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name + "_type", ", ", sep = "", end = '', file = script_file)
print("]", file = script_file)
print ("var_name_list = [", end = '', file = script_file)
for var in self.FVar_list:
if var.name != "None":
print(var.name + "_name", ", ", sep = "", end = '', file = script_file)
for var in self.TVar_list:
if var.name != "None":
print(var.name + "_name", ", ", sep = "", end = '', file = script_file)
for var in self.AmpVar_list:
if var.name != "None":
print(var.name + "_name", ", ", sep = "", end = '', file = script_file)
for var in self.PhVar_list:
if var.name != "None":
print(var.name + "_name", ", ", sep = "", end = '', file = script_file)
for var in self.OVar_list:
if var.name != "None":
print(var.name + "_name", ", ", sep = "", end = '', file = script_file)
print("]", file = script_file)
print("#____________________________________________", file = script_file)
print("#END", file = script_file)
script_file.close()
except:
print("SCRIPT SAVE Warning!")
def ExpDirSet(self):
try:
directory = self.ExpDir_LineEdit.text()
print(directory)
if os.path.exists(directory):
self.exp_dir = directory
self.ExpScriptRun_Button.setEnabled(True)
self.ExpScriptView_Button.setEnabled(True)
else:
self.ExpScriptRun_Button.setDisabled(True)
self.ExpScriptView_Button.setDisabled(True)
except:
pass
def ExpScriptView(self):
time.sleep(0.001)
try:
if os.path.exists(self.exp_dir):
os.system("notepad " + self.exp_dir)
except:
pass
def WinConfigView(self):
try:
if os.path.exists(self.winconfig_dir):
os.system("notepad " + self.winconfig_dir)
except:
pass
def ExpScriptRun(self):
print("~")
try:
if os.path.exists(self.exp_dir):
print("python \" "+ self.exp_dir + "\"")
os.system("python \""+ self.exp_dir + "\"")
except:
pass
##--------------------------------------------------------------------------------
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
win = SubExperiment("TEST")
##win.SubExperiment_Dialog.setCentralWidget(win.centralWidget)
win.SubExperiment_Dialog.show()
sys.exit(app.exec_())
| 41.941298 | 123 | 0.556783 | 6,650 | 60,731 | 4.847669 | 0.041654 | 0.03648 | 0.054285 | 0.037131 | 0.730124 | 0.603127 | 0.573627 | 0.542451 | 0.495704 | 0.433663 | 0 | 0.002493 | 0.332993 | 60,731 | 1,447 | 124 | 41.970283 | 0.793325 | 0.042334 | 0 | 0.462255 | 0 | 0 | 0.040159 | 0.0016 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054054 | false | 0.01398 | 0.006524 | 0 | 0.06151 | 0.157502 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64e1becee21ec789ab3fba8c1362708a1fcff647 | 1,509 | py | Python | train_DT.py | caspase-like-homolog-identifier/c14_witcher | e2c481607b85fed749daec0e9b3b29b65d6b448f | [
"MIT"
] | null | null | null | train_DT.py | caspase-like-homolog-identifier/c14_witcher | e2c481607b85fed749daec0e9b3b29b65d6b448f | [
"MIT"
] | null | null | null | train_DT.py | caspase-like-homolog-identifier/c14_witcher | e2c481607b85fed749daec0e9b3b29b65d6b448f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.tree import export_graphviz
from IPython.display import Image
from sklearn import metrics
from six import StringIO
import pandas as pd
import pydotplus
import argparse
import pickle
c14reference = pd.read_csv("c14reference.tsv", delimiter = "\t")
c14reference.shape
c14_ref = c14reference.dropna()
feature_cols = c14_ref.columns[:-1]
c14_ref = c14_ref[['p20', 'linker', 'p10','Classification']]
# +
#attributes
y = c14_ref.loc[:,"Classification"].values
# #labels
X = c14_ref.drop(["Classification"], axis = 1).values
# -
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
c14classifier = DecisionTreeClassifier(random_state=0)
c14classifier.fit(X_train, y_train)
y_pred = c14classifier.predict(X_test)
y_pred
y_test
print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
dot_data = StringIO()
export_graphviz(c14classifier,
out_file=dot_data,
filled=True,
rounded=True,
special_characters=True,
feature_names = feature_cols,class_names=['MCP','Type_I','Type_II', 'Type_III'])
graph = pydotplus.graph_from_dot_data(dot_data.getvalue())
graph.write_png('c14classifier.png')
Image(graph.create_png())
pkl_filename = "c14classifier.pickle"
with open(pkl_filename, 'wb') as file:
pickle.dump(c14classifier, file)
| 24.33871 | 96 | 0.726972 | 203 | 1,509 | 5.162562 | 0.458128 | 0.034351 | 0.026718 | 0.040076 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0347 | 0.159708 | 1,509 | 61 | 97 | 24.737705 | 0.791798 | 0.027833 | 0 | 0 | 0 | 0 | 0.098698 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.27027 | 0 | 0.27027 | 0.027027 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64e21c7ea2001708b9da0332c531d7724a4113b2 | 2,505 | py | Python | nbprocess/clean.py | fastai/nbprocess | cf4290b31357101bc96103ee2525e14c0c8ef162 | [
"Apache-2.0"
] | 15 | 2021-08-09T22:50:46.000Z | 2022-03-31T10:11:20.000Z | nbprocess/clean.py | fastai/nbprocess | cf4290b31357101bc96103ee2525e14c0c8ef162 | [
"Apache-2.0"
] | 15 | 2021-08-20T03:07:43.000Z | 2022-03-28T16:45:42.000Z | nbprocess/clean.py | fastai/nbprocess | cf4290b31357101bc96103ee2525e14c0c8ef162 | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/05_clean.ipynb.
# %% auto 0
__all__ = ['clean_nb', 'nbdev_clean_nbs']
# %% ../nbs/05_clean.ipynb 2
from fastcore.script import *
from fastcore.utils import *
from fastcore.imports import *
from .imports import *
from .read import *
from .sync import *
# from pathlib import Path
# import io,sys,json,glob,re
# %% ../nbs/05_clean.ipynb 6
def _clean_cell_output(cell):
"Remove execution count in `cell`"
if 'outputs' in cell:
for o in cell['outputs']:
if 'execution_count' in o: o['execution_count'] = None
o.get('data',{}).pop("application/vnd.google.colaboratory.intrinsic+json", None)
o.get('metadata', {}).pop('tags', None)
# %% ../nbs/05_clean.ipynb 7
def _clean_cell(cell, clear_all=False):
"Clean `cell` by removing superfluous metadata or everything except the input if `clear_all`"
if 'execution_count' in cell: cell['execution_count'] = None
if 'outputs' in cell:
if clear_all: cell['outputs'] = []
else: _clean_cell_output(cell)
if cell['source'] == ['']: cell['source'] = []
cell['metadata'] = {} if clear_all else {
k:v for k,v in cell['metadata'].items() if k=="hide_input"}
# %% ../nbs/05_clean.ipynb 8
def clean_nb(nb, clear_all=False):
"Clean `nb` from superfluous metadata"
for c in nb['cells']: _clean_cell(c, clear_all=clear_all)
nb['metadata'] = {k:v for k,v in nb['metadata'].items() if k in
("kernelspec", "jekyll", "jupytext", "doc")}
# %% ../nbs/05_clean.ipynb 11
def _wrapio(strm): return io.TextIOWrapper(strm, encoding='utf-8', line_buffering=True)
def _clean_write(nb, f_in, f_out=None, clear_all=False):
if not f_out: f_out = f_in
nb = json.load(f_in)
clean_nb(nb, clear_all=clear_all)
write_nb(nb, f_out)
# %% ../nbs/05_clean.ipynb 12
@call_parse
def nbdev_clean_nbs(
fname:str=None, # A notebook name or glob to convert
clear_all:bool_arg=False, # Clean all metadata and outputs
read_stdin:bool_arg=False # Read input stream and not nb folder
):
"Clean all notebooks in `fname` to avoid merge conflicts"
# Git hooks will pass the notebooks in stdin
if read_stdin: return _clean_write(nb, _wrapio(sys.stdin), _wrapio(sys.stdout), clear_all=clear_all)
if fname is None: fname = get_config().path("nbs_path")
for f in globtastic(fname, file_glob='*.ipynb', skip_folder_re='^[_.]'):
_clean_write(nb, f, clear_all=clear_all)
| 36.838235 | 104 | 0.663872 | 386 | 2,505 | 4.108808 | 0.310881 | 0.075662 | 0.044136 | 0.066204 | 0.032787 | 0.011349 | 0 | 0 | 0 | 0 | 0 | 0.011858 | 0.192016 | 2,505 | 67 | 105 | 37.38806 | 0.771739 | 0.261078 | 0 | 0.044444 | 1 | 0 | 0.244282 | 0.024331 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.133333 | 0.022222 | 0.266667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
64e2f7bd0c5248b10b722215e23712c6705e3215 | 3,205 | py | Python | data/process_data.py | YvesDeutschmann/disaster-response-pipeline-project | 6ce33642a9bc05ed063c5adfab42fd69c076bd40 | [
"MIT"
] | null | null | null | data/process_data.py | YvesDeutschmann/disaster-response-pipeline-project | 6ce33642a9bc05ed063c5adfab42fd69c076bd40 | [
"MIT"
] | null | null | null | data/process_data.py | YvesDeutschmann/disaster-response-pipeline-project | 6ce33642a9bc05ed063c5adfab42fd69c076bd40 | [
"MIT"
] | null | null | null | import sys
import pandas as pd
import numpy as np
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
"""
Loads the data.
Args:
messages_filepath: String - csv file containing disaster messages.
categories_filepath: String - csv file containing categories for each disaster message.
Returns:
df: DataFrame containing messages and categories.
"""
messages = pd.read_csv(messages_filepath)
categories = pd.read_csv(categories_filepath)
df = messages.merge(categories)
return df
def clean_data(df):
"""
Clean up the message dataframe.
Args:
df: DataFrame containing messages and categories.
Returns:
df: cleaned DataFrame containing messages and categories.
"""
# create a dataframe of the 36 individual category columns
categories = df.categories.str.split(';', expand=True)
# extract column names for categories from first row
row = categories.iloc[0,:]
colnames = [column[:-2] for column in row.values]
categories.columns = colnames
# Convert category values to 0 or 1
for column in categories:
# set each value to be the last character of the string
categories[column] = categories[column].apply(lambda x: x[-1])
# convert column from string to numeric
categories[column] = pd.to_numeric(categories[column])
# Replace categories column in df with new category columns
df.drop('categories', axis=1, inplace=True)
df = pd.concat([df, categories], axis=1)
# remove duplicates
df.drop_duplicates(inplace=True)
# remove rows with a value of 2 in 'related' column
df.drop(df[df.related==2].index, inplace=True)
return df
def save_data(df, database_filename):
"""
Store data in database.
Args:
df: cleaned DataFrame containing messages and categories.
database_filename: String - Name of Database the DataFrame is stored in.
"""
engine = create_engine(r'sqlite:///{}'.format(database_filename))
df.to_sql('CleanData', engine, index=False, if_exists='replace')
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main() | 32.05 | 95 | 0.659282 | 391 | 3,205 | 5.294118 | 0.319693 | 0.046377 | 0.062802 | 0.0657 | 0.158454 | 0.128502 | 0.047343 | 0 | 0 | 0 | 0 | 0.00541 | 0.250234 | 3,205 | 100 | 96 | 32.05 | 0.856013 | 0.294228 | 0 | 0.044444 | 0 | 0 | 0.226512 | 0.020465 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088889 | false | 0 | 0.088889 | 0 | 0.222222 | 0.111111 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64e3373b39465c8e986539b86fbdf9497ff35c51 | 1,914 | py | Python | library/twisted/trtl.py | Kelbec-Nef/EVE-bot-discord | 30432beb482ca56a10fda8aefdb4c1b9802e4ceb | [
"MIT"
] | 59 | 2016-06-27T13:12:18.000Z | 2021-06-23T12:09:16.000Z | trtl.py | andimiller/twistedbot | f462385ab951f1b74e19a2ebb6b3d58da4e2a03f | [
"MIT"
] | 112 | 2016-07-06T15:32:09.000Z | 2022-03-08T07:22:54.000Z | trtl.py | andimiller/twistedbot | f462385ab951f1b74e19a2ebb6b3d58da4e2a03f | [
"MIT"
] | 59 | 2016-07-04T22:22:26.000Z | 2020-05-15T20:38:08.000Z | """
_____________________________________________
___ < trtl - TwistedBot REPL for Testing Libraries >_
//_\\\\ _ ---------------------------------------------
/_|_|_('> /
" "
"""
import os
import sys
import traceback
import readline
import re
regex = re.compile("\x03(?:\d{1,2}(?:,\d{1,2})?)?", re.UNICODE)
sys.path.append("./modules/")
TO_LOAD = [filename[:-3] for dirname, dirnames, filenames in os.walk('./modules') for filename in filenames if filename[-3:] == ".py"]
MODULES = {}
from test.fake_tbot import TestedBot
TBOT = TestedBot()
for module in TO_LOAD:
try:
MODULES[module] = __import__(module)
for function in dir(MODULES[module]):
glob = MODULES[module].__dict__[function]
if hasattr(glob, 'rule'):
TBOT.register(glob, function)
except:
pass
USER = "[USER]"
CHANNEL = "[CHANNEL]"
print __doc__
while True:
try:
msg = raw_input("> ")
except EOFError:
print ""
print "Bye!"
sys.exit()
except KeyboardInterrupt:
print ""
print "Bye!"
sys.exit()
if msg and msg[0] == "/":
try:
(command, value) = msg.split(" ", 1)
except:
command = msg
if command == "/nick":
USER = value
print "(Nick has changed to '%s')" % USER
elif command == "/j":
CHANNEL = value
print "(Channel has changed to '%s')" % CHANNEL
elif command == "/rules":
print "\n".join(TBOT.rules())
else:
print "(Not a recognised command)"
else:
try:
TBOT.listen(USER, CHANNEL, msg)
if [] != TBOT.bot_messages:
print regex.sub('', "\n".join([x[1] for x in TBOT.bot_messages]))
TBOT.bot_messages = []
except:
print traceback.format_exc()
| 26.583333 | 134 | 0.521421 | 202 | 1,914 | 4.574257 | 0.430693 | 0.042208 | 0.048701 | 0.034632 | 0.04329 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008397 | 0.315569 | 1,914 | 71 | 135 | 26.957746 | 0.696947 | 0 | 0 | 0.263158 | 0 | 0 | 0.105386 | 0.016979 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.017544 | 0.122807 | null | null | 0.192982 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
64e34eb0a032af8b9bc9db3ed58fc47178b0d893 | 447 | py | Python | swot_item/migrations/0013_auto_20180609_1915.py | imranariffin/liveswot-api | a2acc05fd2c51adc30e8e1785b857a94af81677d | [
"MIT"
] | null | null | null | swot_item/migrations/0013_auto_20180609_1915.py | imranariffin/liveswot-api | a2acc05fd2c51adc30e8e1785b857a94af81677d | [
"MIT"
] | 25 | 2018-03-25T05:25:22.000Z | 2021-06-10T19:51:12.000Z | swot_item/migrations/0013_auto_20180609_1915.py | imranariffin/liveswot-api | a2acc05fd2c51adc30e8e1785b857a94af81677d | [
"MIT"
] | 2 | 2018-07-02T02:59:24.000Z | 2018-08-21T02:58:21.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-06-09 19:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('swot_item', '0012_swotitem_score'),
]
operations = [
migrations.AlterField(
model_name='swotitem',
name='score',
field=models.FloatField(default=0),
),
]
| 21.285714 | 48 | 0.612975 | 49 | 447 | 5.408163 | 0.795918 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067278 | 0.268456 | 447 | 20 | 49 | 22.35 | 0.743119 | 0.152125 | 0 | 0 | 1 | 0 | 0.109043 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
64e5cb2f12494c402629102fa0444b71af4c9e8d | 1,006 | py | Python | bnpy/datasets/zzz_unsupported/StandardNormalK1.py | jun2tong/bnp-anomaly | c7fa106b5bb29ed6688a3d91e3f302a0a130b896 | [
"BSD-3-Clause"
] | 184 | 2016-12-13T21:05:48.000Z | 2022-02-28T11:47:23.000Z | bnpy/datasets/zzz_unsupported/StandardNormalK1.py | jun2tong/bnp-anomaly | c7fa106b5bb29ed6688a3d91e3f302a0a130b896 | [
"BSD-3-Clause"
] | 37 | 2016-12-18T14:07:53.000Z | 2022-03-13T10:58:14.000Z | bnpy/datasets/zzz_unsupported/StandardNormalK1.py | jun2tong/bnp-anomaly | c7fa106b5bb29ed6688a3d91e3f302a0a130b896 | [
"BSD-3-Clause"
] | 50 | 2017-01-25T19:44:34.000Z | 2022-03-15T10:22:01.000Z | '''
StandardNormalK1.py
Simple toy dataset from standard normal distribution.
'''
import numpy as np
from bnpy.data import XData
def get_data(seed=8675309, nObsTotal=25000, **kwargs):
''' Create and return toy dataset from 1D standard normal distribution.
Args
-------
seed : integer seed for random number generator,
used for actually *generating* the data
nObsTotal : total number of observations for the dataset.
Returns
-------
Data : bnpy XData object, with nObsTotal observations
'''
X, TrueZ = generate_data(seed, nObsTotal)
Data = XData(X=X, TrueZ=TrueZ)
Data.name = get_short_name()
Data.summary = get_data_info()
return Data
def get_data_info():
return 'Standard Normal Data. All from one true cluster.'
def get_short_name():
return "StandardNormalK1"
def generate_data(seed, nObsTotal):
PRNG = np.random.RandomState(seed)
X = PRNG.randn(nObsTotal, 1)
TrueZ = np.ones(nObsTotal)
return X, TrueZ
| 22.355556 | 75 | 0.686879 | 131 | 1,006 | 5.19084 | 0.450382 | 0.061765 | 0.041176 | 0.073529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020305 | 0.2167 | 1,006 | 44 | 76 | 22.863636 | 0.84264 | 0.375746 | 0 | 0 | 0 | 0 | 0.110919 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0 | 0.117647 | 0.117647 | 0.588235 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
64e6916efe37fc956ae95e126abe46204022a77b | 6,259 | py | Python | scripts/rewrite_fcns.py | dolphingarlic/sketch-frontend | e646b7d51405e8a693f45472aa3cc6991a6f38af | [
"X11"
] | 1 | 2020-12-06T03:40:53.000Z | 2020-12-06T03:40:53.000Z | scripts/rewrite_fcns.py | dolphingarlic/sketch-frontend | e646b7d51405e8a693f45472aa3cc6991a6f38af | [
"X11"
] | null | null | null | scripts/rewrite_fcns.py | dolphingarlic/sketch-frontend | e646b7d51405e8a693f45472aa3cc6991a6f38af | [
"X11"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# author: gatoatigrado (nicholas tung) [ntung at ntung]
# Copyright 2010 University of California, Berkeley
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0 .
from __future__ import division, print_function
from collections import namedtuple
from amara import bindery
from gatoatigrado_lib import (ExecuteIn, Path, SubProc, dict, get_singleton,
list, memoize_file, pprint, process_jinja2, set, sort_asc, sort_desc)
import re
import gatoatigrado_lib.subproc
from gatoatigrado_lib.util import GetSingletonEmptyException
import sys
REWR_STR = " /* automatically rewritten */"
FIRST_CHAR = re.compile(r"[^\s]")
def get_info(fname):
assert fname.isfile()
try:
print("running %s" %(fname))
SubProc(["java", "-classpath", "sketch-noarch.jar",
"sketch.compiler.main.other.ParseFunctions",
fname]).start_wait()
except gatoatigrado_lib.subproc.ProcessException:
print("failed with %s" %(fname))
couldntfindexception = False
def read_info(fname):
all = bindery.parse(fname.read()).vector
info = [v for v in all.xml_children if v.xml_type == "element"]
def setImplements(fcns):
for fcn in fcns:
fcn.name = str(fcn.nameStr)
fcn.impl = getattr(fcn, "implName", None)
fcn.impl = (str(fcn.impl) if fcn.impl else fcn.impl)
fcn.line_idx = int(str(fcn.lineNum)) - 1
fcn.is_generator = str(fcn.isGenerator) == "true"
for fcn in fcns:
fcn.is_toplevel = getattr(fcn, "is_toplevel", False) or bool(fcn.impl)
if fcn.impl:
try:
get_singleton(v for v in fcns if v.name == fcn.impl).is_toplevel = True
except GetSingletonEmptyException:
global couldntfindexception
couldntfindexception = True
print("couldn't find function %s for file %s" %(fcn.impl, fcn.srcFile))
return fcns
return dict(list(info).equiv_classes(lambda a: Path(str(a.srcFile)))).map_values(setImplements)
class RewriteException(Exception): pass
def try_rewrite(line, first_char, fcn):
end_whitespace = re.search(r"\s*$", line).group(0)
if fcn.is_generator and not fcn.is_toplevel:
line = line[:first_char] + "generator " + line[first_char:].rstrip() + REWR_STR + end_whitespace
elif not fcn.is_toplevel:
if "static " in line:
line = line.replace("static ", "", 1).rstrip() + REWR_STR + end_whitespace
elif "static" in line:
line = line.replace("static", "", 1).rstrip() + REWR_STR + end_whitespace
else:
raise RewriteException("no static keyword to delete")
return line
def main(*sketchfiles):
# make sure sketch-noarch.jar exists
assembly_file = Path("sketch-noarch.jar")
if not assembly_file.isfile():
raise Exception("please run renamer-script to generate sketch-noarch.jar")
# use all sketch files which are subpaths of directory if they exist
if not sketchfiles:
sketchfiles = [v for v in Path(".").walk_files() if v.extension() in ["sk", "skh"]]
else:
sketchfiles = [Path(v) for v in sketchfiles]
# sketchfiles = sketchfiles[:1000]
# run the Java program
outpath = Path("function_list.xml")
outpath.exists() and outpath.unlink()
#[get_info(v) for v in sketchfiles]
for coarse_idx in range(0, len(sketchfiles), 100):
subset = map(str, sketchfiles[coarse_idx:(coarse_idx + 100)])
SubProc(["java", "-classpath", "sketch-noarch.jar",
"sketch.compiler.main.other.ParseFunctions"] + subset).start_wait()
fcns_by_fname = read_info(outpath)
if couldntfindexception:
print("(press enter to continue)", end="")
sys.stdin.readline()
for fname, fcns in fcns_by_fname.items():
lines = open(fname).readlines()
for fcn in fcns:
success = False
if not (0 <= fcn.line_idx < len(lines)):
print("line offset not in range, assuming it came from a header file")
success = True
elif not str(fcn.name) in lines[fcn.line_idx]:
print("function name not in line, assuming it came from a header file")
success = True
else:
for err_offset in [0, -1, 1, -2, 2]:
if not (0 <= fcn.line_idx + err_offset < len(lines)):
continue
line = lines[fcn.line_idx + err_offset]
if REWR_STR in line:
success = True
break
first_char = FIRST_CHAR.search(line)
if not first_char:
continue
try:
lines[fcn.line_idx + err_offset] = try_rewrite(line, first_char.start(), fcn)
success = True
break
except RewriteException, e:
print("WARNING / REWRITE EXCEPTION -- %s -- %s:%d"
%(fname, e, fcn.line_idx + err_offset))
continue
if not success:
print(" WARNING -- couldn't perform rewrite on all neighboring lines!")
Path(fname + ".rewrite").write("".join(lines))
if __name__ == "__main__":
import optparse
cmdopts = optparse.OptionParser()
noptions = len(cmdopts.option_list) - 1
varargs = bool(main.__code__.co_flags & 0x04)
required_args = main.__code__.co_argcount - noptions
if varargs:
cmdopts.usage = "%%prog [options] <<list %s>>" % (main.__code__.co_varnames[0])
else:
cmdopts.usage = "%prog [options] " + " ".join(
v for v in main.__code__.co_varnames[:required_args])
options, args = cmdopts.parse_args()
if not varargs and required_args != len(args):
cmdopts.error("%d arguments required." % (required_args))
main(*args, **options.__dict__)
| 41.177632 | 104 | 0.602333 | 768 | 6,259 | 4.763021 | 0.316406 | 0.017223 | 0.019136 | 0.011482 | 0.176326 | 0.125205 | 0.09404 | 0.09404 | 0.09404 | 0.072171 | 0 | 0.008296 | 0.287426 | 6,259 | 151 | 105 | 41.450331 | 0.811883 | 0.087873 | 0 | 0.191667 | 0 | 0 | 0.135159 | 0.014394 | 0.008333 | 0 | 0.000702 | 0 | 0.008333 | 0 | null | null | 0.008333 | 0.075 | null | null | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
64e6d9802ff131b02145f06b9894c085a64f01d6 | 795 | py | Python | streamselect/concept_representations/__init__.py | BenHals/streamselect | ca5e80f3a8a31a38ac52bccfd92528d73f387a6a | [
"BSD-3-Clause"
] | null | null | null | streamselect/concept_representations/__init__.py | BenHals/streamselect | ca5e80f3a8a31a38ac52bccfd92528d73f387a6a | [
"BSD-3-Clause"
] | null | null | null | streamselect/concept_representations/__init__.py | BenHals/streamselect | ca5e80f3a8a31a38ac52bccfd92528d73f387a6a | [
"BSD-3-Clause"
] | null | null | null | """ Base classes for concept representations.
A concept is a joint distribution between x and y.
A concept representation is a finite sized approximation of this distribution using a given classifier.
Each concept distribution should have a method of construction from a window of observations and
a similarity method to another concept representation. Ideally, it should also be able to be updated
online."""
from .base import ConceptRepresentation
from .error_rate_representation import ErrorRateRepresentation
from .meta_feature_distributions import (
DistributionTypes,
GaussianDistribution,
SingleValueDistribution,
)
__all__ = [
"ConceptRepresentation",
"ErrorRateRepresentation",
"DistributionTypes",
"SingleValueDistribution",
"GaussianDistribution",
]
| 34.565217 | 103 | 0.798742 | 86 | 795 | 7.290698 | 0.604651 | 0.025518 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153459 | 795 | 22 | 104 | 36.136364 | 0.931649 | 0.50566 | 0 | 0 | 0 | 0 | 0.26943 | 0.173575 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.214286 | 0 | 0.214286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64e7f07288cd11bd5d0f4da97dbcf8d4318f8b8b | 9,586 | py | Python | 4 model_batch/train_for_hist_alldata_loop_result.py | bneiluj/crop_yield_prediction | c73baca3da09e072065c55a1b645e92084df54a1 | [
"MIT"
] | 1 | 2021-04-08T09:14:00.000Z | 2021-04-08T09:14:00.000Z | 4 model_batch/train_for_hist_alldata_loop_result.py | bneiluj/crop_yield_prediction | c73baca3da09e072065c55a1b645e92084df54a1 | [
"MIT"
] | null | null | null | 4 model_batch/train_for_hist_alldata_loop_result.py | bneiluj/crop_yield_prediction | c73baca3da09e072065c55a1b645e92084df54a1 | [
"MIT"
] | 2 | 2020-12-02T04:36:44.000Z | 2021-03-01T12:58:35.000Z | from nnet_for_hist_dropout_stride import *
from GP_crop_v3 import *
if __name__ == "__main__":
config = Config()
summary_train_loss = []
summary_eval_loss = []
summary_RMSE = []
summary_ME = []
# load data to memory
filename = 'histogram_all' + '.npz'
# filename = 'histogram_all_soilweather' + '.npz'
content = np.load(config.load_path + filename)
image_all = content['output_image']
yield_all = content['output_yield']
year_all = content['output_year']
locations_all = content['output_locations']
index_all = content['output_index']
# delete broken image
list_delete=[]
for i in range(image_all.shape[0]):
if np.sum(image_all[i,:,:,:])<=287:
if year_all[i]<2016:
list_delete.append(i)
image_all=np.delete(image_all,list_delete,0)
yield_all=np.delete(yield_all,list_delete,0)
year_all = np.delete(year_all,list_delete, 0)
locations_all = np.delete(locations_all, list_delete, 0)
index_all = np.delete(index_all, list_delete, 0)
# keep major counties
list_keep=[]
for i in range(image_all.shape[0]):
if (index_all[i,0]==5)or(index_all[i,0]==17)or(index_all[i,0]==18)or(index_all[i,0]==19)or(index_all[i,0]==20)or(index_all[i,0]==27)or(index_all[i,0]==29)or(index_all[i,0]==31)or(index_all[i,0]==38)or(index_all[i,0]==39)or(index_all[i,0]==46):
list_keep.append(i)
image_all=image_all[list_keep,:,:,:]
yield_all=yield_all[list_keep]
year_all = year_all[list_keep]
locations_all = locations_all[list_keep,:]
index_all = index_all[list_keep,:]
ME_test_mean=np.zeros([6])
ME_val_mean=np.zeros([6])
count = 0
for predict_year in range(2014,2011,-1):
# split into train and validate
index_train = np.nonzero(year_all < predict_year)[0]
index_validate = np.nonzero(year_all == predict_year)[0]
index_test = np.nonzero(year_all == predict_year)[0]
print 'train size',index_train.shape[0]
print 'validate size',index_validate.shape[0]
print 'test size',index_test.shape[0]
# calc train image mean (for each band), and then detract (broadcast)
image_mean=np.mean(image_all[index_train],(0,1,2))
image_all = image_all - image_mean
year_mean = np.mean(year_all)
print 'year_mean',year_mean
image_validate=image_all[index_validate]
yield_validate=yield_all[index_validate]
image_test=image_all[index_test]
yield_test=yield_all[index_test]
for loop in range(0,1):
RMSE_test_all=[]
ME_test_all=[]
RMSE_val_all=[]
ME_val_all=[]
for time in range(10,31,4):
g = tf.Graph()
with g.as_default():
print 'year',predict_year,'loop',loop,'time',time
# modify config
config = Config()
config.H=time
model= NeuralModel(config,'net')
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.22)
# Launch the graph.
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
sess.run(tf.initialize_all_variables())
saver=tf.train.Saver()
saver.restore(sess, config.save_path+str(loop)+str(time)+str(predict_year)+"CNN_model.ckpt")
# Restore log results
# npzfile = np.load(config.save_path+str(loop)+str(time) + str(predict_year)+'result.npz')
# summary_train_loss = npzfile['summary_train_loss'].tolist()
# summary_eval_loss = npzfile['summary_eval_loss'].tolist()
# summary_RMSE = npzfile['summary_RMSE'].tolist()
# summary_ME = npzfile['summary_ME'].tolist()
# print("Model restored.")
# do test
pred = []
real = []
for j in range(image_test.shape[0] / config.B):
real_temp = yield_test[j * config.B:(j + 1) * config.B]
pred_temp= sess.run(model.logits, feed_dict={
model.x: image_test[j * config.B:(j + 1) * config.B,:,0:config.H,:],
model.y: yield_test[j * config.B:(j + 1) * config.B],
model.keep_prob: 1,
model.year: year_all[j * config.B:(j + 1) * config.B,np.newaxis]-year_mean
})
pred.append(pred_temp)
real.append(real_temp)
pred=np.concatenate(pred)
real=np.concatenate(real)
RMSE_test=np.sqrt(np.mean((pred-real)**2))
ME_test=np.mean(pred-real)/np.mean(real)*100
RMSE_test_all.append(RMSE_test)
ME_test_all.append(ME_test)
print 'Test set','RMSE',RMSE_test,'ME',ME_test
# do validation
pred = []
real = []
for j in range(image_validate.shape[0] / config.B):
real_temp = yield_validate[j * config.B:(j + 1) * config.B]
pred_temp= sess.run(model.logits, feed_dict={
model.x: image_validate[j * config.B:(j + 1) * config.B,:,0:config.H,:],
model.y: yield_validate[j * config.B:(j + 1) * config.B],
model.keep_prob: 1,
model.year: year_all[j * config.B:(j + 1) * config.B,np.newaxis]-year_mean
})
pred.append(pred_temp)
real.append(real_temp)
pred=np.concatenate(pred)
real=np.concatenate(real)
RMSE_val=np.sqrt(np.mean((pred-real)**2))
ME_val=np.mean(pred-real)/np.mean(real)*100
RMSE_val_all.append(RMSE_val)
ME_val_all.append(ME_val)
print 'Validation set','RMSE',RMSE_val,'ME',ME_val
# save result
pred_out = []
real_out = []
feature_out = []
year_out = []
locations_out =[]
index_out = []
for i in range(image_all.shape[0] / config.B):
feature,pred = sess.run(
[model.fc6,model.logits], feed_dict={
model.x: image_all[i * config.B:(i + 1) * config.B,:,0:config.H,:],
model.y: yield_all[i * config.B:(i + 1) * config.B],
model.keep_prob: config.drop_out,
model.year: year_all[i * config.B:(i + 1) * config.B,np.newaxis]-year_mean
})
real = yield_all[i * config.B:(i + 1) * config.B]
pred_out.append(pred)
real_out.append(real)
feature_out.append(feature)
year_out.append(year_all[i * config.B:(i + 1) * config.B])
locations_out.append(locations_all[i * config.B:(i + 1) * config.B])
index_out.append(index_all[i * config.B:(i + 1) * config.B])
# print i
weight_out, b_out = sess.run(
[model.dense_W, model.dense_B], feed_dict={
model.x: image_all[0 * config.B:(0 + 1) * config.B, :, 0:config.H, :],
model.y: yield_all[0 * config.B:(0 + 1) * config.B],
model.keep_prob: config.drop_out,
model.year: year_all[i * config.B:(i + 1) * config.B,np.newaxis]-year_mean
})
pred_out=np.concatenate(pred_out)
real_out=np.concatenate(real_out)
feature_out=np.concatenate(feature_out)
year_out=np.concatenate(year_out)
locations_out=np.concatenate(locations_out)
index_out=np.concatenate(index_out)
path = config.save_path+str(loop)+str(time)+str(predict_year)+'result_prediction.npz'
np.savez(path,
pred_out=pred_out,real_out=real_out,feature_out=feature_out,
year_out=year_out,locations_out=locations_out,weight_out=weight_out,b_out=b_out,index_out=index_out)
RMSE_GP,ME_GP,Average_GP=GaussianProcess(predict_year,path)
print 'RMSE_GP',RMSE_GP
print 'ME_GP',ME_GP
print 'Average_GP',Average_GP
ME_test_mean+=np.absolute(np.array(ME_test_all))
ME_val_mean+=np.absolute(np.array(ME_val_all))
count += 1
print count
print 'theoretical count', 32
ME_test_mean/=count
ME_val_mean/=count
plt.plot(range(len(ME_val_mean)),ME_val_mean)
plt.plot(range(len(ME_test_mean)),ME_test_mean)
plt.legend(['val','test'])
plt.show()
# plt.bar(range(len(ME_val_mean)),ME_val_mean)
plt.bar(range(len(ME_test_mean)),ME_test_mean)
# plt.legend(['val','test'])
plt.show()
| 45.647619 | 251 | 0.52368 | 1,211 | 9,586 | 3.892651 | 0.14038 | 0.057913 | 0.030547 | 0.023335 | 0.409419 | 0.359567 | 0.345779 | 0.314383 | 0.26644 | 0.210013 | 0 | 0.019586 | 0.355518 | 9,586 | 209 | 252 | 45.866029 | 0.743444 | 0.072502 | 0 | 0.197531 | 0 | 0 | 0.029206 | 0.002368 | 0.006173 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.012346 | null | null | 0.074074 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
64ea22c140e09fcc03e94150afada75a7e282353 | 1,099 | py | Python | src/layers/tsm.py | zhaojieting/e3d_lstm | e77d5523ad3a6f062042c095f1d40a29ee054db4 | [
"Apache-2.0"
] | null | null | null | src/layers/tsm.py | zhaojieting/e3d_lstm | e77d5523ad3a6f062042c095f1d40a29ee054db4 | [
"Apache-2.0"
] | null | null | null | src/layers/tsm.py | zhaojieting/e3d_lstm | e77d5523ad3a6f062042c095f1d40a29ee054db4 | [
"Apache-2.0"
] | null | null | null | """Module for constructing TSN layers Cells."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
import tensorflow.contrib.layers as layers
def TSM_layer(inputs, output_channels, kernel_shape, padding='same'):
with tf.variable_scope('generator'):
input_shape = inputs.shape
inputs = tf.unstack(inputs)
out_puts = np.zeros(input_shape)
for i in range(len(inputs)):
input = tf.unstack(inputs[i])
shift_input = np.zeros(input_shape[1:])
out_puts[i] = shift_input + inputs
out_puts = tf.stack(out_puts)
out_puts = out_puts.views(-1, input_shape[2], input_shape[3], input_shape[4])
tf.layers.conv2d(out_puts, output_channels, [5, 5], padding=padding)
out_puts = out_puts.views(input_shape[0], input_shape[1], input_shape[2], input_shape[3], input_shape[4])
return out_puts | 42.269231 | 121 | 0.622384 | 142 | 1,099 | 4.521127 | 0.373239 | 0.17134 | 0.074766 | 0.065421 | 0.165109 | 0.105919 | 0.105919 | 0.105919 | 0.105919 | 0.105919 | 0 | 0.016518 | 0.283894 | 1,099 | 26 | 122 | 42.269231 | 0.799238 | 0.037307 | 0 | 0 | 0 | 0 | 0.012346 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.3 | 0 | 0.4 | 0.05 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64ec1fd31776137216ede3cc1ef1e18c0071a86e | 167 | py | Python | django_dicom/views/utils.py | ZviBaratz/django-dicom | fc5d5443ebcab9af9705a2e81c58662789a34c62 | [
"Apache-2.0"
] | 8 | 2018-12-25T11:00:31.000Z | 2022-02-03T12:05:56.000Z | django_dicom/views/utils.py | ZviBaratz/django-dicom | fc5d5443ebcab9af9705a2e81c58662789a34c62 | [
"Apache-2.0"
] | 11 | 2019-01-03T11:30:42.000Z | 2021-05-12T06:53:20.000Z | django_dicom/views/utils.py | ZviBaratz/django-dicom | fc5d5443ebcab9af9705a2e81c58662789a34c62 | [
"Apache-2.0"
] | 4 | 2019-06-23T18:09:07.000Z | 2019-08-30T15:43:18.000Z | """
Utilities for the :mod:`~django_dicom.views` module.
"""
CONTENT_DISPOSITION = "attachment; filename={name}.zip"
ZIP_CONTENT_TYPE = "application/x-zip-compressed"
| 27.833333 | 55 | 0.754491 | 21 | 167 | 5.809524 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083832 | 167 | 5 | 56 | 33.4 | 0.797386 | 0.311377 | 0 | 0 | 0 | 0 | 0.551402 | 0.261682 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
64ecd16af3bba6904bb50e63ae398928437708ec | 1,946 | py | Python | tests/tmp.py | ribeirojose/d6tstack | 4d974cca4dc75ff988269443a6622ca9922127e6 | [
"MIT"
] | 176 | 2018-04-30T15:40:34.000Z | 2022-03-16T09:31:08.000Z | tests/tmp.py | tsering10/d6tstack | 7b6c5851b53bdd221466facfb7aebdc96006bf41 | [
"MIT"
] | 29 | 2018-10-28T15:35:24.000Z | 2022-01-31T03:23:35.000Z | tests/tmp.py | tsering10/d6tstack | 7b6c5851b53bdd221466facfb7aebdc96006bf41 | [
"MIT"
] | 45 | 2018-07-27T04:16:28.000Z | 2022-01-10T18:29:21.000Z | import importlib
import d6tstack.utils
importlib.reload(d6tstack.utils)
import time
import yaml
config = yaml.load(open('tests/.test-cred.yaml'))
cfg_uri_psql = config['rds']
cfg_uri_psql = config['wlo']
import pandas as pd
df = pd.DataFrame({'a':range(10),'b':range(10)})
d6tstack.utils.pd_to_psql(df,cfg_uri_psql,'quick',sep='\t',if_exists='replace')
print(pd.read_sql_table('quick',sqlengine))
import yaml
config = yaml.load(open('.test-cred.yaml'))
cfg_uri_psql = config['wlo']
import pandas as pd
df = pd.DataFrame({'a':range(10),'b':range(10),'name':['name,first name']*10})
import d6tstack.utils
d6tstack.utils.pd_to_psql(df,cfg_uri_psql,'quick',sep='\t',if_exists='replace')
import sqlalchemy
sqlengine = sqlalchemy.create_engine(cfg_uri_psql)
print(pd.read_sql_table('quick',sqlengine))
config = yaml.load(open('tests/.test-cred.yaml'))
cfg_uri_mysql = config['local-mysql']
sqlengine = sqlalchemy.create_engine(cfg_uri_mysql)
importlib.reload(d6tstack.utils)
d6tstack.utils.pd_to_mysql(df,cfg_uri_mysql,'quick',if_exists='replace')
print(pd.read_sql_table('quick',sqlengine))
import sqlalchemy
sqlengine = sqlalchemy.create_engine(cfg_uri_psql)
sqlengine = sqlalchemy.create_engine(cfg_uri_mysql)
sqlengine = sqlalchemy.create_engine(cfg_uri_psql)
print(pd.read_sql_table('benchmark',sqlengine).head())
dft = pd.read_sql_table('benchmark',sqlengine)
dft.shape
# cursor = sqlengine.cursor()
sql = sqlengine.execute("SELECT * FROM benchmark;")
dft2 = pd.DataFrame(sql.fetchall())
dft2.shape
sql.keys()
importlib.reload(d6tstack.utils)
start_time = time.time()
dft2 = d6tstack.utils.pd_from_sqlengine(cfg_uri_psql, "SELECT * FROM benchmark;")
assert dft2.shape==(100000, 23)
print("--- %s seconds ---" % (time.time() - start_time))
start_time = time.time()
dft = pd.read_sql_table('benchmark',sqlengine)
assert dft.shape==(100000, 23)
print("--- %s seconds ---" % (time.time() - start_time))
d6tstack.utils.test()
| 25.605263 | 81 | 0.751285 | 293 | 1,946 | 4.791809 | 0.215017 | 0.055556 | 0.064103 | 0.059829 | 0.696581 | 0.696581 | 0.630342 | 0.491453 | 0.491453 | 0.443732 | 0 | 0.022396 | 0.08222 | 1,946 | 75 | 82 | 25.946667 | 0.763718 | 0.013875 | 0 | 0.632653 | 0 | 0 | 0.138831 | 0.021921 | 0 | 0 | 0 | 0 | 0.040816 | 1 | 0 | false | 0 | 0.265306 | 0 | 0.265306 | 0.122449 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64ed7fa0bd919eed84109ce8a4e03eedc4ba591b | 467 | py | Python | tests/py/test_event.py | phlax/playground | ca661f7adcc2c3502f63e630c96e87e31aa9309a | [
"Apache-2.0"
] | 8 | 2020-11-23T21:08:32.000Z | 2021-12-18T10:37:25.000Z | tests/py/test_event.py | phlax/playground | ca661f7adcc2c3502f63e630c96e87e31aa9309a | [
"Apache-2.0"
] | 273 | 2020-11-23T19:27:06.000Z | 2020-12-21T17:34:49.000Z | tests/py/test_event.py | phlax/playground | ca661f7adcc2c3502f63e630c96e87e31aa9309a | [
"Apache-2.0"
] | 2 | 2020-11-24T09:49:29.000Z | 2020-12-30T10:39:10.000Z |
from playground.control import attribs, event
class DummyAttribs(attribs.ValidatingAttribs):
def __init__(self):
pass
def test_event():
_kwargs = dict()
_event = event.PlaygroundEvent(_kwargs)
assert _event._kwargs is _kwargs
assert _event._attribs is None
dummy_attribs = DummyAttribs()
_event = event.PlaygroundEvent(_kwargs, dummy_attribs)
assert _event._kwargs is _kwargs
assert _event._attribs is dummy_attribs
| 22.238095 | 58 | 0.736617 | 53 | 467 | 6.056604 | 0.396226 | 0.137072 | 0.158879 | 0.193146 | 0.280374 | 0.280374 | 0.280374 | 0.280374 | 0.280374 | 0 | 0 | 0 | 0.197002 | 467 | 20 | 59 | 23.35 | 0.856 | 0 | 0 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.307692 | 1 | 0.153846 | false | 0.076923 | 0.076923 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
64eea514dc744490d62df2dbfa0ff759f1bd366a | 13,313 | py | Python | pi_weather.py | mitgobla/Pi-Weather | aa5a8a4a543d721ba9c7ebe3a69444512133d4cc | [
"MIT"
] | 1 | 2021-08-22T20:56:37.000Z | 2021-08-22T20:56:37.000Z | pi_weather.py | mitgobla/Pi-Weather | aa5a8a4a543d721ba9c7ebe3a69444512133d4cc | [
"MIT"
] | null | null | null | pi_weather.py | mitgobla/Pi-Weather | aa5a8a4a543d721ba9c7ebe3a69444512133d4cc | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import os
from sys import argv
from time import sleep
from papirus import PapirusComposite
from weather import Weather
DIRECTORY = os.path.dirname(os.path.realpath(__file__))
class PiWeather:
def __init__(self):
self.config = self.load_config()["weather"]
self.unit = self.get_unit()
self.weather = Weather(unit=self.unit)
self.location = self.get_location()
self.lookup = {}
self.compass_dirs = ["N", "NNE", "NE", "ENE", "E", "ESE", "SE", "SSE",
"S", "SSW", "SW", "WSW", "W", "WNW", "NW", "NNW"]
self.compass_dirs_simple = ["N", "NE", "NE", "NE", "E", "SE", "SE", "SE",
"S", "SW", "SW", "SW", "W", "NW", "NW", "NW"]
@staticmethod
def load_config():
"""Load PiWeather Config
Returns:
dict -- Dictonary of config options
"""
with open(os.path.join(DIRECTORY, 'config.json')) as config_file:
return json.load(config_file)
def get_unit(self):
"""Read the selected temperature unit from config
Returns:
str -- String of unit in lowercase
"""
if "unit" in self.config:
return self.config["unit"].lower()
return "c"
def get_location(self):
"""Read the location set in the config
Returns:
str -- String of the location
"""
if len(argv) > 1:
return str(argv[1])
if "location" in self.config:
return self.config["location"]
return "London"
def get_wind_direction(self, direction):
"""Converts the direction from degrees to compass
Arguments:
direction {int} -- Direction in degrees
Returns:
str -- Compass/Degrees direction depending on config
"""
ix = int((int(direction) + 11.25)/22.5 - 0.02)
if self.config["wind_direction"] == "compass":
return self.compass_dirs[ix % 16]
elif self.config["wind_direction"] == "simplecompass":
return self.compass_dirs_simple[ix % 16]
return direction
@staticmethod
def convert24(time, meridiem):
"""Convert hour to 24 hour format
Arguments:
time {list} -- Array of Hour, Minute
meridiem {str} -- String of meridiem
Returns:
int -- 24 Hour format of hour
"""
if meridiem == 'am' and time[0] == '12':
return 0
elif meridiem == 'am':
return int(time[0])
elif meridiem == 'pm' and time[0] == '12':
return int(time[0])
return int(time[0])+12
def get_suntime(self, suntime):
"""Convert sunrise/set to 24 hour
Arguments:
suntime {str} -- String of time in 'HH:MM pm' format
Returns:
str -- Returns HH:MM in 24 format
"""
meridiem = suntime.split(' ')[-1]
suntime = suntime.split(' ')[0].split(':')
sun_hour = self.convert24(suntime, meridiem)
sun_minute = int(suntime[1])
return str(sun_hour)+":"+str(sun_minute)
def get_weather(self):
"""Get weather and populate lookup dictonary
"""
lookup_data = self.weather.lookup_by_location(self.location)
self.lookup = {
"temperature": lookup_data.condition.temp+"°"+lookup_data.units.temperature,
"humidity": lookup_data.atmosphere.humidity+"%",
"wind": {
"speed": lookup_data.wind.speed+lookup_data.units.speed,
"direction": self.get_wind_direction(lookup_data.wind.direction)
},
"pressure": lookup_data.atmosphere.pressure+lookup_data.units.pressure,
"visibility": lookup_data.atmosphere.visibility+lookup_data.units.distance,
"sunrise": self.get_suntime(lookup_data.astronomy.sunrise),
"sunset": self.get_suntime(lookup_data.astronomy.sunset),
"weather_type": lookup_data.condition.text,
"weather_code": lookup_data.condition.code,
"forecast": lookup_data.forecast
}
class PiDisplay(PiWeather):
def __init__(self):
PiWeather.__init__(self)
self.display = PapirusComposite(False)
self.unknown_icon = "3200.png"
self.order = []
self.gotWeather = False
self.initalize_order()
self.initalize_display()
def initalize_order(self):
"""Create the order that information is displayed
"""
for stat in self.config["stats"]:
if self.config["stats"][stat]:
self.order.append(stat)
def initalize_display(self):
"""Add all the screen elements to the e-ink display
"""
if self.config["forecast"]["enabled"]:
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 0, 0, (48, 48), Id="WeatherIcon")
self.display.AddText("Loading...", 48, 0, size=13, Id="LineOne",
fontPath='/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf')
self.display.AddText("Loading...", 48, 20, size=12, Id="LineTwo")
self.display.AddText("Loading...", 48, 34,
size=12, Id="LineThree")
if self.config["forecast"]["sixday"]:
self.display.AddText("...", 3, 49, size=12, Id="ForecastOne")
self.display.AddText("...", 35, 49, size=12, Id="ForecastTwo")
self.display.AddText(
"...", 68, 49, size=12, Id="ForecastThree")
self.display.AddText(
"...", 101, 49, size=12, Id="ForecastFour")
self.display.AddText(
"...", 135, 49, size=12, Id="ForecastFive")
self.display.AddText("...", 167, 49, size=12, Id="ForecastSix")
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 1, 63, (32, 32), Id="ForecastIconOne")
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 34, 63, (32, 32), Id="ForecastIconTwo")
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 67, 63, (32, 32), Id="ForecastIconThree")
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 100, 63, (32, 32), Id="ForecastIconFour")
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 133, 63, (32, 32), Id="ForecastIconFive")
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 166, 63, (32, 32), Id="ForecastIconSix")
else:
self.display.AddText("Today: ...", 25, 51,
size=12, Id="ForecastOne")
self.display.AddText("Tomorrow: ...", 25,
74, size=12, Id="ForecastTwo")
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 1, 49, (23, 23), Id="ForecastIconOne")
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 1, 72, (23, 23), Id="ForecastIconTwo")
else:
self.display.AddImg(os.path.join(
DIRECTORY, 'images', 'weather', self.unknown_icon), 1, 15, (80, 80), Id="WeatherIcon")
self.display.AddText("Loading...", 1, 1, size=13, Id="LineOne",
fontPath='/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf')
self.display.AddText("Loading...", 82, 15, size=12, Id="LineTwo")
self.display.AddText("Loading...", 82, 30,
size=12, Id="LineThree")
self.display.WriteAll()
def update(self):
"""Regurlarly update the screen with new information
"""
self.gotWeather = False
while not self.gotWeather:
try:
self.get_weather()
self.gotWeather = True
except:
sleep(60)
if not self.lookup:
print("Invalid Location")
exit()
self.display.UpdateImg("WeatherIcon", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["weather_code"])+'.png'))
self.display.UpdateText("LineOne", self.lookup["weather_type"])
if self.config["forecast"]["enabled"]:
if self.config["forecast"]["sixday"]:
self.display.UpdateText(
"ForecastOne", self.lookup["forecast"][0].day)
self.display.UpdateText(
"ForecastTwo", self.lookup["forecast"][1].day)
self.display.UpdateText(
"ForecastThree", self.lookup["forecast"][2].day)
self.display.UpdateText(
"ForecastFour", self.lookup["forecast"][3].day)
self.display.UpdateText(
"ForecastFive", self.lookup["forecast"][4].day)
self.display.UpdateText(
"ForecastSix", self.lookup["forecast"][5].day)
self.display.UpdateImg("ForecastIconOne", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["forecast"][0].code)+'.png'))
self.display.UpdateImg("ForecastIconTwo", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["forecast"][1].code)+'.png'))
self.display.UpdateImg("ForecastIconThree", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["forecast"][2].code)+'.png'))
self.display.UpdateImg("ForecastIconFour", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["forecast"][3].code)+'.png'))
self.display.UpdateImg("ForecastIconFive", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["forecast"][4].code)+'.png'))
self.display.UpdateImg("ForecastIconSix", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["forecast"][5].code)+'.png'))
else:
self.display.UpdateText(
"ForecastOne", "Today: "+self.lookup["forecast"][0].text)
self.display.UpdateText(
"ForecastTwo", "Tomorrow: "+self.lookup["forecast"][1].text)
self.display.UpdateImg("ForecastIconOne", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["forecast"][0].code)+'.png'))
self.display.UpdateImg("ForecastIconTwo", os.path.join(
DIRECTORY, 'images', 'weather', str(self.lookup["forecast"][1].code)+'.png'))
for stat in self.order:
if stat == "temperature":
self.display.UpdateText("LineTwo", "Temp: "+self.lookup[stat])
self.display.UpdateText(
"LineThree", "Hi: "+self.lookup["forecast"][0].high+" Lo: "+self.lookup["forecast"][0].low)
elif stat == "humidity":
self.display.UpdateText(
"LineTwo", "Humidity: "+self.lookup[stat])
humidity = int(self.lookup[stat][:-1])
scale = ""
if humidity < 25:
scale = "Very Dry"
elif humidity < 60:
scale = "Dry"
elif humidity < 80:
scale = "Wet"
else:
scale = "Very Wet"
self.display.UpdateText("LineThree", scale)
elif stat == "wind":
self.display.UpdateText(
"LineTwo", "Speed: "+self.lookup[stat]["speed"])
self.display.UpdateText(
"LineThree", "Direction: "+self.lookup[stat]["direction"])
elif stat == "pressure":
self.display.UpdateText("LineTwo", "Pressure")
self.display.UpdateText("LineThree", self.lookup[stat])
elif stat == "visibility":
self.display.UpdateText("LineTwo", "Visibility")
self.display.UpdateText("LineThree", self.lookup[stat])
elif stat == "sunrise":
self.display.UpdateText("LineTwo", "Sunrise")
self.display.UpdateText("LineThree", self.lookup[stat])
elif stat == "sunset":
self.display.UpdateText("LineTwo", "Sunset")
self.display.UpdateText("LineThree", self.lookup[stat])
self.display.WriteAll()
if len(self.order) >= 3:
sleep(20)
else:
sleep(int(60/len(self.order)))
# Can only request weather data every 43 seconds (2000 calls a day)
# 20 seconds per slide is safe
PI = PiDisplay()
if __name__ == "__main__":
while True:
PI.update()
| 40.588415 | 113 | 0.534966 | 1,380 | 13,313 | 5.096377 | 0.188406 | 0.092279 | 0.068676 | 0.054031 | 0.349637 | 0.309683 | 0.274563 | 0.247263 | 0.235888 | 0.207308 | 0 | 0.027526 | 0.320514 | 13,313 | 327 | 114 | 40.712538 | 0.749834 | 0.082401 | 0 | 0.264317 | 0 | 0 | 0.154065 | 0.008531 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052863 | false | 0.026432 | 0.026432 | 0 | 0.14978 | 0.004405 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64eedb6c77b1e955f83b08c16eb463ea0891406d | 961 | py | Python | crystallography/cube_symmetry.py | rpw199912j/matsci_animation | cd613853a40cdee73f9cdff7bdf23a02451bb1ef | [
"MIT"
] | null | null | null | crystallography/cube_symmetry.py | rpw199912j/matsci_animation | cd613853a40cdee73f9cdff7bdf23a02451bb1ef | [
"MIT"
] | null | null | null | crystallography/cube_symmetry.py | rpw199912j/matsci_animation | cd613853a40cdee73f9cdff7bdf23a02451bb1ef | [
"MIT"
] | null | null | null | from manim import *
class CubeSymmetry(ThreeDScene):
def construct(self):
# define a 3D axes
axes_3d = ThreeDAxes(
tips=False
)
# define a cube with side length 2 placed at the origin
cube = Cube(stroke_color=YELLOW, stroke_width=3)
# define a line that aligns with one the edges
line = Line3D(
start=np.array([1, -1, 1]),
end=np.array([1, 1, 1]),
stroke_color=PURPLE
)
self.add(axes_3d)
self.wait()
self.play(
FadeIn(cube)
)
self.wait()
self.move_camera(phi=(90 - 35.26) * DEGREES, theta=-45 * DEGREES)
self.wait()
self.play(
Create(line)
)
self.wait()
for _ in range(3):
self.play(
Rotate(VGroup(cube, line), angle=120 * DEGREES, axis=np.array([1, -1, 1]))
)
self.wait()
| 23.439024 | 90 | 0.495317 | 115 | 961 | 4.078261 | 0.556522 | 0.025586 | 0.051173 | 0.057569 | 0.063966 | 0 | 0 | 0 | 0 | 0 | 0 | 0.046233 | 0.3923 | 961 | 40 | 91 | 24.025 | 0.756849 | 0.119667 | 0 | 0.275862 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034483 | false | 0 | 0.034483 | 0 | 0.103448 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64f18694f99e323c1c04d6f1bc14cbfb5fcf7280 | 3,226 | py | Python | src/python/services/rpc_services/rpc_requests.py | rockmind/LoveAndMarriage | 2877d6af626eff2a3134a05ab7f03c52f14fde5c | [
"Apache-2.0"
] | null | null | null | src/python/services/rpc_services/rpc_requests.py | rockmind/LoveAndMarriage | 2877d6af626eff2a3134a05ab7f03c52f14fde5c | [
"Apache-2.0"
] | 1 | 2021-12-18T16:07:39.000Z | 2021-12-18T16:07:39.000Z | src/python/services/rpc_services/rpc_requests.py | rockmind/LoveAndMarriage | 2877d6af626eff2a3134a05ab7f03c52f14fde5c | [
"Apache-2.0"
] | null | null | null | from asyncio import sleep, get_event_loop
from aiohttp import ClientSession
from typing import OrderedDict, Union, List
from numpy.random import randint
from oauthlib.oauth2 import LegacyApplicationClient
from requests_oauthlib import OAuth2Session
from services import json_dumps, json_loads
class RequestRpc:
REFRESH_TOKEN_TIME = 30*60 # in sec
def __init__(self, url: str, username: str, password: str, token_url: str, ):
self.url = url
self.username = username
self.password = password
self.token_url = token_url
self._oauth = OAuth2Session(client=LegacyApplicationClient(client_id=username))
self._token = None
self._token_refresh_task = get_event_loop().create_task(self.refresh_token_loop())
self._session = None
async def refresh_token_loop(self):
while True:
try:
await self.refresh_token()
except Exception as err:
pass
await sleep(self.REFRESH_TOKEN_TIME)
async def refresh_token(self):
self._token = self._oauth.fetch_token(
token_url=self.token_url,
username=self.username,
password=self.password
)
async def rpc_request(self, methods: Union[OrderedDict, List[str], str]):
if not self._token:
await self.refresh_token()
if isinstance(methods, OrderedDict):
body = [{
"jsonrpc": "2.0",
"method": m,
"params": v or dict(),
"id": randint(10000000)
} for m, v in methods.items()]
elif isinstance(methods, List):
body = [{
"jsonrpc": "2.0",
"method": m,
"id": randint(10000000)
} for m in methods]
elif isinstance(methods, str):
body = [{
"jsonrpc": "2.0",
"method": methods,
"id": randint(10000000)
}]
else:
raise Exception('Unexpected type params.')
if not self._session:
self._session = ClientSession(json_serialize=json_dumps)
for i in range(4):
async with self._session.get(self.url+'authentication_check', headers=self._prepare_headers()) as resp:
if resp.status == 200:
results = await resp.json(loads=json_loads)
if results.get('Status') == 'OK':
break
await sleep(5**i)
await self.refresh_token()
continue
async with self._session.post(self.url, headers=self._prepare_headers(), json=body) as resp:
results = await resp.json(loads=json_loads)
for result in results:
if 'error' in result:
raise Exception(result['error'].get('message'))
if isinstance(methods, str):
return results[0]
return results
def _prepare_headers(self):
headers = {
'Authorization': f'{self._token["token_type"]} {self._token["access_token"]}',
'Content-Type': 'application/json'
}
return headers
| 33.604167 | 115 | 0.567886 | 348 | 3,226 | 5.091954 | 0.316092 | 0.054176 | 0.045147 | 0.035553 | 0.095372 | 0.060948 | 0.038375 | 0 | 0 | 0 | 0 | 0.020075 | 0.33602 | 3,226 | 95 | 116 | 33.957895 | 0.80719 | 0.00186 | 0 | 0.2 | 0 | 0 | 0.07023 | 0.017402 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025 | false | 0.05 | 0.0875 | 0 | 0.175 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64f2e2b530af5be77d35403bbf827b176b5a071a | 2,297 | py | Python | Chinese_zodiac.py | 2019-fall-csc-226/a01-breaking-bad-iransi_a01 | 720a648e9068af9e1202893942e311163bd0e1c1 | [
"MIT"
] | null | null | null | Chinese_zodiac.py | 2019-fall-csc-226/a01-breaking-bad-iransi_a01 | 720a648e9068af9e1202893942e311163bd0e1c1 | [
"MIT"
] | null | null | null | Chinese_zodiac.py | 2019-fall-csc-226/a01-breaking-bad-iransi_a01 | 720a648e9068af9e1202893942e311163bd0e1c1 | [
"MIT"
] | null | null | null | birthyear = int(input("what year were you born? {2000-2011}"))
if birthyear == 2000 :
print("you're a fire breathing dragon")
elif birthyear == 2001:
print("sssss you're a snake ")
if birthyear == 2002:
print("Haaayyy. Get it? cause your a horse :)")
if birthyear == 2003 :
print("you are the GOAT")
elif birthyear == 2004 :
print("what did one monkey say to another? 'I was born in 2004'")
if birthyear == 2005 :
print("why did the turkey cross the road? "
"To prove he wasn't chicken and "
"neither are you because you're a rooster!")
elif birthyear == 2006 :
print("woof woof you're a dog")
if birthyear == 2007:
print("oink oink you're a pig")
elif birthyear == 2008 :
print("pitty pat pat you are a rat")
if birthyear == 2009 :
print("you are an ox")
elif birthyear == 2010 :
print("you're a tiger grrr")
if birthyear == 2011 :
print(" is your name Thumper? Because you're a rabbit!")
elif birthyear < 2000 :
print("you're too old get out out here !")
elif birthyear > 2011 :
print(" umm I said a year between 2000 and 2011. TRy Again")
birthyear = int(input("now put in a friend's birth year {2000 and up}"))
if birthyear == 2000 :
print("you're a fire breathing dragon!!")
elif birthyear == 2001:
print("sssss you're a snake ")
if birthyear == 2002:
print("Haaayyy. Get it? cause you're a horse :)")
if birthyear == 2003 :
print("you are the GOAT")
elif birthyear == 2004 :
print("what did one monkey say to another?")
print( '"I was born in 2004"')
if birthyear == 2005 :
print("why did the turkey cross the road? to prove he wasn't chicken and neither are you you're a rooster")
elif birthyear == 2006 :
print("woof woof you're a dog")
if birthyear == 2007:
print("oink oink you're a pig")
elif birthyear == 2008 :
print("pitty pat pat you are a rat!")
if birthyear == 2009 :
print("you are an ox")
elif birthyear == 2010 :
print("a tiger grrr")
if birthyear == 2011 :
print(" is your name Thumper? Because you're a rabbit!")
elif birthyear < 2000 :
print("you're too old get out of here !")
elif birthyear > 2011 :
print(" too young ! try again with someone who was born between 2000 and 2011") | 31.902778 | 111 | 0.637353 | 361 | 2,297 | 4.055402 | 0.265928 | 0.054645 | 0.057377 | 0.057377 | 0.849727 | 0.814208 | 0.814208 | 0.814208 | 0.814208 | 0.814208 | 0 | 0.0855 | 0.246408 | 2,297 | 72 | 112 | 31.902778 | 0.760254 | 0 | 0 | 0.655738 | 0 | 0.016393 | 0.475196 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.47541 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
64f449e7051ecea8b2412b9c5d7d5fca434151bc | 13,150 | py | Python | pyrallis/wrappers/field_wrapper.py | eladrich/pyrallis | 1e0586f9de9ed5d8d67d061dac1fb44c73f9d4a4 | [
"MIT"
] | 22 | 2021-12-30T16:06:09.000Z | 2022-03-09T23:27:30.000Z | pyrallis/wrappers/field_wrapper.py | eladrich/pyrallis | 1e0586f9de9ed5d8d67d061dac1fb44c73f9d4a4 | [
"MIT"
] | 5 | 2022-01-18T14:05:52.000Z | 2022-03-03T17:23:03.000Z | pyrallis/wrappers/field_wrapper.py | eladrich/pyrallis | 1e0586f9de9ed5d8d67d061dac1fb44c73f9d4a4 | [
"MIT"
] | null | null | null | import argparse
import dataclasses
import inspect
from logging import getLogger
from typing import Any, Optional, List, Type, Dict, Set, Union, Tuple
from . import docstring
from .wrapper import Wrapper
from .. import utils
logger = getLogger(__name__)
class FieldWrapper(Wrapper[dataclasses.Field]):
"""
The FieldWrapper class acts a bit like an 'argparse.Action' class, which
essentially just creates the `option_strings` and `arg_options` that get
passed to the `add_argument(*option_strings, **arg_options)` function of the
`argparse._ArgumentGroup` (in this case represented by the `parent`
attribute, an instance of the class `DataclassWrapper`).
The `option_strings`, `required`, `help`, `default`, etc.
attributes just autogenerate the argument of the same name of the
above-mentioned `add_argument` function. The `arg_options` attribute fills
in the rest and may overwrite these values, depending on the type of field.
The `field` argument is the actually wrapped `dataclasses.Field` instance.
"""
def __init__(self, field: dataclasses.Field, parent: Any = None, prefix: str = ""):
super().__init__(wrapped=field, name=field.name)
self.field: dataclasses.Field = field
self.prefix: str = prefix
self._parent: Any = parent
# Holders used to 'cache' the properties.
# (could've used cached_property with Python 3.8).
self._option_strings: Optional[Set[str]] = None
self._required: Optional[bool] = None
self._docstring: docstring.AttributeDocString = docstring.AttributeDocString()
self._help: Optional[str] = None
self._default: Optional[Union[Any, List[Any]]] = None
self._dest: Optional[str] = None
# the argparse-related options:
self._arg_options: Dict[str, Any] = {}
self._type: Optional[Type[Any]] = None
# stores the resulting values for each of the destination attributes.
self._results: Dict[str, Any] = {}
@property
def arg_options(self) -> Dict[str, Any]:
"""Dictionary of values to be passed to the `add_argument` method.
The main feature of this package is to infer these arguments
automatically using features of the built-in `dataclasses` package, as
well as Python's type annotations.
By passing additional keyword arguments to the `field()`
function, the autogenerated arguments can be overwritten,
giving access to all of the usual argparse features know and love.
NOTE: When passing an `action` keyword argument, we remove all the
autogenerated options that aren't required by the Action class
constructor.
For example, when specifying a custom `action` like "store_true" or
"store_false", the `type` argument autogenerated here shouldn't be
passed to the constructor of the `argparse._StoreFalseAction`, so we
discard it.
"""
if self._arg_options:
return self._arg_options
# get the auto-generated options.
options = self.get_arg_options()
# overwrite the auto-generated options with given ones, if any.
options.update(self.custom_arg_options)
# only keep the arguments used by the Action constructor.
action = options.get("action", "store")
self._arg_options = only_keep_action_args(options, action)
return self._arg_options
def get_arg_options(self) -> Dict[str, Any]:
"""Create the `parser.add_arguments` kwargs for this field."""
if not self.field.init:
return {}
# TODO: Refactor this:
# 1. Create a `get_argparse_options_for_field` function
# 2. Use `get_argparse_options_for_annotation` below as part of that function
# 3. Update the dict returned from 1. with values set in the field() function
# 4. Update the dict from 3. with the values set by the DataclassWrapper, or
# when this field is reused. (are they ever modified externally?)
# 5. Return that dictionary.
_arg_options: Dict[str, Any] = {}
_arg_options["required"] = False # Required arguments can also be set from yaml,
# so do not enforce with argparse
_arg_options["dest"] = self.dest
_arg_options["default"] = self.default
if self.help:
_arg_options["help"] = self.help
elif self.default is not None:
# issue 64: Need to add an empty 'help' string, so that the formatter
# automatically adds the (default: '123')
_arg_options["help"] = " "
_arg_options['type'] = self.type
try:
_arg_options['type'].__name__ = self.type.__repr__().replace('typing.', '')
except Exception as e:
# Only to prettify printing, if fails just continue
pass
return _arg_options
@property
def action(self) -> Union[str, Type[argparse.Action]]:
"""The `action` argument to be passed to `add_argument(...)`."""
return self.custom_arg_options.get("action", "store")
@property
def action_str(self) -> str:
if isinstance(self.action, str):
return self.action
return self.action.__name__
@property
def custom_arg_options(self) -> Dict[str, Any]:
"""Custom argparse options that overwrite those in `arg_options`.
Can be set by using the `field` function, passing in a keyword argument
that would usually be passed to the parser.add_argument(
*option_strings, **kwargs) method.
"""
return self.field.metadata.get("custom_args", {})
@property
def option_strings(self) -> List[str]:
"""Generates the `option_strings` argument to the `add_argument` call.
`parser.add_argument(*name_or_flags, **arg_options)`
## Notes:
- Additional names for the same argument can be added via the `field`
function.
- Whenever the name of an attribute includes underscores ("_"), the same
argument can be passed by using dashes ("-") instead. This also includes
aliases.
- If an alias contained leading dashes, either single or double, the
same number of dashes will be used, even in the case where a prefix is
added.
For an illustration of this, see the aliases example.
"""
dashes: List[str] = [] # contains the leading dashes.
options: List[str] = [] # contains the name following the dashes.
# Currently create only a single option name, no support for aliases
dashes.append('--')
options.append(self.dest)
# remove duplicates by creating a set.
option_strings = set(f"{dash}{option}" for dash, option in zip(dashes, options))
return list(sorted(option_strings, key=len))
@property
def dest(self) -> str:
"""Where the attribute will be stored in the Namespace."""
self._dest = super().dest
return self._dest
@property
def nargs(self):
return self.custom_arg_options.get("nargs", None)
@property
def default(self) -> Any:
"""Either a single default value, when parsing a single argument, or
the list of default values, when this argument is reused multiple times
(which only happens with the `ConflictResolution.ALWAYS_MERGE` option).
In order of increasing priority, this could either be:
1. The default attribute of the field
2. the value of the corresponding attribute on the parent,
if it has a default value
"""
if self._default is not None:
return self._default
default: Any = utils.default_value(self.field)
if default is dataclasses.MISSING:
default = None
self._default = default
return self._default
@default.setter
def default(self, value: Any):
self._default = value
@property
def required(self) -> bool:
if self._required is not None:
return self._required
if self.action_str.startswith("store_"):
# all the store_* actions do not require a value.
self._required = False
elif self.is_optional:
self._required = False
elif self.parent.required:
# if the parent dataclass is required, then this attribute is too.
# TODO: does that make sense though?
self._required = True
elif self.nargs in {"?", "*"}:
self._required = False
elif self.nargs == "+":
self._required = True
elif self.default is None:
self._required = True
else:
self._required = False
return self._required
@required.setter
def required(self, value: bool):
self._required = value
@property
def type(self) -> Type[Any]:
"""Returns the wrapped field's type annotation."""
if self._type is None:
self._type = self.field.type
return self._type
def __str__(self):
return f"""<FieldWrapper for field '{self.dest}'>"""
@property
def help(self) -> Optional[str]:
if self._help:
return self._help
try:
self._docstring = docstring.get_attribute_docstring(
self.parent.dataclass, self.field.name
)
except (SystemExit, Exception) as e:
logger.debug(
f"Couldn't find attribute docstring for field {self.name}, {e}"
)
self._docstring = docstring.AttributeDocString()
if self._docstring.docstring_below:
self._help = self._docstring.docstring_below
elif self._docstring.comment_above:
self._help = self._docstring.comment_above
elif self._docstring.comment_inline:
self._help = self._docstring.comment_inline
return self._help
@help.setter
def help(self, value: str):
self._help = value
@property
def name(self) -> str:
return self.field.name
@property
def is_list(self):
return utils.is_list(self.type)
@property
def is_enum(self) -> bool:
return utils.is_enum(self.type)
@property
def is_tuple(self) -> bool:
return utils.is_tuple(self.type)
@property
def is_bool(self) -> bool:
return utils.is_bool(self.type)
@property
def is_optional(self) -> bool:
return utils.is_optional(self.field.type)
@property
def is_union(self) -> bool:
return utils.is_union(self.field.type)
@property
def type_arguments(self) -> Optional[Tuple[Type, ...]]:
return utils.get_type_arguments(self.type)
@property
def parent(self) -> "DataclassWrapper":
return self._parent
def only_keep_action_args(
options: Dict[str, Any], action: Union[str, Any]
) -> Dict[str, Any]:
"""Remove all the arguments in `options` that aren't required by the Action.
Parameters
----------
options : Dict[str, Any]
A dictionary of options that would usually be passed to
`add_arguments(*option_strings, **options)`.
action : Union[str, Any]
The action class or name.
Returns
-------
Dict[str, Any]
[description]
"""
# TODO: explicitly tests these custom actions?
argparse_action_classes: Dict[str, Type[argparse.Action]] = {
"store": argparse._StoreAction,
"store_const": argparse._StoreConstAction,
"store_true": argparse._StoreTrueAction,
"store_false": argparse._StoreFalseAction,
"append": argparse._AppendAction,
"append_const": argparse._AppendConstAction,
"count": argparse._CountAction,
"help": argparse._HelpAction,
"version": argparse._VersionAction,
"parsers": argparse._SubParsersAction,
}
if action not in argparse_action_classes:
# the provided `action` is not a standard argparse-action.
# We don't remove any of the provided options.
return options
# Remove all the keys that aren't needed by the action constructor:
action_class = argparse_action_classes[action]
argspec = inspect.getfullargspec(action_class)
if argspec.varargs is not None or argspec.varkw is not None:
# if the constructor takes variable arguments, pass all the options.
logger.debug("Constructor takes var args. returning all options.")
return options
args_to_keep = argspec.args + ["action"]
kept_options, deleted_options = utils.keep_keys(options, args_to_keep)
if deleted_options:
logger.debug(
f"Some auto-generated options were deleted, as they were "
f"not required by the Action constructor: {deleted_options}."
)
if deleted_options:
logger.debug(f"Kept options: \t{kept_options.keys()}")
logger.debug(f"Removed options: \t{deleted_options.keys()}")
return kept_options
| 35.831063 | 89 | 0.63924 | 1,620 | 13,150 | 5.048765 | 0.201852 | 0.031789 | 0.012226 | 0.011615 | 0.125076 | 0.037657 | 0.008559 | 0.008559 | 0 | 0 | 0 | 0.001668 | 0.270646 | 13,150 | 366 | 90 | 35.928962 | 0.85111 | 0.363422 | 0 | 0.215 | 0 | 0 | 0.069293 | 0.006207 | 0 | 0 | 0 | 0.008197 | 0 | 1 | 0.135 | false | 0.005 | 0.04 | 0.055 | 0.335 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64f496f7a964f4a0c2008a4e6ef5318ca9d938ee | 7,183 | py | Python | up/settings/base.py | rodlukas/UP-admin | 08f36de0773f39c6222da82016bf1384af2cce18 | [
"MIT"
] | 4 | 2019-07-19T17:39:04.000Z | 2022-03-22T21:02:15.000Z | up/settings/base.py | rodlukas/UP-admin | 08f36de0773f39c6222da82016bf1384af2cce18 | [
"MIT"
] | 53 | 2019-08-04T14:25:40.000Z | 2022-03-26T20:30:55.000Z | up/settings/base.py | rodlukas/UP-admin | 08f36de0773f39c6222da82016bf1384af2cce18 | [
"MIT"
] | 3 | 2020-03-09T07:11:03.000Z | 2020-09-11T01:22:50.000Z | """
Základní konfigurace Django projektu.
Je základem pro konfigurace v souborech local.py a production.py.
"""
import os
import sys
from datetime import timedelta
import environ
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# env promenne
env = environ.Env(
# nastaveni typu a pripadne vychozi hodnoty
BANK_ACTIVE=(bool, True), # aktivace propojeni s bankou
BANK_RENT_PRICE=(int, 0), # vyse najmu (v Kc)
DATABASE_URL=str, # url pouzivane DB (napr. postgresql://postgres:postgres@localhost:5432/up)
DEBUG=(bool, False), # aktivace debug prostredi
ENVIRONMENT=str, # nazev aktualniho prostredi, kde je aplikace spustena (pro Sentry)
FIO_API_KEY=(str, ""), # token pro pristup do Fia
HEADLESS=(bool, True), # indikace headless mode pro testy UI
HEROKU=(bool, False), # priznak nasazeni aplikace na Heroku
MANUAL_PRODUCTION=(bool, False), # pro simulaci produkcni verze nastavit: True
SECRET_KEY=str, # tajny klic pro Django
SENTRY_DSN=str, # DSN klic pro Sentry
TESTS_RUNNING=(bool, False), # indikace bezicich testu
)
# cteni z .env souboru
environ.Env.read_env(os.path.join(BASE_DIR, ".env"))
# vlastni konstanty
CONST_AUTH_EXPIRATION = 60 * 8 # minuty -> 8 hodin (60*8)
CONST_DB_CON_AGE = 600
# vlastni konstanty nactene z prostredi/souboru
BANK_ACTIVE = env("BANK_ACTIVE")
BANK_RENT_PRICE = env("BANK_RENT_PRICE")
ENVIRONMENT = env("ENVIRONMENT")
FIO_API_KEY = env("FIO_API_KEY")
HEADLESS = env("HEADLESS")
HEROKU = env("HEROKU")
MANUAL_PRODUCTION = env("MANUAL_PRODUCTION")
SENTRY_DSN = env("SENTRY_DSN")
# osetreni pro bezici testy - rozpoznani spusteni z radky/promenna prostredi (kvuli IDE)
TESTS_RUNNING = env("TESTS_RUNNING") or (len(sys.argv) > 1 and sys.argv[1] in ["test", "behave"])
# Django konstanty
DEBUG = env("DEBUG")
SECRET_KEY = env("SECRET_KEY")
# Application definition
INSTALLED_APPS = [
"whitenoise.runserver_nostatic",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"admin.apps.AdminConfig",
"rest_framework",
"api.apps.ApiConfig",
"django_filters",
"debug_toolbar",
]
if not HEROKU:
INSTALLED_APPS.append("behave_django")
# API
REST_FRAMEWORK = {
# pouziva se JWTTokenUserAuthentication, aby se neprovadel pri kazdem req DB lookup na uzivatele
"DEFAULT_AUTHENTICATION_CLASSES": (
# BasicAuthentication pro OpenAPI dokumentaci a Browsable API
"rest_framework.authentication.BasicAuthentication",
# JWTTokenUserAuthentication pro pristup k API z frontendu
"rest_framework_simplejwt.authentication.JWTTokenUserAuthentication",
),
"DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",),
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
"TEST_REQUEST_DEFAULT_FORMAT": "json",
}
SIMPLE_JWT = {
# pouzivaji se Sliding tokens - 1 a tentyz token pro autentizaci i refresh
"SLIDING_TOKEN_LIFETIME": timedelta(minutes=CONST_AUTH_EXPIRATION),
"SLIDING_TOKEN_REFRESH_LIFETIME": timedelta(days=2),
"AUTH_TOKEN_CLASSES": ("rest_framework_simplejwt.tokens.SlidingToken",),
"AUTH_HEADER_TYPES": ("Bearer",),
}
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"csp.middleware.CSPMiddleware",
"whitenoise.middleware.WhiteNoiseMiddleware",
"debug_toolbar.middleware.DebugToolbarMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "up.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(BASE_DIR, "admin/templates")],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
WSGI_APPLICATION = "up.wsgi.application"
CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
# Database
DATABASES = {"default": env.db()}
# nastaveni persistentnich spojeni s DB (mimo testy - zpusobuje problemy)
if not TESTS_RUNNING:
DATABASES["default"]["CONN_MAX_AGE"] = CONST_DB_CON_AGE
# https://docs.djangoproject.com/fr/3.2/releases/3.2/#customizing-type-of-auto-created-primary-keys
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
# Password validation
AUTH_PASSWORD_VALIDATORS = [
{"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
LANGUAGE_CODE = "cs"
TIME_ZONE = "Europe/Prague"
USE_I18N = True
USE_L10N = True
USE_TZ = True
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Static files
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATIC_URL = "/static/"
# debug toolbar
DEBUG_TOOLBAR_CONFIG = {
"SHOW_TOOLBAR_CALLBACK": lambda request: True if DEBUG else False,
"SHOW_COLLAPSED": True,
}
# Django konstanty pro bezpecnost
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin" # Referer je potreba posilat na Sentry
X_FRAME_OPTIONS = "DENY"
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
# CSP
# CSP pro Google Analytics, viz https://developers.google.com/tag-manager/web/csp#universal_analytics_google_analytics
CSPURL_GOOGLE_ANALYTICS = "https://www.google-analytics.com"
CSPURL_GOOGLE_ANALYTICS_SSL = "https://ssl.google-analytics.com"
# CSP pro Google Fonts
CSPURL_GOOGLE_FONTS_STYLE = "fonts.googleapis.com"
CSPURL_GOOGLE_FONTS_FONT = "fonts.gstatic.com"
# CSP pro Sentry
CSPURL_SENTRY = "https://sentry.io"
# CSP pro unpkg.com
CSPURL_UNPKG = "https://unpkg.com"
CSP_SELF = "'self'"
CSP_NONE = "'none'"
# CSP konfigurace
CSP_DEFAULT_SRC = (CSP_NONE,)
CSP_STYLE_SRC = (
CSP_SELF,
"'unsafe-inline'",
CSPURL_GOOGLE_FONTS_STYLE,
CSPURL_UNPKG,
) # 'unsafe-inline' kvuli inline CSS v Sentry feedback formulari
CSP_CONNECT_SRC = (CSP_SELF, CSPURL_GOOGLE_ANALYTICS, CSPURL_SENTRY)
CSP_SCRIPT_SRC = (
CSP_SELF,
CSPURL_SENTRY,
CSPURL_GOOGLE_ANALYTICS,
CSPURL_GOOGLE_ANALYTICS_SSL,
CSPURL_UNPKG,
)
CSP_FONT_SRC = (CSP_SELF, CSPURL_GOOGLE_FONTS_FONT)
CSP_IMG_SRC = (CSP_SELF, CSPURL_GOOGLE_ANALYTICS, "data:")
CSP_FRAME_ANCESTORS = (CSP_NONE,)
CSP_FORM_ACTION = (CSP_NONE,)
CSP_BASE_URI = (CSP_NONE,)
CSP_MANIFEST_SRC = (CSP_SELF,) # site.webmanifest
| 35.384236 | 118 | 0.738828 | 873 | 7,183 | 5.853379 | 0.399771 | 0.035616 | 0.023288 | 0.010959 | 0.091194 | 0.059491 | 0.01683 | 0.008806 | 0 | 0 | 0 | 0.0044 | 0.145761 | 7,183 | 202 | 119 | 35.559406 | 0.82839 | 0.244466 | 0 | 0.027027 | 0 | 0 | 0.418496 | 0.301824 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.033784 | 0.027027 | 0 | 0.027027 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64f4ed4a482b455825429f140a47289347e72d8a | 1,731 | py | Python | common/src/plugins/__init__.py | krisshol/bach-kmno | f40d85b3397bb340e26a671c54d4a753dbbb0d43 | [
"Apache-2.0"
] | 248 | 2015-01-08T09:36:44.000Z | 2022-01-12T10:29:21.000Z | common/src/plugins/__init__.py | krisshol/bach-kmno | f40d85b3397bb340e26a671c54d4a753dbbb0d43 | [
"Apache-2.0"
] | 50 | 2015-01-09T08:31:57.000Z | 2022-03-30T10:41:13.000Z | common/src/plugins/__init__.py | krisshol/bach-kmno | f40d85b3397bb340e26a671c54d4a753dbbb0d43 | [
"Apache-2.0"
] | 74 | 2015-01-05T09:11:21.000Z | 2022-03-29T02:16:54.000Z | from irma.common.plugins.plugin import PluginBase
from irma.common.plugins.plugin import PluginMetaClass
from irma.common.plugins.manager import PluginManager
from irma.common.plugins.exceptions import PluginError
from irma.common.plugins.exceptions import PluginLoadError
from irma.common.plugins.exceptions import PluginFormatError
from irma.common.plugins.exceptions import PluginCrashed
from irma.common.plugins.exceptions import DependencyMissing
from irma.common.plugins.exceptions import ModuleDependencyMissing
from irma.common.plugins.exceptions import BinaryDependencyMissing
from irma.common.plugins.exceptions import FileDependencyMissing
from irma.common.plugins.exceptions import FolderDependencyMissing
from irma.common.plugins.exceptions import PlatformDependencyMissing
from irma.common.plugins.dependencies import Dependency
from irma.common.plugins.dependencies import ModuleDependency
from irma.common.plugins.dependencies import BinaryDependency
from irma.common.plugins.dependencies import FileDependency
from irma.common.plugins.dependencies import FolderDependency
from irma.common.plugins.dependencies import PlatformDependency
__all__ = [
# from plugin.py
'PluginBase',
'PluginMetaClass',
# from manager.py
'PluginManager',
# from exception.py
'PluginError',
'PluginLoadError',
'PluginFormatError',
'PluginCrashed',
'DependencyMissing',
'ModuleDependencyMissing',
'BinaryDependencyMissing',
'FileDependencyMissing',
'FolderDependencyMissing',
'PlatformDependencyMissing',
# from dependency.py
'Dependency',
'ModuleDependency',
'BinaryDependency',
'FileDependency',
'FolderDependency',
'PlatformDependency',
]
| 35.326531 | 68 | 0.808203 | 165 | 1,731 | 8.454545 | 0.187879 | 0.108961 | 0.190681 | 0.286022 | 0.480287 | 0.480287 | 0 | 0 | 0 | 0 | 0 | 0 | 0.119584 | 1,731 | 48 | 69 | 36.0625 | 0.915354 | 0.038706 | 0 | 0 | 0 | 0 | 0.190476 | 0.069319 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.475 | 0 | 0.475 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
64f4ede9bde7f503aa60b397301d64722811e7ff | 253 | py | Python | data/image_name_list.py | wangkingkingking/synme | 82b6b7e640939b7946256b7f4e8b6d51357b97be | [
"MIT"
] | null | null | null | data/image_name_list.py | wangkingkingking/synme | 82b6b7e640939b7946256b7f4e8b6d51357b97be | [
"MIT"
] | null | null | null | data/image_name_list.py | wangkingkingking/synme | 82b6b7e640939b7946256b7f4e8b6d51357b97be | [
"MIT"
] | null | null | null | def get_image_name_list(dataset_file):
# get synme image name list
with open(dataset_file, 'r') as f:
lines = f.readlines()
imagenames = [x.strip().split()[0].split('/')[1].split('.')[0] for x in lines]
return imagenames
| 36.142857 | 86 | 0.612648 | 37 | 253 | 4.054054 | 0.648649 | 0.12 | 0.173333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015306 | 0.225296 | 253 | 6 | 87 | 42.166667 | 0.75 | 0.098814 | 0 | 0 | 0 | 0 | 0.013274 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
64f589c595e6fd6aa5cda5244976e2e886627844 | 2,080 | py | Python | py_wake/tests/test_utils/test_xarray_utils.py | aemoser/PyWake | 889a2c10882195af21339e9bcf2ede0db9b58319 | [
"MIT"
] | 30 | 2019-03-18T14:10:27.000Z | 2022-03-13T17:39:04.000Z | py_wake/tests/test_utils/test_xarray_utils.py | aemoser/PyWake | 889a2c10882195af21339e9bcf2ede0db9b58319 | [
"MIT"
] | 1 | 2020-11-12T06:13:00.000Z | 2020-11-12T06:43:26.000Z | py_wake/tests/test_utils/test_xarray_utils.py | aemoser/PyWake | 889a2c10882195af21339e9bcf2ede0db9b58319 | [
"MIT"
] | 20 | 2019-01-11T14:45:13.000Z | 2021-12-13T19:55:29.000Z | from py_wake.examples.data.hornsrev1 import Hornsrev1Site
import numpy as np
import pytest
import pandas as pd
@pytest.mark.parametrize(['ti', 'dims'], [
(0.1, ()),
(np.full((2), 0.1), ('i',)),
(np.full((2, 360), 0.1), ('i', 'wd')),
(np.full((2, 360, 23), 0.1), ('i', 'wd', 'ws')),
(np.full((360), 0.1), ('wd',)),
(np.full((360, 23), 0.1), ('wd', 'ws')),
(np.full((23), 0.1), ('ws',)),
])
def test_add_ilk(ti, dims):
site = Hornsrev1Site()
wt, wd, ws = np.arange(2), np.arange(360), np.arange(3, 26)
lw = site.local_wind(wt * 1000, wt * 0, wt * 0 + 70, wd, ws, time=False)
lw.add_ilk('TI', ti)
assert lw.TI.dims == dims
@pytest.mark.parametrize(['ti', 'dims'], [
(0.1, ()),
(np.full((2), 0.1), ('i',)),
(np.full((2, 100), 0.1), ('i', 'time')),
(np.full((100), 0.1), ('time',)),
])
def test_add_ilk_time(ti, dims):
site = Hornsrev1Site()
wt, wd, ws = np.arange(2), np.arange(100), np.arange(100) % 20 + 3
t = pd.date_range("2000-01-01", freq="10T", periods=100)
lw = site.local_wind(wt * 1000, wt * 0, wt * 0 + 70, wd, ws, time=t)
lw.add_ilk('TI', ti)
assert lw.TI.dims == dims
@pytest.mark.parametrize(['shape'], [
[(360, 2)],
[(23, 360)],
[(23, 2)],
[(2, 23, 360)],
[(360, 2, 23)],
[(360, 23, 2)],
[(23, 360, 2)],
[(23, 2, 360)],
])
def test_add_ilk_wrong_dim(shape):
site = Hornsrev1Site()
wt, wd, ws = np.arange(2), np.arange(360), np.arange(3, 26)
lw = site.local_wind(wt * 1000, wt * 0, wt * 0 + 70, wd, ws, time=False)
with pytest.raises(ValueError):
lw.add_ilk('TI', np.full(shape, 0.1))
@pytest.mark.parametrize(['shape'], [
[(100, 2)],
[(2, 100, 23)],
])
def test_add_ilk_time_wrong_dim(shape):
site = Hornsrev1Site()
wt, wd, ws = np.arange(2), np.arange(100), np.arange(100) % 20 + 3
t = pd.date_range("2000-01-01", freq="10T", periods=100)
lw = site.local_wind(wt * 1000, wt * 0, wt * 0 + 70, wd, ws, time=t)
with pytest.raises(ValueError):
lw.add_ilk('TI', np.full(shape, 0.1))
| 29.714286 | 76 | 0.538462 | 348 | 2,080 | 3.146552 | 0.172414 | 0.023744 | 0.032877 | 0.047489 | 0.751598 | 0.720548 | 0.69863 | 0.69863 | 0.69863 | 0.69863 | 0 | 0.125076 | 0.212019 | 2,080 | 69 | 77 | 30.144928 | 0.543014 | 0 | 0 | 0.576271 | 0 | 0 | 0.039904 | 0 | 0 | 0 | 0 | 0 | 0.033898 | 1 | 0.067797 | false | 0 | 0.067797 | 0 | 0.135593 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
64f5cef49fb1fe4a41de9751140db69b63b91e0f | 212 | py | Python | scripts/library/__init__.py | geozeke/ubuntu | 49b7649b4306e6f3eb39c5dd9419cddc5c10d077 | [
"MIT"
] | null | null | null | scripts/library/__init__.py | geozeke/ubuntu | 49b7649b4306e6f3eb39c5dd9419cddc5c10d077 | [
"MIT"
] | 77 | 2020-07-08T18:52:48.000Z | 2022-01-21T20:13:31.000Z | scripts/library/__init__.py | geozeke/ubuntu | 49b7649b4306e6f3eb39c5dd9419cddc5c10d077 | [
"MIT"
] | null | null | null | from .classes import Environment
from .utilities import clear
from .utilities import runOneCommand
from .utilities import runManyArguments
from .utilities import minPythonVersion
from .utilities import copyFiles
| 30.285714 | 39 | 0.858491 | 24 | 212 | 7.583333 | 0.416667 | 0.357143 | 0.521978 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.113208 | 212 | 6 | 40 | 35.333333 | 0.968085 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
64f625ff3b06630f6e5652636658877b09051eea | 10,470 | py | Python | acme/agents/jax/ail/learning.py | wookayin/acme | 71b2ab8577a118c103718f034fa62c5ad2c0fd97 | [
"Apache-2.0"
] | null | null | null | acme/agents/jax/ail/learning.py | wookayin/acme | 71b2ab8577a118c103718f034fa62c5ad2c0fd97 | [
"Apache-2.0"
] | null | null | null | acme/agents/jax/ail/learning.py | wookayin/acme | 71b2ab8577a118c103718f034fa62c5ad2c0fd97 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AIL learner implementation."""
import functools
import itertools
import time
from typing import Any, Callable, Iterator, List, NamedTuple, Optional, Tuple
import acme
from acme import types
from acme.agents.jax.ail import losses
from acme.agents.jax.ail import networks as ail_networks
from acme.jax import networks as networks_lib
from acme.jax import utils
from acme.utils import counting
from acme.utils import loggers
from acme.utils import reverb_utils
import jax
import optax
import reverb
class DiscriminatorTrainingState(NamedTuple):
"""Contains training state for the discriminator."""
# State of the optimizer used to optimize the discriminator parameters.
optimizer_state: optax.OptState
# Parameters of the discriminator.
discriminator_params: networks_lib.Params
# State of the discriminator
discriminator_state: losses.State
# For AIRL variants, we need the policy params to compute the loss.
policy_params: Optional[networks_lib.Params]
# Key for random number generation.
key: networks_lib.PRNGKey
# Training step of the discriminator.
steps: int
class TrainingState(NamedTuple):
"""Contains training state of the AIL learner."""
rewarder_state: DiscriminatorTrainingState
learner_state: Any
def ail_update_step(
state: DiscriminatorTrainingState, data: Tuple[types.Transition,
types.Transition],
optimizer: optax.GradientTransformation,
ail_network: ail_networks.AILNetworks,
loss_fn: losses.Loss) -> Tuple[DiscriminatorTrainingState, losses.Metrics]:
"""Run an update steps on the given transitions.
Args:
state: The learner state.
data: Demo and rb transitions.
optimizer: Discriminator optimizer.
ail_network: AIL networks.
loss_fn: Discriminator loss to minimize.
Returns:
A new state and metrics.
"""
demo_transitions, rb_transitions = data
key, discriminator_key, loss_key = jax.random.split(state.key, 3)
def compute_loss(
discriminator_params: networks_lib.Params) -> losses.LossOutput:
discriminator_fn = functools.partial(
ail_network.discriminator_network.apply,
discriminator_params,
state.policy_params,
is_training=True,
rng=discriminator_key)
return loss_fn(discriminator_fn, state.discriminator_state,
demo_transitions, rb_transitions, loss_key)
loss_grad = jax.grad(compute_loss, has_aux=True)
grads, (loss, new_discriminator_state) = loss_grad(state.discriminator_params)
update, optimizer_state = optimizer.update(
grads,
state.optimizer_state,
params=state.discriminator_params)
discriminator_params = optax.apply_updates(state.discriminator_params, update)
new_state = DiscriminatorTrainingState(
optimizer_state=optimizer_state,
discriminator_params=discriminator_params,
discriminator_state=new_discriminator_state,
policy_params=state.policy_params, # Not modified.
key=key,
steps=state.steps + 1,
)
return new_state, loss
class AILSample(NamedTuple):
discriminator_sample: types.Transition
direct_sample: reverb.ReplaySample
demonstration_sample: types.Transition
class AILLearner(acme.Learner):
"""AIL learner."""
def __init__(
self,
counter: counting.Counter,
direct_rl_learner_factory: Callable[[Iterator[reverb.ReplaySample]],
acme.Learner],
loss_fn: losses.Loss,
iterator: Iterator[AILSample],
discriminator_optimizer: optax.GradientTransformation,
ail_network: ail_networks.AILNetworks,
discriminator_key: networks_lib.PRNGKey,
is_sequence_based: bool,
num_sgd_steps_per_step: int = 1,
policy_variable_name: Optional[str] = None,
logger: Optional[loggers.Logger] = None):
"""AIL Learner.
Args:
counter: Counter.
direct_rl_learner_factory: Function that creates the direct RL learner
when passed a replay sample iterator.
loss_fn: Discriminator loss.
iterator: Iterator that returns AILSamples.
discriminator_optimizer: Discriminator optax optimizer.
ail_network: AIL networks.
discriminator_key: RNG key.
is_sequence_based: If True, a direct rl algorithm is using SequenceAdder
data format. Otherwise the learner assumes that the direct rl algorithm
is using NStepTransitionAdder.
num_sgd_steps_per_step: Number of discriminator gradient updates per step.
policy_variable_name: The name of the policy variable to retrieve
direct_rl policy parameters.
logger: Logger.
"""
self._is_sequence_based = is_sequence_based
state_key, networks_key = jax.random.split(discriminator_key)
# Generator expression that works the same as an iterator.
# https://pymbook.readthedocs.io/en/latest/igd.html#generator-expressions
iterator, direct_rl_iterator = itertools.tee(iterator)
direct_rl_iterator = (
self._process_sample(sample.direct_sample)
for sample in direct_rl_iterator)
self._direct_rl_learner = direct_rl_learner_factory(direct_rl_iterator)
self._iterator = iterator
if policy_variable_name is not None:
def get_policy_params():
return self._direct_rl_learner.get_variables([policy_variable_name])[0]
self._get_policy_params = get_policy_params
else:
self._get_policy_params = lambda: None
# General learner book-keeping and loggers.
self._counter = counter or counting.Counter()
self._logger = logger or loggers.make_default_logger(
'learner',
asynchronous=True,
serialize_fn=utils.fetch_devicearray,
steps_key=self._counter.get_steps_key())
# Use the JIT compiler.
self._update_step = functools.partial(
ail_update_step,
optimizer=discriminator_optimizer,
ail_network=ail_network,
loss_fn=loss_fn)
self._update_step = utils.process_multiple_batches(self._update_step,
num_sgd_steps_per_step)
self._update_step = jax.jit(self._update_step)
discriminator_params, discriminator_state = (
ail_network.discriminator_network.init(networks_key))
self._state = DiscriminatorTrainingState(
optimizer_state=discriminator_optimizer.init(discriminator_params),
discriminator_params=discriminator_params,
discriminator_state=discriminator_state,
policy_params=self._get_policy_params(),
key=state_key,
steps=0,
)
# Do not record timestamps until after the first learning step is done.
# This is to avoid including the time it takes for actors to come online and
# fill the replay buffer.
self._timestamp = None
self._get_reward = jax.jit(
functools.partial(
ail_networks.compute_ail_reward, networks=ail_network))
def _process_sample(self, sample: reverb.ReplaySample) -> reverb.ReplaySample:
"""Updates the reward of the replay sample.
Args:
sample: Replay sample to update the reward to.
Returns:
The replay sample with an updated reward.
"""
transitions = reverb_utils.replay_sample_to_sars_transition(
sample, is_sequence=self._is_sequence_based)
rewards = self._get_reward(self._state.discriminator_params,
self._state.discriminator_state,
self._state.policy_params, transitions)
return sample._replace(data=sample.data._replace(reward=rewards))
def step(self):
sample = next(self._iterator)
rb_transitions = sample.discriminator_sample
demo_transitions = sample.demonstration_sample
if demo_transitions.reward.shape != rb_transitions.reward.shape:
raise ValueError(
'Different shapes for demo transitions and rb_transitions: '
f'{demo_transitions.reward.shape} != {rb_transitions.reward.shape}')
# Update the parameters of the policy before doing a gradient step.
state = self._state._replace(policy_params=self._get_policy_params())
self._state, metrics = self._update_step(state,
(demo_transitions, rb_transitions))
# The order is important for AIRL.
# In AIRL, the discriminator update depends on the logpi of the direct rl
# policy.
# When updating the discriminator, we want the logpi for which the
# transitions were made with and not an updated one.
# Get data from replay (dropping extras if any). Note there is no
# extra data here because we do not insert any into Reverb.
self._direct_rl_learner.step()
# Compute elapsed time.
timestamp = time.time()
elapsed_time = timestamp - self._timestamp if self._timestamp else 0
self._timestamp = timestamp
# Increment counts and record the current time.
counts = self._counter.increment(steps=1, walltime=elapsed_time)
# Attempts to write the logs.
self._logger.write({**metrics, **counts})
def get_variables(self, names: List[str]) -> List[Any]:
rewarder_dict = {'discriminator': self._state.discriminator_params}
learner_names = [name for name in names if name not in rewarder_dict]
learner_dict = {}
if learner_names:
learner_dict = dict(
zip(learner_names,
self._direct_rl_learner.get_variables(learner_names)))
variables = [
rewarder_dict.get(name, learner_dict.get(name, None)) for name in names
]
return variables
def save(self) -> TrainingState:
return TrainingState(
rewarder_state=self._state,
learner_state=self._direct_rl_learner.save())
def restore(self, state: TrainingState):
self._state = state.rewarder_state
self._direct_rl_learner.restore(state.learner_state)
| 35.612245 | 80 | 0.720153 | 1,279 | 10,470 | 5.671618 | 0.228303 | 0.019851 | 0.020678 | 0.015715 | 0.14654 | 0.084367 | 0.032534 | 0.032534 | 0 | 0 | 0 | 0.001811 | 0.209074 | 10,470 | 293 | 81 | 35.733788 | 0.87417 | 0.288252 | 0 | 0.02439 | 0 | 0 | 0.019535 | 0.008254 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054878 | false | 0 | 0.097561 | 0.012195 | 0.280488 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64f62b0e9a31a02193441b2301ca86a8c2d36616 | 9,878 | py | Python | json_database/utils/__init__.py | NeonJarbas/json_database | 026d01faff79f178ab8e5a8505666959279761cb | [
"MIT"
] | 8 | 2020-05-30T12:44:35.000Z | 2022-02-14T15:12:53.000Z | json_database/utils/__init__.py | NeonJarbas/json_database | 026d01faff79f178ab8e5a8505666959279761cb | [
"MIT"
] | 4 | 2021-08-18T23:40:45.000Z | 2021-09-30T00:43:42.000Z | json_database/utils/__init__.py | NeonJarbas/json_database | 026d01faff79f178ab8e5a8505666959279761cb | [
"MIT"
] | 7 | 2020-05-30T12:44:41.000Z | 2021-09-30T00:27:04.000Z | import json
from difflib import SequenceMatcher
def fuzzy_match(x, against):
"""Perform a 'fuzzy' comparison between two strings.
Returns:
float: match percentage -- 1.0 for perfect match,
down to 0.0 for no match at all.
"""
return SequenceMatcher(None, x, against).ratio()
def match_one(query, choices):
"""
Find best match from a list or dictionary given an input
Arguments:
query: string to test
choices: list or dictionary of choices
Returns: tuple with best match, score
"""
if isinstance(choices, dict):
_choices = list(choices.keys())
elif isinstance(choices, list):
_choices = choices
else:
raise ValueError('a list or dict of choices must be provided')
best = (_choices[0], fuzzy_match(query, _choices[0]))
for c in _choices[1:]:
score = fuzzy_match(query, c)
if score > best[1]:
best = (c, score)
if isinstance(choices, dict):
return (choices[best[0]], best[1])
else:
return best
def merge_dict(base, delta, merge_lists=True, skip_empty=True,
no_dupes=True, new_only=False):
"""
Recursively merging configuration dictionaries.
Args:
base: Target for merge
delta: Dictionary to merge into base
merge_lists: if a list is found merge contents instead of replacing
skip_empty: if an item in delta is empty, dont overwrite base
no_dupes: when merging lists deduplicate entries
new_only: only merge keys not yet in base
"""
for k, d in delta.items():
b = base.get(k)
if isinstance(d, dict) and isinstance(b, dict):
merge_dict(b, d, merge_lists, skip_empty, no_dupes, new_only)
else:
if new_only and k in base:
continue
if skip_empty and not d and d is not False:
# dont replace if new entry is empty
pass
elif all((isinstance(b, list), isinstance(d, list), merge_lists)):
if no_dupes:
base[k] += [item for item in d if item not in base[k]]
else:
base[k] += d
else:
base[k] = d
return base
def load_commented_json(filename):
""" Loads an JSON file, ignoring comments
Supports a trivial extension to the JSON file format. Allow comments
to be embedded within the JSON, requiring that a comment be on an
independent line starting with '//' or '#'.
NOTE: A file created with these style comments will break strict JSON
parsers. This is similar to but lighter-weight than "human json"
proposed at https://hjson.org
Args:
filename (str): path to the commented JSON file
Returns:
obj: decoded Python object
"""
with open(filename, encoding='utf-8') as f:
contents = f.read()
return json.loads(uncomment_json(contents))
def uncomment_json(commented_json_str):
""" Removes comments from a JSON string.
Supporting a trivial extension to the JSON format. Allow comments
to be embedded within the JSON, requiring that a comment be on an
independent line starting with '//' or '#'.
Example...
{
// comment
'name' : 'value'
}
Args:
commented_json_str (str): a JSON string
Returns:
str: uncommented, legal JSON
"""
lines = commented_json_str.splitlines()
# remove all comment lines, starting with // or #
nocomment = []
for line in lines:
stripped = line.lstrip()
if stripped.startswith("//") or stripped.startswith("#"):
continue
nocomment.append(line)
return " ".join(nocomment)
def is_jsonifiable(thing):
if not isinstance(thing, dict):
if isinstance(thing, str):
try:
json.loads(thing)
return True
except:
pass
else:
try:
thing.__dict__
return True
except:
pass
return False
return True
def get_key_recursively(search_dict, field, filter_None=True):
"""
Takes a dict with nested lists and dicts,
and searches all dicts for a key of the field
provided.
"""
if not is_jsonifiable(search_dict):
raise ValueError("unparseable format")
fields_found = []
for key, value in search_dict.items():
if value is None and filter_None:
continue
if key == field:
fields_found.append(search_dict)
elif isinstance(value, dict):
fields_found += get_key_recursively(value, field, filter_None)
elif isinstance(value, list):
for item in value:
if not isinstance(item, dict):
try:
if get_key_recursively(item.__dict__, field, filter_None):
fields_found.append(item)
except:
continue # can't parse
else:
fields_found += get_key_recursively(item, field, filter_None)
return fields_found
def get_key_recursively_fuzzy(search_dict, field, thresh=0.6, filter_None=True):
"""
Takes a dict with nested lists and dicts,
and searches all dicts for a key of the field
provided.
"""
if not is_jsonifiable(search_dict):
raise ValueError("unparseable format")
fields_found = []
for key, value in search_dict.items():
if value is None and filter_None:
continue
score = 0
if isinstance(key, str):
score = fuzzy_match(key, field)
if score >= thresh:
fields_found.append((search_dict, score))
elif isinstance(value, dict):
fields_found += get_key_recursively_fuzzy(value, field, thresh, filter_None)
elif isinstance(value, list):
for item in value:
if not isinstance(item, dict):
try:
if get_key_recursively_fuzzy(item.__dict__, field, thresh, filter_None):
fields_found.append((item, score))
except:
continue # can't parse
else:
fields_found += get_key_recursively_fuzzy(item, field, thresh, filter_None)
return sorted(fields_found, key = lambda i: i[1],reverse=True)
def get_value_recursively(search_dict, field, target_value):
"""
Takes a dict with nested lists and dicts,
and searches all dicts for a key of the field
provided.
"""
if not is_jsonifiable(search_dict):
raise ValueError("unparseable format")
fields_found = []
for key, value in search_dict.items():
if key == field and value == target_value:
fields_found.append(search_dict)
elif isinstance(value, dict):
fields_found += get_value_recursively(value, field, target_value)
elif isinstance(value, list):
for item in value:
if not isinstance(item, dict):
try:
if get_value_recursively(item.__dict__, field, target_value):
fields_found.append(item)
except:
continue # can't parse
else:
fields_found += get_value_recursively(item, field, target_value)
return fields_found
def get_value_recursively_fuzzy(search_dict, field, target_value, thresh=0.6):
"""
Takes a dict with nested lists and dicts,
and searches all dicts for a key of the field
provided.
"""
if not is_jsonifiable(search_dict):
raise ValueError("unparseable format")
fields_found = []
for key, value in search_dict.items():
if key == field:
if isinstance(value, str):
score = fuzzy_match(target_value, value)
if score >= thresh:
fields_found.append((search_dict, score))
elif isinstance(value, list):
for item in value:
score = fuzzy_match(target_value, item)
if score >= thresh:
fields_found.append((search_dict, score))
elif isinstance(value, dict):
fields_found += get_value_recursively_fuzzy(value, field, target_value, thresh)
elif isinstance(value, list):
for item in value:
if not isinstance(item, dict):
try:
found = get_value_recursively_fuzzy(item.__dict__, field, target_value, thresh)
if len(found):
fields_found.append((item, found[0][1]))
except:
continue # can't parse
else:
fields_found += get_value_recursively_fuzzy(item, field, target_value, thresh)
return sorted(fields_found, key = lambda i: i[1],reverse=True)
def jsonify_recursively(thing):
if isinstance(thing, list):
jsonified = list(thing)
for idx, item in enumerate(thing):
jsonified[idx] = jsonify_recursively(item)
elif isinstance(thing, dict):
try:
# can't import at top level to do proper check
jsonified = dict(thing.db)
except:
jsonified = dict(thing)
for key in jsonified.keys():
value = jsonified[key]
jsonified[key] = jsonify_recursively(value)
else:
try:
jsonified = thing.__dict__
except:
jsonified = thing
return jsonified
| 32.071429 | 103 | 0.57542 | 1,161 | 9,878 | 4.745047 | 0.179156 | 0.049918 | 0.027773 | 0.020875 | 0.476493 | 0.414413 | 0.387185 | 0.387185 | 0.383917 | 0.373026 | 0 | 0.003096 | 0.346021 | 9,878 | 307 | 104 | 32.175896 | 0.84969 | 0.215934 | 0 | 0.519126 | 0 | 0 | 0.016651 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.060109 | false | 0.016393 | 0.010929 | 0 | 0.153005 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
64f676f9dc39b1b750355b9ece3b4cb04859aa3d | 51 | py | Python | master/bopytest-code/code/tasks_proj/tests/func/test_delete.py | AlexRogalskiy/DevArtifacts | 931aabb8cbf27656151c54856eb2ea7d1153203a | [
"MIT"
] | 4 | 2018-09-07T15:35:24.000Z | 2019-03-27T09:48:12.000Z | master/bopytest-code/code/tasks_proj/tests/func/test_delete.py | AlexRogalskiy/DevArtifacts | 931aabb8cbf27656151c54856eb2ea7d1153203a | [
"MIT"
] | 371 | 2020-03-04T21:51:56.000Z | 2022-03-31T20:59:11.000Z | master/bopytest-code/code/tasks_proj/tests/func/test_delete.py | AlexRogalskiy/DevArtifacts | 931aabb8cbf27656151c54856eb2ea7d1153203a | [
"MIT"
] | 3 | 2019-06-18T19:57:17.000Z | 2020-11-06T03:55:08.000Z | def test_delete():
"""Placeholder test"""
pass
| 12.75 | 24 | 0.647059 | 6 | 51 | 5.333333 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.176471 | 51 | 3 | 25 | 17 | 0.761905 | 0.313725 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0.5 | 0 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 5 |
64f6cfdf3012940080f0da657680c3dfea60fa6b | 17,679 | py | Python | AutomatedTesting/Gem/PythonTests/largeworlds/dyn_veg/TestSuite_Periodic.py | LB-JakubSkorupka/o3de | e224fc2ee5ec2a12e75a10acae268b7b38ae3a32 | [
"Apache-2.0",
"MIT"
] | 11 | 2021-07-08T09:58:26.000Z | 2022-03-17T17:59:26.000Z | AutomatedTesting/Gem/PythonTests/largeworlds/dyn_veg/TestSuite_Periodic.py | LB-JakubSkorupka/o3de | e224fc2ee5ec2a12e75a10acae268b7b38ae3a32 | [
"Apache-2.0",
"MIT"
] | 29 | 2021-07-06T19:33:52.000Z | 2022-03-22T10:27:49.000Z | AutomatedTesting/Gem/PythonTests/largeworlds/dyn_veg/TestSuite_Periodic.py | LB-JakubSkorupka/o3de | e224fc2ee5ec2a12e75a10acae268b7b38ae3a32 | [
"Apache-2.0",
"MIT"
] | 4 | 2021-07-06T19:24:43.000Z | 2022-03-31T12:42:27.000Z | """
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
import os
import pytest
import sys
import ly_test_tools.environment.waiter as waiter
import ly_test_tools.environment.file_system as file_system
import editor_python_test_tools.hydra_test_utils as hydra
from ly_remote_console.remote_console_commands import RemoteConsole as RemoteConsole
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../../automatedtesting_shared')
from base import TestAutomationBase
@pytest.fixture
def remove_test_slice(request, workspace, project):
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "slices", "TestSlice_1.slice")], True,
True)
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "slices", "TestSlice_2.slice")], True,
True)
def teardown():
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "slices", "TestSlice_1.slice")], True,
True)
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "slices", "TestSlice_2.slice")], True,
True)
request.addfinalizer(teardown)
@pytest.fixture
def remote_console_instance(request):
console = RemoteConsole()
def teardown():
if console.connected:
console.stop()
request.addfinalizer(teardown)
return console
@pytest.mark.SUITE_periodic
@pytest.mark.parametrize("launcher_platform", ['windows_editor'])
@pytest.mark.parametrize("project", ["AutomatedTesting"])
class TestAutomation(TestAutomationBase):
def test_AltitudeFilter_ComponentAndOverrides_InstancesPlantAtSpecifiedAltitude(self, request, workspace, editor, launcher_platform):
from .EditorScripts import AltitudeFilter_ComponentAndOverrides_InstancesPlantAtSpecifiedAltitude as test_module
self._run_test(request, workspace, editor, test_module)
def test_AltitudeFilter_ShapeSample_InstancesPlantAtSpecifiedAltitude(self, request, workspace, editor, launcher_platform):
from .EditorScripts import AltitudeFilter_ShapeSample_InstancesPlantAtSpecifiedAltitude as test_module
self._run_test(request, workspace, editor, test_module)
def test_AltitudeFilter_FilterStageToggle(self, request, workspace, editor, launcher_platform):
from .EditorScripts import AltitudeFilter_FilterStageToggle as test_module
self._run_test(request, workspace, editor, test_module)
def test_SpawnerSlices_SliceCreationAndVisibilityToggleWorks(self, request, workspace, editor, remove_test_slice, launcher_platform):
from .EditorScripts import SpawnerSlices_SliceCreationAndVisibilityToggleWorks as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_AssetListCombiner_CombinedDescriptorsExpressInConfiguredArea(self, request, workspace, editor, launcher_platform):
from .EditorScripts import AssetListCombiner_CombinedDescriptorsExpressInConfiguredArea as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_AssetWeightSelector_InstancesExpressBasedOnWeight(self, request, workspace, editor, launcher_platform):
from .EditorScripts import AssetWeightSelector_InstancesExpressBasedOnWeight as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
@pytest.mark.xfail(reason="https://github.com/o3de/o3de/issues/4155")
def test_DistanceBetweenFilter_InstancesPlantAtSpecifiedRadius(self, request, workspace, editor, launcher_platform):
from .EditorScripts import DistanceBetweenFilter_InstancesPlantAtSpecifiedRadius as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
@pytest.mark.xfail(reason="https://github.com/o3de/o3de/issues/4155")
def test_DistanceBetweenFilterOverrides_InstancesPlantAtSpecifiedRadius(self, request, workspace, editor, launcher_platform):
from .EditorScripts import DistanceBetweenFilterOverrides_InstancesPlantAtSpecifiedRadius as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SurfaceDataRefreshes_RemainsStable(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SurfaceDataRefreshes_RemainsStable as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_VegetationInstances_DespawnWhenOutOfRange(self, request, workspace, editor, launcher_platform):
from .EditorScripts import VegetationInstances_DespawnWhenOutOfRange as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_InstanceSpawnerPriority_LayerAndSubPriority_HigherValuesPlantOverLower(self, request, workspace, editor, launcher_platform):
from .EditorScripts import InstanceSpawnerPriority_LayerAndSubPriority as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_LayerBlocker_InstancesBlockedInConfiguredArea(self, request, workspace, editor, launcher_platform):
from .EditorScripts import LayerBlocker_InstancesBlockedInConfiguredArea as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_LayerSpawner_InheritBehaviorFlag(self, request, workspace, editor, launcher_platform):
from .EditorScripts import LayerSpawner_InheritBehaviorFlag as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_LayerSpawner_InstancesPlantInAllSupportedShapes(self, request, workspace, editor, launcher_platform):
from .EditorScripts import LayerSpawner_InstancesPlantInAllSupportedShapes as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_LayerSpawner_FilterStageToggle(self, request, workspace, editor, launcher_platform):
from .EditorScripts import LayerSpawner_FilterStageToggle as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
@pytest.mark.xfail(reason="https://github.com/o3de/o3de/issues/2038")
def test_LayerSpawner_InstancesRefreshUsingCorrectViewportCamera(self, request, workspace, editor, launcher_platform):
from .EditorScripts import LayerSpawner_InstancesRefreshUsingCorrectViewportCamera as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_MeshBlocker_InstancesBlockedByMesh(self, request, workspace, editor, launcher_platform):
from .EditorScripts import MeshBlocker_InstancesBlockedByMesh as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_MeshBlocker_InstancesBlockedByMeshHeightTuning(self, request, workspace, editor, launcher_platform):
from .EditorScripts import MeshBlocker_InstancesBlockedByMeshHeightTuning as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_MeshSurfaceTagEmitter_DependentOnMeshComponent(self, request, workspace, editor, launcher_platform):
from .EditorScripts import MeshSurfaceTagEmitter_DependentOnMeshComponent as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_MeshSurfaceTagEmitter_SurfaceTagsAddRemoveSuccessfully(self, request, workspace, editor, launcher_platform):
from .EditorScripts import MeshSurfaceTagEmitter_SurfaceTagsAddRemoveSuccessfully as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_PhysXColliderSurfaceTagEmitter_E2E_Editor(self, request, workspace, editor, launcher_platform):
from .EditorScripts import PhysXColliderSurfaceTagEmitter_E2E_Editor as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_PositionModifier_ComponentAndOverrides_InstancesPlantAtSpecifiedOffsets(self, request, workspace, editor, launcher_platform):
from .EditorScripts import PositionModifier_ComponentAndOverrides_InstancesPlantAtSpecifiedOffsets as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_PositionModifier_AutoSnapToSurfaceWorks(self, request, workspace, editor, launcher_platform):
from .EditorScripts import PositionModifier_AutoSnapToSurfaceWorks as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_RotationModifier_InstancesRotateWithinRange(self, request, workspace, editor, launcher_platform):
from .EditorScripts import RotationModifier_InstancesRotateWithinRange as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_RotationModifierOverrides_InstancesRotateWithinRange(self, request, workspace, editor, launcher_platform):
from .EditorScripts import RotationModifierOverrides_InstancesRotateWithinRange as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_ScaleModifier_InstancesProperlyScale(self, request, workspace, editor, launcher_platform):
from .EditorScripts import ScaleModifier_InstancesProperlyScale as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_ScaleModifierOverrides_InstancesProperlyScale(self, request, workspace, editor, launcher_platform):
from .EditorScripts import ScaleModifierOverrides_InstancesProperlyScale as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_ShapeIntersectionFilter_InstancesPlantInAssignedShape(self, request, workspace, editor, launcher_platform):
from .EditorScripts import ShapeIntersectionFilter_InstancesPlantInAssignedShape as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_ShapeIntersectionFilter_FilterStageToggle(self, request, workspace, editor, launcher_platform):
from .EditorScripts import ShapeIntersectionFilter_FilterStageToggle as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SlopeAlignmentModifier_InstanceSurfaceAlignment(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SlopeAlignmentModifier_InstanceSurfaceAlignment as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SlopeAlignmentModifierOverrides_InstanceSurfaceAlignment(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SlopeAlignmentModifierOverrides_InstanceSurfaceAlignment as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SurfaceMaskFilter_BasicSurfaceTagCreation(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SurfaceMaskFilter_BasicSurfaceTagCreation as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SurfaceMaskFilter_ExclusiveSurfaceTags_Function(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SurfaceMaskFilter_ExclusionList as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SurfaceMaskFilter_InclusiveSurfaceTags_Function(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SurfaceMaskFilter_InclusionList as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SurfaceMaskFilterOverrides_MultipleDescriptorOverridesPlantAsExpected(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SurfaceMaskFilterOverrides_MultipleDescriptorOverridesPlantAsExpected as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SystemSettings_SectorPointDensity(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SystemSettings_SectorPointDensity as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SystemSettings_SectorSize(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SystemSettings_SectorSize as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
def test_SlopeFilter_ComponentAndOverrides_InstancesPlantOnValidSlopes(self, request, workspace, editor, launcher_platform):
from .EditorScripts import SlopeFilter_ComponentAndOverrides_InstancesPlantOnValidSlope as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
@pytest.mark.SUITE_periodic
@pytest.mark.parametrize("project", ["AutomatedTesting"])
@pytest.mark.parametrize("level", ["tmp_level"])
class TestAutomationE2E(TestAutomationBase):
# The following tests must run in order, please do not move tests out of order
@pytest.mark.parametrize("launcher_platform", ['windows_editor'])
def test_DynamicSliceInstanceSpawner_Embedded_E2E_Editor(self, request, workspace, project, level, editor, launcher_platform):
# Ensure our test level does not already exist
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "Levels", level)], True, True)
from .EditorScripts import DynamicSliceInstanceSpawner_Embedded_E2E as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
@pytest.mark.parametrize("launcher_platform", ['windows'])
def test_DynamicSliceInstanceSpawner_Embedded_E2E_Launcher(self, workspace, launcher, level,
remote_console_instance, project, launcher_platform):
expected_lines = [
"Instances found in area = 400"
]
hydra.launch_and_validate_results_launcher(launcher, level, remote_console_instance, expected_lines, launch_ap=False)
# Cleanup our temp level
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "Levels", level)], True, True)
@pytest.mark.parametrize("launcher_platform", ['windows_editor'])
def test_DynamicSliceInstanceSpawner_External_E2E_Editor(self, request, workspace, project, level, editor, launcher_platform):
# Ensure our test level does not already exist
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "Levels", level)], True, True)
from .EditorScripts import DynamicSliceInstanceSpawner_External_E2E as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
@pytest.mark.parametrize("launcher_platform", ['windows'])
def test_DynamicSliceInstanceSpawner_External_E2E_Launcher(self, workspace, launcher, level,
remote_console_instance, project, launcher_platform):
expected_lines = [
"Instances found in area = 400"
]
hydra.launch_and_validate_results_launcher(launcher, level, remote_console_instance, expected_lines, launch_ap=False)
# Cleanup our temp level
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "Levels", level)], True, True)
@pytest.mark.parametrize("launcher_platform", ['windows_editor'])
def test_LayerBlender_E2E_Editor(self, request, workspace, project, level, editor, launcher_platform):
# Ensure our test level does not already exist
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "Levels", level)], True, True)
from .EditorScripts import LayerBlender_E2E_Editor as test_module
self._run_test(request, workspace, editor, test_module, enable_prefab_system=False)
@pytest.mark.parametrize("launcher_platform", ['windows'])
@pytest.mark.xfail(reason="https://github.com/o3de/o3de/issues/4170")
def test_LayerBlender_E2E_Launcher(self, workspace, launcher, level,
remote_console_instance, project, launcher_platform):
launcher.args.extend(["-rhi=Null"])
launcher.start(launch_ap=False)
assert launcher.is_alive(), "Launcher failed to start"
# Wait for test script to quit the launcher. If wait_for returns exc, test was not successful
waiter.wait_for(lambda: not launcher.is_alive(), timeout=300)
# Verify launcher quit successfully and did not crash
ret_code = launcher.get_returncode()
assert ret_code == 0, "Test failed. See Game.log for details"
# Cleanup our temp level
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "Levels", level)], True, True)
| 62.25 | 138 | 0.783755 | 1,868 | 17,679 | 7.131156 | 0.123662 | 0.099692 | 0.130471 | 0.049246 | 0.736131 | 0.720892 | 0.720892 | 0.712859 | 0.712859 | 0.668944 | 0 | 0.003509 | 0.14571 | 17,679 | 283 | 139 | 62.469965 | 0.878501 | 0.035353 | 0 | 0.423469 | 0 | 0 | 0.041202 | 0.001761 | 0 | 0 | 0 | 0 | 0.010204 | 1 | 0.244898 | false | 0 | 0.25 | 0 | 0.510204 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
64f819eba5fc083ede53de05b85e42707d3caf74 | 1,192 | py | Python | Modulo_construcao_regras/Frase_pt.py | mattgoncalves/PE2LGP | dc74d496637217170b5287ca877d86e2e18068ab | [
"MIT"
] | 1 | 2021-12-16T13:13:19.000Z | 2021-12-16T13:13:19.000Z | PE2LGP/Modulo_construcao_regras/Frase_pt.py | ineslacerda/PE2LGP-Translator | 6ff22da887444d8be6cd50b3c3cdc3ce5bd2449a | [
"MIT"
] | null | null | null | PE2LGP/Modulo_construcao_regras/Frase_pt.py | ineslacerda/PE2LGP-Translator | 6ff22da887444d8be6cd50b3c3cdc3ce5bd2449a | [
"MIT"
] | 2 | 2021-03-27T10:59:28.000Z | 2021-12-16T11:51:43.000Z | class Frase_pt:
#informações com as dependências da frase em português e das classes gramaticais.
def __init__(self, frase_pt= ""):
self.frase_pt = frase_pt
self.lemmas = {}
self.classes_gramaticais = {}
self.analise_sintatica = {}
def append_classes_gramaticais(self, glosa, classe):
self.classes_gramaticais[glosa] = classe
def append_analise_sintatica(self, glosa, classe):
self.analise_sintatica[glosa] = classe
def append_lemmas(self, glosa, lemma):
self.lemmas[glosa] = lemma
def converter_classes_gramaticais(self):
for k,v in self.classes_gramaticais.items():
if v.startswith( 'V'):
self.classes_gramaticais[k]="V"
if v.startswith( 'N'):
self.classes_gramaticais[k]="N"
if v.startswith( 'AQ') or v.startswith( 'AO'):
self.classes_gramaticais[k]="ADJ"
if v.startswith( 'R'):
self.classes_gramaticais[k]="ADV"
if v.startswith( 'C'):
self.classes_gramaticais[k]="CONJ"
if v.startswith( 'Z'):
self.classes_gramaticais[k]="NUM"
if v.startswith( 'I'):
self.classes_gramaticais[k]="INT"
if v.startswith( 'P'):
self.classes_gramaticais[k]="PRO"
if v.startswith( 'D'):
self.classes_gramaticais[k]="DET" | 30.564103 | 82 | 0.698826 | 165 | 1,192 | 4.872727 | 0.29697 | 0.335821 | 0.328358 | 0.257463 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.15604 | 1,192 | 39 | 83 | 30.564103 | 0.799205 | 0.067114 | 0 | 0 | 0 | 0 | 0.032374 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15625 | false | 0 | 0 | 0 | 0.1875 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
8f01c902b327cbd9d3e3b498fc969a42f9707e07 | 6,994 | py | Python | sysinv/sysinv/sysinv/sysinv/puppet/heat.py | etaivan/stx-config | 281e1f110973f96e077645fb01f67b646fc253cc | [
"Apache-2.0"
] | null | null | null | sysinv/sysinv/sysinv/sysinv/puppet/heat.py | etaivan/stx-config | 281e1f110973f96e077645fb01f67b646fc253cc | [
"Apache-2.0"
] | null | null | null | sysinv/sysinv/sysinv/sysinv/puppet/heat.py | etaivan/stx-config | 281e1f110973f96e077645fb01f67b646fc253cc | [
"Apache-2.0"
] | 1 | 2021-01-05T16:24:58.000Z | 2021-01-05T16:24:58.000Z | #
# Copyright (c) 2017 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from sysinv.puppet import openstack
from sysinv.common import constants
class HeatPuppet(openstack.OpenstackBasePuppet):
"""Class to encapsulate puppet operations for heat configuration"""
SERVICE_NAME = 'heat'
SERVICE_PORT = 8004
SERVICE_PORT_CFN = 8000
SERVICE_PORT_CLOUDWATCH = 8003
SERVICE_PATH = 'v1/%(tenant_id)s'
SERVICE_PATH_WAITCONDITION = 'v1/waitcondition'
DEFAULT_DOMAIN_NAME = 'heat'
DEFAULT_STACK_ADMIN = 'heat_admin'
SERVICE_NAME_DOMAIN = 'heat-domain'
def get_static_config(self):
dbuser = self._get_database_username(self.SERVICE_NAME)
return {
'heat::db::postgresql::user': dbuser,
}
def get_secure_static_config(self):
dbpass = self._get_database_password(self.SERVICE_NAME)
kspass = self._get_service_password(self.SERVICE_NAME)
dkspass = self._get_service_password(self.SERVICE_NAME_DOMAIN)
return {
'heat::db::postgresql::password': dbpass,
'heat::keystone::auth::password': kspass,
'heat::keystone::auth_cfn::password': kspass,
'heat::keystone::authtoken::password': kspass,
'heat::keystone::domain::domain_password': dkspass,
'heat::engine::auth_encryption_key':
self._generate_random_password(length=32),
'openstack::heat::params::domain_pwd': dkspass,
}
def get_system_config(self):
ksuser = self._get_service_user_name(self.SERVICE_NAME)
config = {
'heat::keystone_ec2_uri': self._operator.keystone.get_auth_url(),
'heat::region_name': self.get_region_name(),
'heat::engine::heat_metadata_server_url':
self._get_metadata_url(),
'heat::engine::heat_waitcondition_server_url':
self._get_waitcondition_url(),
'heat::engine::heat_watch_server_url':
self._get_cloudwatch_url(),
'heat::keystone::domain::domain_name': self._get_stack_domain(),
'heat::keystone::domain::domain_admin': self._get_stack_admin(),
'heat::keystone::auth::region': self.get_region_name(),
'heat::keystone::auth::public_url': self.get_public_url(),
'heat::keystone::auth::internal_url': self.get_internal_url(),
'heat::keystone::auth::admin_url': self.get_admin_url(),
'heat::keystone::auth::auth_name': ksuser,
'heat::keystone::auth::tenant': self._get_service_tenant_name(),
'heat::keystone::auth_cfn::region':
self.get_region_name(),
'heat::keystone::auth_cfn::public_url':
self.get_public_url_cfn(),
'heat::keystone::auth_cfn::internal_url':
self.get_internal_url_cfn(),
'heat::keystone::auth_cfn::admin_url':
self.get_admin_url_cfn(),
'heat::keystone::auth_cfn::auth_name': ksuser,
'heat::keystone::auth_cfn::tenant':
self._get_service_tenant_name(),
'heat::keystone::authtoken::auth_url':
self._keystone_identity_uri(),
'heat::keystone::authtoken::auth_uri':
self._keystone_auth_uri(),
'heat::keystone::authtoken::user_domain_name':
self._get_service_user_domain_name(),
'heat::keystone::authtoken::project_domain_name':
self._get_service_project_domain_name(),
'heat::keystone::authtoken::project_name':
self._get_service_tenant_name(),
'heat::keystone::authtoken::username': ksuser,
'openstack::heat::params::domain_name': self._get_stack_domain(),
'openstack::heat::params::domain_admin': self._get_stack_admin(),
'openstack::heat::params::region_name': self.get_region_name(),
'openstack::heat::params::domain_pwd':
self._get_service_password(self.SERVICE_NAME_DOMAIN),
'openstack::heat::params::service_tenant':
self._get_service_tenant_name(),
'openstack::heat::params::service_create':
self._to_create_services(),
}
if (self._distributed_cloud_role() ==
constants.DISTRIBUTED_CLOUD_ROLE_SYSTEMCONTROLLER):
config.update({'openstack::heat::params::service_enabled': False,
'heat::keystone::auth::configure_endpoint': False,
'heat::keystone::auth_cfn::configure_endpoint':
False})
return config
def get_secure_system_config(self):
config = {
'heat::database_connection':
self._format_database_connection(self.SERVICE_NAME),
}
return config
def get_public_url(self):
return self._format_public_endpoint(self.SERVICE_PORT,
path=self.SERVICE_PATH)
def get_internal_url(self):
return self._format_private_endpoint(self.SERVICE_PORT,
path=self.SERVICE_PATH)
def get_admin_url(self):
return self._format_private_endpoint(self.SERVICE_PORT,
path=self.SERVICE_PATH)
def get_public_url_cfn(self):
return self._format_public_endpoint(self.SERVICE_PORT_CFN,
path=self.SERVICE_PATH)
def get_internal_url_cfn(self):
return self._format_private_endpoint(self.SERVICE_PORT_CFN,
path=self.SERVICE_PATH)
def get_admin_url_cfn(self):
return self._format_private_endpoint(self.SERVICE_PORT_CFN,
path=self.SERVICE_PATH)
def _get_metadata_url(self):
return self._format_public_endpoint(self.SERVICE_PORT_CFN)
def get_region_name(self):
return self._get_service_region_name(self.SERVICE_NAME)
def _get_waitcondition_url(self):
return self._format_public_endpoint(
self.SERVICE_PORT_CFN, path=self.SERVICE_PATH_WAITCONDITION)
def _get_cloudwatch_url(self):
return self._format_public_endpoint(self.SERVICE_PORT_CLOUDWATCH)
def _get_stack_domain(self):
if self._region_config():
service_config = self._get_service_config(self.SERVICE_NAME)
if service_config is not None:
return service_config.capabilities.get('admin_domain_name')
return self.DEFAULT_DOMAIN_NAME
def _get_stack_admin(self):
if self._region_config():
service_config = self._get_service_config(self.SERVICE_NAME)
if service_config is not None:
return service_config.capabilities.get('admin_user_name')
return self.DEFAULT_STACK_ADMIN
| 39.292135 | 77 | 0.622391 | 769 | 6,994 | 5.253576 | 0.141743 | 0.055446 | 0.063366 | 0.044554 | 0.486634 | 0.457426 | 0.319307 | 0.310149 | 0.22896 | 0.22896 | 0 | 0.004508 | 0.270518 | 6,994 | 177 | 78 | 39.514124 | 0.787338 | 0.020303 | 0 | 0.186567 | 0 | 0 | 0.230488 | 0.214411 | 0 | 0 | 0 | 0 | 0 | 1 | 0.119403 | false | 0.08209 | 0.014925 | 0.074627 | 0.343284 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
8f02541a7ae70196b397d818b7763843b81bed3a | 6,213 | py | Python | pong_v2b.py | jasonj2333/Pico-Pong-2021 | dabd57a9b6a44723a1d690a614265e1a6d5d9b1f | [
"MIT"
] | null | null | null | pong_v2b.py | jasonj2333/Pico-Pong-2021 | dabd57a9b6a44723a1d690a614265e1a6d5d9b1f | [
"MIT"
] | null | null | null | pong_v2b.py | jasonj2333/Pico-Pong-2021 | dabd57a9b6a44723a1d690a614265e1a6d5d9b1f | [
"MIT"
] | 1 | 2021-03-11T08:34:21.000Z | 2021-03-11T08:34:21.000Z | #####################################################
####### Pico Pong 2021 by Jerzy Jasonek ########
####### version 2.0 ########
####### add training mode - 1 player mode ########
#####################################################
from machine import Pin, I2C, ADC
from ssd1306 import SSD1306_I2C
import framebuf
from utime import sleep
from random import randint
################################### Hardware Settings #################################
WIDTH = 128
HEIGHT = 64
i2c = I2C(1, scl = Pin(3), sda = Pin(2), freq=400000)
oled = SSD1306_I2C(WIDTH, HEIGHT, i2c)
Pot = ADC(26) # player 1 controller
Pot2 = ADC(27) # player 2 controller / scroll max left on start screen to turn on training mode
conversion_factor = 3.3 / (65535) # Conversion from Pin read to proper voltage
button = machine.Pin(14, machine.Pin.IN, machine.Pin.PULL_DOWN) # start button
start_button = machine.Pin(15, machine.Pin.IN, machine.Pin.PULL_DOWN) # level button
global level1, level2,level3
level1 = machine.Pin(13, machine.Pin.OUT) # level 1 led
level2 = machine.Pin(12, machine.Pin.OUT) #level 2 led
level3 = machine.Pin(11, machine.Pin.OUT) #level 3 led
led_one_player_game = machine.Pin(1, machine.Pin.OUT) #training mode - 1 player game - led
################################### Game Settings #################################
one_player_game = False # training mode - you control player2
one_player_game_score = 0
game_over = False
#ball = bytearray(b'?\x00\x7f\x80\xff\xc0\xff\xc0\xff\xc0\xff\xc0\xff\xc0\xff\xc0\x7f\x80?\x00')
ball = bytearray(b'x\xfc\xfc\xfc\xfcx')
ball_x = 1
ball_y = 1
player1 = bytearray(b'\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0\xe0')
player1X = 5
player1Y = int((HEIGHT-20)/2)
player2X = WIDTH-8
player2Y = int((HEIGHT-20)/2)
player1_score = 0
player2_score = 0
ball_buff = framebuf.FrameBuffer(ball, 6, 6, framebuf.MONO_HLSB)
player1_buff = framebuf.FrameBuffer(player1, 3, 20, framebuf.MONO_HLSB)
player2_buff = framebuf.FrameBuffer(player1, 3, 20, framebuf.MONO_HLSB)
global level
level = 1
level1.value(0)
level2.value(0)
level3.value(0)
global start
start = False
################################### Function #################################
def button_handler(pin):
global level
level +=1
if level == 4:
level=1
def button_start(pin):
global start
if not start:
start = True
button.irq(trigger=machine.Pin.IRQ_RISING, handler=button_handler)
start_button.irq(trigger=machine.Pin.IRQ_RISING, handler=button_start)
def check_level(level):
global level1, level2,level3
if level == 1:
level1.value(1)
level2.value(0)
level3.value(0)
elif level == 2:
level1.value(1)
level2.value(1)
level3.value(0)
elif level == 3:
level1.value(1)
level2.value(1)
level3.value(1)
#Map function
def convert(x, in_min, in_max, out_min, out_max):
return (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min
def set_ball_y(y, playerY):
pY = int(playerY)
if (y >= pY-3 and y <= pY+2):
return -2
elif (y >= pY+16 and y <= pY+19):
return 2
elif y >= pY+3 and y <= pY+6:
return -1
elif y >= pY+12 and y <= pY+15:
return 1
else:
return 0
################################### Start Screen #################################
oled.fill(0)
x = int((WIDTH-4)/2)
y = int((HEIGHT-4)/2)
oled.text('Pico Pong 2021', 10,21)
oled.text('by Jerzy Jasonek', 0,41)
oled.show()
check_level(level)
sleep(2)
################################### Game loop #################################
while not game_over:
check_level(level)
if not start:
player2Y = (Pot2.read_u16())
if player2Y < 1000:
one_player_game = True
led_one_player_game.value(1)
else:
one_player_game = False
led_one_player_game.value(0)
else:
#update player position
if not one_player_game:
player1Y = (Pot.read_u16() * conversion_factor)
player1Y = convert(player1Y, 0, 3.3, 0, 44)
else:
player1Y = y-10
player2Y = (Pot2.read_u16() * conversion_factor)
player2Y = convert(player2Y, 0, 3.3, 0, 44)
#draw screen
oled.fill(0)
oled.text(str(player1_score), 40,3)
oled.text(str(player2_score), 88,3)
oled.blit(ball_buff, x, y)
oled.blit(player1_buff, player1X, int(player1Y))
oled.blit(player2_buff, player2X, int(player2Y))
if one_player_game:
oled.text('Score:'+str(one_player_game_score), 28,55)
oled.show()
#check collinsion with z wall
if y > HEIGHT-7 or y < 0:
ball_y *= -1
if x < 0 or x > WIDTH-6:
if x < 0:
player2_score +=1
else:
player1_score +=1
x = int((WIDTH-4)/2)
y = int((HEIGHT-4)/2)
ball_x *= -1
if player1_score == 15 or player2_score == 15:
game_over = True
else:
sleep(1)
#collision with player
if player1X+3 <=x and player1X+4 >=x and player1Y-3 <= y and player1Y+21 >= y:
ball_x *= -1
ball_y = set_ball_y(y, player1Y)
if one_player_game:
ball_y = randint(0,2)
if player2X-5 <= x and player2X-4 >= x and player2Y-3 <= y and player2Y+21 >= y:
ball_x *= -1
ball_y = set_ball_y(y, player2Y)
if one_player_game:
one_player_game_score +=level
x += ball_x*level
y += ball_y*level
################################### Game over screen #################################
oled.fill(0)
oled.text(str(player1_score), 40,3)
oled.text(str(player2_score), 88,3)
if one_player_game:
oled.text('Score:'+str(one_player_game_score), 28,55)
oled.text('Game over', 30,31)
oled.show()
| 30.014493 | 104 | 0.541445 | 839 | 6,213 | 3.882002 | 0.191895 | 0.035002 | 0.049739 | 0.062634 | 0.30089 | 0.254222 | 0.233344 | 0.214922 | 0.19343 | 0.133866 | 0 | 0.072368 | 0.266055 | 6,213 | 206 | 105 | 30.160194 | 0.641886 | 0.110736 | 0 | 0.355705 | 0 | 0.006711 | 0.030113 | 0.016168 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033557 | false | 0 | 0.033557 | 0.006711 | 0.107383 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f0774dc003329dbb4aee294ed1abf6b04d27049 | 14,904 | py | Python | gh/views.py | Gepetto/dashboard | a24bbcec7c13c00b2a783c840658130083ad3b30 | [
"BSD-2-Clause"
] | null | null | null | gh/views.py | Gepetto/dashboard | a24bbcec7c13c00b2a783c840658130083ad3b30 | [
"BSD-2-Clause"
] | 7 | 2018-02-21T18:03:36.000Z | 2021-04-29T15:17:59.000Z | gh/views.py | Gepetto/dashboard | a24bbcec7c13c00b2a783c840658130083ad3b30 | [
"BSD-2-Clause"
] | 1 | 2018-07-10T15:19:31.000Z | 2018-07-10T15:19:31.000Z | """Views for dashboard_apps."""
import hmac
import logging
import re
import traceback
from hashlib import sha1
from ipaddress import ip_address, ip_network
from json import loads
from asgiref.sync import sync_to_async, async_to_sync
from django.conf import settings
from django.core.mail import mail_admins
from django.http import HttpRequest
from django.http.response import (HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseRedirect,
HttpResponseServerError)
from django.shortcuts import get_object_or_404, reverse
from django.utils.encoding import force_bytes
from django.views.decorators.csrf import csrf_exempt
import git
import github
from gitlab import GitlabDeleteError
from autoslug.utils import slugify
from dashboard.middleware import ip_laas
from rainboard.models import Namespace, Project
from rainboard.utils import SOURCES
from . import models
logger = logging.getLogger(__name__)
PR_MASTER_MSG = """Hi ! This project doesn't usually accept pull requests on master. If this wasn't intentionnal, you
can change the base branch of this pull request to devel (No need to close it for that). Best, a bot."""
async def check_suite(request: HttpRequest, rep: str) -> HttpResponse:
"""Manage Github's check suites."""
data = loads(request.body.decode())
slug = slugify(data['repository']['name'])
if 'ros-release' in slug: # Don't run check suites on ros-release repositories
return HttpResponse(rep)
await sync_to_async(models.GithubCheckSuite.objects.get_or_create)(id=data['check_suite']['id'])
return HttpResponse(rep)
async def pull_request(request: HttpRequest, rep: str) -> HttpResponse:
"""Manage Github's Pull Requests."""
logger.info('process gh pr')
data = loads(request.body.decode())
event = data['action']
branch = f'pr/{data["number"]}'
login = slugify(data["pull_request"]["head"]["repo"]["owner"]["login"])
namespace = await sync_to_async(get_object_or_404)(Namespace,
slug_github=slugify(data['repository']['owner']['login']))
project = await sync_to_async(get_object_or_404)(Project,
main_namespace=namespace,
slug=slugify(data['repository']['name']))
git_repo = await sync_to_async(project.git)()
logger.debug(f'{namespace.slug}/{project.slug}: Pull request on {branch}: {event}')
# Prevent pull requests on master when necessary
if event in ['opened', 'reopened']:
gh = await sync_to_async(project.github)()
pr = await sync_to_async(gh.get_pull)(data["number"])
pr_branch = pr.base.ref
branches = [b.name for b in await sync_to_async(gh.get_branches)()]
if (not project.accept_pr_to_master and pr_branch == 'master' and 'devel' in branches
and login != namespace.slug_github):
logger.info(f"{namespace.slug}/{project.slug}: New pr {data['number']} to master")
await sync_to_async(pr.create_issue_comment)(PR_MASTER_MSG)
gh_remote_name = f'github/{login}'
if gh_remote_name not in git_repo.remotes:
remote = await sync_to_async(git_repo.create_remote)(gh_remote_name,
data["pull_request"]["head"]["repo"]["clone_url"])
else:
remote = await sync_to_async(git_repo.remote)(gh_remote_name)
# Sync the pull request with the pr/XX branch on Gitlab
if event in ['opened', 'reopened', 'synchronize']:
remote.fetch()
commit = data['pull_request']['head']['sha']
# Update branch to the latest commit
if branch in git_repo.branches:
git_repo.heads[branch].commit = commit
else:
await sync_to_async(git_repo.create_head)(branch, commit=commit)
# Create a gitlab remote if it doesn't exist
gl_remote_name = f'gitlab/{namespace.slug}'
if gl_remote_name not in git_repo.remotes:
url = await sync_to_async(project.remote_url_gitlab)()
await sync_to_async(git_repo.create_remote)(gl_remote_name, url=url)
# Push the changes to gitlab
logger.info(f'{namespace.slug}/{project.slug}: Pushing {commit} on {branch} on gitlab')
try:
git_repo.git.push(gl_remote_name, branch)
except git.exc.GitCommandError:
logger.warning(f'{namespace.slug}/{project.slug}: Failed to push on {branch} on gitlab, force pushing ...')
git_repo.git.push(gl_remote_name, branch, force=True)
# The pull request was closed, delete the branch pr/XX on Gitlab
elif event == 'closed':
if branch in git_repo.branches:
git_repo.delete_head(branch, force=True)
git_repo.delete_remote(gh_remote_name)
gitlab = await sync_to_async(project.gitlab)()
try:
await sync_to_async(gitlab.branches.delete)(branch)
logger.info(f'{namespace.slug}/{project.slug}: Deleted branch {branch}')
except GitlabDeleteError as e:
logger.info(f'{namespace.slug}/{project.slug}: branch {branch} not delete: {e}')
return HttpResponse(rep)
async def push(request: HttpRequest, source: SOURCES, rep: str) -> HttpResponse:
"""Someone pushed on github or gitlab. Synchronise local & remote repos."""
data = loads(request.body.decode())
slug = slugify(data['repository']['name'])
if 'ros-release' in slug: # Don't sync ros-release repositories
return HttpResponse(rep)
if source == SOURCES.gitlab:
namespace = await sync_to_async(get_object_or_404)(Namespace,
slug_gitlab=slugify(
data['project']['path_with_namespace'].split('/')[0]))
else:
namespace = await sync_to_async(get_object_or_404)(Namespace,
slug_github=slugify(data['repository']['owner']['login']))
project = await sync_to_async(get_object_or_404)(Project, main_namespace=namespace, slug=slug)
branch = data['ref'][11:] # strip 'refs/heads/'
commit = data['after']
gl_remote_name = f'gitlab/{namespace.slug}'
gh_remote_name = f'github/{namespace.slug}'
git_repo = await sync_to_async(project.git)()
logger.debug(f'{namespace.slug}/{slug}: Push detected on {source.name} {branch} (commit {commit})')
if branch.startswith('pr/'): # Don't sync pr/XX branches here, they are already handled by pull_request()
return HttpResponse(rep)
if branch.startswith('release/'): # Don't sync release/X.Y.Z branches at all
return HttpResponse(rep)
# Fetch the latest commit from gitlab
if gl_remote_name in git_repo.remotes:
gl_remote = await sync_to_async(git_repo.remote)(gl_remote_name)
else:
url = await sync_to_async(project.remote_url_gitlab)()
gl_remote = await sync_to_async(git_repo.create_remote)(gl_remote_name, url=url)
gl_remote.fetch()
# Fetch the latest commit from github
if gh_remote_name in git_repo.remotes:
gh_remote = await sync_to_async(git_repo.remote)(gh_remote_name)
else:
url = await sync_to_async(project.remote_url_github)()
gh_remote = await sync_to_async(git_repo.create_remote)(gh_remote_name, url=url)
gh_remote.fetch()
# The branch was deleted on one remote, delete the branch on the other remote as well
if commit == "0000000000000000000000000000000000000000":
if branch in git_repo.branches:
git_repo.delete_head(branch, force=True)
if source == SOURCES.gitlab:
github = await sync_to_async(project.github)()
github.get_git_ref(f'heads/{branch}').delete()
else:
gitlab = await sync_to_async(project.gitlab)()
gitlab.branches.delete(branch)
logger.info(f'{namespace.slug}/{slug}: Deleted branch {branch}')
return HttpResponse(rep)
# Make sure we fetched the latest commit
ref = gl_remote.refs[branch] if source == SOURCES.gitlab else gh_remote.refs[branch]
if str(ref.commit) != commit:
fail = f'Push: wrong commit: {ref.commit} vs {commit}'
logger.error(f'{namespace.slug}/{slug}: ' + fail)
return HttpResponseBadRequest(fail)
# Update the branch to the latest commit
if branch in git_repo.branches:
git_repo.heads[branch].commit = commit
else:
await sync_to_async(git_repo.create_head)(branch, commit=commit)
# Push the changes to other remote
try:
if source == SOURCES.gitlab and (branch not in gh_remote.refs or str(gh_remote.refs[branch].commit) != commit):
logger.info(f'{namespace.slug}/{slug}: Pushing {commit} on {branch} on github')
await sync_to_async(git_repo.git.push)(gh_remote_name, branch)
elif branch not in gl_remote.refs or str(gl_remote.refs[branch].commit) != commit:
logger.info(f'{namespace.slug}/{slug}: Pushing {commit} on {branch} on gitlab')
await sync_to_async(git_repo.git.push)(gl_remote_name, branch)
else:
return HttpResponse('already synced')
except git.exc.GitCommandError:
# Probably failed because of a force push
logger.exception(f'{namespace.slug}/{slug}: Forge sync failed')
message = traceback.format_exc()
message = re.sub(r'://.*@', '://[REDACTED]@', message) # Hide access tokens in the mail
await sync_to_async(mail_admins)(f'Forge sync failed for {namespace.slug}/{slug}', message)
return HttpResponse(rep)
async def pipeline(request: HttpRequest, rep: str) -> HttpResponse:
"""Something happened on a Gitlab pipeline. Tell Github if necessary."""
data = loads(request.body.decode())
branch, commit, gl_status, pipeline_id = (data['object_attributes'][key] for key in ['ref', 'sha', 'status', 'id'])
namespace = await sync_to_async(get_object_or_404)(Namespace,
slug_gitlab=slugify(
data['project']['path_with_namespace'].split('/')[0]))
project = await sync_to_async(get_object_or_404)(Project,
main_namespace=namespace,
slug=slugify(data['project']['name']))
gh_repo = await sync_to_async(project.github)()
ci_web_url = f'{project.url_gitlab()}/pipelines/{pipeline_id}'
logger.debug(f'{namespace.slug}/{project.slug}: Pipeline #{pipeline_id} on commit {commit} for branch {branch}, '
f'status: {gl_status}')
# Report the status to Github
if gl_status in ['pending', 'success', 'failed']:
gh_status = gl_status if gl_status != 'failed' else 'failure'
if branch.startswith('pr/'):
sha = await sync_to_async(gh_repo.get_commit)(sha=commit)
await sync_to_async(sha.create_status)(state=gh_status, target_url=ci_web_url, context='gitlab-ci')
else:
try:
sha = await sync_to_async(gh_repo.get_branch)(branch)
await sync_to_async(sha.commit.create_status)(state=gh_status,
target_url=ci_web_url,
context='gitlab-ci')
except github.GithubException as e:
if e.status == 404:
# Happens when a new branch is created on gitlab and the pipeline event comes before the push event
logger.warning(f"Branch {branch} does not exist on github, unable to report the pipeline status.")
else:
raise
return HttpResponse(rep)
@sync_to_async
@csrf_exempt
@async_to_sync
async def webhook(request: HttpRequest) -> HttpResponse:
"""
Process request incoming from a github webhook.
thx https://simpleisbetterthancomplex.com/tutorial/2016/10/31/how-to-handle-github-webhooks-using-django.html
"""
# validate ip source
forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR').split(', ')[0]
# networks = httpx.get('https://api.github.com/meta').json()['hooks'] # Fails if API rate limit exceeded
networks = ['185.199.108.0/22', '140.82.112.0/20']
if not any(ip_address(forwarded_for) in ip_network(net) for net in networks):
logger.warning('not from github IP')
return HttpResponseRedirect(reverse('login'))
# validate signature
signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if signature is None:
logger.warning('no signature')
return HttpResponseRedirect(reverse('login'))
algo, signature = signature.split('=')
if algo != 'sha1':
logger.warning('signature not sha-1')
return HttpResponseServerError('I only speak sha1.', status=501)
mac = hmac.new(force_bytes(settings.GITHUB_WEBHOOK_KEY), msg=force_bytes(request.body), digestmod=sha1)
if not hmac.compare_digest(force_bytes(mac.hexdigest()), force_bytes(signature)):
logger.warning('wrong signature')
return HttpResponseForbidden('wrong signature.')
# process event
event = request.META.get('HTTP_X_GITHUB_EVENT', 'ping')
if event == 'ping':
return HttpResponse('pong')
if event == 'push':
return await push(request, SOURCES.github, 'push event detected')
if event == 'check_suite':
return await check_suite(request, 'check_suite event detected')
if event == 'pull_request':
return await pull_request(request, 'pull_request event detected')
return HttpResponseForbidden('event not found')
@sync_to_async
@csrf_exempt
@async_to_sync
async def gl_webhook(request: HttpRequest) -> HttpResponse:
"""Process request incoming from a gitlab webhook."""
# validate ip source
if not ip_laas(request):
logger.warning('not from LAAS IP')
return HttpResponseRedirect(reverse('login'))
# validate token
token = request.META.get('HTTP_X_GITLAB_TOKEN')
if token is None:
logger.warning('no token')
return HttpResponseRedirect(reverse('login'))
if token != settings.GITLAB_WEBHOOK_KEY:
logger.warning('wrong token')
return HttpResponseForbidden('wrong token.')
event = request.META.get('HTTP_X_GITLAB_EVENT')
if event == 'ping':
return HttpResponse('pong')
elif event == 'Pipeline Hook':
return await pipeline(request, 'pipeline event detected')
elif event == 'Push Hook':
return await push(request, SOURCES.gitlab, 'push event detected')
return HttpResponseForbidden('event not found')
| 45.577982 | 119 | 0.650631 | 1,900 | 14,904 | 4.933684 | 0.164737 | 0.026243 | 0.048112 | 0.06486 | 0.449755 | 0.387881 | 0.332836 | 0.279923 | 0.243759 | 0.213783 | 0 | 0.009806 | 0.240472 | 14,904 | 326 | 120 | 45.717791 | 0.818286 | 0.078502 | 0 | 0.349794 | 0 | 0.020576 | 0.18201 | 0.041201 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.09465 | 0 | 0.205761 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f0ac5d355c9a9038a09d1d81879dac676f628c0 | 2,844 | py | Python | plugins/serializables/global_packets.py | wesleyd1124/WLUS | ce319962d57d91dc9c8b06cc435469c7b24da826 | [
"MIT"
] | 25 | 2018-06-05T22:45:03.000Z | 2021-09-01T08:15:38.000Z | plugins/serializables/global_packets.py | wesleyd1124/WLUS | ce319962d57d91dc9c8b06cc435469c7b24da826 | [
"MIT"
] | 15 | 2018-07-10T10:39:55.000Z | 2021-07-01T20:56:26.000Z | plugins/serializables/global_packets.py | wesleyd1124/WLUS | ce319962d57d91dc9c8b06cc435469c7b24da826 | [
"MIT"
] | 13 | 2018-05-19T19:44:59.000Z | 2021-07-18T18:45:58.000Z | """
Contains all the packets which are sent by either the client or server
"""
from pyraknet import bitstream
class HandshakePacket(bitstream.Serializable):
"""
[53-00-00-00]
Global handshake packet serializable.
This packet is sent to establish a connection.
"""
def __init__(self):
self.game_version = 171022
self.unknown_0 = 0
self.remote_connection_type = 0 # For auth this is 1, otherwise it is 4
self.process_id = 1124
self.local_port = 0xff
def serialize(self, stream: bitstream.WriteStream) -> None:
stream.write(bitstream.c_uint32(self.game_version))
stream.write(bitstream.c_uint32(self.unknown_0))
stream.write(bitstream.c_uint32(self.remote_connection_type))
stream.write(bitstream.c_uint32(self.process_id))
stream.write(bitstream.c_uint16(self.local_port))
stream.write("127.0.0.1", allocated_length=33)
@classmethod
def deserialize(cls, stream: bitstream.ReadStream) -> bitstream.Serializable:
packet = HandshakePacket()
packet.game_version = stream.read(bitstream.c_uint32)
packet.unknown_0 = stream.read(bitstream.c_uint32)
packet.remote_connection_type = stream.read(bitstream.c_uint32)
packet.process_id = stream.read(bitstream.c_uint32)
packet.local_port = stream.read(bitstream.c_uint16)
return packet
class DisconnectNotifyPacket(bitstream.Serializable):
"""
[53-00-00-01]
This packet is sent when the server and client disconnect from each other
"""
def __init__(self):
self.disconnect_id = 0
def serialize(self, stream: bitstream.WriteStream) -> None:
stream.write(bitstream.c_uint32(self.disconnect_id))
@classmethod
def deserialize(cls, stream: bitstream.ReadStream) -> bitstream.Serializable:
packet = DisconnectNotifyPacket()
packet.disconnect_id = stream.read(bitstream.c_uint32)
return packet
def send(self, generic_game_server, address):
disconnect_packet = bitstream.WriteStream()
disconnect_packet.write(b"S\x00\x00\x01\x00\x00\x00\x00")
disconnect_packet.write(self)
generic_game_server.send(disconnect_packet, address)
generic_game_server.delete_session(ip_address=address[0])
class GeneralNotifyPacket(bitstream.Serializable):
"""
[53-00-00-02]
This packet is sent to notify the player?
"""
def __init__(self):
self.notify_id = 0
def serialize(self, stream: bitstream.WriteStream) -> None:
stream.write(bitstream.c_uint32(self.notify_id))
@classmethod
def deserialize(cls, stream: bitstream.ReadStream) -> bitstream.Serializable:
packet = GeneralNotifyPacket()
packet.notify_id = stream.read(bitstream.c_uint32)
return packet
| 35.55 | 81 | 0.697961 | 347 | 2,844 | 5.538905 | 0.262248 | 0.072841 | 0.099896 | 0.076483 | 0.468783 | 0.407908 | 0.291883 | 0.291883 | 0.25026 | 0.25026 | 0 | 0.041998 | 0.204641 | 2,844 | 79 | 82 | 36 | 0.807692 | 0.123769 | 0 | 0.3 | 0 | 0 | 0.015683 | 0.011969 | 0 | 0 | 0.001651 | 0 | 0 | 1 | 0.2 | false | 0 | 0.02 | 0 | 0.34 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f0b154543475c7fee417eb340ebc23a9a65e18b | 1,278 | py | Python | tests/test_rasterize.py | PADAS/django-raster | 68b2d181c70827dffad3c07f4f38d3490872a3eb | [
"BSD-3-Clause"
] | null | null | null | tests/test_rasterize.py | PADAS/django-raster | 68b2d181c70827dffad3c07f4f38d3490872a3eb | [
"BSD-3-Clause"
] | null | null | null | tests/test_rasterize.py | PADAS/django-raster | 68b2d181c70827dffad3c07f4f38d3490872a3eb | [
"BSD-3-Clause"
] | null | null | null | from django.contrib.gis.gdal import GDALRaster, OGRGeometry
from django.test import TestCase
from raster.rasterize import rasterize
class RasterizeGeometryTests(TestCase):
def setUp(self):
self.rast = GDALRaster({
'datatype': 1,
'driver': 'MEM',
'width': 2,
'height': 2,
'nr_of_bands': 1,
'srid': 3086,
'origin': (500000, 400000),
'scale': (100, -100),
'skew': (0, 0),
'bands': [{
'nodata_value': 10,
'data': range(4)
}],
})
def test_covering_geom_rasterization(self):
geom = OGRGeometry.from_bbox(self.rast.extent)
geom.srid = 3086
result = rasterize(geom, self.rast)
self.assertEqual(result.bands[0].data().ravel().tolist(), [1, 1, 1, 1])
self.assertEqual(result.geotransform, self.rast.geotransform)
self.assertEqual(result.srs.wkt, self.rast.srs.wkt)
def test_half_covering_geom_rasterization(self):
geom = OGRGeometry.from_bbox((500000.0, 399800.0, 500200.0, 399900.0))
geom.srid = 3086
result = rasterize(geom, self.rast)
self.assertEqual(result.bands[0].data().ravel().tolist(), [0, 0, 1, 1])
| 32.769231 | 79 | 0.571205 | 145 | 1,278 | 4.951724 | 0.406897 | 0.066852 | 0.116992 | 0.08078 | 0.370474 | 0.370474 | 0.370474 | 0.370474 | 0.225627 | 0.225627 | 0 | 0.084615 | 0.28795 | 1,278 | 38 | 80 | 33.631579 | 0.704396 | 0 | 0 | 0.125 | 0 | 0 | 0.061815 | 0 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.09375 | false | 0 | 0.09375 | 0 | 0.21875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f0b38a7be3883ab570ebdb5fd63d7f0c704c7d9 | 340 | py | Python | array/shuffle_the_array.py | elenaborisova/LeetCode-Solutions | 98376aab7fd150a724e316357ae5ea46988d9eac | [
"MIT"
] | null | null | null | array/shuffle_the_array.py | elenaborisova/LeetCode-Solutions | 98376aab7fd150a724e316357ae5ea46988d9eac | [
"MIT"
] | null | null | null | array/shuffle_the_array.py | elenaborisova/LeetCode-Solutions | 98376aab7fd150a724e316357ae5ea46988d9eac | [
"MIT"
] | null | null | null | def shuffle(nums, n):
shuffled_array = []
i = 0
while len(shuffled_array) < len(nums):
shuffled_array.append(nums[i])
shuffled_array.append(nums[i + n])
i += 1
return shuffled_array
print(shuffle([2, 5, 1, 3, 4, 7], 3))
print(shuffle([1, 2, 3, 4, 4, 3, 2, 1], 4))
print(shuffle([1, 1, 2, 2], 2))
| 21.25 | 43 | 0.558824 | 57 | 340 | 3.245614 | 0.333333 | 0.351351 | 0.205405 | 0.248649 | 0.259459 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 0.255882 | 340 | 15 | 44 | 22.666667 | 0.640316 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0 | 0 | 0.181818 | 0.272727 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
8f0ecc69fcbb900b73340911f3b2c0dbdb93c8a1 | 6,577 | py | Python | otcs.py | neckro/mr-otcs | 5782f3664afb7213729e207881ae855fb60e43a0 | [
"MIT"
] | null | null | null | otcs.py | neckro/mr-otcs | 5782f3664afb7213729e207881ae855fb60e43a0 | [
"MIT"
] | null | null | null | otcs.py | neckro/mr-otcs | 5782f3664afb7213729e207881ae855fb60e43a0 | [
"MIT"
] | null | null | null | import datetime
import errno
import itertools
import os
import subprocess
import sys
###############################################################################
# Configuration.
# Program paths. Use absolute paths.
MEDIA_PLAYER_PATH = "/usr/bin/vlc"
FFPROBE_PATH = "/usr/bin/ffprobe"
# Base path for all video files, including trailing slash.
BASE_PATH = "/media/videos/"
# This path will also contain play_index.txt and play_history.txt.
# Video files, including subdirectories.
MEDIA_PLAYLIST = ['video1.mp4','video2.mp4','Series/E01.mp4']
# Number of videos to keep in history log, saved in play_history.txt in
# BASE_PATH. Set to 0 to disable.
PLAY_HISTORY_LENGTH = 100
# Path for HTML schedule written by write_schedule().
# See template.html for the file to be read by this script.
# Set to None to disable writing schedule.
SCHEDULE_PATH = "/var/www/schedule.html"
# Number of upcoming shows to write in schedule.
# High settings can cause delays in playing next file.
# Setting too high can cause MemoryError.
SCHEDULE_UPCOMING_LENGTH = 10
###############################################################################
# Function definitions.
def get_length(file):
"""Run ffprobe and retrieve length of file."""
result = subprocess.run([FFPROBE_PATH,"-v","error","-select_streams","v:0",
"-show_entries","stream=duration","-of",
"default=noprint_wrappers=1:nokey=1",file],
capture_output=True,text=True).stdout
return result
def write_schedule(file_list,previous_file = None):
"""
Write an HTML file containing file names and lengths read from a list
containing video file paths. Optionally, include the most recently played
file as well.
"""
# next_time contains start times of upcoming videos.
# For the first file in file_list, this is the current system time.
# Time is retrieved in UTC, to be converted to user's local time when
# they load the schedule in their browser.
next_time = datetime.datetime.utcnow()
coming_up_next = []
for filename in file_list:
# Get length of next video in seconds from ffprobe.
duration = float(get_length(os.path.join(BASE_PATH,filename)))
# Remove .mp4 extension from file names and convert backslashes to
# forward slashes.
filename = os.path.splitext(filename)[0].replace("\\","/")
# Append duration and stripped filename to list as tuple.
coming_up_next.append((next_time,filename))
# Add length of current video to current time and use as starting time
# for next video. Format to ISO 8601 string for Day.js.
next_time = next_time + datetime.timedelta(seconds=duration)
# Format coming_up_next list into string suitable for assigning as
# JavaScript array of objects.
js_array = "[" + ",".join(["{{time:'{}',name:' {}'}}".format(i,n) for i,n in coming_up_next]) + "]"
# Generate HTML contents.
with open(os.path.join(sys.path[0],"template.html"),"r") as html_template:
html_contents = html_template.read()
html_contents = html_contents.format(js_array=js_array)
with open(SCHEDULE_PATH,"w") as html_file:
html_file.write(html_contents)
###############################################################################
# Main loop.
# Keep playlist index and store in file play_index.txt. Create it if it does
# not exist.
try:
with open(os.path.join(BASE_PATH,"play_index.txt"),"r") as index_file:
play_index = int(index_file.read())
except FileNotFoundError:
with open(os.path.join(BASE_PATH,"play_index.txt"),"w") as index_file:
index_file.write("0")
play_index = 0
# Loop over playlist indefinitely.
while True:
if play_index < len(MEDIA_PLAYLIST):
video_time = datetime.datetime.now()
video_file = MEDIA_PLAYLIST[play_index]
video_file_fullpath = os.path.join(BASE_PATH,video_file)
# Check if video_file exists and raise exception if it does not.
if not os.path.isfile(video_file_fullpath):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
video_file_fullpath)
# Write history of played video files and timestamps, limited to
# PLAY_HISTORY_LENGTH.
if PLAY_HISTORY_LENGTH > 0:
with open(os.path.join(BASE_PATH,"play_history.txt"),"r") as play_history:
play_history_buffer = play_history.readlines()
with open(os.path.join(BASE_PATH,"play_history.txt"),"w+") as play_history:
play_history_buffer.append("{},{}\n".format(video_time,video_file))
play_history.writelines(play_history_buffer[-PLAY_HISTORY_LENGTH:])
# TODO: Write schedule in second thread.
# If HTML schedule writing is enabled, retrieve next videos in list up
# to SCHEDULE_UPCOMING_LENGTH and pass to write_schedule.
if SCHEDULE_PATH != None:
# Copy of media list sliced from current video to the end.
media_progress = MEDIA_PLAYLIST[play_index:]
# Pass sliced list to write_schedule.
if len(media_progress) >= SCHEDULE_UPCOMING_LENGTH:
media_copy = media_progress[:SCHEDULE_UPCOMING_LENGTH + 1]
# If media_progress is shorter than SCHEDULE_UPCOMING_LENGTH, copy
# full media playlist until the correct length is reached.
else:
media_copy = media_progress + list(
itertools.islice(itertools.cycle(MEDIA_PLAYLIST),
SCHEDULE_UPCOMING_LENGTH
- len(media_progress) + 1))
write_schedule(media_copy,
previous_file=MEDIA_PLAYLIST[play_index - 1])
# TODO: Delay playback for several seconds to account for window capture
# delay.
print("Now playing: " + video_file)
result = subprocess.run([MEDIA_PLAYER_PATH,video_file_fullpath,"--play-and-exit"])
# Increment play_index and write play_index.txt in BASE_PATH.
play_index = play_index + 1
with open(os.path.join(BASE_PATH,"play_index.txt"),"w") as index_file:
index_file.write(str(play_index))
else:
# Reset index at end of playlist.
play_index = 0
with open(os.path.join(BASE_PATH,"play_index.txt"),"w") as index_file:
index_file.write("0")
| 39.383234 | 103 | 0.636764 | 854 | 6,577 | 4.728337 | 0.289227 | 0.040119 | 0.022288 | 0.027737 | 0.145864 | 0.09262 | 0.077761 | 0.077761 | 0.077761 | 0.077266 | 0 | 0.006401 | 0.239927 | 6,577 | 166 | 104 | 39.620482 | 0.80136 | 0.35168 | 0 | 0.121622 | 0 | 0 | 0.092335 | 0.014166 | 0.013514 | 0 | 0 | 0.006024 | 0 | 1 | 0.027027 | false | 0 | 0.081081 | 0 | 0.121622 | 0.027027 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f0f210fb96be3418eb569e273c87bbeadbc980a | 4,674 | py | Python | src/lambda_functions/lex_v2_cfn_cr/lex_v2_cfn_cr/slot.py | mohsenari/aws-lex-v2-cfn-cr | 619b223d5b6fb4561ca3adb4c278ad03cc978cf0 | [
"Apache-2.0"
] | 11 | 2021-06-24T23:23:16.000Z | 2021-09-07T16:38:01.000Z | src/lambda_functions/lex_v2_cfn_cr/lex_v2_cfn_cr/slot.py | mohsenari/aws-lex-v2-cfn-cr | 619b223d5b6fb4561ca3adb4c278ad03cc978cf0 | [
"Apache-2.0"
] | 3 | 2021-09-23T00:07:36.000Z | 2021-11-24T00:29:33.000Z | src/lambda_functions/lex_v2_cfn_cr/lex_v2_cfn_cr/slot.py | mohsenari/aws-lex-v2-cfn-cr | 619b223d5b6fb4561ca3adb4c278ad03cc978cf0 | [
"Apache-2.0"
] | 4 | 2021-07-11T02:46:36.000Z | 2022-01-13T22:47:39.000Z | #!/usr/bin/env python3.8
################################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"). #
# You may not use this file except in compliance with the License. #
# A copy of the License is located at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# or in the 'license' file accompanying this file. This file is distributed #
# on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express #
# or implied. See the License for the specific language governing #
# permissions and limitations under the License. #
################################################################################
"""Amazon Lex CloudFormation Custom Resource Slot Manager"""
import logging
from typing import Any, Dict, Optional, TYPE_CHECKING
import boto3
from .shared.api import get_api_parameters
if TYPE_CHECKING:
from mypy_boto3_lexv2_models import LexModelsV2Client
from mypy_boto3_lexv2_models.type_defs import (
CreateSlotResponseTypeDef,
UpdateSlotResponseTypeDef,
)
else:
LexModelsV2Client = object
CreateSlotResponseTypeDef = object
UpdateSlotResponseTypeDef = object
class Slot:
"""Lex V2 CloudFormation Custom Resource Slot"""
def __init__(
self,
client: Optional[LexModelsV2Client] = None,
logger: Optional[logging.Logger] = None,
):
self._client = client or boto3.client("lexv2-models")
self._logger = logger or logging.getLogger(__name__)
def get_slot_id(
self,
bot_id: str,
bot_version: str,
intent_id: str,
locale_id: str,
slot_name: str,
) -> str:
"""Get Slot ID from Name"""
list_slots_args: Dict[str, Any] = dict(
botId=bot_id,
botVersion=bot_version,
localeId=locale_id,
intentId=intent_id,
filters=[
{
"name": "SlotName",
"values": [slot_name],
"operator": "EQ",
}
],
sortBy={
"attribute": "SlotName",
"order": "Ascending",
},
)
while True:
response = self._client.list_slots(**list_slots_args)
self._logger.debug(response)
slot_summaries = response["slotSummaries"]
slot_id = slot_summaries[0]["slotId"] if slot_summaries else ""
if slot_id:
break
next_token = response.get("nextToken")
if next_token:
list_slots_args["nextToken"] = next_token
else:
break
if not slot_id:
self._logger.warning("could not find slot named: %s", slot_id)
return slot_id
def create_slot(self, input_parameters: Dict[str, Any]) -> CreateSlotResponseTypeDef:
"""Create Slot"""
operation = "CreateSlot"
operation_parameters = get_api_parameters(
operation=operation,
input_parameters=input_parameters,
client=self._client,
logger=self._logger,
)
response = self._client.create_slot(**operation_parameters)
self._logger.debug(response)
return response
def delete_slot(self, input_parameters: Dict[str, Any]) -> None:
"""Delete Slot"""
operation = "DeleteSlot"
operation_parameters = get_api_parameters(
operation=operation,
input_parameters=input_parameters,
client=self._client,
logger=self._logger,
)
self._client.delete_slot(**operation_parameters)
def update_slot(self, input_parameters: Dict[str, Any]) -> UpdateSlotResponseTypeDef:
"""Update Slot"""
operation = "UpdateSlot"
operation_parameters = get_api_parameters(
operation=operation,
input_parameters=input_parameters,
client=self._client,
logger=self._logger,
)
response = self._client.update_slot(**operation_parameters)
self._logger.debug(response)
return response
| 34.367647 | 89 | 0.537441 | 431 | 4,674 | 5.614849 | 0.345708 | 0.03719 | 0.026446 | 0.028512 | 0.267769 | 0.247934 | 0.247934 | 0.207025 | 0.207025 | 0.157438 | 0 | 0.005921 | 0.349594 | 4,674 | 135 | 90 | 34.622222 | 0.790132 | 0.232135 | 0 | 0.27957 | 0 | 0 | 0.04994 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.053763 | false | 0 | 0.064516 | 0 | 0.16129 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f0f7ac7b18d123aa39d6ab27ba2f2362e38f9ef | 3,939 | py | Python | pyiomica/visibilityGraphAuxiliaryFunctions.py | benstear/pyiomica | bc26032b610fc911cc03b54115d6abdf53a56fce | [
"MIT"
] | 12 | 2019-11-22T21:44:13.000Z | 2022-03-06T21:46:31.000Z | pyiomica/visibilityGraphAuxiliaryFunctions.py | benstear/pyiomica | bc26032b610fc911cc03b54115d6abdf53a56fce | [
"MIT"
] | null | null | null | pyiomica/visibilityGraphAuxiliaryFunctions.py | benstear/pyiomica | bc26032b610fc911cc03b54115d6abdf53a56fce | [
"MIT"
] | 5 | 2019-07-25T21:03:42.000Z | 2021-06-09T02:14:30.000Z | '''Functions to generate adjacency matrix of visibility graphs'''
import numba
numba.config.NUMBA_DEFAULT_NUM_THREADS = 4
from .globalVariables import *
@numba.jit(cache=True)
def getAdjacencyMatrixOfNVG(data, times):
"""Calculate adjacency matrix of visibility graph.
JIT-accelerated version (a bit faster than NumPy-accelerated version).
Allows use of Multiple CPUs.
Parameters:
data: 2d numpy.array
Numpy array of floats
times: 1d numpy.array
Numpy array of floats
Returns:
2d numpy.array
Adjacency matrix
Usage:
A = getAdjacencyMatrixOfNVG(data, times)
"""
dimension = len(data)
V = np.zeros((dimension,dimension))
for i in range(dimension):
for j in range(i + 1, dimension):
V[i,j] = V[j,i] = (data[i] - data[j]) / (times[i] - times[j])
A = np.zeros((dimension,dimension))
for i in range(dimension):
for j in range(i + 1, dimension):
no_conflict = True
for a in list(range(i+1,j)):
if V[a,i] > V[j,i]:
no_conflict = False
break
if no_conflict:
A[i,j] = A[j,i] = 1
return A
def getAdjacencyMatrixOfNVGbyNUMPY(data, times):
"""Calculate adjacency matrix of visibility graph.
NumPy-accelerated version. Somewhat slower than JIT-accelerated version.
Use in serial applications.
Parameters:
data: 2d numpy.array
Numpy array of floats
times: 1d numpy.array
Numpy array of floats
Returns:
2d numpy.array
Adjacency matrix
Usage:
A = getAdjacencyMatrixOfNVGbyNUMPY(data, times)
"""
dimension = len(data)
V = (np.subtract.outer(data, data))/(np.subtract.outer(times, times) + np.identity(dimension))
A = np.zeros((dimension,dimension))
for i in range(dimension):
if i<dimension-1:
A[i,i+1] = A[i+1,i] = 1
for j in range(i + 2, dimension):
if np.max(V[i+1:j,i])<=V[j,i]:
A[i,j] = A[j,i] = 1
return A
@numba.jit(cache=True)
def getAdjacencyMatrixOfHVG(data):
"""Calculate adjacency matrix of horizontal visibility graph.
JIT-accelerated version (a bit faster than NumPy-accelerated version).
Single-threaded beats NumPy up to 2k data sizes.
Allows use of Multiple CPUs.
Parameters:
data: 2d numpy.array
Numpy array of floats
Returns:
2d numpy.array
Adjacency matrix
Usage:
A = getAdjacencyMatrixOfHVG(data)
"""
A = np.zeros((len(data),len(data)))
for i in range(len(data)):
for j in range(i + 1, len(data)):
no_conflict = True
for a in list(range(i+1,j)):
if data[a] > data[i] or data[a] > data[j]:
no_conflict = False
break
if no_conflict:
A[i,j] = A[j,i] = 1
return A
def getAdjacencyMatrixOfHVGbyNUMPY(data):
"""Calculate adjacency matrix of horizontal visibility graph.
NumPy-accelerated version.
Use with datasets larger than 2k.
Use in serial applications.
Parameters:
data: 2d numpy.array
Numpy array of floats
Returns:
2d numpy.array
Adjacency matrix
Usage:
A = getAdjacencyMatrixOfHVGbyNUMPY(data)
"""
dimension = len(data)
A = np.zeros((dimension,dimension))
for i in range(dimension):
if i<dimension-1:
A[i,i+1] = A[i+1,i] = 1
for j in range(i + 2, dimension):
if np.max(data[i+1:j])<=min(data[i], data[j]):
A[i,j] = A[j,i] = 1
return A
| 24.465839 | 99 | 0.550394 | 491 | 3,939 | 4.397149 | 0.175153 | 0.015748 | 0.044465 | 0.055581 | 0.694766 | 0.654933 | 0.648912 | 0.6239 | 0.540528 | 0.527559 | 0 | 0.013286 | 0.350343 | 3,939 | 160 | 100 | 24.61875 | 0.830403 | 0.387408 | 0 | 0.690909 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.072727 | false | 0 | 0.036364 | 0 | 0.181818 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f10177907a54de871e4ea66e85d002d100e36d1 | 1,557 | py | Python | turkApi/reject_assignments.py | arunchaganty/kbp-online | 9f8763d8f4bfb1fb8a01f1f4f506f56625dd38d8 | [
"MIT"
] | 4 | 2017-08-09T14:05:48.000Z | 2018-12-25T01:34:23.000Z | turkApi/reject_assignments.py | arunchaganty/kbp-online | 9f8763d8f4bfb1fb8a01f1f4f506f56625dd38d8 | [
"MIT"
] | 12 | 2017-01-19T23:18:18.000Z | 2018-12-23T18:57:54.000Z | turkApi/reject_assignments.py | arunchaganty/kbp-online | 9f8763d8f4bfb1fb8a01f1f4f506f56625dd38d8 | [
"MIT"
] | 2 | 2017-08-08T09:48:20.000Z | 2018-07-09T09:12:43.000Z | import boto
from boto.mturk.connection import MTurkConnection
from boto.mturk.question import ExternalQuestion
from connection import connect
import urllib
import argparse
import ConfigParser
import sys, os
import time
import pandas as pd
parser = argparse.ArgumentParser()
parser.add_argument('answers_file', nargs=1, type=argparse.FileType('r'), default=sys.stdin, help="File or stdin containing documents paths")
parser.add_argument('config_file', type=str, help="Config file containing parameters to spin the batch")
args = parser.parse_args()
config = ConfigParser.ConfigParser()
config.read(args.config_file)
mtc = connect(config.get('default', 'target'))
answers_file = pd.read_csv(args.answers_file[0], sep='\t')
for assignmentId, answer in zip(answers_file['assignmentId'], answers_file[config.get('default', 'answer_field')]):
while True:
try:
# Note: Python 2.x users should use raw_input, the equivalent of 3.x's input
print "Answer: ", answer
response = raw_input("Reject assignment (y/n)?")
except ValueError:
print("Sorry, I didn't understand that.")
#better try again... Return to the start of the loop
continue
else:
if response == 'y' or response == 'Y':
print "Rejected"
mtc.reject_assignment(assignmentId)
break
elif response == 'n' or response == 'N':
print "Not rejected"
break
else:
continue
| 35.386364 | 141 | 0.652537 | 192 | 1,557 | 5.213542 | 0.520833 | 0.054945 | 0.025974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003416 | 0.247913 | 1,557 | 43 | 142 | 36.209302 | 0.851409 | 0.080283 | 0 | 0.166667 | 0 | 0 | 0.174614 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.277778 | null | null | 0.111111 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f104b5661836f5be7edc0ffb289f0055a1c9b2f | 2,265 | py | Python | api/models.py | DominikWolek/survey | 358794ea1ed81ba7ba2ffe4a0897d9867455cf31 | [
"MIT"
] | null | null | null | api/models.py | DominikWolek/survey | 358794ea1ed81ba7ba2ffe4a0897d9867455cf31 | [
"MIT"
] | null | null | null | api/models.py | DominikWolek/survey | 358794ea1ed81ba7ba2ffe4a0897d9867455cf31 | [
"MIT"
] | null | null | null | import datetime
from django.db import models
from django.contrib.postgres.fields import ArrayField
from django.utils import timezone
class Survey(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
def questions(self):
return self.question_set
class Question(models.Model):
value = models.CharField(max_length=200)
answer_type = models.CharField(max_length=20, default='single_select')
survey = models.ForeignKey(Survey, on_delete=models.CASCADE)
def __str__(self):
return self.value
def answers(self):
return self.answer_set
class Answer(models.Model):
value = models.CharField(max_length=200)
question = models.ForeignKey(Question, on_delete=models.CASCADE)
votes = models.IntegerField(default=0)
def __str__(self):
return self.value
class Day(models.Model):
day = models.DateField('Day', default = timezone.localdate)
def __str__(self):
return str(self.day)
def rooms(self):
return self.place_set
class Place(models.Model):
day = models.ForeignKey(Day, on_delete=models.CASCADE)
room = models.CharField(max_length=100)
def __str__(self):
return str(self.room) + ' ' + str(self.day)
def lectures(self):
return self.lecture_set
class Lecture(models.Model):
name = models.CharField(max_length=200)
start_time = models.TimeField('Start time of lecture', default=timezone.localtime)
end_time = models.TimeField('End time of lecture', default=(timezone.localtime))
room = models.ForeignKey(Place, on_delete=models.CASCADE)
speaker = models.CharField(max_length=200)
rates = ArrayField(base_field=models.SmallIntegerField(blank=False), size=5, default=list([0 for x in range(5)]))
def __str__(self):
return self.name
def attendance(self):
return sum(self.rates)
def average_rate(self):
if self.attendance() == 0:
return 0
sum_of_rates = 0
for i in range(len(self.rates)):
sum_of_rates += (i + 1) * self.rates[i]
return sum_of_rates / self.attendance()
def rate_lecture(self, rate):
self.rates[rate - 1] += 1
| 26.964286 | 117 | 0.671082 | 294 | 2,265 | 4.993197 | 0.268707 | 0.074932 | 0.076294 | 0.114441 | 0.286785 | 0.268392 | 0.152589 | 0.115804 | 0 | 0 | 0 | 0.017007 | 0.221192 | 2,265 | 83 | 118 | 27.289157 | 0.815193 | 0 | 0 | 0.245614 | 0 | 0 | 0.025166 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.22807 | false | 0 | 0.070175 | 0.192982 | 0.912281 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
8f10cb27e5a5f26016387bdf0a6b631a9bfd71b3 | 92 | py | Python | my_PP4E/system/adder3.py | BeacherHou/Python-_Markdown- | 015d79a02d32f49395b80ca10919b3a09b72c4df | [
"MIT"
] | null | null | null | my_PP4E/system/adder3.py | BeacherHou/Python-_Markdown- | 015d79a02d32f49395b80ca10919b3a09b72c4df | [
"MIT"
] | null | null | null | my_PP4E/system/adder3.py | BeacherHou/Python-_Markdown- | 015d79a02d32f49395b80ca10919b3a09b72c4df | [
"MIT"
] | null | null | null | import sys
sum_int = 0
for line_str in sys.stdin: sum_int += int(line_str)
print(sum_int)
| 13.142857 | 51 | 0.73913 | 19 | 92 | 3.315789 | 0.578947 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012987 | 0.163043 | 92 | 6 | 52 | 15.333333 | 0.805195 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.25 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
8f170c370bacf2243ee7c2b4a5545bf2f5612009 | 7,102 | py | Python | okcupyd_testing/util.py | sphericalcow/okcupyd | ae0a99d248c515eea9a6d21a9c89f51e299b33f5 | [
"MIT"
] | 89 | 2015-01-09T19:58:07.000Z | 2022-03-03T21:56:50.000Z | okcupyd_testing/util.py | sphericalcow/okcupyd | ae0a99d248c515eea9a6d21a9c89f51e299b33f5 | [
"MIT"
] | 51 | 2015-01-18T23:09:35.000Z | 2017-04-24T03:16:03.000Z | okcupyd_testing/util.py | sphericalcow/okcupyd | ae0a99d248c515eea9a6d21a9c89f51e299b33f5 | [
"MIT"
] | 24 | 2015-01-16T17:43:21.000Z | 2020-09-18T12:19:15.000Z | import copy
import inspect
import logging
import os
import zlib
from six.moves import urllib
import simplejson
import vcr
import wrapt
from okcupyd import settings
from okcupyd import util
log = logging.getLogger(__name__)
TESTING_USERNAME = 'username'
TESTING_PASSWORD = 'password'
WBITS = 16 + zlib.MAX_WBITS
SHOULD_SCRUB = False
REPLACEMENTS = []
REMOVE_OLD_CASSETTES = False
@wrapt.decorator
def check_should_scrub(function, instance, args, kwargs):
if SHOULD_SCRUB:
return function(*args)
else:
return args[0] # The request or response
@util.curry
def remove_headers(request, headers_to_remove=()):
headers = copy.copy(request.headers)
headers_to_remove = [h.lower() for h in headers_to_remove]
keys = [k for k in headers if k.lower() in headers_to_remove]
if keys:
for k in keys:
headers.pop(k)
request.headers = headers
return request
def scrub_request_body(request):
if urllib.parse.urlsplit(request.uri).path == '/login':
request.body = scrub_query_string(request.body)
request.uri = scrub_uri(request.uri)
return request
def scrub_uri(uri):
replaced = util.replace_all_case_insensitive(uri, settings.USERNAME,
TESTING_USERNAME)
return util.replace_all_case_insensitive(replaced, settings.PASSWORD,
TESTING_PASSWORD)
def scrub_query_string(query_string):
request_dict = urllib.parse.parse_qs(query_string)
if 'password' not in request_dict:
return query_string
for key in request_dict:
request_dict[key] = request_dict[key][0]
request_dict['username'] = TESTING_USERNAME
request_dict['password'] = TESTING_PASSWORD
return urllib.parse.urlencode(request_dict)
def gzip_string(incoming):
if isinstance(incoming, str) and bytes is not str:
incoming = bytes(incoming, 'utf8')
else:
incoming = incoming.encode('utf8')
compress_object = zlib.compressobj(6, zlib.DEFLATED, WBITS)
start = compress_object.compress(incoming)
end = compress_object.flush()
if not isinstance(start, str):
return start + end
return ''.join([start, end])
def scrub_response_headers(response):
for item in ('location', 'Location'):
if item in response['headers']:
response['headers'][item] = [scrub_uri(uri)
for uri in response['headers'][item]]
return response
def replace_json_fields(body):
try:
response_dict = simplejson.loads(body)
except:
return body
if 'screenname' not in response_dict:
return body
if response_dict['screenname'] is not None:
response_dict['screenname'] = TESTING_USERNAME
response_dict['userid'] = 1
response_dict['thumbnail'] = ''
return simplejson.dumps(response_dict)
def scrub_response(response):
if not SHOULD_SCRUB:
return response
response = response.copy()
response = scrub_response_headers(response)
body = response['body']['string']
try:
body = zlib.decompress(response['body']['string'], WBITS).decode('utf8')
except:
should_recompress = False
else:
should_recompress = True
body = replace_json_fields(body)
body = util.replace_all_case_insensitive(body, settings.USERNAME,
TESTING_USERNAME)
if should_recompress:
body = gzip_string(body)
response['body']['string'] = body
return response
before_record = check_should_scrub(util.compose(
scrub_request_body, remove_headers(headers_to_remove=(
'Set-Cookie',
'Cookie'
))
))
def _maybe_decode(maybe_bytes):
try:
return maybe_bytes.decode('utf-8')
except (AttributeError, UnicodeDecodeError):
return maybe_bytes
def _match_search_query(left, right):
left_filter = set([value for param_name, value in left
if 'filter' in _maybe_decode(param_name)])
right_filter = set([value for param_name, value in right
if 'filter' in _maybe_decode(param_name)])
left_rest = set([(param_name, value) for param_name, value in left
if 'filter' not in _maybe_decode(param_name)])
right_rest = set([(param_name, value) for param_name, value in right
if 'filter' not in _maybe_decode(param_name)])
try:
log.info(simplejson.dumps(
{
'filter_differences': list(
left_filter.symmetric_difference(right_filter)
),
'rest_differences': list(
left_rest.symmetric_difference(right_rest)
),
}, encoding='utf-8'
))
except Exception as e:
log.warning(e)
return left_filter == right_filter and left_rest == right_rest
def match_search_query(left, right):
return _match_search_query(left.query, right.query)
def body_as_query_string(left, right):
if left.path == right.path and 'ajaxuploader' in left.path:
return True # We can't seem to handle matching photo uploads likely
# because of requests internals.
try:
left_qs_items = list(urllib.parse.parse_qs(left.body).items())
right_qs_items = list(urllib.parse.parse_qs(right.body).items())
except Exception as exc:
log.debug(exc)
return left.body == right.body
else:
left_qs_items = [(k, tuple(v)) for k, v in left_qs_items]
right_qs_items = [(k, tuple(v)) for k, v in right_qs_items]
return _match_search_query(left_qs_items, right_qs_items)
cassette_library_directory = os.path.join(os.path.dirname(os.path.dirname(__file__)),
'tests', 'vcr_cassettes')
okcupyd_vcr = vcr.VCR(match_on=('path', 'method', 'match_search_query',
'body_as_query_string'),
before_record=(before_record,),
before_record_response=scrub_response,
cassette_library_dir=cassette_library_directory,
path_transformer=vcr.VCR.ensure_suffix('.yaml'))
okcupyd_vcr.register_matcher('body_as_query_string', body_as_query_string)
okcupyd_vcr.register_matcher('match_search_query', match_search_query)
match_on_no_body = list(filter(lambda x: 'body' not in x, okcupyd_vcr.match_on))
@wrapt.adapter_factory
def add_request_to_signature(function):
argspec = inspect.getargspec(function)
return inspect.ArgSpec(argspec.args + ['request'], argspec.varargs, argspec.keywords, argspec.defaults)
@wrapt.decorator(adapter=add_request_to_signature)
def skip_if_live(function, instance, args, kwargs):
request = kwargs.pop('request')
if request.config.getoption('skip_vcrpy'):
log.debug("Skipping {0} because vcrpy is being skipped.".format(
function.__name__
))
else:
return function(*args, **kwargs)
use_cassette = okcupyd_vcr.use_cassette
| 31.286344 | 107 | 0.656998 | 870 | 7,102 | 5.106897 | 0.222989 | 0.020257 | 0.025208 | 0.018006 | 0.141796 | 0.111411 | 0.087779 | 0.064821 | 0.049516 | 0.018006 | 0 | 0.002248 | 0.248381 | 7,102 | 226 | 108 | 31.424779 | 0.830086 | 0.015207 | 0 | 0.177143 | 0 | 0 | 0.063949 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.085714 | false | 0.028571 | 0.062857 | 0.005714 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f199845284d48a07aa7f005ade6fd7c86f09c1e | 68 | py | Python | clips/training_session/plotsequence.py | thomasbazeille/public_protocols | 8d8dd051eda7eec2b8358dae42ab363b7d83e1d0 | [
"BSD-3-Clause"
] | 3 | 2019-09-19T13:06:59.000Z | 2021-07-03T18:09:32.000Z | clips/training_session/plotsequence.py | thomasbazeille/public_protocols | 8d8dd051eda7eec2b8358dae42ab363b7d83e1d0 | [
"BSD-3-Clause"
] | 2 | 2017-11-30T19:32:24.000Z | 2020-09-03T19:40:13.000Z | clips/training_session/plotsequence.py | thomasbazeille/public_protocols | 8d8dd051eda7eec2b8358dae42ab363b7d83e1d0 | [
"BSD-3-Clause"
] | 3 | 2019-09-19T13:07:10.000Z | 2021-01-14T16:07:16.000Z | import numpy as np
import sys
import pylab as pl
_, f = sys.argv
| 8.5 | 18 | 0.705882 | 13 | 68 | 3.615385 | 0.692308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 68 | 7 | 19 | 9.714286 | 0.921569 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
8f1d431916ff72728e735e8e734dd11974fd8bb1 | 1,474 | py | Python | utils/checkpoint.py | Jackson-Kang/VQVC-Pytorch | d2267b5c52253b6ae11a5767963a65320ae335c2 | [
"MIT"
] | 13 | 2021-02-11T17:48:40.000Z | 2022-02-08T06:37:12.000Z | utils/checkpoint.py | Jackson-Kang/VQVC-Pytorch | d2267b5c52253b6ae11a5767963a65320ae335c2 | [
"MIT"
] | 1 | 2022-01-17T17:07:22.000Z | 2022-01-18T06:51:21.000Z | utils/checkpoint.py | Jackson-Kang/VQVC-Pytorch | d2267b5c52253b6ae11a5767963a65320ae335c2 | [
"MIT"
] | 3 | 2021-03-10T08:40:00.000Z | 2022-01-17T17:08:48.000Z | import torch
import os, glob
from .path import create_dir, get_path
def load_checkpoint(checkpoint_path, model, optimizer=None, scheduler=None):
if optimizer is not None:
if not(os.path.exists(checkpoint_path)):
print("[WARNING] No checkpoint exists. Start from scratch.")
global_step = 0
else:
print("[WARNING] Already exists. Restart to train model.")
last_model_path = sorted(glob.glob(get_path(checkpoint_path, '*.pth.tar')))[-1]
state = torch.load(last_model_path)
model.load_state_dict(state['model'])
global_step = state['global_step']
optimizer.load_state_dict(state['optimizer'])
scheduler.load_state_dict(state['scheduler'])
else:
last_model_path = sorted(glob.glob(get_path(checkpoint_path, '*.pth.tar')))[-1]
state = torch.load(last_model_path)
model.load_state_dict(state['model'])
global_step = 0
print("[WARNING] Model: {} has been loaded.".format(last_model_path.split("/")[-1].replace(".pth.tar", "")))
return global_step
def save_checkpoint(checkpoint_path, global_step, model, optimizer, scheduler):
create_dir("/".join(checkpoint_path.split("/")[:-1]))
checkpoint_path = create_dir(checkpoint_path)
cur_checkpoint_name = "model-{:03d}k.pth.tar".format(global_step//1000)
state = {
'global_step': global_step,
'model': model.state_dict(),
'optimizer': optimizer.state_dict(),
'scheduler': scheduler.state_dict()
}
torch.save(state, get_path(checkpoint_path, cur_checkpoint_name))
| 30.708333 | 111 | 0.729308 | 205 | 1,474 | 4.990244 | 0.263415 | 0.123167 | 0.063539 | 0.070381 | 0.29521 | 0.234604 | 0.234604 | 0.234604 | 0.234604 | 0.234604 | 0 | 0.009252 | 0.120081 | 1,474 | 47 | 112 | 31.361702 | 0.779491 | 0 | 0 | 0.294118 | 0 | 0 | 0.175832 | 0.014257 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0.088235 | 0 | 0.176471 | 0.088235 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f1dda3eb63e47622769b82f7f990a3303f3a4ae | 2,514 | py | Python | ui/Pytest/test_LineEditMinimumMaximumController.py | MoisesHenr/OCEAN | e99c853893adc89652794ace62fcc8ffa78aa7ac | [
"MIT"
] | 15 | 2021-06-15T13:48:03.000Z | 2022-01-26T13:51:46.000Z | ui/Pytest/test_LineEditMinimumMaximumController.py | MoisesHenr/OCEAN | e99c853893adc89652794ace62fcc8ffa78aa7ac | [
"MIT"
] | 1 | 2021-07-04T02:58:29.000Z | 2021-07-04T02:58:29.000Z | ui/Pytest/test_LineEditMinimumMaximumController.py | MoisesHenr/OCEAN | e99c853893adc89652794ace62fcc8ffa78aa7ac | [
"MIT"
] | 2 | 2021-06-21T20:44:01.000Z | 2021-06-23T11:10:56.000Z | # Author: Moises Henrique Pereira
# this class handle the functions tests of controller of the component of the numerical features
import pytest
import sys
from PyQt5 import QtWidgets
from ui.mainTest import StaticObjects
@pytest.mark.parametrize('featureName', [1, 2.9, False, ('t1', 't2'), None])
def test_CILEMMC_initializeView_wrong_type_featureName_parameter(featureName):
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceLineEditMinimumMaximumController = StaticObjects.staticCounterfactualInterfaceLineEditMinimumMaximumController()
counterfactualInterfaceLineEditMinimumMaximumController.initializeView(featureName, 0, 1)
def test_CILEMMC_initializeView_none_min_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceLineEditMinimumMaximumController = StaticObjects.staticCounterfactualInterfaceLineEditMinimumMaximumController()
counterfactualInterfaceLineEditMinimumMaximumController.initializeView('featureName', None, 1)
def test_CILEMMC_initializeView_none_max_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceLineEditMinimumMaximumController = StaticObjects.staticCounterfactualInterfaceLineEditMinimumMaximumController()
counterfactualInterfaceLineEditMinimumMaximumController.initializeView('featureName', 0, None)
def test_CILEMMC_initializeView_right_parameters():
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceLineEditMinimumMaximumController = StaticObjects.staticCounterfactualInterfaceLineEditMinimumMaximumController()
counterfactualInterfaceLineEditMinimumMaximumController.initializeView('featureName', 0, 1)
def test_CILEMMC_setSelectedValue_none_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceLineEditMinimumMaximumController = StaticObjects.staticCounterfactualInterfaceLineEditMinimumMaximumController()
counterfactualInterfaceLineEditMinimumMaximumController.setSelectedValue(None)
def test_CILEMMC_setSelectedValue_right_parameter():
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceLineEditMinimumMaximumController = StaticObjects.staticCounterfactualInterfaceLineEditMinimumMaximumController()
counterfactualInterfaceLineEditMinimumMaximumController.setSelectedValue(0.5) | 55.866667 | 143 | 0.838902 | 175 | 2,514 | 11.891429 | 0.32 | 0.020183 | 0.040365 | 0.077847 | 0.825084 | 0.79433 | 0.769822 | 0.769822 | 0.769822 | 0.769822 | 0 | 0.006206 | 0.102625 | 2,514 | 45 | 144 | 55.866667 | 0.916223 | 0.050119 | 0 | 0.484848 | 0 | 0 | 0.020134 | 0 | 0 | 0 | 0 | 0 | 0.121212 | 1 | 0.181818 | false | 0 | 0.121212 | 0 | 0.30303 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8f1e2d5980fe5fec1c6bd18c363575e550ec387e | 1,055 | py | Python | retropongbuild/randomMusicplayer.py | MatthewAlgo/RetroPongBuild | 9ab6770928b7e48e638655acd7ac6bfdd7ae85c1 | [
"Unlicense"
] | null | null | null | retropongbuild/randomMusicplayer.py | MatthewAlgo/RetroPongBuild | 9ab6770928b7e48e638655acd7ac6bfdd7ae85c1 | [
"Unlicense"
] | null | null | null | retropongbuild/randomMusicplayer.py | MatthewAlgo/RetroPongBuild | 9ab6770928b7e48e638655acd7ac6bfdd7ae85c1 | [
"Unlicense"
] | null | null | null | import multiprocessing
import os
import random
import threading
import time
from multiprocessing import Process
import concurrent.futures
import pygame
import retropong
class WavPlayerRandom():
def __init__(self):
# th = threading.Thread(target=self.fileChooser(), args=(), daemon=True)
process = multiprocessing.Process(self.fileChooser(), args=())
main.threads.append(process)
def fileChooser(self):
print("Wav Player engine started")
while True:
if len(os.listdir("WAVSelectorDirectory")) != 0:
random_file = random.choice(os.listdir("WAVSelectorDirectory"))
soundObj = pygame.mixer.Sound(
f"WAVSelectorDirectory/{random_file}")
print(f"Loaded WAVSelectorDirectory/{random_file}")
soundObj.play()
timenow = time.time()
while True:
timethen = time.time()
if timethen - timenow > soundObj.get_length():
break
| 31.029412 | 80 | 0.605687 | 99 | 1,055 | 6.373737 | 0.515152 | 0.047544 | 0.060222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001355 | 0.300474 | 1,055 | 33 | 81 | 31.969697 | 0.853659 | 0.066351 | 0 | 0.074074 | 0 | 0 | 0.142421 | 0.069176 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.333333 | 0 | 0.444444 | 0.074074 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
8f1edf521362329d1afd7290b5a0f0db4b9cc960 | 1,277 | py | Python | Interviews/goldmansachs/1802.Grid Game/Solution.py | Zhenye-Na/LxxxCode | afd79d790d0a7495d75e6650f80adaa99bd0ff07 | [
"MIT"
] | 12 | 2019-05-04T04:21:27.000Z | 2022-03-02T07:06:57.000Z | Interviews/goldmansachs/1802.Grid Game/Solution.py | Zhenye-Na/LxxxCode | afd79d790d0a7495d75e6650f80adaa99bd0ff07 | [
"MIT"
] | 1 | 2019-07-24T18:43:53.000Z | 2019-07-24T18:43:53.000Z | Interviews/goldmansachs/1802.Grid Game/Solution.py | Zhenye-Na/LxxxCode | afd79d790d0a7495d75e6650f80adaa99bd0ff07 | [
"MIT"
] | 10 | 2019-07-01T04:03:04.000Z | 2022-03-09T03:57:37.000Z | class Solution:
"""
@param grid: a 2D array of integers
@param rules: an array of strings
@param k: an integer that denotes the number of steps to perform
@return: return a grid
"""
ALIVE = []
dx = [1, 1, 1, 0, 0, -1, -1, -1]
dy = [-1, 0, 1, -1, 1, -1, 0, 1]
def GridGame(self, grid, rules, k):
# write your code here
if not grid or len(grid) == 0 or len(grid[0]) == 0:
return [[]]
n, m = len(grid), len(grid[0])
for idx, rule in enumerate(rules):
if rule == 'alive':
self.ALIVE.append(idx)
for _ in range(k):
print(grid)
new_grid = [[0 for _ in range(m)] for _ in range(n)]
for i in range(n):
for j in range(m):
if self.count(i, j, grid, n, m) in self.ALIVE:
new_grid[i][j] = 1
grid = new_grid
return grid
def count(self, i, j, grid, n, m):
count = 0
for idx in range(len(self.dx)):
temp_x = self.dx[idx] + i
temp_y = self.dy[idx] + j
if temp_x >= 0 and temp_x < n and temp_y >= 0 and temp_y < m and grid[temp_x][temp_y] == 1:
count += 1
return count
| 28.377778 | 103 | 0.47455 | 197 | 1,277 | 3.005076 | 0.284264 | 0.023649 | 0.02027 | 0.013514 | 0.027027 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036411 | 0.397807 | 1,277 | 44 | 104 | 29.022727 | 0.73342 | 0.140172 | 0 | 0 | 0 | 0 | 0.004673 | 0 | 0 | 0 | 0 | 0.022727 | 0 | 1 | 0.071429 | false | 0 | 0 | 0 | 0.321429 | 0.035714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f21a46afa8fc39f3ecf728165185537c6f76296 | 1,712 | py | Python | lib/tsetmc_api/core/symbol.py | mahs4d/tsetmc-api | 4d7252b9e9aeda870e0340d7641aa244427a4ab1 | [
"MIT"
] | 18 | 2020-06-01T06:12:41.000Z | 2021-05-08T07:57:47.000Z | lib/tsetmc_api/core/symbol.py | mahs4d/tsetmc-api | 4d7252b9e9aeda870e0340d7641aa244427a4ab1 | [
"MIT"
] | 3 | 2020-08-07T11:25:53.000Z | 2021-04-09T12:37:00.000Z | lib/tsetmc_api/core/symbol.py | mahs4d/tsetmc-api | 4d7252b9e9aeda870e0340d7641aa244427a4ab1 | [
"MIT"
] | 6 | 2021-04-09T12:37:40.000Z | 2021-11-08T20:50:16.000Z | from datetime import date
import requests
from bs4 import BeautifulSoup
def get_symbol_details(symbol_id):
raw = requests.get(f'http://www.tsetmc.com/Loader.aspx?Partree=15131M&i={symbol_id}', timeout=20, verify=False).text
ret = {}
trs = BeautifulSoup(raw, 'lxml').find_all('tr')
for tr in trs:
tds = tr.find_all('td')
ret[tds[0].contents[0]] = str(tds[1].contents[0])
return ret
def get_daily_history(symbol_id):
daily_content = requests.get(
f'http://members.tsetmc.com/tsev2/data/InstTradeHistory.aspx?i={symbol_id}&Top=99999&A=0',
timeout=20, verify=False).text
raw_ticks = daily_content.split(';')
ticks = []
for raw_tick in raw_ticks:
if raw_tick == '':
continue
tick_data = raw_tick.split('@')
date_raw = tick_data[0]
high_price = tick_data[1]
low_price = tick_data[2]
close_price = tick_data[3]
last_price = tick_data[4]
first_price = tick_data[5]
yesterday_price = tick_data[6]
value = tick_data[7]
volume = tick_data[8]
count = tick_data[9]
ticks.append({
'date': date(year=int(date_raw[:4]), month=int(date_raw[4:6]), day=int(date_raw[6:])),
'first_price': int(first_price[:-3]),
'high_price': int(high_price[:-3]),
'low_price': int(low_price[:-3]),
'close_price': int(close_price[:-3]),
'last_price': int(last_price[:-3]),
'yesterday_price': int(yesterday_price[:-3]),
'value': int(float(value)),
'volume': int(float(volume)),
'count': int(float(count)),
})
return ticks
| 30.035088 | 120 | 0.586449 | 234 | 1,712 | 4.076923 | 0.350427 | 0.092243 | 0.081761 | 0.033543 | 0.050314 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032385 | 0.260514 | 1,712 | 56 | 121 | 30.571429 | 0.721169 | 0 | 0 | 0 | 0 | 0.022727 | 0.142523 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0.068182 | 0 | 0.159091 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f231583cf8cbaed36b765c3c82ea90dabd12d76 | 975 | py | Python | shamester_api/handlers/new_website_handler.py | heynemann/shamester | b098c922be941037410d3c7b3214a9aecde67495 | [
"MIT"
] | 1 | 2015-01-25T13:13:23.000Z | 2015-01-25T13:13:23.000Z | shamester_api/handlers/new_website_handler.py | heynemann/shamester | b098c922be941037410d3c7b3214a9aecde67495 | [
"MIT"
] | null | null | null | shamester_api/handlers/new_website_handler.py | heynemann/shamester | b098c922be941037410d3c7b3214a9aecde67495 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
from tornado.web import RequestHandler, asynchronous
import tornado.gen
import motor
from ujson import loads, dumps
from shamester_api.models import Website
class NewWebsiteHandler(RequestHandler):
@property
def websites(self):
return self.application.mongo.websites
@asynchronous
@tornado.gen.coroutine
def post(self):
website = loads(self.request.body)
if website.get('url', None) is None:
self.write(dumps({
"success": False,
"reason": "Url is required!"
}))
website = Website(url=website['url'])
website_data = website.to_dict()
new_website = yield motor.Op(self.websites.insert, website_data)
self.application.redis.publish("new-website", website_data)
self.write(dumps({
"success": True,
"websiteId": str(new_website)
}))
self.finish()
| 23.780488 | 72 | 0.61641 | 107 | 975 | 5.551402 | 0.53271 | 0.055556 | 0.047138 | 0.070707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001403 | 0.268718 | 975 | 40 | 73 | 24.375 | 0.831697 | 0.038974 | 0 | 0.148148 | 0 | 0 | 0.06631 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.185185 | 0.037037 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f246adac761633444e7917b2e83510f4f23aa8d | 8,368 | py | Python | TM271A-ctrl.py | wb4bxo/TM-271A-ctrl | 45f9f931553b08f4bc1b30360c2c946b07b54074 | [
"Unlicense"
] | null | null | null | TM271A-ctrl.py | wb4bxo/TM-271A-ctrl | 45f9f931553b08f4bc1b30360c2c946b07b54074 | [
"Unlicense"
] | null | null | null | TM271A-ctrl.py | wb4bxo/TM-271A-ctrl | 45f9f931553b08f4bc1b30360c2c946b07b54074 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
import sys
import os
import time
import serial
# This is a quick and dirty control program for the Kenwood TM-271A and TM-281A
# transceiver to allow remote base like operations for use with Allstar or
# other digital modes. It is primarily targeted at the Raspberry Pi but being
# in Python allows it to be built and run on multiple platforms including
# Windows and Linux.
#
# This is targeting Python3 and you must install the pyserial libraries by
# issuing "pip3 install pyserial"
### Some global variables most for configuration and operation modes
usage = """
Arguments passed in can be:
ser xxx
Where xxx is the name for the serial port appropriate for the OS.
For example "ser COM3" for Windows or "ser /dev/tty0" for linux.
NOTE - must be first argument if used. Environment variable
"TM271Aser" or "TM281Aser: is read if it exists as the default
port to use.
mem xxx
Where xxx is up to a 3 digit memory number
vfo xxxxxxxxxx{-|+}
Where xxxxxxxxxx is the 10 digit frequency in Hz.
If the leading character is not "1" a zero is appended as the GHz value.
If 10 digits is not supplied, "0"s are appended to the end to 13 digits.
Thus you can enter 0147330000 or 14733 for the same thing.
The optional + or - sets the offset
This command clears any tone setting, set desired tone afterwards
tone {x}xx.x
Where {x}xx.x is a 2 or 3 digit whole number followed by a decimal.
For example tone 141.3
Note these must match exactly the standard tones
ctcss {x}xx.x
Where {x}xx.x is a 2 or 3 digit whole number followed by a decimal.
For example tone 141.3
Note these must match exactly the standard tones
pow [h|l]
Set transmit power to high or low (h or l)
freq
Read frequency from display suitable for use with TTS.
Multiple arguments can be passed like "mem 33 freq" to change to a memory
and read back what the frequency is. Or "vfo 147330+ tone 100.0".
"""
serialName=os.getenv("TM271Aser")
if serialName is None:
serialName=os.getenv("TM281Aser")
if serialName is None:
serialName = "/dev/ttyUSB0"
verbose=0
radioID = ""
CTCSS_Tones = { # dictionary for tone to control number for the radio
"67.0" : "00",
"69.3" : "01",
"71.9" : "02",
"74.4" : "03",
"77.0" : "04",
"79.7" : "05",
"82.5" : "06",
"85.4" : "07",
"88.5" : "08",
"91.5" : "09",
"94.8" : "10",
"97.4" : "11",
"100.0" : "12",
"103.5" : "13",
"107.2" : "14",
"110.9" : "15",
"114.8" : "16",
"118.8" : "17",
"123.0" : "18",
"127.3" : "19",
"131.8" : "20",
"136.5" : "21",
"141.3" : "22",
"146.2" : "23",
"151.4" : "24",
"156.7" : "25",
"162.2" : "26",
"167.9" : "27",
"173.8" : "28",
"179.9" : "29",
"186.2" : "30",
"192.8" : "31",
"203.5" : "32",
"206.5" : "33",
"210.7" : "34",
"218.1" : "35",
"225.7" : "36",
"229.1" : "37",
"233.6" : "38",
"241.8" : "39",
"250.3" : "40",
"254.1" : "41"
}
### Some functions we'll use
# Send and check for same thing to echo, try to resync if needed.
def sendAndWait(data):
cnt = 50
while 1:
if cnt == 0:
return "ERR"
cnt -= 1
ser.read(1000)
ser.write((data + "\r").encode())
rtn = ser.readline().decode()
if rtn[0:2] == data[0:2]:
break
# Sometimes the radio gets out of sync and will return ?, E or the tail of something else...
# It has not taken the command if it doesn't echo it back.
if verbose >= 2:
print("Retrying - Sent: " + data + " Got: " + rtn)
# time.sleep(0.25)
ser.write(("\r").encode())
ser.read(1000) # force timeout to flush buffers
ser.read(1000) # force timeout to flush buffers
if verbose >= 2:
print(rtn)
return rtn
# Select a memory channel. Should be 3 digits but will fix it up if not
def memorySelect(mem):
data = "VM 1"
sendAndWait(data)
if len(mem) > 3: # sanity check in case more digits passed in than radio can handled
mem = mem[-3]
while len(mem) < 3: # radio requires 3 digit memory numbers
mem = "0" + mem
data="MR " + mem
sendAndWait(data)
return
# Select and set the vfo frequency passed in as string.
# freq should be 10 digits as Hz. as in 0147330000
# An appended + or - is used to signify offset
# VF format: (spaces only to align with description, omit when sending to radio)
# 3 14 16 18 20 22 24 26 29 32 36 45 47
# VF 0147330000, 0, 0, 0, 1, 0, 0, 13, 13,056,00600000,0 ,0
# freq,step,shift,reverse,Tone,CTCSS,DCS,ENC,DEC,DCS,Offset ,Narrow,BeatShift
def vfoSelect(freq):
data = "VM 0"
sendAndWait(data)
current = sendAndWait("VF")
if current[-1] == "\r":
current = current[0:-1]
if freq[-1] == "-":
shift = "2"
freq=freq[0:-1]
elif freq[-1] == "+":
shift = "1"
freq=freq[0:-1]
else:
shift = "0"
if freq[0] != "0":
freq = "0" + freq
if len(freq) > 10:
freq = freq[0:10]
while len(freq) < 10:
freq = freq + "0"
data = current[0:3] + freq + ",0," + shift + current[17:20] + "0,0,0" + current[25:]
sendAndWait(data)
return
# Set the tone parameters for the current VFO setting. Reads what is in the radio,
# makes the changes, then writes it back.
# VF format: (spaces only to align with description, omit when sending to radio)
# 3 14 16 18 20 22 24 26 29 32 36 45 47
# VF 0147330000, 0, 0, 0, 1, 0, 0, 13, 13,056,00600000,0 ,0
# freq,step,shift,reverse,Tone,CTCSS,DCS,ENC,DEC,DCS,Offset ,Narrow,BeatShift
def vfoTone(toneFreq, tx, rx):
if rx == 1: #there can only be one
tx = 0
current = sendAndWait("VF")
if current[-1] == "\r":
current = current[0:-1]
if toneFreq == "0": #tone of zero to turn off tone
tx=0
rx=0
theToneNumber = "00"
else:
theToneNumber = CTCSS_Tones[toneFreq]
if verbose >= 2:
print( "Tone set to: " + theToneNumber)
data = current[0:20] + str(tx) + "," + str(rx) + ",0," + theToneNumber + "," + theToneNumber + current[31:]
if verbose >= 2:
print("Setting: " + data)
sendAndWait(data)
return
def powerSelect(pow):
pow = pow.lower()[0:1]
if pow == "h":
sendAndWait("PC 0")
elif pow == "l":
sendAndWait("PC 2")
return
# Read radio frequency
def getFreq():
rtn = sendAndWait("FQ")
# rtn will be "FQ 0147330000,0"
mhz = rtn[4:7]
khz = rtn[7:13]
print(mhz + "." + khz)
# Initialize the serial port as global variable ser
def serialInit(serPort):
ser = serial.Serial(
port= serPort, #Replace ttyS0 with ttyAM0 for Pi1,Pi2,Pi0
baudrate = 9600,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS,
rtscts=False,
timeout=0.100
)
time.sleep(0.5) # mostly needed on Windows to allow port to settle in background
return ser
#### Start of exectution
i=1
ser = None
if (len(sys.argv) > i) and ((sys.argv[i].lower())[0:2] == "-v"):
# verbose must be first
verbose = len(sys.argv[i]) - 1
i += 1
print ("Verbose: " + str(verbose))
try:
# serial init must happen first or second
if (len(sys.argv) > i) and (sys.argv[i].lower() == "ser"):
serialName = sys.argv[i+1]
i += 2
ser = serialInit(serialName)
radioID = sendAndWait("ID")
except:
print("Could not open: " + serialName)
sys.exit(1)
while i < len(sys.argv):
if sys.argv[i].lower() == "mem":
memorySelect(sys.argv[i+1])
i += 2
elif sys.argv[i].lower() == "vfo":
vfoSelect(sys.argv[i+1])
i += 2
elif sys.argv[i].lower() == "tone":
vfoTone(sys.argv[i+1], 1, 0)
i += 2
elif sys.argv[i].lower() == "ctcss":
vfoTone(sys.argv[i+1], 0, 1)
i += 2
elif sys.argv[i].lower()[0:3] == "pow":
powerSelect(sys.argv[i+1])
i += 2
elif sys.argv[i].lower()[0:4] == "freq":
getFreq()
i += 1
elif sys.argv[i].lower() == "help":
print(usage)
break
else:
print ("Error input:" + sys.argv[i])
break
# while
if ser is not None:
ser.close()
| 30.652015 | 112 | 0.58162 | 1,296 | 8,368 | 3.752315 | 0.320988 | 0.028789 | 0.031256 | 0.024059 | 0.229899 | 0.206046 | 0.196381 | 0.192474 | 0.177257 | 0.172733 | 0 | 0.097666 | 0.278083 | 8,368 | 272 | 113 | 30.764706 | 0.707333 | 0.275215 | 0 | 0.214286 | 0 | 0 | 0.31249 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03125 | false | 0.008929 | 0.017857 | 0 | 0.080357 | 0.040179 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f247daa65827948d6dd176860fe6b66dc1abfcd | 2,347 | py | Python | snake/games/snake_gen.py | TeamSerpentine/retro-baselines | 9b2c725604496aca9c382a53f456d31fdbcaa5b1 | [
"BSD-3-Clause"
] | 2 | 2019-12-09T08:41:13.000Z | 2020-10-22T02:29:22.000Z | snake/games/snake_gen.py | TeamSerpentine/retro-baselines | 9b2c725604496aca9c382a53f456d31fdbcaa5b1 | [
"BSD-3-Clause"
] | null | null | null | snake/games/snake_gen.py | TeamSerpentine/retro-baselines | 9b2c725604496aca9c382a53f456d31fdbcaa5b1 | [
"BSD-3-Clause"
] | null | null | null |
import itertools
import numpy as np
from snake.objects.utils import Point
from snake.games.base_game import SnakeGame
from snake.boards.classic import Board
from snake.displays.single_image import SingleImage
class Snake(SnakeGame):
"""
Classic snake game, which outputs a numpy ndarray of size (24,)
Containing 8 times snake
"""
def __init__(self):
board = Board()
render = SingleImage(board.width, board.height)
self._image = np.zeros((board.width, board.height, 3), dtype=np.uint8)
super().__init__(board, render)
def obs(self):
"""
Generates the output array.
The output will be a (24,) numpy array, with 3 times 8 directions.
wall distance, snake distance, food distance
["UP", "DOWN", "LEFT", "LEFT UP", "LEFT DOWN", "RIGHT", "RIGHT UP", "RIGHT DOWN"]
"""
object_types = [v for k, v in self.board.object_types.items() if k != "ground"]
obs_directions = [x for x in itertools.product([0, 1, -1], repeat=2)][1:]
obs_out = np.zeros((len(object_types), len(obs_directions)), dtype=np.int)
snake = self.board.objects['snake'][0]
for idx_direction, direction in enumerate(obs_directions):
scan_direction = Point(*direction)
object_found = False
scan_counter = 1
while not object_found:
scan_x = snake.position.x + scan_direction.x * scan_counter
scan_y = snake.position.y + scan_direction.y * scan_counter
for idx_object, object_type in enumerate(object_types):
if isinstance(self.board.board[scan_x, scan_y], object_type):
obs_out[idx_object, idx_direction] = scan_counter
object_found = True
scan_counter += 1
return obs_out.flatten()
def reward(self):
""" Returns the number of apples eaten during the entire game. """
snake = self.board.objects['snake'][0]
return len(snake) - snake.LEN_SNAKE_START
def render(self):
obs = self.board._get_obs(attribute="colour")
for x in range(obs.shape[0]):
for y in range(obs.shape[1]):
self._image[x, y] = obs[x, y]
return self.display.render(self._image)
| 39.116667 | 93 | 0.607158 | 306 | 2,347 | 4.5 | 0.349673 | 0.039216 | 0.020334 | 0.030501 | 0.039216 | 0.039216 | 0 | 0 | 0 | 0 | 0 | 0.011926 | 0.285471 | 2,347 | 59 | 94 | 39.779661 | 0.809183 | 0.157648 | 0 | 0.051282 | 0 | 0 | 0.011721 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.102564 | false | 0 | 0.153846 | 0 | 0.358974 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f2505c47e5671579a2268dd78cbe3bc9dc699e6 | 448 | py | Python | mytest.py | qxcross/tdd-practice | 72d58bf99ab02decc570649fd8995f8a9ccb0d4f | [
"MIT"
] | null | null | null | mytest.py | qxcross/tdd-practice | 72d58bf99ab02decc570649fd8995f8a9ccb0d4f | [
"MIT"
] | null | null | null | mytest.py | qxcross/tdd-practice | 72d58bf99ab02decc570649fd8995f8a9ccb0d4f | [
"MIT"
] | null | null | null | import unittest
from practice import *
class myFirstTests(unittest.TestCase):
def test_hello(self):
self.assertEqual(hello_world(),'hello world')
def test_sorted(self):
self.assertEqual(sorted_list(list(range(0,11))),[0,1,2,3,4,5,6,7,8,9,10])
def test_even(self):
self.assertEqual(even_num(4),True)
def test_sorted_type(self):
self.assertEqual(type(sorted_list([0,1,2,3])),list)
unittest.main() | 24.888889 | 81 | 0.676339 | 69 | 448 | 4.26087 | 0.478261 | 0.095238 | 0.258503 | 0.027211 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.053619 | 0.167411 | 448 | 18 | 82 | 24.888889 | 0.734584 | 0 | 0 | 0 | 0 | 0 | 0.024499 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | false | 0 | 0.166667 | 0 | 0.583333 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
8f26c80988f27d122c2a8050884079763f1d447d | 309 | py | Python | 49. matplotlib 6 - Plotting from a csv file.py | JatinR05/Python-3-basics-series | e4b3d8056e2074602c9ed0cd201676484dd0d179 | [
"MIT"
] | 41 | 2015-05-12T12:49:35.000Z | 2021-07-13T11:07:09.000Z | 49. matplotlib 6 - Plotting from a csv file.py | JatinR05/Python-3-basics-series | e4b3d8056e2074602c9ed0cd201676484dd0d179 | [
"MIT"
] | null | null | null | 49. matplotlib 6 - Plotting from a csv file.py | JatinR05/Python-3-basics-series | e4b3d8056e2074602c9ed0cd201676484dd0d179 | [
"MIT"
] | 37 | 2016-10-13T04:02:09.000Z | 2021-12-16T18:28:27.000Z | from matplotlib import pyplot as plt
from matplotlib import style
####
import numpy as np
style.use('ggplot')
x,y = np.loadtxt('exampleFile.csv',
unpack=True,
delimiter = ',')
plt.plot(x,y)
plt.title('Epic Info')
plt.ylabel('Y axis')
plt.xlabel('X axis')
plt.show()
| 13.434783 | 36 | 0.61165 | 44 | 309 | 4.295455 | 0.613636 | 0.148148 | 0.21164 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.239482 | 309 | 22 | 37 | 14.045455 | 0.804255 | 0 | 0 | 0 | 0 | 0 | 0.140984 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f272df30ff4e1643b5772b92ea4e650ad48af7e | 473 | py | Python | hw5/close_p_q.py | rocke97/crypto | 89c4e595adf74558e12ceb1762025fd2f0275fec | [
"MIT"
] | null | null | null | hw5/close_p_q.py | rocke97/crypto | 89c4e595adf74558e12ceb1762025fd2f0275fec | [
"MIT"
] | null | null | null | hw5/close_p_q.py | rocke97/crypto | 89c4e595adf74558e12ceb1762025fd2f0275fec | [
"MIT"
] | null | null | null | import math
def find_s_and_t(n):
s = 0
t = 0
for possible_t in range(math.ceil(math.sqrt(n)), n):
if math.sqrt((pow(possible_t, 2) - n)) == math.ceil(math.sqrt((pow(possible_t, 2) - n))):
t = possible_t
s_squared = pow(t, 2) - n
s = math.floor(math.sqrt(s_squared))
return (s, t)
result = find_s_and_t(310485170747)
p = result[0] + result[1]
q = result[1] - result[0]
print("p = ", p)
print("q = ", q) | 24.894737 | 97 | 0.547569 | 81 | 473 | 3.049383 | 0.333333 | 0.145749 | 0.036437 | 0.072874 | 0.178138 | 0.178138 | 0.178138 | 0 | 0 | 0 | 0 | 0.061584 | 0.27907 | 473 | 19 | 98 | 24.894737 | 0.662757 | 0 | 0 | 0 | 0 | 0 | 0.016878 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.066667 | 0 | 0.2 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f2a2ed8a1f8b461a2fc0955e0ccc67710df3bfc | 1,051 | py | Python | Week_4/xcoverage.py | actaylor05/learning_python | d8c72fdb7c07bac4176a4418f83d75013db2245a | [
"MIT"
] | null | null | null | Week_4/xcoverage.py | actaylor05/learning_python | d8c72fdb7c07bac4176a4418f83d75013db2245a | [
"MIT"
] | null | null | null | Week_4/xcoverage.py | actaylor05/learning_python | d8c72fdb7c07bac4176a4418f83d75013db2245a | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Write a program that simulates random BAC coverage over a genome
# Command line arguments include
# Genome size (e.g. 1000)
# X coverage (e.g. 5)
# Use assert() to check parameter bounds
# Report min, max, and histogram of coverage
# Note that your output may vary due to random function
import sys
import random
assert(len(sys.argv) == 3)
size = int(sys.argv[1])
coverage = float(sys.argv[2]) #can use float cause 5.5 is ok
assert(size > 0)
assert(coverage > 0)
bacs = int(size * coverage)
genome = [0] * size
for i in range(bacs):
r = random.randint(0, size -1)
genome[r] += 1
genome.sort()
min = genome[0]
max = genome[-1]
hist = [0] * (max + 1)
for v in genome:
hist[v] += 1
#output
print(f'Size: {size}')
print(f'X: {coverage}')
print(f'BACs: {bacs}')
print(f'Min: {min}')
print(f'Max: {max}')
print(f'Counts:')
for i in range(len(hist)):
print(i, hist[i])
"""
Size: 1000
X: 5.0
BACs: 5000
Min: 0
Max: 13
Counts:
0 5
1 39
2 88
3 144
4 175
5 150
6 151
7 116
8 59
9 40
10 20
11 5
12 6
13 2
"""
| 16.169231 | 66 | 0.647954 | 199 | 1,051 | 3.422111 | 0.452261 | 0.052863 | 0.017621 | 0.032305 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097735 | 0.201713 | 1,051 | 64 | 67 | 16.421875 | 0.713945 | 0.317793 | 0 | 0 | 0 | 0 | 0.111693 | 0 | 0 | 0 | 0 | 0 | 0.115385 | 1 | 0 | false | 0 | 0.076923 | 0 | 0.076923 | 0.269231 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f2ab165419c56c1e792ea4658c93240a1bf71e7 | 1,023 | py | Python | GetExcelWorksheetName.py | gnomesoup/pyDynamo | dea046e96f7973fcb6c28a274a3092b246457551 | [
"Unlicense",
"MIT"
] | null | null | null | GetExcelWorksheetName.py | gnomesoup/pyDynamo | dea046e96f7973fcb6c28a274a3092b246457551 | [
"Unlicense",
"MIT"
] | null | null | null | GetExcelWorksheetName.py | gnomesoup/pyDynamo | dea046e96f7973fcb6c28a274a3092b246457551 | [
"Unlicense",
"MIT"
] | null | null | null | import clr
clr.AddReference('Microsoft.Office.Interop.Excel, Version=11.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
from Microsoft.Office.Interop import Excel
from System.Runtime.InteropServices import Marshal
paths = IN[0]
if not isinstance(paths, list):
paths = [paths]
outList = []
for path in paths:
try:
# Instantiate the Excel Application
ex = Excel.ApplicationClass()
# Make it Visible for us all to see
ex.Visible = False
# Disable Alerts - Errors Ignore them, they're probably not important
ex.DisplayAlerts = False
# Workbook
workbook = ex.Workbooks.Open(path)
# WorkSheet
for sheet in workbook.Worksheets
ws = workbook.Worksheets
# Cell range
ex.ActiveWorkbook.Close(False)
Marshal.ReleaseComObject(ws)
Marshal.ReleaseComObject(workbook)
Marshal.ReleaseComObject(ex)
outList.append(ws)
except Exception as e:
outList.append(e)
| 28.416667 | 118 | 0.662757 | 116 | 1,023 | 5.844828 | 0.594828 | 0.10177 | 0.064897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02108 | 0.258065 | 1,023 | 35 | 119 | 29.228571 | 0.8722 | 0.162268 | 0 | 0 | 0 | 0.043478 | 0.115294 | 0.072941 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.130435 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f2afa44239c14e6b1bb586457468cf46963a9c2 | 3,352 | py | Python | python/desc/sims_ci_pipe/psf_mag_check.py | jchiang87/sims_ci_pipe | db8f5ba03880c8def4242fc80ab4cfe6e225e72f | [
"BSD-3-Clause"
] | 3 | 2019-12-04T02:47:34.000Z | 2021-07-04T16:25:34.000Z | python/desc/sims_ci_pipe/psf_mag_check.py | jchiang87/sims_ci_pipe | db8f5ba03880c8def4242fc80ab4cfe6e225e72f | [
"BSD-3-Clause"
] | 5 | 2019-12-10T15:54:49.000Z | 2020-07-19T02:25:39.000Z | python/desc/sims_ci_pipe/psf_mag_check.py | jchiang87/sims_ci_pipe | db8f5ba03880c8def4242fc80ab4cfe6e225e72f | [
"BSD-3-Clause"
] | 1 | 2020-07-15T15:41:34.000Z | 2020-07-15T15:41:34.000Z | """
Compute visit-level distributions of psf_mag - calib_mag to check
for biases in photometry.
"""
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import lsst.daf.persistence as dp
from .ellipticity_distributions import get_point_sources
__all__ = ['get_psf_calib_mags', 'psf_mag_check']
def get_psf_calib_mags(butler, visit, sn_min=150):
"""
Compute psf and calib magnitudes.
Parameters
----------
butler: lsst.daf.persistence.Butler
Butler pointing at the data repo with the calexps.
visit: int
Visit number to consider.
sn_min: float [150]
Mininum signal-to-noise cut on psfFlux/psfFluxErr.
Returns
-------
pandas.DataFrame containing the psf_mag and calib_mag values.
"""
datarefs = butler.subset('src', visit=visit)
psf_mags = []
calib_mags = []
psf_fluxes = []
psf_fluxErrs = []
for dataref in list(datarefs):
try:
src = dataref.get('src')
photoCalib = dataref.get('calexp_photoCalib')
except:
continue
visit = dataref.dataId['visit']
stars = get_point_sources(src)
psf_mags.extend(photoCalib.instFluxToMagnitude(
stars, 'base_PsfFlux').transpose()[0])
calib_mags.extend(photoCalib.instFluxToMagnitude(
stars, 'base_CircularApertureFlux_12_0').transpose()[0])
psf_fluxes.extend(stars['base_PsfFlux_instFlux'])
psf_fluxErrs.extend(stars['base_PsfFlux_instFluxErr'])
psf_mags = np.array(psf_mags)
calib_mags = np.array(calib_mags)
psf_fluxes = np.array(psf_fluxes)
psf_fluxErrs = np.array(psf_fluxErrs)
psf_flux_sn = psf_fluxes/psf_fluxErrs
index = np.where((psf_flux_sn == psf_flux_sn) & (psf_flux_sn > sn_min))
return pd.DataFrame(data=dict(psf_mag=psf_mags[index],
calib_mag=calib_mags[index]))
def psf_mag_check(repo, visit, dmag_range=(-0.05, 0.05), sn_min=150):
"""
Plot distribution of delta_mag = psf_mag - calib_mag values, and
return estimate of the delta_mag peak location.
Parameters
----------
butler: lsst.daf.persistence.Butler
Butler pointing at the data repo with the calexps.
visit: int
Visit number to consider.
dmag_range: (float, float) [(-0.05, 0.05)]
Magnitude range to use for plotting and median estimation.
sn_min: float [150]
Mininum signal-to-noise cut on psfFlux/psfFluxErr.
Returns
-------
float: An estimate of the delta_mag peak location.
"""
butler = dp.Butler(repo)
df = get_psf_calib_mags(butler, visit, sn_min=sn_min)
if len(df) == 0:
return None
delta_mag = (df['psf_mag'] - df['calib_mag']).to_numpy()
delta_mag = delta_mag[np.where(delta_mag == delta_mag)]
index = np.where((dmag_range[0] < delta_mag) & (delta_mag < dmag_range[1]))
dmag_median = np.median(delta_mag[index])
plt.hist(delta_mag, range=dmag_range, bins=100, histtype='step')
plt.axvline(0, linestyle=':')
plt.axvline(dmag_median, linestyle='--')
plt.annotate(f'median: {dmag_median*1000:.2f} mmag\n'
f'psfFlux/psfFluxErr > {sn_min}', (0.05, 0.95),
xycoords='axes fraction', verticalalignment='top')
plt.xlabel('psf_mag - calib_mag')
return dmag_median
| 34.204082 | 79 | 0.656026 | 452 | 3,352 | 4.641593 | 0.294248 | 0.045758 | 0.017159 | 0.020019 | 0.283603 | 0.283603 | 0.224976 | 0.193518 | 0.163966 | 0.163966 | 0 | 0.018133 | 0.22673 | 3,352 | 97 | 80 | 34.556701 | 0.791281 | 0.28401 | 0 | 0 | 0 | 0 | 0.119363 | 0.042882 | 0 | 0 | 0 | 0 | 0 | 1 | 0.039216 | false | 0 | 0.098039 | 0 | 0.196078 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f2d5a928f67b43b78d0330c6a21750fcb1cff1d | 129 | py | Python | webpagecrawl.py | solodom/crawler | fcaf8570b85452194bac18a49f5a61d88ca16a60 | [
"MIT"
] | null | null | null | webpagecrawl.py | solodom/crawler | fcaf8570b85452194bac18a49f5a61d88ca16a60 | [
"MIT"
] | null | null | null | webpagecrawl.py | solodom/crawler | fcaf8570b85452194bac18a49f5a61d88ca16a60 | [
"MIT"
] | null | null | null | from urllib import request
url=input('URL:')
file_name=input('Filename:')
page_file=request.urlretrieve(url,filename=file_name) | 21.5 | 53 | 0.79845 | 19 | 129 | 5.263158 | 0.578947 | 0.16 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.062016 | 129 | 6 | 53 | 21.5 | 0.826446 | 0 | 0 | 0 | 0 | 0 | 0.1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f30236674d6673760085f111ad109a0c51c2725 | 7,601 | py | Python | job_scheduler_web/scheduler_web/views.py | ewerkema/job-scheduler | ecb2e93cf8eb3d9200f05176b60c3424d70056c3 | [
"MIT"
] | null | null | null | job_scheduler_web/scheduler_web/views.py | ewerkema/job-scheduler | ecb2e93cf8eb3d9200f05176b60c3424d70056c3 | [
"MIT"
] | null | null | null | job_scheduler_web/scheduler_web/views.py | ewerkema/job-scheduler | ecb2e93cf8eb3d9200f05176b60c3424d70056c3 | [
"MIT"
] | 1 | 2018-06-25T19:05:13.000Z | 2018-06-25T19:05:13.000Z | from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.template import loader
from django import forms
import logging
from dateutil.parser import parse
from requests.utils import quote
from django.urls import reverse
from time import strftime
import csv
from .models import Server, Job, LoadMeasurement, JobSchedulingEvent
from .forms import AddServerForm, AddJobForm
import rest
import requests
# Create your views here.
from .JSONserializer import ServerSerializer
def index(request):
fetchAPI()
job_list = Job.objects.order_by('-priority')
server_list = Server.objects.order_by('displayName')
running_job_list = [x for x in list(job_list) if x.schedStatus() != None and x.schedStatus().schedStatus == 2]
entered_job_list = [x for x in list(job_list) if x.schedStatus() != None and x.schedStatus().schedStatus == 0]
server_loads = [x.latestLoadMeasurement() for x in server_list if x.latestLoadMeasurement() != None]
if len(server_loads) > 0:
avg_cpu_load = sum([x.cpuLoad for x in server_loads])/len(server_loads) * 100
else:
avg_cpu_load = 0
num_servers = len(server_list)
context = {'running_job_list':running_job_list,
'entered_job_list':entered_job_list,
'server_list': server_list,
'avg_cpu_load' : avg_cpu_load,
'num_servers' : num_servers}
template = loader.get_template('scheduler_web/index.html')
return HttpResponse(template.render(context,request))
def servers(request):
server_list = Server.objects.order_by('hostname')
context = {'server_list': server_list}
template = loader.get_template('scheduler_web/servers.html')
return HttpResponse(template.render(context,request))
def serverDetail(request, serverID):
server = Server.objects.get(pk=serverID)
latestLoad = LoadMeasurement.objects.filter(server=server).order_by('-date').first()
context = {
'server':server,
'load':latestLoad,
}
template = loader.get_template('scheduler_web/serverDetail.html')
return HttpResponse(template.render(context,request))
def jobs(request):
job_list = Job.objects.order_by('-priority')
context = {'job_list': job_list}
template = loader.get_template('scheduler_web/jobs.html')
return HttpResponse(template.render(context,request))
def jobDetail(request, jobID):
job = Job.objects.get(pk=jobID)
schedEvents = JobSchedulingEvent.objects.filter(job=job)
latestEvent = schedEvents.order_by('-eventDate').first()
context = {
'job':job,
'latestEvent':latestEvent,
'schedEvents':schedEvents,
}
template = loader.get_template('scheduler_web/jobDetail.html')
return HttpResponse(template.render(context,request))
def addServer(request):
template = loader.get_template('scheduler_web/addServer.html')
if request.method == 'POST':
form = AddServerForm(request.POST)
if form.is_valid():
#send API request to server
return HttpResponseRedirect('/servers/')
else:
form = AddServerForm()
context = {'form' : form}
return HttpResponse(template.render(context,request))
def addJob(request):
template = loader.get_template('scheduler_web/addJob.html')
if request.method == 'POST':
form = AddJobForm(request.POST)
if form.is_valid():
try:
requests.post('http://localhost:8080/jobservice/addjob?command=%s&priority=%s&deadline=%d' % (quote(request.POST['command']),request.POST['priority'],1))
except Exception as e:
print e
context = {'form' : form, 'alert': 'Server unreachable.'}
return HttpResponse(template.render(context,request))
return HttpResponseRedirect(reverse('index'))
else:
form = AddJobForm()
context = {'form' : form}
return HttpResponse(template.render(context,request))
def loadMeasurementsCSV(request,serverID):
server = Server.objects.get(pk=serverID)
loadMeasurements = LoadMeasurement.objects.filter(server=server).order_by('-date')
response = HttpResponse(content_type='text/html')
#response['Content-Disposition'] = 'attachment; filename="loadMeasurements_'+server.hostname+'.csv"'
writer = csv.writer(response)
writer.writerow(["date","cpuLoad","memoryLoad"])
for meas in loadMeasurements:
writer.writerow([meas.date.strftime("%Y-%m-%d %H:%M"),meas.cpuLoad,meas.memoryLoad])
return response
def fetchAPI():
fetchServers()
fetchJobs()
fetchServerReports()
fetchJobSchedules()
def fetchServers():
Server.objects.all().delete()
r = requests.get("http://localhost:8080/api/clientservice")
json = r.json()
print r.text
i = 0
while(True):
try:
server = Server(idOnServer = json[str(i)][0],
hostname = json[str(i)][6],
hostport = json[str(i)][7],
address = json[str(i)][1],
displayName = json[str(i)][9],
cpuName = json[str(i)][2],
cpuCores = json[str(i)][3],
memoryAmount = json[str(i)][4])
server.save()
i += 1
except:
break
def fetchJobs():
Job.objects.all().delete()
r = requests.get("http://localhost:8080/api/jobservice")
json = r.json()
print r.text
i = 0
while(True):
try:
job = Job(idOnServer = json[str(i)][0],
deadline = parse(json[str(i)][1]),
command = json[str(i)][2],
priority = json[str(i)][3],)
job.save()
i += 1
except Exception as e:
print e
break
def fetchServerReports():
LoadMeasurement.objects.all().delete()
r = requests.get("http://localhost:8080/api/reportservice")
json = r.json()
print r.text
i = 0
while(True):
try:
server = Server.objects.get(address=json[str(i)][0])
lm = LoadMeasurement(date = parse(json[str(i)][4]),
cpuLoad = json[str(i)][1],
memoryLoad = json[str(i)][2],
server = server)
lm.save()
i += 1
except Exception as e:
print e
break
def fetchJobSchedules():
JobSchedulingEvent.objects.all().delete()
for job in Job.objects.all():
print job.idOnServer
r = requests.get("http://localhost:8080/api/jobscheduleservice?job=%d" % job.idOnServer)
print r.text
try:
json = r.json()
i=0
while(True):
try:
if int(json[str(i)][3]) > 0:
server = Server.objects.get(idOnServer=json[str(i)][3])
schedEvent = JobSchedulingEvent(job=job,
eventDate = parse(json[str(i)][1]),
schedStatus = json[str(i)][2],
server = server)
else:
schedEvent = JobSchedulingEvent(job=job,
eventDate = parse(json[str(i)][1]),
schedStatus = json[str(i)][2])
i+=1
schedEvent.save()
except Exception as e:
print e
break
except Exception as e:
print e
| 30.526104 | 169 | 0.597553 | 840 | 7,601 | 5.328571 | 0.194048 | 0.034406 | 0.039321 | 0.057194 | 0.451519 | 0.428954 | 0.344951 | 0.273458 | 0.170465 | 0.170465 | 0 | 0.010778 | 0.279832 | 7,601 | 248 | 170 | 30.649194 | 0.806905 | 0.019471 | 0 | 0.374332 | 0 | 0.005348 | 0.096389 | 0.024836 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.080214 | null | null | 0.053476 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f30b3b7f97b65c7deec8372528b61489b88ad0a | 558 | py | Python | WaybackMedic 2.1/cloudflare.py | greencardamom/WaybackMedic | 4291afb5451e25ec9cb73f207a7da39456440865 | [
"MIT"
] | 8 | 2016-03-16T02:23:09.000Z | 2021-12-21T21:58:59.000Z | WaybackMedic 2.1/cloudflare.py | greencardamom/WaybackMedic | 4291afb5451e25ec9cb73f207a7da39456440865 | [
"MIT"
] | null | null | null | WaybackMedic 2.1/cloudflare.py | greencardamom/WaybackMedic | 4291afb5451e25ec9cb73f207a7da39456440865 | [
"MIT"
] | 1 | 2017-04-29T14:35:08.000Z | 2017-04-29T14:35:08.000Z | #!/usr/bin/python
#
# Bypass Cloudflare DDOS protection with cfscrape library.
# https://github.com/Anorov/cloudflare-scrape
# Note: SSL doesn't work, use HTTP
# If it stops working check for newer version
#
import cfscrape, sys
scraper = cfscrape.create_scraper() # returns a CloudflareScraper instance
# Or: scraper = cfscrape.CloudflareScraper() # CloudflareScraper inherits from requests.Session
if (sys.argv[1] == "content"):
print scraper.get(sys.argv[2]).content
if (sys.argv[1] == "header"):
print scraper.get(sys.argv[2]).headers
| 31 | 96 | 0.729391 | 75 | 558 | 5.413333 | 0.68 | 0.068966 | 0.044335 | 0.049261 | 0.1133 | 0.1133 | 0 | 0 | 0 | 0 | 0 | 0.008386 | 0.145161 | 558 | 17 | 97 | 32.823529 | 0.842767 | 0.587814 | 0 | 0 | 0 | 0 | 0.059361 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.166667 | null | null | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f31782a6012b73d5ada6e6de802c8e5d232912b | 33,930 | py | Python | mapmint-services/datastores/postgis/pgConnection.py | fenilgmehta/mapmint | 7c28c42dbe9b17b11f5f6f080fd2c397f4f6937f | [
"MIT"
] | null | null | null | mapmint-services/datastores/postgis/pgConnection.py | fenilgmehta/mapmint | 7c28c42dbe9b17b11f5f6f080fd2c397f4f6937f | [
"MIT"
] | 2 | 2019-03-31T01:11:06.000Z | 2020-03-15T13:43:16.000Z | mapmint-services/datastores/postgis/pgConnection.py | fenilgmehta/mapmint | 7c28c42dbe9b17b11f5f6f080fd2c397f4f6937f | [
"MIT"
] | null | null | null | import psycopg2
import lxml
# import libxslt
from lxml import etree
import osgeo.ogr
import sys
import zoo
import json
try:
from manage_users.manage_users import mm_md5
except:
from manage_users import mm_md5
class pgConnection:
def __init__(self, conf, dbfile):
self.dbfile = dbfile
self.conf = conf
def parseConf(self):
#libxml2.initParser()
#doc = libxml2.parseFile(self.conf["main"]["dataPath"] + "/PostGIS/" + self.dbfile + ".xml")
doc = etree.parse(self.conf["main"]["dataPath"] + "/PostGIS/" + self.dbfile + ".xml")
#styledoc = libxml2.parseFile(self.conf["main"]["dataPath"] + "/PostGIS/conn.xsl")
#style = etree.XSLT(styledoc)
styledoc = etree.parse(self.conf["main"]["dataPath"] + "/PostGIS/conn.xsl")
style = etree.XSLT(styledoc)
res = style(doc)
self.db_string = str(res).replace("PG: ", "")
def connect(self):
try:
self.conn = psycopg2.connect(self.db_string)
self.cur = self.conn.cursor()
return True
except Exception as e:
self.conf["lenv"]["message"] = "Unable to connect: " + str(e)
return False
def execute(self, req):
try:
self.ex = self.cur.execute(req)
if req.count("SELECT") > 0 or req.count("select") > 0:
return self.cur.fetchall()
else:
return True
except Exception as e:
self.conf["lenv"]["message"] = "Unable to execute " + req.encode('utf-8') + " due to: " + str(e)
# print("Unable to execute "+req+str(e), file=sys.stderr)
return False
def listSchemas(conf, inputs, outputs):
print(inputs["dataStore"]["value"], file=sys.stderr)
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
if db.connect():
res = db.execute(
"select nspname as schema from pg_namespace WHERE nspname NOT LIKE 'information_schema' AND nspname NOT LIKE 'pg_%' ORDER BY nspname")
if res:
outputs["Result"]["value"] = json.dumps(res)
return zoo.SERVICE_SUCCEEDED
else:
print("Unable to connect", file=sys.stderr)
return zoo.SERVICE_FAILED
def listTables(conf, inputs, outputs):
import authenticate.service as auth
if not (auth.is_ftable(inputs["schema"]["value"])):
conf["lenv"]["message"] = zoo._("Unable to identify your parameter as table or field name")
return zoo.SERVICE_FAILED
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
if db.connect():
req = "select schemaname||'.'||tablename as tablename, tablename as display from pg_tables WHERE schemaname NOT LIKE 'information_schema' AND schemaname NOT LIKE 'pg_%' AND tablename NOT LIKE 'spatial_ref_sys' AND tablename NOT LIKE 'geometry_columns' "
if "schema" in inputs:
req += "AND schemaname='" + inputs["schema"]["value"] + "'"
req += " ORDER BY schemaname||'.'||tablename"
res = db.execute(req)
outputs["Result"]["value"] = json.dumps(res)
return zoo.SERVICE_SUCCEEDED
# return zoo.SERVICE_SUCCEEDED
else:
print("Unable to connect", file=sys.stderr)
return zoo.SERVICE_FAILED
def listTablesAndViews(conf, inputs, outputs):
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
if db.connect():
req = "select schemaname||'.'||tablename as tablename, tablename as display from pg_tables WHERE schemaname NOT LIKE 'information_schema' AND schemaname NOT LIKE 'pg_%' AND tablename NOT LIKE 'tmp%' AND tablename NOT LIKE 'spatial_ref_sys' AND tablename NOT LIKE 'geometry_columns' "
req1 = "select schemaname||'.'||viewname as tablename, viewname as display from pg_views WHERE schemaname NOT LIKE 'information_schema' AND schemaname NOT LIKE 'pg_%' "
if "schema" in inputs:
req += " AND schemaname='" + inputs["schema"]["value"] + "'"
req1 += " AND schemaname='" + inputs["schema"]["value"] + "'"
res = db.execute("SELECT * from (" + req + ") as foo UNION (" + req1 + ") ORDER BY display")
if res:
outputs["Result"]["value"] = json.dumps(res)
return zoo.SERVICE_SUCCEEDED
else:
print("Unable to connect", file=sys.stderr)
return zoo.SERVICE_FAILED
def getDesc(cur, table):
tmp = table.split('.')
if len(tmp) == 1:
tmp1 = tmp[0]
tmp = ["public", tmp1];
req = "SELECT b.relname as t FROM pg_inherits, pg_class a, pg_class b WHERE inhrelid=a.oid AND inhparent=b.oid AND a.relname = '" + \
tmp[1] + "' AND a.relnamespace=(select oid from pg_namespace where nspname='" + tmp[0] + "')"
res0 = cur.execute(req)
res = cur.fetchall()
if res != False and len(res) > 0:
return "SELECT * FROM (SELECT DISTINCT ON (\"Pos\",\"Field\") * FROM ((SELECT DISTINCT on (\"Pos\") \"Pos\"-1 as \"Pos\",\"Field\",\"Type\",\"Key\", \"Ref\", \"RefCol\", \"RefCols\",array_upper(\"RefCols\",1) from (SELECT attnum AS \"Pos\", attname AS \"Field\",CASE WHEN atttypmod >0 THEN b.typname || '(' || atttypmod-4 || ')' ELSE b.typname END AS \"Type\" FROM pg_catalog.pg_attribute a, pg_catalog.pg_type b WHERE a.atttypid=b.oid AND a.attrelid = (SELECT pg_class.oid FROM pg_class, pg_namespace WHERE relname='" + \
res[0][0] + "' AND pg_namespace.oid=relnamespace AND nspname='" + tmp[
0] + "') AND a.attnum > 0 AND NOT a.attisdropped ORDER BY attnum) a LEFT JOIN (SELECT conkey,c.conname AS constraint_name, CASE c.contype WHEN 'c' THEN 'CHECK' WHEN 'f' THEN 'FOR' WHEN 'p' THEN 'PRI' WHEN 'u' THEN 'UNIQUE' END AS \"Key\", t3.nspname||'.'||t2.relname AS \"Ref\", (SELECT attname from pg_catalog.pg_attribute WHERE attrelid=c.confrelid AND confkey[1] = attnum) AS \"RefCol\" FROM pg_constraint c LEFT JOIN pg_class t ON c.conrelid = t.oid LEFT JOIN pg_class t2 ON c.confrelid = t2.oid LEFT JOIN pg_namespace t3 ON t2.relnamespace=t3.oid WHERE t.relname = '" + \
res[0][
0] + "') b ON get_nb_of(conkey,\"Pos\")>0 LEFT JOIN (SELECT DISTINCT ON (at2.attnum) c.*, at2.attnum AS \"myid\", ARRAY(SELECT attname AS \"RefCol\" FROM pg_constraint AS c, pg_catalog.pg_attribute, pg_class t, pg_class t2 WHERE c.conrelid = t.oid AND c.confrelid = t2.oid AND t.relname = '" + \
res[0][
0] + "' AND attrelid=confrelid AND get_nb_of(confkey,attnum) > 0) AS \"RefCols\", at2.attnum, at2.attname AS atn, get_index_of(conkey,at2.attnum) AS \"RealOrigColNum\", at1.attnum, at1.attname, get_index_of(confkey,at1.attnum) AS \"RealRefColNum\", t.relname as orig, t2.relname as ref FROM pg_constraint AS c, pg_catalog.pg_attribute AS at1, pg_catalog.pg_attribute AS at2, pg_class t, pg_class t2 WHERE c.conrelid = t.oid AND c.confrelid = t2.oid AND t.relname = '" + \
res[0][
0] + "' AND at1.attrelid=confrelid AND get_nb_of(conkey,at2.attnum) > 0 AND get_nb_of(confkey,at1.attnum) > 0 AND t.relname='" + \
res[0][
0] + "' AND at2.attrelid=t.oid) AS foreigns ON foreigns.myid=a.\"Pos\") UNION (SELECT DISTINCT on (\"Pos\") \"Pos\"-1 as \"Pos\",\"Field\",\"Type\",\"Key\", \"Ref\", \"RefCol\", \"RefCols\",array_upper(\"RefCols\",1) from (SELECT attnum AS \"Pos\", attname AS \"Field\",CASE WHEN atttypmod >0 THEN b.typname || '(' || atttypmod-4 || ')' ELSE b.typname END AS \"Type\" FROM pg_catalog.pg_attribute a, pg_catalog.pg_type b WHERE a.atttypid=b.oid AND a.attrelid = (SELECT pg_class.oid FROM pg_class, pg_namespace WHERE relname='" + \
tmp[1] + "' AND pg_namespace.oid=relnamespace AND nspname='" + tmp[
0] + "') AND a.attnum > 0 AND NOT a.attisdropped ORDER BY attnum) a LEFT JOIN (SELECT conkey,c.conname AS constraint_name, CASE c.contype WHEN 'c' THEN 'CHECK' WHEN 'f' THEN 'FOR' WHEN 'p' THEN 'PRI' WHEN 'u' THEN 'UNIQUE' END AS \"Key\", t3.nspname||'.'||t2.relname AS \"Ref\", (SELECT attname from pg_catalog.pg_attribute WHERE attrelid=c.confrelid AND confkey[1] = attnum) AS \"RefCol\" FROM pg_constraint c LEFT JOIN pg_class t ON c.conrelid = t.oid LEFT JOIN pg_class t2 ON c.confrelid = t2.oid LEFT JOIN pg_namespace t3 ON t2.relnamespace=t3.oid WHERE t.relname = '" + \
tmp[1] + "' and t.relnamespace=(select oid from pg_namespace where nspname='" + tmp[
0] + "') ) b ON get_nb_of(conkey,\"Pos\")>0 LEFT JOIN (SELECT DISTINCT ON (at2.attnum) c.*, at2.attnum AS \"myid\", ARRAY(SELECT attname AS \"RefCol\" FROM pg_constraint AS c, pg_catalog.pg_attribute, pg_class t, pg_class t2 WHERE c.conrelid = t.oid AND c.confrelid = t2.oid AND t.relname = '" + \
tmp[
1] + "' AND attrelid=confrelid AND get_nb_of(confkey,attnum) > 0) AS \"RefCols\", at2.attnum, at2.attname AS atn, get_index_of(conkey,at2.attnum) AS \"RealOrigColNum\", at1.attnum, at1.attname, get_index_of(confkey,at1.attnum) AS \"RealRefColNum\", t.relname as orig, t2.relname as ref FROM pg_constraint AS c, pg_catalog.pg_attribute AS at1, pg_catalog.pg_attribute AS at2, pg_class t, pg_class t2 WHERE c.conrelid = t.oid AND c.confrelid = t2.oid AND t.relname = '" + \
tmp[
1] + "' AND at1.attrelid=confrelid AND get_nb_of(conkey,at2.attnum) > 0 AND get_nb_of(confkey,at1.attnum) > 0 AND t.relname='" + \
tmp[1] + "' and t.relnamespace=(select oid from pg_namespace where nspname='" + tmp[
0] + "') AND at2.attrelid=t.oid) AS foreigns ON foreigns.myid=a.\"Pos\")) As foo) as foo1 ORDER BY \"Pos\",\"Key\""
else:
# print("SELECT DISTINCT on (\"Pos\") \"Pos\"-1 as \"Pos\",\"Field\",\"Type\",\"Key\", \"Ref\", \"RefCol\", \"RefCols\",array_upper(\"RefCols\",1) from (SELECT attnum AS \"Pos\", attname AS \"Field\",CASE WHEN atttypmod >0 THEN b.typname || '(' || atttypmod-4 || ')' ELSE b.typname END AS \"Type\" FROM pg_catalog.pg_attribute a, pg_catalog.pg_type b WHERE a.atttypid=b.oid AND a.attrelid = (SELECT pg_class.oid FROM pg_class, pg_namespace WHERE relname='"+tmp[1]+"' AND pg_namespace.oid=relnamespace AND nspname='"+tmp[0]+"') AND a.attnum > 0 AND NOT a.attisdropped ORDER BY attnum) a LEFT JOIN (SELECT conkey,c.conname AS constraint_name, CASE c.contype WHEN 'c' THEN 'CHECK' WHEN 'f' THEN 'FOR' WHEN 'p' THEN 'PRI' WHEN 'u' THEN 'UNIQUE' END AS \"Key\", t3.nspname||'.'||t2.relname AS \"Ref\", (SELECT attname from pg_catalog.pg_attribute WHERE attrelid=c.confrelid AND confkey[1] = attnum) AS \"RefCol\" FROM pg_constraint c LEFT JOIN pg_class t ON c.conrelid = t.oid LEFT JOIN pg_class t2 ON c.confrelid = t2.oid LEFT JOIN pg_namespace t3 ON t2.relnamespace=t3.oid WHERE t.relname = '"+tmp[1]+"' and t.relnamespace=(select oid from pg_namespace where nspname='"+tmp[0]+"')) b ON get_nb_of(conkey,\"Pos\")>0 LEFT JOIN (SELECT DISTINCT ON (at2.attnum) c.*, at2.attnum AS \"myid\", ARRAY(SELECT attname AS \"RefCol\" FROM pg_constraint AS c, pg_catalog.pg_attribute, pg_class t, pg_class t2 WHERE c.conrelid = t.oid AND c.confrelid = t2.oid AND t.relname = '"+tmp[1]+"' AND attrelid=confrelid AND get_nb_of(confkey,attnum) > 0 and t.relnamespace=(select oid from pg_namespace where nspname='"+tmp[0]+"')) AS \"RefCols\", at2.attnum, at2.attname AS atn, get_index_of(conkey,at2.attnum) AS \"RealOrigColNum\", at1.attnum, at1.attname, get_index_of(confkey,at1.attnum) AS \"RealRefColNum\", t.relname as orig, t2.relname as ref FROM pg_constraint AS c, pg_catalog.pg_attribute AS at1, pg_catalog.pg_attribute AS at2, pg_class t, pg_class t2 WHERE c.conrelid = t.oid AND c.confrelid = t2.oid AND t.relname = '"+tmp[1]+"' AND at1.attrelid=confrelid AND get_nb_of(conkey,at2.attnum) > 0 AND get_nb_of(confkey,at1.attnum) > 0 AND t.relname='"+tmp[1]+"' AND at2.attrelid=t.oid and t.relnamespace=(select oid from pg_namespace where nspname='"+tmp[0]+"')) AS foreigns ON foreigns.myid=a.\"Pos\"", file=sys.stderr)
return "SELECT DISTINCT on (\"Pos\") \"Pos\"-1 as \"Pos\",\"Field\",\"Type\",\"Key\", \"Ref\", \"RefCol\", \"RefCols\",array_upper(\"RefCols\",1) from (SELECT * FROM (SELECT attnum AS \"Pos\", attname AS \"Field\",CASE WHEN atttypmod >0 THEN b.typname || '(' || atttypmod-4 || ')' ELSE b.typname END AS \"Type\" FROM pg_catalog.pg_attribute a, pg_catalog.pg_type b WHERE a.atttypid=b.oid AND a.attrelid = (SELECT pg_class.oid FROM pg_class, pg_namespace WHERE relname='" + \
tmp[1] + "' AND pg_namespace.oid=relnamespace AND nspname='" + tmp[
0] + "') AND a.attnum > 0 AND NOT a.attisdropped ORDER BY attnum) a LEFT JOIN (SELECT conkey,c.conname AS constraint_name, CASE c.contype WHEN 'c' THEN 'CHECK' WHEN 'f' THEN 'FOR' WHEN 'p' THEN 'PRI' WHEN 'u' THEN 'UNIQUE' END AS \"Key\", t3.nspname||'.'||t2.relname AS \"Ref\", (SELECT attname from pg_catalog.pg_attribute WHERE attrelid=c.confrelid AND confkey[1] = attnum) AS \"RefCol\" FROM pg_constraint c LEFT JOIN pg_class t ON c.conrelid = t.oid LEFT JOIN pg_class t2 ON c.confrelid = t2.oid LEFT JOIN pg_namespace t3 ON t2.relnamespace=t3.oid WHERE t.relname = '" + \
tmp[1] + "' and t.relnamespace=(select oid from pg_namespace where nspname='" + tmp[
0] + "')) b ON get_nb_of(conkey,\"Pos\")>0 LEFT JOIN (SELECT DISTINCT ON (at2.attnum) c.*, at2.attnum AS \"myid\", ARRAY(SELECT attname AS \"RefCol\" FROM pg_constraint AS c, pg_catalog.pg_attribute, pg_class t, pg_class t2 WHERE c.conrelid = t.oid AND c.confrelid = t2.oid AND t.relname = '" + \
tmp[
1] + "' AND attrelid=confrelid AND get_nb_of(confkey,attnum) > 0 and t.relnamespace=(select oid from pg_namespace where nspname='" + \
tmp[
0] + "')) AS \"RefCols\", at2.attnum, at2.attname AS atn, get_index_of(conkey,at2.attnum) AS \"RealOrigColNum\", at1.attnum, at1.attname, get_index_of(confkey,at1.attnum) AS \"RealRefColNum\", t.relname as orig, t2.relname as ref FROM pg_constraint AS c, pg_catalog.pg_attribute AS at1, pg_catalog.pg_attribute AS at2, pg_class t, pg_class t2 WHERE c.conrelid = t.oid AND c.confrelid = t2.oid AND t.relname = '" + \
tmp[
1] + "' AND at1.attrelid=confrelid AND get_nb_of(conkey,at2.attnum) > 0 AND get_nb_of(confkey,at1.attnum) > 0 AND t.relname='" + \
tmp[1] + "' AND at2.attrelid=t.oid and t.relnamespace=(select oid from pg_namespace where nspname='" + \
tmp[0] + "')) AS foreigns ON foreigns.myid=a.\"Pos\" order by \"Key\"='PRI' or \"Key\"='FOR' desc) As f"
def getTableDescription(conf, inputs, outputs):
import authenticate.service as auth
# if not(auth.is_ftable(inputs["table"]["value"])):
# conf["lenv"]["message"]=zoo._("Unable to identify your parameter as table or field name")
# return zoo.SERVICE_FAILED
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
if db.connect():
tmp = inputs["table"]["value"].split('.')
req = getDesc(db.cur, inputs["table"]["value"])
# print(req, file=sys.stderr)
res = db.execute(req)
if res != False and len(res) > 0:
outputs["Result"]["value"] = json.dumps(res)
return zoo.SERVICE_SUCCEEDED
else:
print("unable to run request " + req, file=sys.stderr)
return zoo.SERVICE_FAILED
else:
print("Unable to connect", file=sys.stderr)
return zoo.SERVICE_FAILED
def getTableContent(conf, inputs, outputs):
import authenticate.service as auth
# if not(auth.is_ftable(inputs["table"]["value"])):
# conf["lenv"]["message"]=zoo._("Unable to identify your parameter as table or field name")
# return zoo.SERVICE_FAILED
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
getTableDescription(conf, inputs, outputs)
tmp = eval(outputs["Result"]["value"].replace("null", "None"))
pkey = 0
geom = []
files = []
fields = ""
for i in range(0, len(tmp)):
if tmp[i][3] == "PRI":
pkey = tmp[i][0]
if tmp[i][2] == "geometry":
geom += [i]
if tmp[i][2] == "bytea":
files += [i]
if tmp[i][3] == "FOR" and not ("force" in inputs):
input1 = inputs
otbl = inputs["table"]["value"]
inputs["table"]["value"] = tmp[i][4]
getTableDescription(conf, inputs, outputs)
tmp2 = eval(outputs["Result"]["value"].replace("null", "None"))
pkey1 = 0
for j in range(0, len(tmp2)):
if tmp2[j][3] == "PRI":
pkey1 = j
break
hasV = False
for j in range(0, len(tmp2)):
if not (hasV) and (tmp2[j][2].count("char") > 0 or tmp2[j][2].count("text") > 0):
if fields != "":
fields += ","
hasV = True
fields += "(SELECT " + tmp2[j][1] + " FROM " + tmp[i][4] + " as a WHERE a." + tmp2[pkey][
1] + "=" + otbl + "." + tmp[i][1] + ")"
if not (hasV):
if fields != "":
fields += ","
fields += "(SELECT " + tmp2[0][1] + " FROM " + tmp[i][4] + " as a WHERE a." + tmp2[pkey][
1] + "=" + otbl + "." + tmp[i][1] + ")"
inputs["table"]["value"] = otbl
else:
if fields != "":
fields += ","
fields += tmp[i][1]
if db.connect():
tmp1 = inputs["table"]["value"].split(".")
tmp1[0] = '"' + tmp1[0] + '"'
tmp1[1] = '"' + tmp1[1] + '"'
inputs["table"]["value"] = (".").join(tmp1)
req = "select count(*) from " + inputs["table"]["value"]
if "clause" in inputs and inputs["clause"]["value"] != "NULL":
req += " WHERE " + inputs["clause"]["value"]
if "search" in inputs and inputs["search"]["value"] != "NULL" and inputs["search"]["value"] != "asc":
req += " WHERE "
print(req, file=sys.stderr)
cnt = 0
print(req, file=sys.stderr)
for i in range(0, len(tmp)):
if cnt > 0:
req += " OR "
req += tmp[i][1] + "::varchar like '%" + inputs["search"]["value"] + "%'"
cnt += 1
res = db.execute(req)
if res != False:
total = res[0][0]
req = "select "
if "cols" in inputs and inputs["cols"]["value"] != "NULL":
req += inputs["cols"]["value"]
else:
req += fields
req += " from " + inputs["table"]["value"]
if "clause" in inputs and inputs["clause"]["value"] != "NULL":
req += " WHERE " + inputs["clause"]["value"]
if "search" in inputs and inputs["search"]["value"] != "NULL" and inputs["search"]["value"] != "asc":
req += " WHERE "
print(req, file=sys.stderr)
cnt = 0
print(req, file=sys.stderr)
for i in range(0, len(tmp)):
if cnt > 0:
req += " OR "
req += tmp[i][1] + "::varchar like '%" + inputs["search"]["value"] + "%'"
cnt += 1
if "sortname" in inputs and inputs["sortname"]["value"] != "NULL":
req += " ORDER BY " + inputs["sortname"]["value"] + " " + inputs["sortorder"]["value"]
if "limit" in inputs and inputs["limit"]["value"] != "NULL":
if "page" in inputs and inputs["page"]["value"] != "":
req += " OFFSET " + str((int(inputs["page"]["value"]) - 1) * int(inputs["limit"]["value"]))
page = inputs["page"]["value"]
req += " LIMIT " + inputs["limit"]["value"]
else:
page = 1
req += " LIMIT 10"
print(req, file=sys.stderr)
res = db.execute(req)
if res != False:
rows = []
for i in range(0, len(res)):
res0 = []
for k in range(0, len(res[i])):
try:
tmp = str(res[i][k].decode('utf-8'))
print(dir(tmp), file=sys.stderr)
except Exception as e:
print(e, file=sys.stderr)
tmp = str(res[i][k])
res0 += [str(tmp)]
if len(geom) > 0:
for j in range(0, len(geom)):
res0[geom[j]] = "GEOMETRY"
if len(files) > 0:
for j in range(0, len(files)):
res0[files[j]] = "BINARY FILE"
rows += [{"id": res[i][pkey], "cell": res0}]
outputs["Result"]["value"] = json.dumps({"page": page, "total": total, "rows": rows}, ensure_ascii=False)
return zoo.SERVICE_SUCCEEDED
else:
print("unable to run request", file=sys.stderr)
return zoo.SERVICE_FAILED
else:
print("Unable to connect", file=sys.stderr)
return zoo.SERVICE_FAILED
def getTableContent1(conf, inputs, outputs):
import authenticate.service as auth
# if not(auth.is_ftable(inputs["table"]["value"])):
# conf["lenv"]["message"]=zoo._("Unable to identify your parameter as table or field name")
# return zoo.SERVICE_FAILED
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
getTableDescription(conf, inputs, outputs)
tmp = eval(outputs["Result"]["value"].replace("null", "None"))
pkey = 0
geom = []
files = []
fields = ""
for i in range(0, len(tmp)):
if tmp[i][3] == "PRI":
pkey = tmp[i][0]
if tmp[i][2] == "geometry":
geom += [i]
if tmp[i][2] == "bytea":
files += [i]
if tmp[i][3] == "FOR" and not ("force" in inputs):
input1 = inputs
otbl = inputs["table"]["value"]
inputs["table"]["value"] = tmp[i][4]
getTableDescription(conf, inputs, outputs)
tmp2 = eval(outputs["Result"]["value"].replace("null", "None"))
pkey1 = 0
for j in range(0, len(tmp2)):
if tmp2[j][3] == "PRI":
pkey1 = j
break
hasV = False
for j in range(0, len(tmp2)):
if not (hasV) and (tmp2[j][2].count("char") > 0 or tmp2[j][2].count("text") > 0):
if fields != "":
fields += ","
hasV = True
fields += "(SELECT " + tmp2[j][1] + " FROM " + tmp[i][4] + " as a WHERE a." + tmp2[pkey][
1] + "=" + otbl + "." + tmp[i][1] + ")"
if not (hasV):
if fields != "":
fields += ","
fields += "(SELECT " + tmp2[0][1] + " FROM " + tmp[i][4] + " as a WHERE a." + tmp2[pkey][
1] + "=" + otbl + "." + tmp[i][1] + ")"
inputs["table"]["value"] = otbl
else:
if fields != "":
fields += ","
fields += tmp[i][1]
if db.connect():
tmp1 = inputs["table"]["value"].split(".")
tmp1[0] = '"' + tmp1[0] + '"'
tmp1[1] = '"' + tmp1[1] + '"'
inputs["table"]["value"] = (".").join(tmp1)
req = "select count(*) from " + inputs["table"]["value"]
if "clause" in inputs and inputs["clause"]["value"] != "NULL":
req += " WHERE " + inputs["clause"]["value"]
if "search" in inputs and inputs["search"]["value"] != "NULL" and inputs["search"]["value"] != "asc":
req += " WHERE "
print(req, file=sys.stderr)
cnt = 0
print(req, file=sys.stderr)
for i in range(0, len(tmp)):
if cnt > 0:
req += " OR "
req += tmp[i][1] + "::varchar like '%" + inputs["search"]["value"] + "%'"
cnt += 1
print(req, file=sys.stderr)
res = db.execute(req)
if res != False:
total = res[0][0]
req = "select "
if "cols" in inputs and inputs["cols"]["value"] != "NULL":
req += inputs["cols"]["value"]
else:
req += fields
req += " from " + inputs["table"]["value"]
if "clause" in inputs and inputs["clause"]["value"] != "NULL":
req += " WHERE " + inputs["clause"]["value"]
if "search" in inputs and inputs["search"]["value"] != "NULL" and inputs["search"]["value"] != "asc":
req += " WHERE "
print(req, file=sys.stderr)
cnt = 0
print(req, file=sys.stderr)
for i in range(0, len(tmp)):
if cnt > 0:
req += " OR "
req += tmp[i][1] + "::varchar like '%" + inputs["search"]["value"] + "%'"
cnt += 1
print(req, file=sys.stderr)
if "sortname" in inputs and inputs["sortname"]["value"] != "NULL":
req += " ORDER BY " + inputs["sortname"]["value"] + " " + inputs["sortorder"]["value"]
if "limit" in inputs and inputs["limit"]["value"] != "NULL":
if "page" in inputs and inputs["page"]["value"] != "":
req += " OFFSET " + str((int(inputs["page"]["value"]) - 1) * int(inputs["limit"]["value"]))
page = inputs["page"]["value"]
req += " LIMIT " + inputs["limit"]["value"]
else:
page = 1
req += " LIMIT 10"
print(req, file=sys.stderr)
res = db.execute(req)
if res != False:
rows = []
for i in range(0, len(res)):
res0 = []
for k in range(0, len(res[i])):
try:
tmp = str(res[i][k].decode('utf-8'))
# print(dir(tmp), file=sys.stderr)
except Exception as e:
# print(e, file=sys.stderr)
tmp = str(res[i][k])
res0 += [str(tmp)]
if len(geom) > 0:
for j in range(0, len(geom)):
res0[geom[j]] = "GEOMETRY"
if len(files) > 0:
for j in range(0, len(files)):
res0[files[j]] = "BINARY FILE"
rows += [{"id": res[i][pkey], "cell": res0}]
outputs["Result"]["value"] = json.dumps({"page": page, "total": total, "rows": rows}, ensure_ascii=False)
return zoo.SERVICE_SUCCEEDED
else:
print("unable to run request", file=sys.stderr)
return zoo.SERVICE_FAILED
else:
print("Unable to connect", file=sys.stderr)
return zoo.SERVICE_FAILED
def deleteTuple(conf, inputs, outputs):
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
if db.connect():
res = db.execute("DELETE FROM " + inputs["table"]["value"] + " WHERE " + inputs["clause"]["value"])
if res == False:
conf = db.conf
return zoo.SERVICE_FAILED
else:
db.conn.commit()
outputs["Result"]["value"] = "Tuple deleted"
return zoo.SERVICE_SUCCEEDED
else:
conf = db.conf
return zoo.SERVICE_FAILED
import psycopg2, json
from psycopg2.extensions import *
def editTuple(conf, inputs, outputs):
# TODO: confirm assumption: inputs is a Python 3 dictionary object
getTableDescription(conf, inputs, outputs)
desc = eval(outputs["Result"]["value"].replace("null", "None"))
tmp = json.loads(inputs["obj"]["value"])
if "clause" in inputs and inputs["clause"]["value"] != "NULL":
req = "UPDATE " + inputs["table"]["value"] + " set "
fields = ""
tkeys = list(tmp.keys())
for i in tkeys:
fd = None
for k in desc:
if k[1] == i:
fd = k[2]
if fd is not None:
print(tmp, file=sys.stderr)
print(fd, file=sys.stderr)
td = testDesc(tmp[i], fd)
if td is not None:
if fields != "":
fields += ", "
fields += '"' + i + '"=' + td
if "content" in inputs:
if fields != "":
fields += ","
print(inputs["content"]["value"], file=sys.stderr)
tmp1 = inputs["content"]["value"]
fields += '"content"=%s' % adapt(
inputs["content"]["value"].replace('<?xml version="1.0" encoding="utf-8"?>\n', ''))
req += fields + " WHERE " + inputs["clause"]["value"]
outputs["Result"]["value"] = "Tuple updated"
else:
req = "INSERT INTO " + inputs["table"]["value"] + " "
fields = "("
values = "("
cnt = 0
for i in tmp:
fd = None
for k in desc:
if k[1] == i:
fd = k[2]
td = testDesc(tmp[i], fd)
if td is not None:
if fields != "(":
fields += ","
if values != "(":
values += ","
fields += i
values += td
cnt += 1
if list(inputs.keys()).count("content") > 0:
if fields != "(":
fields += ","
if values != "(":
values += ","
fields += "content"
values += '%s' % adapt(inputs["content"]["value"].replace('<?xml version="1.0" encoding="utf-8"?>\n', ''))
fields += ")"
values += ")"
req += fields + " VALUES " + values
outputs["Result"]["value"] = "Tuple inserted"
print(req.encode("utf-8"), file=sys.stderr)
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
if db.connect():
try:
res = db.execute(req)
if res == False:
conf["lenv"]["message"] = db.conf["lenv"]["message"]
return zoo.SERVICE_FAILED
db.conn.commit()
# print(res, file=sys.stderr)
return zoo.SERVICE_SUCCEEDED
except Exception as e:
conf["lenv"]["message"] = "Unable to run the request " + str(e)
return zoo.SERVICE_FAILED
def testDesc(val, desc):
if desc == "bool":
if val == "t" or val:
return "true"
else:
return "false"
if desc.count("char") > 0 or desc.count("text") > 0:
if desc.count("varchar(40)"):
if val != 'NULL':
return "'" + mm_md5(val) + "'"
else:
return None
else:
if val != 'NULL':
tmp = adapt(val)#.encode('utf-8').decode('utf-8'))
tmp.encoding = "utf-8"
return str(tmp)#.decode('utf-8')
else:
return "NULL"
else:
if desc.count("date") > 0:
tmp = val.split("/")
return "'" + tmp[2] + "-" + tmp[1] + "-" + tmp[0] + "'"
else:
if desc.count("geometry") > 0:
if val != 'NULL':
return "'" + val + "'"
else:
return val
else:
return val
def fetchType(conf, ftype):
db = pgConnection(conf, conf["main"]["dbuserName"])
db.parseConf()
if db.connect():
res = db.execute("SELECT code from mm_tables.ftypes where id=" + ftype)
if res:
return str(res[0][0])
return None
def addColumn(conf, inputs, outputs):
print(inputs["dataStore"]["value"], file=sys.stderr)
db = pgConnection(conf, inputs["dataStore"]["value"])
db.parseConf()
req = []
if db.connect():
if inputs["field_type"]["value"] != "18":
req += ["ALTER TABLE quote_ident(" + inputs["table"]["value"] + ") ADD COLUMN " + inputs["field_name"][
"value"] + " " + fetchType(conf, inputs["field_type"]["value"])]
outputs["Result"]["value"] = zoo._("Column added")
else:
tblInfo = inputs["table"]["value"].split(".")
if len(tblInfo) == 1:
tmp = tblInfo[0]
tblInfo[0] = "public"
tblInfo[1] = tmpl
req += ["SELECT AddGeometryColumn('" + tblInfo[0] + "','" + tblInfo[
1] + "','wkb_geometry',(select srid from spatial_ref_sys where auth_name||':'||auth_srid = '" +
inputs["proj"]["value"] + "'),'" + inputs["geo_type"]["value"] + "',2)"]
outputs["Result"]["value"] = zoo._("Geometry column added.")
if list(inputs.keys()).count("geo_x") > 0 and list(inputs.keys()).count("geo_y") > 0:
req += ["CREATE TRIGGER mm_tables_" + inputs["table"]["value"].replace(".",
"_") + "_update_geom BEFORE UPDATE OR INSERT ON " +
inputs["table"][
"value"] + " FOR EACH ROW EXECUTE PROCEDURE automatically_update_geom_property('" +
inputs["geo_x"]["value"] + "','" + inputs["geo_y"]["value"] + "','" + inputs["proj"][
"value"] + "')"]
outputs["Result"]["value"] += " " + zoo._("Trigger in place")
print(req, file=sys.stderr)
for i in range(0, len(req)):
if not (db.execute(req[i])):
return zoo.SERVICE_FAILED
db.conn.commit()
return zoo.SERVICE_SUCCEEDED
else:
conf["lenv"]["message"] = zoo._("Unable to connect")
return zoo.SERVICE_FAILED
| 54.725806 | 2,322 | 0.536045 | 4,293 | 33,930 | 4.174237 | 0.073142 | 0.012723 | 0.026116 | 0.022321 | 0.822824 | 0.800837 | 0.794531 | 0.775893 | 0.771429 | 0.768862 | 0 | 0.017056 | 0.300147 | 33,930 | 619 | 2,323 | 54.814216 | 0.737598 | 0.098998 | 0 | 0.721137 | 0 | 0.060391 | 0.337327 | 0.045931 | 0 | 0 | 0 | 0.001616 | 0 | 1 | 0.028419 | false | 0 | 0.026643 | 0 | 0.134991 | 0.053286 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8f31fe570d9ae4f3cf87da4f3445c66fe7790cc6 | 1,739 | py | Python | tests/test_errors.py | vishalbelsare/cointanalysis | ae21c520dfe500fe535265e93df4a36f4d012069 | [
"BSD-3-Clause"
] | 27 | 2020-01-03T03:36:42.000Z | 2022-03-28T06:47:32.000Z | tests/test_errors.py | simaki/cointanalysis | ae21c520dfe500fe535265e93df4a36f4d012069 | [
"BSD-3-Clause"
] | 26 | 2020-01-03T09:02:21.000Z | 2022-02-22T01:01:48.000Z | tests/test_errors.py | vishalbelsare/cointanalysis | ae21c520dfe500fe535265e93df4a36f4d012069 | [
"BSD-3-Clause"
] | 8 | 2021-02-09T22:19:18.000Z | 2022-02-23T19:45:24.000Z | import numpy as np
import pytest
from cointanalysis import CointAnalysis
from cointanalysis._stat import StationarityTester
from cointanalysis._utils import check_shape
# --------------------------------------------------------------------------------
@pytest.mark.parametrize("n_features", [0, 1, 3])
def test_check_shape(n_features):
with pytest.raises(ValueError):
X = np.random.randn(100, n_features)
check_shape(X, n_features=2)
def test_stat_method():
X = np.random.randn(100)
with pytest.raises(ValueError):
StationarityTester(method="hoge").pvalue(X)
def test_stat_regression():
X = np.random.randn(100)
with pytest.raises(ValueError):
StationarityTester(regression="hoge").pvalue(X)
def test_coint_fit():
X = np.random.randn(100, 2)
with pytest.raises(ValueError):
coint = CointAnalysis(method="hoge")
coint.fit(X)
with pytest.raises(ValueError):
coint = CointAnalysis(axis=2)
coint.fit(X)
with pytest.raises(ValueError):
coint = CointAnalysis(trend="ct")
coint.fit(X)
def test_coint_test():
X = np.random.randn(100, 2)
with pytest.raises(ValueError):
coint = CointAnalysis(method="hoge")
coint.test(X)
with pytest.raises(ValueError):
coint = CointAnalysis(axis=2)
coint.test(X)
with pytest.raises(ValueError):
coint = CointAnalysis(trend="ct")
coint.test(X)
# def test_collinear():
# coint = CointAnalysis(axis='PCA')
# x = np.random.randn(1000).cumsum()
# small_noise = 0.001 * np.random.randn(1000)
# X = np.stack([x, x + small_noise], axis=1)
# with pytest.raises(RuntimeWarning):
# coint.test(X)
| 24.842857 | 82 | 0.626797 | 211 | 1,739 | 5.066351 | 0.222749 | 0.093545 | 0.149673 | 0.218896 | 0.530402 | 0.480823 | 0.480823 | 0.480823 | 0.480823 | 0.467727 | 0 | 0.026049 | 0.20529 | 1,739 | 69 | 83 | 25.202899 | 0.747467 | 0.193214 | 0 | 0.625 | 0 | 0 | 0.021521 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.125 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f3300062c5c25c6b9220dca88740586126db017 | 575 | py | Python | perma_web/perma/migrations/0004_auto_20160506_1632.py | rachelaus/perma | 36c05080520ea3ffce465dbc383795c060fa4112 | [
"MIT",
"Unlicense"
] | 317 | 2015-02-12T16:53:34.000Z | 2022-03-14T23:38:04.000Z | perma_web/perma/migrations/0004_auto_20160506_1632.py | rachelaus/perma | 36c05080520ea3ffce465dbc383795c060fa4112 | [
"MIT",
"Unlicense"
] | 2,069 | 2015-01-06T20:09:24.000Z | 2022-03-31T15:44:36.000Z | perma_web/perma/migrations/0004_auto_20160506_1632.py | rachelaus/perma | 36c05080520ea3ffce465dbc383795c060fa4112 | [
"MIT",
"Unlicense"
] | 69 | 2015-01-12T18:56:07.000Z | 2022-02-22T19:57:10.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('perma', '0003_capturejob'),
]
operations = [
migrations.AddField(
model_name='historicallink',
name='warc_size',
field=models.IntegerField(null=True, blank=True),
),
migrations.AddField(
model_name='link',
name='warc_size',
field=models.IntegerField(null=True, blank=True),
),
]
| 23 | 61 | 0.582609 | 53 | 575 | 6.132075 | 0.584906 | 0.110769 | 0.141538 | 0.166154 | 0.32 | 0.32 | 0.32 | 0.32 | 0.32 | 0.32 | 0 | 0.012376 | 0.297391 | 575 | 24 | 62 | 23.958333 | 0.792079 | 0.036522 | 0 | 0.444444 | 0 | 0 | 0.101449 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.277778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f332ee1aa858c59191df24f355feb7d4151e658 | 3,052 | py | Python | Baseline/NABA/obs_naba.py | sarthak-chakraborty/PARIMA | c6ceb6e17fc3c934603fa843febc42a8b6ee5bb1 | [
"MIT"
] | 13 | 2021-03-06T16:53:33.000Z | 2022-02-04T20:28:13.000Z | Baseline/NABA/obs_naba.py | sarthak-chakraborty/Adaptive-360-video | c6ceb6e17fc3c934603fa843febc42a8b6ee5bb1 | [
"MIT"
] | 6 | 2021-06-02T08:08:09.000Z | 2022-03-12T00:58:26.000Z | Baseline/NABA/obs_naba.py | sarthak-chakraborty/Adaptive-360-video | c6ceb6e17fc3c934603fa843febc42a8b6ee5bb1 | [
"MIT"
] | 3 | 2021-05-26T03:32:04.000Z | 2021-07-17T14:34:20.000Z | import numpy as np
import math
import pickle
from naba import get_data, tiling, alloc_bitrate, calc_qoe
import argparse
import json
def main():
parser = argparse.ArgumentParser(description='Run NABA algorithm and calculate Average QoE of a video for all users')
parser.add_argument('-D', '--dataset', type=int, required=True, help='Dataset ID (1 or 2)')
parser.add_argument('-T', '--topic', required=True, help='Topic in the particular Dataset (video name)')
parser.add_argument('--fps', type=int, required=True, help='fps of the video')
parser.add_argument('-O', '--offset', type=int, default=0, help='Offset for the start of the video in seconds (when the data was logged in the dataset) [default: 0]')
parser.add_argument('--fpsfrac', type=float, default=1.0, help='Fraction with which fps is to be multiplied to change the chunk size [default: 1.0]')
parser.add_argument('-Q', '--quality', required=True, help='Preferred bitrate quality of the video (360p, 480p, 720p, 1080p, 1440p)')
args = parser.parse_args()
if args.dataset != 1 and args.dataset != 2:
print("Incorrect value of the Dataset ID provided!!...")
print("======= EXIT ===========")
exit()
pred_nframe = args.fps * args.fpsfrac
# Get the necessary information regarding the dimensions of the video
print("Reading JSON...")
file = open('./meta.json', )
jsonRead = json.load(file)
nusers = jsonRead["dataset"][args.dataset-1]["nusers"]
width = jsonRead["dataset"][args.dataset-1]["width"]
height = jsonRead["dataset"][args.dataset-1]["height"]
view_width = jsonRead["dataset"][args.dataset-1]["view_width"]
view_height = jsonRead["dataset"][args.dataset-1]["view_height"]
milisec = jsonRead["dataset"][args.dataset-1]["milisec"]
pref_bitrate = jsonRead["bitrates"][args.quality]
ncol_tiles = jsonRead["ncol_tiles"]
nrow_tiles = jsonRead["nrow_tiles"]
player_width = jsonRead["player_width"]
player_height = jsonRead["player_height"]
final_qoe = []
for usernum in range(nusers):
print('User_{}'.format(usernum))
data, frame_nos = [],[]
data, frame_nos, max_frame = get_data(data, frame_nos, args.dataset, args.topic, usernum+1, args.fps, milisec, width, height, view_width, view_height)
act_tiles, chunk_frames = tiling(data, frame_nos, max_frame, width, height, nrow_tiles, ncol_tiles, args.fps, pred_nframe)
# To be consistent with our model
i = 0
while True:
curr_frame=frame_nos[i]
if curr_frame<5*args.fps:
i += 1
else:
break
frame_nos = frame_nos[i:]
vid_bitrate = alloc_bitrate(frame_nos, chunk_frames, pref_bitrate, nrow_tiles, ncol_tiles)
q = calc_qoe(vid_bitrate, act_tiles, frame_nos, chunk_frames, width, height, nrow_tiles, ncol_tiles, player_width, player_height)
final_qoe.append(q)
print("QoE: {}".format(q))
# Find averaged results
final_qoe.sort()
avg_qoe = np.mean(final_qoe)
# Print averaged results
print('Topic: '+topic)
print('Qoe NABA')
print('Pred nframe',(args.fps*args.fpsfrac))
print('Avg. QoE: ',avg_qoe)
print('\n\n')
if __name__ == "__main__":
main()
| 35.488372 | 167 | 0.713303 | 452 | 3,052 | 4.652655 | 0.311947 | 0.047076 | 0.039943 | 0.07418 | 0.1864 | 0.119829 | 0 | 0 | 0 | 0 | 0 | 0.01402 | 0.135321 | 3,052 | 85 | 168 | 35.905882 | 0.782872 | 0.047182 | 0 | 0 | 0 | 0.016393 | 0.260076 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016393 | false | 0 | 0.098361 | 0 | 0.114754 | 0.163934 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f391da0df73d8436dc8322ba02d27a6dffd570f | 2,635 | py | Python | tsserver/features/environment.py | m4tx/techswarm-server | a04a3e2a731c3a086aa8476f66adda64973dcd66 | [
"MIT"
] | 1 | 2016-08-12T14:27:31.000Z | 2016-08-12T14:27:31.000Z | tsserver/features/environment.py | TechSwarm/techswarm-server | a04a3e2a731c3a086aa8476f66adda64973dcd66 | [
"MIT"
] | null | null | null | tsserver/features/environment.py | TechSwarm/techswarm-server | a04a3e2a731c3a086aa8476f66adda64973dcd66 | [
"MIT"
] | null | null | null | import base64
import os
import tempfile
from flask import json
from werkzeug.datastructures import Headers
import tsserver
from tsserver import configutils
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def authenticate(username=None, password=None):
if username is None and password is None:
username = tsserver.app.config['USERNAME']
password = tsserver.app.config['PASSWORD']
h = Headers()
val = 'Basic ' + base64.b64encode(
(username + ':' + password).encode('utf-8')
).decode('utf-8')
h.add('Authorization', val)
context.additional_headers = h
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
headers = Headers()
if 'additional_headers' in context:
headers.extend(context.additional_headers)
if 'headers' in kwargs:
headers.extend(kwargs['headers'])
rv = context.app.open(url, method=method, headers=headers, *args,
**kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.authenticate = authenticate
context.request = request
def remove_uploaded(filename):
path = os.path.join(configutils.get_upload_dir(), filename)
if os.path.isfile(path):
os.remove(path)
def after_scenario(context, scenario):
tsserver.db.session.remove()
tsserver.db.drop_all()
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
# If test photo was uploaded, remove it
if 'test_photo_url' in context:
remove_uploaded(context.test_photo_url)
def after_all(context):
# Remove uploaded images
remove_uploaded('test001.jpg')
remove_uploaded('test002.jpg')
| 29.606742 | 76 | 0.656546 | 340 | 2,635 | 4.970588 | 0.367647 | 0.02071 | 0.040237 | 0.023077 | 0.017751 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007025 | 0.243643 | 2,635 | 88 | 77 | 29.943182 | 0.840943 | 0.17723 | 0 | 0.035714 | 0 | 0 | 0.078487 | 0.010875 | 0 | 0 | 0 | 0 | 0.017857 | 1 | 0.107143 | false | 0.071429 | 0.125 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
8f397a404c6c6b7845cf8d3cc2ad927c19c0bc7f | 1,568 | py | Python | tests/time_delay_layers_test.py | veqtor/veqtor_keras | 303f81b7c6aaa7962b288541275fe7ea618804b9 | [
"MIT"
] | 1 | 2020-08-07T14:47:16.000Z | 2020-08-07T14:47:16.000Z | tests/time_delay_layers_test.py | veqtor/veqtor_keras | 303f81b7c6aaa7962b288541275fe7ea618804b9 | [
"MIT"
] | null | null | null | tests/time_delay_layers_test.py | veqtor/veqtor_keras | 303f81b7c6aaa7962b288541275fe7ea618804b9 | [
"MIT"
] | null | null | null | import tensorflow as tf
from tensorflow.keras.utils import custom_object_scope
from tensorflow.python.keras.testing_utils import layer_test
from veqtor_keras.layers.time_delay_layers import TimeDelayLayer1D, DepthGroupwiseTimeDelayLayer1D, \
DepthGroupwiseTimeDelayLayerFake2D, TimeDelayLayerFake2D
class TimeDelayLayer1DTest(tf.test.TestCase):
def test_simple(self):
with custom_object_scope({'TimeDelayLayer1D': TimeDelayLayer1D}):
layer_test(
TimeDelayLayer1D, kwargs={'output_dim': 4}, input_shape=(5, 32, 3))
class SeparableTimeDelayLayer1DTest(tf.test.TestCase):
def test_simple(self):
with custom_object_scope(
{'DepthGroupwiseTimeDelayLayer1D': DepthGroupwiseTimeDelayLayer1D,
'TimeDelayLayer1D': TimeDelayLayer1D}):
layer_test(
DepthGroupwiseTimeDelayLayer1D, kwargs={'output_mul': 2}, input_shape=(5, 32, 3))
class SeparableTimeDelayLayerFake2DTest(tf.test.TestCase):
def test_simple(self):
with custom_object_scope({'DepthGroupwiseTimeDelayLayerFake2D': DepthGroupwiseTimeDelayLayerFake2D}):
layer_test(
DepthGroupwiseTimeDelayLayerFake2D, input_shape=(5, 16, 16, 3))
class TimeDelayLayerFake2DTest(tf.test.TestCase):
def test_simple(self):
with custom_object_scope({'TimeDelayLayerFake2D': TimeDelayLayerFake2D}):
layer_test(
TimeDelayLayerFake2D, kwargs={'output_dim': 4}, input_shape=(5, 16, 16, 3))
if __name__ == '__main__':
tf.test.main()
| 38.243902 | 109 | 0.720026 | 147 | 1,568 | 7.421769 | 0.319728 | 0.054995 | 0.07791 | 0.062328 | 0.284143 | 0.284143 | 0.240147 | 0.190651 | 0.190651 | 0.190651 | 0 | 0.035377 | 0.188776 | 1,568 | 40 | 110 | 39.2 | 0.822327 | 0 | 0 | 0.275862 | 0 | 0 | 0.098214 | 0.040816 | 0 | 0 | 0 | 0 | 0 | 1 | 0.137931 | false | 0 | 0.137931 | 0 | 0.413793 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f39b9e08ffeda9e93fcc277f54d4d6aea6ba49a | 208 | py | Python | Python/factorial.py | tinkpo/Curso-Introducci-n-a-la-Computaci-n-para-Matem-ticos | 72b5db944c759c0d977553fa1bc15a11ac110909 | [
"MIT"
] | null | null | null | Python/factorial.py | tinkpo/Curso-Introducci-n-a-la-Computaci-n-para-Matem-ticos | 72b5db944c759c0d977553fa1bc15a11ac110909 | [
"MIT"
] | null | null | null | Python/factorial.py | tinkpo/Curso-Introducci-n-a-la-Computaci-n-para-Matem-ticos | 72b5db944c759c0d977553fa1bc15a11ac110909 | [
"MIT"
] | null | null | null | def fact(n):
if(n==0):
return 1
else:
return n*fact(n-1)
x=int(input("Ingrese el valor al que le quiere calcular el factorial "))
print('El factorial de ', x, 'es ', fact(x))
| 23.111111 | 73 | 0.5625 | 35 | 208 | 3.342857 | 0.657143 | 0.08547 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02027 | 0.288462 | 208 | 8 | 74 | 26 | 0.77027 | 0 | 0 | 0 | 0 | 0 | 0.376884 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.428571 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f3f574c981c25a097dafd8bdfc599460d07e952 | 660 | py | Python | PyTester/data/Root.py | Sildra/PyTester | ebe16dc4dc169416ee839adc03e42806d8d57620 | [
"Apache-2.0"
] | null | null | null | PyTester/data/Root.py | Sildra/PyTester | ebe16dc4dc169416ee839adc03e42806d8d57620 | [
"Apache-2.0"
] | null | null | null | PyTester/data/Root.py | Sildra/PyTester | ebe16dc4dc169416ee839adc03e42806d8d57620 | [
"Apache-2.0"
] | null | null | null | from data.Category import Category
class Root(Category):
"""description of class"""
instance = None
def __init__(self, args, name="Root", depth=-1):
super().__init__(args.path, name, depth)
global instance
instance = self
self.args = args
def accept(self, visitor):
visitor.visit(self)
if len(self.categories) > 0:
for node in self.categories.values():
node.accept(visitor)
visitor.leave(self, node)
@staticmethod
def get_root_option(a, b):
instance.get_option(a, b)
@staticmethod
def args():
return instance.args
| 23.571429 | 52 | 0.589394 | 77 | 660 | 4.909091 | 0.493506 | 0.042328 | 0.042328 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004348 | 0.30303 | 660 | 27 | 53 | 24.444444 | 0.817391 | 0.030303 | 0 | 0.1 | 0 | 0 | 0.006309 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.05 | 0.05 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f41984526bd0a507c9c13a13aba537060703cb9 | 2,880 | py | Python | predict.py | wangruichens/textcnn | 99dadc2da13d6dff48cc824492788046ceb82031 | [
"Apache-2.0"
] | null | null | null | predict.py | wangruichens/textcnn | 99dadc2da13d6dff48cc824492788046ceb82031 | [
"Apache-2.0"
] | null | null | null | predict.py | wangruichens/textcnn | 99dadc2da13d6dff48cc824492788046ceb82031 | [
"Apache-2.0"
] | null | null | null | # @Time : 18-11-5
# @Author : wangrc
# @Refers :
# @Outputs :
# @Desc :
import tensorflow as tf
import numpy as np
import os
import time
import datetime
import data_helpers
from text_cnn import TextCNN
from tensorflow.contrib import learn
import csv
import read_nlpcc
# Eval Parameters
tf.flags.DEFINE_integer("batch_size", 64, "Batch Size (default: 64)")
tf.flags.DEFINE_string("checkpoint_dir", "./runs/1541395146/checkpoints/", "Checkpoint directory from training run")
tf.flags.DEFINE_boolean("eval_train", False, "Evaluate on all training data")
# Misc Parameters
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
#
FLAGS = tf.flags.FLAGS
x_raw=read_nlpcc.load_app_data()
# Map data into vocabulary
vocab_path = os.path.join(FLAGS.checkpoint_dir, "..", "vocab")
vocab_processor = learn.preprocessing.VocabularyProcessor.restore(vocab_path)
x_test = np.array(list(vocab_processor.transform(x_raw)))
print("\nEvaluating...\n")
# Evaluation
# ==================================================
checkpoint_dir="./runs/1541395146/checkpoints/"
checkpoint_file = tf.train.latest_checkpoint(checkpoint_dir)
graph = tf.Graph()
with graph.as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement)
sess = tf.Session(config=session_conf)
with sess.as_default():
# Load the saved meta graph and restore variables
saver = tf.train.import_meta_graph("{}.meta".format(checkpoint_file))
saver.restore(sess, checkpoint_file)
# Get the placeholders from the graph by name
input_x = graph.get_operation_by_name("input_x").outputs[0]
# input_y = graph.get_operation_by_name("input_y").outputs[0]
dropout_keep_prob = graph.get_operation_by_name("dropout_keep_prob").outputs[0]
# Tensors we want to evaluate
predictions = graph.get_operation_by_name("output/predictions").outputs[0]
# Generate batches for one epoch
batches = data_helpers.batch_iter(list(x_test), FLAGS.batch_size, 1, shuffle=False)
# Collect the predictions here
all_predictions = []
for x_test_batch in batches:
print('waiting')
batch_predictions = sess.run(predictions, {input_x: x_test_batch, dropout_keep_prob: 1.0})
all_predictions = np.concatenate([all_predictions, batch_predictions])
# Save the evaluation to a csv
predictions_human_readable = np.column_stack((np.array(x_raw), all_predictions))
out_path = os.path.join(FLAGS.checkpoint_dir, "..", "prediction.csv")
print("Saving evaluation to {0}".format(out_path))
with open(out_path, 'w') as f:
csv.writer(f).writerows(predictions_human_readable)
| 36 | 116 | 0.721875 | 390 | 2,880 | 5.092308 | 0.369231 | 0.021148 | 0.032729 | 0.038268 | 0.131923 | 0.108761 | 0.032226 | 0 | 0 | 0 | 0 | 0.015158 | 0.152431 | 2,880 | 79 | 117 | 36.455696 | 0.798443 | 0.160417 | 0 | 0 | 0 | 0 | 0.171321 | 0.02501 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.23913 | 0 | 0.23913 | 0.065217 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f42142e2fce9843103335bc343e1cb5be87d4ce | 350 | py | Python | pypdfkit/__init__.py | BogdanGrebenuk/pypdfkit | da92782e1cfa3338e096f4a32e48b28a9730c29f | [
"MIT"
] | 11 | 2019-06-12T13:29:10.000Z | 2019-12-19T12:20:41.000Z | pypdfkit/__init__.py | BogdanGrebenuk/pypdfkit | da92782e1cfa3338e096f4a32e48b28a9730c29f | [
"MIT"
] | null | null | null | pypdfkit/__init__.py | BogdanGrebenuk/pypdfkit | da92782e1cfa3338e096f4a32e48b28a9730c29f | [
"MIT"
] | 1 | 2020-02-05T17:48:46.000Z | 2020-02-05T17:48:46.000Z | from .abc_pdf import *
from .datamanager import *
from .entities import *
from .pyobj2html import *
from .pypdfkit import *
from .templatemanager import *
__all__ = (
abc_pdf.__all__
+ datamanager.__all__
+ entities.__all__
+ pyobj2html.__all__
+ pypdfkit.__all__
+ templatemanager.__all__
+ ["name"]
)
name = "pypdfkit" | 19.444444 | 30 | 0.694286 | 36 | 350 | 5.916667 | 0.305556 | 0.234742 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00722 | 0.208571 | 350 | 18 | 31 | 19.444444 | 0.761733 | 0 | 0 | 0 | 0 | 0 | 0.034188 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.375 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
8f443933ce391a779da9a70c342fe4fdd0a98f7a | 414 | py | Python | rltk/blocking/__init__.py | ckxz105/rltk | 2d08269002c00c0218421c8c2dc0cc7c4f677131 | [
"MIT"
] | null | null | null | rltk/blocking/__init__.py | ckxz105/rltk | 2d08269002c00c0218421c8c2dc0cc7c4f677131 | [
"MIT"
] | null | null | null | rltk/blocking/__init__.py | ckxz105/rltk | 2d08269002c00c0218421c8c2dc0cc7c4f677131 | [
"MIT"
] | null | null | null | from rltk.blocking.block import Block
from rltk.blocking.block_black_list import BlockBlackList
from rltk.blocking.block_generator import BlockGenerator
from rltk.blocking.hash_block_generator import HashBlockGenerator
from rltk.blocking.token_block_generator import TokenBlockGenerator
from rltk.blocking.canopy_block_generator import CanopyBlockGenerator
from rltk.blocking.blocking_helper import BlockingHelper
| 51.75 | 69 | 0.898551 | 52 | 414 | 6.961538 | 0.346154 | 0.154696 | 0.309392 | 0.174033 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067633 | 414 | 7 | 70 | 59.142857 | 0.937824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
8f46e0716183980d44e4a86f4c6e12b6c8d6a358 | 1,422 | py | Python | innuendo/core/interface.py | innuendoio/innuendo-agent-python | bcd79ddaf39083fa6498d1c9af2be2d79e495fc2 | [
"MIT"
] | null | null | null | innuendo/core/interface.py | innuendoio/innuendo-agent-python | bcd79ddaf39083fa6498d1c9af2be2d79e495fc2 | [
"MIT"
] | null | null | null | innuendo/core/interface.py | innuendoio/innuendo-agent-python | bcd79ddaf39083fa6498d1c9af2be2d79e495fc2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Backwards compatibility imports
from __future__ import absolute_import, division, print_function
from builtins import *
# Imports
import sys
import imp
import os
import argparse
import traceback
from innuendo.utils import file_manager as fm, parser
class TerminalInterface():
def __init__(self):
try:
# Private constants for PATHs
self._PATH = os.path.dirname(os.path.abspath(__file__))
self._CONF_FOLDER_PATH = 'config'
self._CONF_FILE_PATH = '{}/../{}/conf.yml'.format(self._PATH, self._CONF_FOLDER_PATH)
# Loads a configuration file
self.conf = fm.load_yaml(self._CONF_FILE_PATH)
self.arguments = self.conf.get('arguments', dict())
except IOError as e:
traceback.print_exc(file=sys.stdout)
except Exception as e:
traceback.print_exc(file=sys.stdout)
def process_args(self):
arg_p = argparse.ArgumentParser()
# Sets the arguments
for k, v in self.arguments.items():
arg_p.add_argument(k, help=v.get(
'help', ''), type=parser.get_value_type(v.get('type', '')))
args = arg_p.parse_args()
print(args)
print(args.command)
def run(self):
try:
self.process_args()
print('Run Forrest')
except Exception as e:
print(e)
| 27.346154 | 97 | 0.613221 | 175 | 1,422 | 4.748571 | 0.445714 | 0.057762 | 0.028881 | 0.043321 | 0.079422 | 0.079422 | 0.079422 | 0.079422 | 0 | 0 | 0 | 0.000978 | 0.280591 | 1,422 | 51 | 98 | 27.882353 | 0.811339 | 0.094937 | 0 | 0.176471 | 0 | 0 | 0.039844 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088235 | false | 0 | 0.235294 | 0 | 0.352941 | 0.205882 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f483a2d601c9006e62be6731d151449cfbbb0bc | 8,108 | py | Python | IMLearn/learners/gaussian_estimators.py | guymkaplan/IML.HUJI | cd0aac71c3684bca9a64df13b0ba15d42ec88e98 | [
"MIT"
] | null | null | null | IMLearn/learners/gaussian_estimators.py | guymkaplan/IML.HUJI | cd0aac71c3684bca9a64df13b0ba15d42ec88e98 | [
"MIT"
] | null | null | null | IMLearn/learners/gaussian_estimators.py | guymkaplan/IML.HUJI | cd0aac71c3684bca9a64df13b0ba15d42ec88e98 | [
"MIT"
] | null | null | null | from __future__ import annotations
import math
import numpy as np
from numpy.linalg import inv, det, slogdet
class UnivariateGaussian:
"""
Class for univariate Gaussian Distribution Estimator
"""
def __init__(self, biased_var: bool = False) -> UnivariateGaussian:
"""
Estimator for univariate Gaussian mean and variance parameters
Parameters
----------
biased_var : bool, default=False
Should fitted estimator of variance be a biased or unbiased estimator
Attributes
----------
fitted_ : bool
Initialized as false indicating current estimator instance has not been fitted.
To be set as True in `UnivariateGaussian.fit` function.
mu_: float
Estimated expectation initialized as None. To be set in `UnivariateGaussian.fit`
function.
var_: float
Estimated variance initialized as None. To be set in `UnivariateGaussian.fit`
function.
"""
self.biased_ = biased_var
self.fitted_, self.mu_, self.var_ = False, None, None
def fit(self, X: np.ndarray) -> UnivariateGaussian:
"""
Estimate Gaussian expectation and variance from given samples
Parameters
----------
X: ndarray of shape (n_samples, )
Training data
Returns
-------
self : returns an instance of self.
Notes
-----
Sets `self.mu_`, `self.var_` attributes according to calculated estimation (where
estimator is either biased or unbiased). Then sets `self.fitted_` attribute to `True`
"""
self.mu_ = X.mean()
# n-1 is for unbiased, n is for biased
if self.biased_:
self.var_ = X.var()
else:
self.var_ = X.var(ddof=1)
self.fitted_ = True
return self
def pdf(self, X: np.ndarray) -> np.ndarray:
"""
Calculate PDF of observations under Gaussian model with fitted estimators
Parameters
----------
X: ndarray of shape (n_samples, )
Samples to calculate PDF for
Returns
-------
pdfs: ndarray of shape (n_samples, )
Calculated values of given samples for PDF function of N(mu_, var_)
Raises
------
ValueError: In case function was called prior fitting the model
"""
if not self.fitted_:
raise ValueError("Estimator must first be fitted before calling `pdf` function")
pdf_on_vector = np.vectorize(self.probability_density_func_uni)
return pdf_on_vector(self.mu_, self.var_, X)
@staticmethod
def log_likelihood(mu: float, sigma: float, X: np.ndarray) -> float:
"""
Calculate the log-likelihood of the data under a specified Gaussian model
Parameters
----------
mu : float
Expectation of Gaussian
sigma : float
Variance of Gaussian
X : ndarray of shape (n_samples, )
Samples to calculate log-likelihood with
Returns
-------
log_likelihood: float
log-likelihood calculated
"""
return -(len(X)/2) * math.log(2 * math.pi * (sigma)) - (np.sum(
((X - mu) ** 2)) / (2 * (sigma)))
@staticmethod
def probability_density_func_uni(mu: float, sigma: float, sample: float) -> float:
"""
Computes the pdf of a Univariate Guassian Distribution, as defined
in literature.
:param mu: Expectation of Gaussian
:param sigma: Variance of Gaussian
:param sample: a single sample
:return: the PDF of a single sample
"""
coof = 1/math.sqrt(sigma * 2 * math.pi)
exponent = math.exp((-1/(2*sigma))*((sample-mu)**2))
return coof * exponent
class MultivariateGaussian:
"""
Class for multivariate Gaussian Distribution Estimator
"""
def __init__(self):
"""
Initialize an instance of multivariate Gaussian estimator
Attributes
----------
fitted_ : bool
Initialized as false indicating current estimator instance has not been fitted.
To be set as True in `MultivariateGaussian.fit` function.
mu_: ndarray of shape (n_features,)
Estimated expectation initialized as None. To be set in `MultivariateGaussian.fit`
function.
cov_: ndarray of shape (n_features, n_features)
Estimated covariance initialized as None. To be set in `MultivariateGaussian.fit`
function.
"""
self.inv_cov_ = None
self.cov_det_ = 0
self.mu_, self.cov_ = None, None
self.fitted_ = False
def fit(self, X: np.ndarray) -> MultivariateGaussian:
"""
Estimate Gaussian expectation and covariance from given samples
Parameters
----------
X: ndarray of shape (n_samples, n_features)
Training data
Returns
-------
self : returns an instance of self
Notes
-----
Sets `self.mu_`, `self.cov_` attributes according to calculated estimation.
Then sets `self.fitted_` attribute to `True`
"""
self.mu_ = np.mean(X, axis=0)
# no need for ddof as default computes sum(X)/N-1:
self.cov_ = np.cov(X, rowvar=False)
self.cov_det_ = np.linalg.det(self.cov_)
self.inv_cov_ = np.linalg.inv(self.cov_)
self.fitted_ = True
return self
def pdf(self, X: np.ndarray):
"""
Calculate PDF of observations under Gaussian model with fitted estimators
Parameters
----------
X: ndarray of shape (n_samples, n_features)
Samples to calculate PDF for
Returns
-------
pdfs: ndarray of shape (n_samples, )
Calculated values of given samples for PDF function of N(mu_, cov_)
Raises
------
ValueError: In case function was called prior fitting the model
"""
if not self.fitted_:
raise ValueError("Estimator must first be fitted before calling `pdf` function")
pdf_on_mat = np.vectorize(self.probability_density_func_multi)
return pdf_on_mat(X)
@staticmethod
def log_likelihood(mu: np.ndarray, cov: np.ndarray, X: np.ndarray) -> float:
"""
Calculate the log-likelihood of the data under a specified Gaussian model
Parameters
----------
mu : ndarray of shape (n_features,)
Expectation of Gaussian
cov : ndarray of shape (n_features, n_features)
covariance matrix of Gaussian
X : ndarray of shape (n_samples, n_features)
Samples to calculate log-likelihood with
Returns
-------
log_likelihood: float
log-likelihood calculated over all input data and under given parameters of Gaussian
"""
A = np.linalg.inv(cov)
detA = np.linalg.det(A)
coof = -((len(X) * len(A))/2) * np.log(2 * np.pi) - ((len(X)/2) * np.log(detA))
delta = X - mu
return coof - (0.5 * np.sum((delta @ A) * delta)) # <x_1, Ax_1> +...+ <x_n, Ax_n>
# return (len(X) / 2 * (np.log(1 / (1 / np.sqrt(np.linalg.det(cov) * ((2 * math.pi) ** (len(cov)))))))) - \
# (0.5 * np.sum(((X - mu) @ (np.linalg.inv(cov))) * (X - mu)))
def probability_density_func_multi(self, samples: np.ndarray) -> float:
"""
Computes the pdf of a Multivariate Guassian Distribution, as defined
in literature.
:param mu: Expectation of Gaussian
:param cov: Covariance of Gaussian
:param samples: n samples
:return: the PDF of a multivariate gaussian sample
"""
coof = 1/math.sqrt(((math.pi * 2) ** len(self.cov_)) * self.cov_det_)
delta = samples-self.mu_
exponent = math.exp(-0.5*(np.transpose(delta) @ self.inv_cov_ @ delta))
return coof * exponent
| 32.562249 | 115 | 0.583004 | 958 | 8,108 | 4.818372 | 0.160752 | 0.023397 | 0.036395 | 0.038995 | 0.602686 | 0.560659 | 0.485269 | 0.485269 | 0.465771 | 0.433276 | 0 | 0.005595 | 0.316601 | 8,108 | 248 | 116 | 32.693548 | 0.827468 | 0.519364 | 0 | 0.216667 | 0 | 0 | 0.042857 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.066667 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f499412eff769d36ce3fc8f434016dea692f534 | 1,509 | py | Python | pycqed/analysis/fit_toolbox/init_guess.py | nuttamas/PycQED_py3 | 1ee35c7428d36ed42ba4afb5d4bda98140b2283e | [
"MIT"
] | 60 | 2016-08-03T10:00:18.000Z | 2021-11-10T11:46:16.000Z | pycqed/analysis/fit_toolbox/init_guess.py | nuttamas/PycQED_py3 | 1ee35c7428d36ed42ba4afb5d4bda98140b2283e | [
"MIT"
] | 512 | 2016-08-03T17:10:02.000Z | 2022-03-31T14:03:43.000Z | pycqed/analysis/fit_toolbox/init_guess.py | nuttamas/PycQED_py3 | 1ee35c7428d36ed42ba4afb5d4bda98140b2283e | [
"MIT"
] | 34 | 2016-10-19T12:00:52.000Z | 2022-03-19T04:43:26.000Z | import numpy
from scipy import *
def lorentzian(x_data, y_data):
p=4*[0]
y_min = min(y_data)
index_y_min = y_data.tolist().index(y_min)
x_min = x_data[index_y_min]
y_max = max(y_data)
index_y_max = y_data.tolist().index(y_max)
y_mean = y_data.mean()
HM = (y_max - y_min)/2
#print 'check 3'
#print x_min
#print index_y_max
HM_index = index_y_min
#print HM_index
value_found = False
index_array = numpy.linspace(index_y_min, index_y_max, abs(index_y_max-index_y_min)+1)
if sign(index_y_min-index_y_max)>0:
index_array_2 = numpy.linspace(index_y_min, size(y_data), abs(index_y_min-size(y_data))+1)
else:
index_array_2 = numpy.linspace(index_y_min, 0, abs(index_y_min)+1)
#print 'check 4'
#print index_array
#print index_array_2
index_i = 0
while (not value_found):
index1 = index_array[index_i]
index2 = index_array_2[index_i]
if y_data[index1] > (y_max-HM):
HM_index = index1
#print 'check 2'
#print HM_index
value_found = True
elif y_data[index2] > (y_max-HM):
HM_index = index2
#print 'check 2'
#print HM_index
value_found = True
index_i+=1
HWHM = abs(x_data[HM_index] - x_min)
#print 'check 1'
#print 2*HWHM
FWHM = 2*HWHM
p[0] = x_min
p[1] = -2*HM
p[2] = FWHM
p[3] = y_max
#print p
return p
| 26.473684 | 98 | 0.588469 | 247 | 1,509 | 3.259109 | 0.174089 | 0.126708 | 0.122981 | 0.063354 | 0.445963 | 0.252174 | 0.173913 | 0.173913 | 0.091925 | 0 | 0 | 0.030564 | 0.306163 | 1,509 | 56 | 99 | 26.946429 | 0.7383 | 0.132538 | 0 | 0.055556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027778 | false | 0 | 0.055556 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f4a9032ca67ddad37ba103c8f41aa58eaf22f85 | 268 | py | Python | src/rez/data/tests/commands/packages/rextest2/2/package.py | alexey-pelykh/rez | ad12105d89d658e4d2ea9249e537b3de90391f0e | [
"Apache-2.0"
] | null | null | null | src/rez/data/tests/commands/packages/rextest2/2/package.py | alexey-pelykh/rez | ad12105d89d658e4d2ea9249e537b3de90391f0e | [
"Apache-2.0"
] | null | null | null | src/rez/data/tests/commands/packages/rextest2/2/package.py | alexey-pelykh/rez | ad12105d89d658e4d2ea9249e537b3de90391f0e | [
"Apache-2.0"
] | 1 | 2020-09-24T08:33:43.000Z | 2020-09-24T08:33:43.000Z | name = 'rextest2'
version = '2'
requires = ["rextest-1.3"]
def commands():
# prepend to existing var
env.REXTEST_DIRS.append('{root}/data2')
setenv("REXTEST2_REXTEST_VER", '{resolve.rextest.version}')
env.REXTEST2_REXTEST_BASE = resolve.rextest.base
| 24.363636 | 63 | 0.697761 | 34 | 268 | 5.352941 | 0.676471 | 0.164835 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.030702 | 0.149254 | 268 | 10 | 64 | 26.8 | 0.767544 | 0.085821 | 0 | 0 | 0 | 0 | 0.316872 | 0.102881 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f4cb0e01f8ead732bd4879f7c1e12e4253c6239 | 937 | py | Python | examples/CheckFirmwareVersion.py | drizztguen77/PTHat | f46d05054875599e80b396f74bc5a348cfcefbfb | [
"Apache-2.0"
] | 5 | 2021-01-28T13:26:08.000Z | 2022-02-24T08:15:44.000Z | examples/CheckFirmwareVersion.py | drizztguen77/PTHat | f46d05054875599e80b396f74bc5a348cfcefbfb | [
"Apache-2.0"
] | null | null | null | examples/CheckFirmwareVersion.py | drizztguen77/PTHat | f46d05054875599e80b396f74bc5a348cfcefbfb | [
"Apache-2.0"
] | null | null | null | """
This is an example of setting up an Axis (motor) and starting it, revving it up to a specified RPM and letting it
run for some time and then shutting it down.
This example does not auto send the commands. It gets the command and then sends it to the send_command method.
"""
from pthat.pthat import Axis
def wait_for_responses(axis, responses_to_check, msg):
responses = axis.get_all_responses()
while not all(x in responses for x in responses_to_check):
responses = responses + axis.get_all_responses()
# Print the responses
print(msg)
axis.parse_responses(responses)
xaxis = Axis("X", command_id=1, serial_device="/dev/ttyS0")
xaxis.debug = True
# Get the firmware version
firmware_version_cmd = xaxis.get_firmware_version()
xaxis.send_command(firmware_version_cmd)
# Show the responses
wait_for_responses(xaxis, ["RI00FW*", "CI00FW*"], "------- Get firmware version command responses -------")
| 32.310345 | 113 | 0.742796 | 143 | 937 | 4.706294 | 0.454545 | 0.111441 | 0.047548 | 0.056464 | 0.08321 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007643 | 0.16222 | 937 | 28 | 114 | 33.464286 | 0.849682 | 0.358591 | 0 | 0 | 0 | 0 | 0.133672 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.083333 | 0 | 0.166667 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8f51187c7b29e504944fcdf736e1d835859086bf | 392 | py | Python | usaco/january-2021/bronze/problem-1/problem-1.py | Yash-Singh1/competitive-programming | 3b9d278ed8138ab614e2a3d748627db8f4a2cdbd | [
"MIT"
] | 1 | 2021-04-25T21:58:57.000Z | 2021-04-25T21:58:57.000Z | usaco/january-2021/bronze/problem-1/problem-1.py | Yash-Singh1/competitive-programming | 3b9d278ed8138ab614e2a3d748627db8f4a2cdbd | [
"MIT"
] | null | null | null | usaco/january-2021/bronze/problem-1/problem-1.py | Yash-Singh1/competitive-programming | 3b9d278ed8138ab614e2a3d748627db8f4a2cdbd | [
"MIT"
] | null | null | null | cowphabet = raw_input()
duplicated_cowphabet = cowphabet
heard = raw_input()
amount = 1
for char in heard:
if char in duplicated_cowphabet:
duplicated_cowphabet = duplicated_cowphabet[duplicated_cowphabet.find(char) + 1:]
else:
duplicated_cowphabet += cowphabet
amount += 1
duplicated_cowphabet = duplicated_cowphabet[duplicated_cowphabet.find(char) + 1:]
print(amount)
| 26.133333 | 85 | 0.762755 | 46 | 392 | 6.26087 | 0.304348 | 0.59375 | 0.503472 | 0.659722 | 0.524306 | 0.524306 | 0.458333 | 0.458333 | 0.458333 | 0 | 0 | 0.012048 | 0.153061 | 392 | 14 | 86 | 28 | 0.855422 | 0 | 0 | 0.166667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.083333 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
8f53a5257eab324e5aae41fe7022ee0ccae9f4f5 | 586 | py | Python | setup.py | luismarcanth/google-searchconsole | e51d012b7404146bccf61e3923237e46ef10c15a | [
"MIT"
] | 9 | 2020-04-26T09:27:01.000Z | 2021-06-23T18:10:48.000Z | setup.py | luismarcanth/google-searchconsole | e51d012b7404146bccf61e3923237e46ef10c15a | [
"MIT"
] | null | null | null | setup.py | luismarcanth/google-searchconsole | e51d012b7404146bccf61e3923237e46ef10c15a | [
"MIT"
] | 3 | 2019-10-22T11:46:38.000Z | 2020-07-23T07:39:41.000Z | # encoding: utf-8
from setuptools import find_packages, setup
setup(name='searchconsole',
description='A wrapper for the Google Search Console API.',
author='Josh Carty',
author_email='carty.josh@gmail.com',
version='0.0.3',
license='MIT',
packages=find_packages(),
keywords='data analysis search console google api seo',
install_requires=[
'google-api-python-client>=1.7.3',
'python-dateutil>=2.7.3',
'google-auth>=1.5.0',
'google-auth-oauthlib>=0.2.0'
],
test_suite='tests'
) | 27.904762 | 65 | 0.605802 | 75 | 586 | 4.666667 | 0.653333 | 0.068571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036364 | 0.249147 | 586 | 21 | 66 | 27.904762 | 0.759091 | 0.025597 | 0 | 0 | 0 | 0 | 0.422807 | 0.140351 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.058824 | 0 | 0.058824 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8f56222a9d4f4dba998acce60d101d7eba94059a | 257 | py | Python | generated-libraries/python/netapp/exports/exportchownmode.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/exports/exportchownmode.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/exports/exportchownmode.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | class Exportchownmode(basestring):
"""
restricted|unrestricted
Possible values:
<ul>
<li> "restricted" ,
<li> "unrestricted"
</ul>
"""
@staticmethod
def get_api_name():
return "exportchownmode"
| 17.133333 | 34 | 0.560311 | 20 | 257 | 7.1 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.319066 | 257 | 14 | 35 | 18.357143 | 0.811429 | 0.365759 | 0 | 0 | 0 | 0 | 0.11811 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 5 |