hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e61b7f3f98ba6e5bf0ac53a7fa5ab8794bae54e7
| 57
|
py
|
Python
|
trisicell/tl/solver/booster/__init__.py
|
faridrashidi/trisicell
|
4db89edd44c03ccb6c7d3477beff0079c3ff8035
|
[
"BSD-3-Clause"
] | 2
|
2021-07-02T13:53:15.000Z
|
2021-11-16T03:14:36.000Z
|
trisicell/tl/solver/booster/__init__.py
|
faridrashidi/trisicell
|
4db89edd44c03ccb6c7d3477beff0079c3ff8035
|
[
"BSD-3-Clause"
] | 58
|
2021-06-14T17:14:39.000Z
|
2022-03-11T19:32:54.000Z
|
trisicell/tl/solver/booster/__init__.py
|
faridrashidi/trisicell
|
4db89edd44c03ccb6c7d3477beff0079c3ff8035
|
[
"BSD-3-Clause"
] | null | null | null |
from trisicell.tl.solver.booster._booster import booster
| 28.5
| 56
| 0.859649
| 8
| 57
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 57
| 1
| 57
| 57
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e634fab009295c60875afc18c6afa809cffff39f
| 221
|
py
|
Python
|
robonet/inverse_model/models/graphs/base_graph.py
|
russellmendonca/RoboNet
|
de30fa069dacb2888e62bd239e7a3471ea3aaa9d
|
[
"MIT"
] | 140
|
2019-10-25T03:05:04.000Z
|
2022-03-07T17:41:56.000Z
|
robonet/inverse_model/models/graphs/base_graph.py
|
russellmendonca/RoboNet
|
de30fa069dacb2888e62bd239e7a3471ea3aaa9d
|
[
"MIT"
] | 9
|
2019-12-22T20:52:47.000Z
|
2022-02-22T07:56:43.000Z
|
robonet/inverse_model/models/graphs/base_graph.py
|
russellmendonca/RoboNet
|
de30fa069dacb2888e62bd239e7a3471ea3aaa9d
|
[
"MIT"
] | 26
|
2019-10-21T04:49:55.000Z
|
2021-09-17T15:50:17.000Z
|
from robonet.video_prediction.models.graphs.base_graph import BaseGraph as BaseVpredGraph
import tensorflow as tf
class BaseGraph(BaseVpredGraph):
@staticmethod
def default_hparams():
return {
}
| 22.1
| 89
| 0.742081
| 24
| 221
| 6.708333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.199095
| 221
| 9
| 90
| 24.555556
| 0.909605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0.285714
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e644b520882c59ddc69bc56f0ff91ce6085ed94e
| 131
|
py
|
Python
|
enthought/numerical_modeling/numeric_context/mapping_context.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/numerical_modeling/numeric_context/mapping_context.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/numerical_modeling/numeric_context/mapping_context.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from blockcanvas.numerical_modeling.numeric_context.mapping_context import *
| 32.75
| 76
| 0.877863
| 16
| 131
| 6.6875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083969
| 131
| 3
| 77
| 43.666667
| 0.891667
| 0.091603
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e645d439cff55de3835007e171f4528d83b9c3c0
| 1,297
|
py
|
Python
|
resources/dot_PyCharm/system/python_stubs/-762174762/PySide/QtCore/QTemporaryFile.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | 1
|
2020-04-20T02:27:20.000Z
|
2020-04-20T02:27:20.000Z
|
resources/dot_PyCharm/system/python_stubs/cache/16012662ddca113c1f50140f9e0d3bd290a511015767475cf362e5267760f062/PySide/QtCore/QTemporaryFile.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | null | null | null |
resources/dot_PyCharm/system/python_stubs/cache/16012662ddca113c1f50140f9e0d3bd290a511015767475cf362e5267760f062/PySide/QtCore/QTemporaryFile.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module PySide.QtCore
# from C:\Python27\lib\site-packages\PySide\QtCore.pyd
# by generator 1.147
# no doc
# imports
import Shiboken as __Shiboken
from QFile import QFile
class QTemporaryFile(QFile):
# no doc
def autoRemove(self, *args, **kwargs): # real signature unknown
pass
def createLocalFile(self, *args, **kwargs): # real signature unknown
pass
def fileEngine(self, *args, **kwargs): # real signature unknown
pass
def fileName(self, *args, **kwargs): # real signature unknown
pass
def fileTemplate(self, *args, **kwargs): # real signature unknown
pass
def open(self, *args, **kwargs): # real signature unknown
pass
def setAutoRemove(self, *args, **kwargs): # real signature unknown
pass
def setFileTemplate(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
staticMetaObject = None # (!) real value is '<PySide.QtCore.QMetaObject object at 0x0000000003E7E4C8>'
| 25.94
| 106
| 0.652274
| 156
| 1,297
| 5.282051
| 0.410256
| 0.157767
| 0.242718
| 0.196602
| 0.444175
| 0.444175
| 0.444175
| 0.398058
| 0
| 0
| 0
| 0.02132
| 0.240555
| 1,297
| 49
| 107
| 26.469388
| 0.815228
| 0.420971
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0.08
| 0
| 0.56
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
0521eb753e0ca438d3c48c32ff2bfa84e03b5b34
| 63,508
|
py
|
Python
|
PRS/PRS_sumstats.py
|
yochaiedlitz/T2DM_UKB_predictions
|
1e6b22e3d51d515eb065d7d5f46408f86f33d0b8
|
[
"MIT"
] | 1
|
2022-01-17T13:13:02.000Z
|
2022-01-17T13:13:02.000Z
|
PRS/PRS_sumstats.py
|
yochaiedlitz/T2DM_UKB_predictions
|
1e6b22e3d51d515eb065d7d5f46408f86f33d0b8
|
[
"MIT"
] | null | null | null |
PRS/PRS_sumstats.py
|
yochaiedlitz/T2DM_UKB_predictions
|
1e6b22e3d51d515eb065d7d5f46408f86f33d0b8
|
[
"MIT"
] | null | null | null |
import numpy as np
import os.path
import pandas as pd
import sys
import time
import os
from bisect import bisect
import pickle
pd.set_option('display.width', 1000)
np.set_printoptions(precision=4, linewidth=200)
from pysnptools.snpreader.bed import Bed
from sklearn.model_selection import KFold
import scipy.stats as stats
CLEAN_DATA='/net/mraid08/export/jafar/Microbiome/Analyses/PNPChip/cleanData'
TEMP_DATA='/net/mraid08/export/jafar/Microbiome/Analyses/PNPChip/rawData/tmp'
PCA_DIR='/net/mraid08/export/jafar/Microbiome/Analyses/PNPChip/PCA'
RAWDATA_DIR='/net/mraid08/export/jafar/Microbiome/Analyses/PNPChip/rawData'
GCTA_PATH='/net/mraid08/export/jafar/Microbiome/Analyses/PNPChip/Analysis/gcta'
GCTA_SUMSTATS_PATH='/net/mraid08/export/jafar/Microbiome/Analyses/PNPChip/Analysis/gcta/sumstats'
# SUMSTATS_DIR1 = '/net/mraid08/export/jafar/Microbiome/Analyses/PNPChip/sumstats'
SUMSTATS_DIR_New= '/net/mraid08/export/jafar/Yochai/sumstats'
SUMSTATS_DIR = '/net/mraid08/export/jafar/Yochai/Orig_sumstats/'
PRS_P_Sort_Dict='/net/mraid08/export/jafar/Yochai/PRS/PRS_Results/Orig_trait_dict"'
Gen_DIR = "/net/mraid08/export/jafar/Yochai/PRS/PRS_Results/Extract_1K_SNPs_UKBB/Final_Results/"
PKL_PATH = os.path.join(GCTA_PATH, 'df_PRS_NETO_predictions.pkl')
Quant_PATH=os.path.join(GCTA_PATH, 'df_PRS_NETO_quantile.pkl')
if not os.path.exists(GCTA_SUMSTATS_PATH): os.makedirs(GCTA_SUMSTATS_PATH)
PVAL_CUTOFFS = [1.1, 3e-1, 1e-1, 3e-2, 1e-2, 3e-3, 1e-3, 3e-4, 1e-4, 3e-5, 1e-5, 3e-6, 1e-6]
#PVAL_CUTOFFS = [1.1, 1e-1, 1e-2, 1e-3, 1e-4]
def read_bfile_forsumstats(bfile_path):
"""read plink file and allele frequencies from a summary statistics file
merginh SNPs from bed file with the ones fom summary statistics
performing Binomical distibution average, consider using external imputations. There is an imputation file
standardize SNPs using external MAfs
"""
bed = Bed(bfile_path+".bed", count_A1=True) #read plink file and allele frequencies from a summary statistics file
bed_snps = pd.DataFrame(bed.sid, columns=['MarkerName'])
files_dict = get_files_dict()
df_mafs = pd.read_csv(files_dict['height'], delim_whitespace=True, usecols=['MarkerName', 'Freq.Allele1.HapMapCEU'])#Minor allile frequencies
df_mafs = bed_snps.merge(df_mafs, on='MarkerName', how='left')#merginh SNPs from bed file with the ones fom summary statistics
assert (df_mafs['MarkerName'] == bed_snps['MarkerName']).all()
snps_to_keep = df_mafs['Freq.Allele1.HapMapCEU'].notnull()
bed = bed[:, snps_to_keep].read() #Reads the SNP values and returns a .SnpData (with .SnpData.val property containing a new ndarray of the SNP values).
df_mafs = df_mafs.ix[snps_to_keep, :]
allele_freqs = df_mafs['Freq.Allele1.HapMapCEU'].values
#impute SNPs according to external MAFs
print ('imputing SNPs using external MAFs...')
isNan = np.isnan(bed.val)
for i in range(bed.sid.shape[0]):
bed.val[isNan[:,i], i] = 2*allele_freqs[i] #Binomical distibution average, consider using external imputations. There is an imputation file
#standardize SNPs using external MAfs
print ('standardizing SNPs using external MAFs...')
snpsMean = 2*allele_freqs
snpsStd = np.sqrt(2*allele_freqs*(1-allele_freqs))
snpsStd[snpsStd==0] = np.inf #Probably not an SNP
bed.val -= snpsMean
###bed.val /= snps Std #not clear what did the people who calculated the summary statistics did
return bed
def get_files_dict():
"""Dictionary with paths to different PRS summary statistics"""
files_dict = dict([])
files_dict['height'] = os.path.join(SUMSTATS_DIR, 'height',
'GIANT_HEIGHT_Wood_et_al_2014_publicrelease_HapMapCeuFreq.txt')
#For metabolon
files_dict["CARDIoGRAM_GWAS"] = os.path.join(SUMSTATS_DIR, 'CARDIO_Yeela', 'CARDIoGRAM_GWAS_RESULTS.txt')#For Metabolon
files_dict['alzheimer'] = os.path.join(SUMSTATS_DIR, 'Alzheimer',
'IGAP_stage_1_2_combined.txt') # Jean-Charles Lambert et al.
files_dict['bmi'] = os.path.join(SUMSTATS_DIR, 'bmi',
'SNP_gwas_mc_merge_nogc.tbl.uniq') # https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4382211/
files_dict['anorexia'] = os.path.join(SUMSTATS_DIR, 'Anorexia',
'gcan_meta.out') # A genome-wide association study of anorexia nervosa,https://www.nature.com/articles/mp2013187
# TODO: check for Asthma pvalue
# files_dict['ashtma'] = os.path.join(SUMSTATS_DIR, 'Ashtma','gabriel_asthma_meta-analysis_36studies_format_repository_NEJM.txt') # https://www.cnrgh.fr/gabriel/study_description.html
files_dict['t2d_mega_meta'] = os.path.join(SUMSTATS_DIR, 't2d',
'diagram.mega-meta.txt') # FKA iris Trans-ethnic T2D GWAS meta-analysis, http://diagram-consortium.org/downloads.html
files_dict['cardio'] = os.path.join(SUMSTATS_DIR, 'Cardio',
'cardiogramplusc4d_data.txt') # CARDIoGRAMplusC4D Metabochip is a two stage meta-analysis of Metabochip and GWAS studies of European and South Asian descent involving 63,746 cases and 130,681 controls. The CARDIoGRAM GWAS data was used as Stage 1 - data as published in: CARDIoGRAMplusC4D Consortium, Deloukas P, Kanoni S, Willenborg C, Farrall M, Assimes TL, Thompson JR, et al. Large-scale association analysis identifies new risk loci for coronary artery disease. Nat Genet 2013 45:25-33
files_dict['hips'] = os.path.join(SUMSTATS_DIR, 'hips',
'GIANT_2015_HIP_COMBINED_EUR.txt') # https://www.nature.com/articles/nature14132,https://portals.broadinstitute.org/collaboration/giant/index.php/GIANT_consortium_data_files
files_dict['waist'] = os.path.join(SUMSTATS_DIR, 'waist',
'GIANT_2015_WC_COMBINED_EUR2.txt') # https://www.nature.com/articles/nature14132,https://portals.broadinstitute.org/collaboration/giant/index.php/GIANT_consortium_data_files
#TODO:Clean the data below
# files_dict["whr_WHR_COMBINED_EUR2"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_2015_WHR_COMBINED_EUR2.txt')
# files_dict["whr_WHRadjBMI_COMB_All"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_2015_WHRadjBMI_COMBINED_AllAncestries.txt')
# files_dict["whr_WHRadjBMI_COMB_EUR"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_2015_WHRadjBMI_COMBINED_EUR.txt')
# files_dict["whr_WHR_COMBINED_All"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_2015_WHR_COMBINED_AllAncestries.txt')
# files_dict["whr_WHR_COMBINED_EUR"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_2015_WHR_COMBINED_EUR.txt')
# files_dict["whr_WHR_FEMALES_EUR"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_2015_WHR_FEMALES_EUR.txt')
# files_dict["whr_WHR_MALES_EUR"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_2015_WHR_MALES_EUR.txt')
# files_dict["whr_WHR_MEN_N"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_WHR_MEN_N.txt')
# files_dict["whr_WHR_WOMEN_N"] = os.path.join(SUMSTATS_DIR_New, 'whr', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_WHR_WOMEN_N.txt')
files_dict['overweight'] = os.path.join(SUMSTATS_DIR, 'overweight',
'GIANT_OVERWEIGHT_Stage1_Berndt2013_publicrelease_HapMapCeuFreq.txt') # https://portals.broadinstitute.org/collaboration/giant/index.php/Main_Page
files_dict['obesity_class1'] = os.path.join(SUMSTATS_DIR, 'obesity_class1',
'GIANT_OBESITY_CLASS1_Stage1_Berndt2013_publicrelease_HapMapCeuFreq.txt') # https://portals.broadinstitute.org/collaboration/giant/index.php/Main_Page
files_dict['obesity_class2'] = os.path.join(SUMSTATS_DIR, 'obesity_class2',
'GIANT_OBESITY_CLASS2_Stage1_Berndt2013_publicrelease_HapMapCeuFreq.txt') # https://portals.broadinstitute.org/collaboration/giant/index.php/Main_Page
#TODO: Check for hba1c P value
# files_dict['hba1c'] = os.path.join(SUMSTATS_DIR, 'HbA1C','MAGIC_HbA1C.txt') # ftp://ftp.sanger.ac.uk/pub/magic/MAGIC_HbA1C.txt.gz
# files_dict['Non_Diabetic_glucose2'] = os.path.join(SUMSTATS_DIR, 'glucose',
# 'MAGIC_Manning_et_al_FastingGlucose_MainEffect.txt.gz') # ftp://ftp.sanger.ac.uk/pub/magic/MAGIC_HbA1C.txt.gz
# files_dict['Magnetic_glucose'] = os.path.join(SUMSTATS_DIR, 'glucose', 'Summary_statistics_MAGNETIC_Glc.txt.gz') #ftp://ftp.sanger.ac.uk/pub/magic/MAGIC_HbA1C.txt.gz
files_dict['cigs_per_day'] = os.path.join(SUMSTATS_DIR, 'smoke',
'tag.cpd.tbl') # Nature Genetics volume 42, pages 441 447 (2010),http://www.med.unc.edu/pgc/files/resultfiles/readme.tag.txt/view
files_dict['ever_smoked'] = os.path.join(SUMSTATS_DIR, 'smoke',
'tag.evrsmk.tbl') # Nature Genetics volume 42, pages 441 447 (2010),http://www.med.unc.edu/pgc/files/resultfiles/readme.tag.txt/view
files_dict['age_smoke'] = os.path.join(SUMSTATS_DIR, 'smoke',
'tag.logonset.tbl') # Nature Genetics volume 42, pages 441 447 (2010),http://www.med.unc.edu/pgc/files/resultfiles/readme.tag.txt/view
files_dict['hdl'] = os.path.join(SUMSTATS_DIR, 'HDL',
'jointGwasMc_HDL.txt') # https://www.nature.com/articles/ng.2797,https://grasp.nhlbi.nih.gov/FullResults.aspx
files_dict['ldl'] = os.path.join(SUMSTATS_DIR, 'LDL',
'jointGwasMc_LDL.txt') ##https://www.nature.com/articles/ng.2797,https://grasp.nhlbi.nih.gov/FullResults.aspx
files_dict['triglycerides'] = os.path.join(SUMSTATS_DIR, 'triglycerides',
'jointGwasMc_TG.txt') ##https://www.nature.com/articles/ng.2797,https://grasp.nhlbi.nih.gov/FullResults.aspx
files_dict['cholesterol'] = os.path.join(SUMSTATS_DIR, 'cholesterol',
'jointGwasMc_TC.txt') ##https://www.nature.com/articles/ng.2797,https://grasp.nhlbi.nih.gov/FullResults.aspx
files_dict['diabetes_BMI_Unadjusted'] = os.path.join(SUMSTATS_DIR, 'diabetes',
'T2D_TranEthnic.BMIunadjusted.txt') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['diabetes_BMI_Adjusted'] = os.path.join(SUMSTATS_DIR, 'diabetes',
'T2D_TranEthnic.BMIadjusted.txt') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
# files_dict['Coronary_Artery_Disease'] = os.path.join(SUMSTATS_DIR, 'CAD', 'MICAD.EUR.ExA.Consortium.PublicRelease.310517.txt')#This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
# files_dict["diabetes_Saxena"] = os.path.join(SUMSTATS_DIR_New, 'diabetes', 'Saxena-17463246.txt')
# files_dict["diabetes_Fuchsberger2016"] = os.path.join(SUMSTATS_DIR_New, 'diabetes', 'DIAGRAMmeta_Fuchsberger2016.txt')
# files_dict["diabetes_Morris2012.females"] = os.path.join(SUMSTATS_DIR_New, 'diabetes', 'DIAGRAM.Morris2012.females.txt')
# files_dict["diabetes_Morris2012.males"] = os.path.join(SUMSTATS_DIR_New, 'diabetes', 'DIAGRAM.Morris2012.males.txt')
# files_dict["diabetes_metabochip.only"] = os.path.join(SUMSTATS_DIR_New, 'diabetes', 'DIAGRAM.website.metabochip.only.txt')
# files_dict["diabetes_GWAS.metabochip"] = os.path.join(SUMSTATS_DIR_New, 'diabetes', 'DIAGRAM.website.GWAS.metabochip.txt')
# files_dict["diabetes_Gaulton_2015"] = os.path.join(SUMSTATS_DIR_New, 'diabetes', 'DIAGRAM_Gaulton_2015.txt')
# files_dict["diabetes_v3.2012DEC17"] = os.path.join(SUMSTATS_DIR_New, 'diabetes', 'DIAGRAMv3.2012DEC17.txt')
files_dict['FastingGlucose'] = os.path.join(SUMSTATS_DIR, 'Fasting',
'MAGIC_FastingGlucose.txt') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['ln_HOMA-B'] = os.path.join(SUMSTATS_DIR, 'Fasting',
'MAGIC_ln_HOMA-B.txt') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['ln_FastingInsulin'] = os.path.join(SUMSTATS_DIR, 'Fasting',
'MAGIC_ln_FastingInsulin.txt') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['ln_HOMA-IR'] = os.path.join(SUMSTATS_DIR, 'Fasting',
'MAGIC_ln_HOMA-IR.txt') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['Leptin_BMI'] = os.path.join(SUMSTATS_DIR, 'Leptin', 'Leptin_Adjusted_for_BMI.txt')
files_dict['Leptin_Unadjusted_BMI'] = os.path.join(SUMSTATS_DIR, 'Leptin', 'Leptin_Not_Adjusted_for_BMI.txt')
files_dict['Body_fat'] = os.path.join(SUMSTATS_DIR, 'Body_fat',
'body_fat_percentage_GWAS_PLUS_MC_ALL_ancestry_se_Sex_combined_for_locus_zoom_plot.TBL.txt')
files_dict['Heart_Rate'] = os.path.join(SUMSTATS_DIR, 'Heart_rate', 'META_STAGE1_GWASHR_SUMSTATS.txt')#PMID 23583979
files_dict['Magic_2hrGlucose'] = os.path.join(SUMSTATS_DIR, '2hr_Glucose', 'MAGIC_2hrGlucose_AdjustedForBMI.txt')
files_dict['MAGIC_fastingProinsulin'] = os.path.join(SUMSTATS_DIR, 'Pro_Insulin', 'MAGIC_ln_fastingProinsulin.txt')
files_dict['MAGIC_Scott_2hGlu'] = os.path.join(SUMSTATS_DIR, 'Insulin/Magic_Metabochip',
'MAGIC_Scott_et_al_2hGlu_Jan2013.txt')
files_dict['MAGIC_Scott_FG'] = os.path.join(SUMSTATS_DIR, 'Insulin/Magic_Metabochip',
'MAGIC_Scott_et_al_FG_Jan2013.txt')
files_dict['MAGIC_Scott_FI_adjBMI'] = os.path.join(SUMSTATS_DIR, 'Insulin/Magic_Metabochip',
'MAGIC_Scott_et_al_FI_adjBMI_Jan2013.txt')
files_dict['MAGIC_Scott_FI'] = os.path.join(SUMSTATS_DIR, 'Insulin/Magic_Metabochip',
'MAGIC_Scott_et_al_FI_Jan2013.txt')
files_dict['MAGIC_HbA1C'] = os.path.join(SUMSTATS_DIR, 'HbA1C', 'MAGIC_HbA1C.txt') # Fasting Insulin
files_dict['Manning_FG'] = os.path.join(SUMSTATS_DIR, 'Insulin/Manning',
'MAGIC_Manning_et_al_FastingGlucose_MainEffect.txt') # Fasting Glucose
files_dict['Manning_BMI_ADJ_FG'] = os.path.join(SUMSTATS_DIR, 'Insulin/Manning',
'BMI_ADJ_FG_Manning.txt') # Fasting Glucose
files_dict['Manning_Fasting_Insulin'] = os.path.join(SUMSTATS_DIR, 'Insulin/Manning',
'MAGIC_Manning_et_al_lnFastingInsulin_MainEffect.txt') # Fasting Insulin
files_dict['Manning_BMI_ADJ_FI'] = os.path.join(SUMSTATS_DIR, 'Insulin/Manning',
'BMI_ADJ__Manning_Fasting_Insulin.txt') # Fasting Insulin
files_dict['HBA1C_ISI'] = os.path.join(SUMSTATS_DIR, 'HBA1C_ISI',
'MAGIC_ISI_Model_1_AgeSexOnly.txt') # Fasting Insulin
files_dict['HBA1C_ISI'] = os.path.join(SUMSTATS_DIR, 'HBA1C_ISI',
'MAGIC_ISI_Model_2_AgeSexBMI.txt') # Fasting Insulin
files_dict['HBA1C_ISI'] = os.path.join(SUMSTATS_DIR, 'HBA1C_ISI', 'MAGIC_ISI_Model_3_JMA.txt') # Fasting Insulin
files_dict['HbA1c_MANTRA'] = os.path.join(SUMSTATS_DIR, 'HbA1C', 'HbA1c_MANTRA.txt') # Fasting Insulin
# TODO delete
#files_dict['A1C_Mantra'] = os.path.join(SUMSTATS_DIR, 'a1c', 'HbA1c_MANTRA.txt')
#files_dict['Alzheimer_1_2'] = os.path.join(SUMSTATS_DIR, 'Alzheimer', 'IGAP_stage_1_2_combined.txt')
#files_dict['Asthma '] = os.path.join(SUMSTATS_DIR, 'Asthma', 'gabriel_asthma_meta-analysis_36studies_format_repository_NEJM.txt')
#files_dict['bmi'] = os.path.join(SUMSTATS_DIR, 'bmi', 'SNP_gwas_mc_merge_nogc.tbl.uniq')
#files_dict["Body_Fat"] = os.path.join(SUMSTATS_DIR, 'Body_Fat', 'body_fat_percentage_GWAS_PLUS_MC_ALL_ancestry_se_Sex_combined_for_locus_zoom_plot.TBL.txt')
#files_dict["cardiogramplusc4d"] = os.path.join(SUMSTATS_DIR, 'Cardiogram', 'cardiogramplusc4d_data.txt')
#files_dict["MICAD.EUR.ExA.310517"] = os.path.join(SUMSTATS_DIR, 'Cardiogram', 'MICAD.EUR.ExA.Consortium.PublicRelease.310517.txt')
#files_dict["Cholesterol"] = os.path.join(SUMSTATS_DIR, 'cholesterol ', 'jointGwasMc_TC.txt')
# files_dict["diabetes_TranEthnic"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'T2D_TranEthnic.BMIunadjusted.txt')
# files_dict["diabetes_mega-meta"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'diagram.mega-meta.txt')
# files_dict["FastingGlucose"] = os.path.join(SUMSTATS_DIR, 'Glucose', 'MAGIC_FastingGlucose.txt')
# files_dict["2hrGlucose_AdjustedForBMI"] = os.path.join(SUMSTATS_DIR, 'Glucose', 'MAGIC_2hrGlucose_AdjustedForBMI.txt')
# files_dict["LDL_Joint"] = os.path.join(SUMSTATS_DIR, 'LDL ', 'jointGwasMc_LDL.txt')
# files_dict["Heart_rate"] = os.path.join(SUMSTATS_DIR, 'Heart_rate', 'META_STAGE1_GWASHR_SUMSTATS.txt')
# files_dict["HIP_COMBINED_EUR"] = os.path.join(SUMSTATS_DIR, 'HIP', 'GIANT_2015_HIP_COMBINED_EUR.txt')
# files_dict["INSULIN_FastingInsulin"] = os.path.join(SUMSTATS_DIR, 'Insulin', 'MAGIC_ln_FastingInsulin.txt')
# files_dict["INSULIN_fastingProinsulin"] = os.path.join(SUMSTATS_DIR, 'Insulin', 'MAGIC_ln_fastingProinsulin.txt')
# files_dict["INSULIN_HOMA-B"] = os.path.join(SUMSTATS_DIR, 'Insulin', 'MAGIC_ln_HOMA-B.txt')
# files_dict["INSULIN_HOMA-IR"] = os.path.join(SUMSTATS_DIR, 'Insulin', 'MAGIC_ln_HOMA-IR.txt')
# files_dict["Leptin_adj_BMI"] = os.path.join(SUMSTATS_DIR, 'Leptin', 'Leptin_Adjusted_for_BMI.txt')
# files_dict["Leptin_not_adj_bmi"] = os.path.join(SUMSTATS_DIR, 'Leptin', 'Leptin_Not_Adjusted_for_BMI.txt')
# files_dict["Obesity"] = os.path.join(SUMSTATS_DIR, 'Obesity', 'GIANT_OBESITY_CLASS1_Stage1_Berndt2013_publicrelease_HapMapCeuFreq.txt')
# files_dict["smoke_cpd"] = os.path.join(SUMSTATS_DIR, 'smoke', 'tag.cpd.tbl')
# files_dict["smoke_evrsmk"] = os.path.join(SUMSTATS_DIR, 'smoke', 'tag.evrsmk.tbl')
# files_dict["smoke_logonset"] = os.path.join(SUMSTATS_DIR, 'smoke', 'tag.logonset.tbl')
# files_dict["triglycerides_Joint"] = os.path.join(SUMSTATS_DIR, 'triglycerides', 'jointGwasMc_TG.txt')
# files_dict["Waist_EUR2"] = os.path.join(SUMSTATS_DIR, 'waist', 'GIANT_2015_WC_COMBINED_EUR2.txt')
# files_dict["Waist__EUR"] = os.path.join(SUMSTATS_DIR, 'waist', 'GIANT_2015_WC_COMBINED_EUR.txt')
# files_dict["Waist_Fem_Euro"] = os.path.join(SUMSTATS_DIR, 'waist', 'GIANT_2015_WC_FEMALES_EUR.txt')
# files_dict["Waist_Males_Euro"] = os.path.join(SUMSTATS_DIR, 'waist', 'GIANT_2015_WC_MALES_EUR.txt')
# files_dict["Waist_WC_MEN_N"] = os.path.join(SUMSTATS_DIR, 'waist', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_WC_MEN_N.txt')
#
# TODO Add to list
#files_dict['A1C_Metal'] = os.path.join(SUMSTATS_DIR, 'a1c', 'HbA1c_METAL_European.txt')
#files_dict['ADHD'] = os.path.join(SUMSTATS_DIR, 'ADHD', 'adhd_jul2017')
#files_dict['Alzheimer_1'] = os.path.join(SUMSTATS_DIR, 'Alzheimer', 'IGAP_stage_1.txt')
#files_dict["Breast_Cancer"] = os.path.join(SUMSTATS_DIR, 'Breast_Cancer', 'icogs_bcac_public_results_euro (1).txt')
#files_dict["cad.add.160614"] = os.path.join(SUMSTATS_DIR, 'Cardiogram', 'cad.add.160614.website.txt')
#files_dict["cad.rec.090715"] = os.path.join(SUMSTATS_DIR, 'Cardiogram', 'cad.rec.090715.web.txt')
#files_dict["CAD_mi.add.030315"] = os.path.join(SUMSTATS_DIR, 'Cardiogram', 'mi.add.030315.website.txt')
#files_dict["CARDIoGRAM_Ia_All"] = os.path.join(SUMSTATS_DIR, 'Cardiogram', 'DataForCARDIoGRAMwebpage_Ia_All_20160105.csv')
#files_dict["CARDIoGRAMIb_All"] = os.path.join(SUMSTATS_DIR, 'Cardiogram', 'DataForCARDIoGRAMwebpage_Ib_All_20160105.csv')
#files_dict["CARDIoGRAMIIa_All"] = os.path.join(SUMSTATS_DIR, 'Cardiogram','DataForCARDIoGRAMwebpage_IIa_All_20160105.csv')
#files_dict["CARDIoGRAM_IIb_All"] = os.path.join(SUMSTATS_DIR, 'Cardiogram', 'DataForCARDIoGRAMwebpage_IIb_All_20160105.csv')
#files_dict["Cognitive"] = os.path.join(SUMSTATS_DIR, 'Cognitive', 'GWAS_CP_10k.txt')
# files_dict["diabetes_Saxena"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'Saxena-17463246.txt')
# files_dict["diabetes_Fuchsberger2016"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'DIAGRAMmeta_Fuchsberger2016.txt')
# files_dict["diabetes_Morris2012.females"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'DIAGRAM.Morris2012.females.txt')
# files_dict["diabetes_Morris2012.males"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'DIAGRAM.Morris2012.males.txt')
# files_dict["diabetes_metabochip.only"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'DIAGRAM.website.metabochip.only.txt')
# files_dict["diabetes_GWAS.metabochip"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'DIAGRAM.website.GWAS.metabochip.txt')
# files_dict["diabetes_Gaulton_2015"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'DIAGRAM_Gaulton_2015.txt')
# files_dict["diabetes_v3.2012DEC17"] = os.path.join(SUMSTATS_DIR, 'diabetes', 'DIAGRAMv3.2012DEC17.txt')
# files_dict["HDL"] = os.path.join(SUMSTATS_DIR, 'HDL', 'AGEN_lipids_hapmap_hdl_m2.txt')
# files_dict["LDL_AGEN"] = os.path.join(SUMSTATS_DIR, 'LDL ', 'AGEN_lipids_hapmap_ldl_m2.txt')
# files_dict["HIPadjBMI_AllAncestries"] = os.path.join(SUMSTATS_DIR, 'HIP', 'GIANT_2015_HIPadjBMI_COMBINED_AllAncestries.txt')
# files_dict["HIPadjBMI_COMBINED_EUR"] = os.path.join(SUMSTATS_DIR, 'HIP', 'GIANT_2015_HIPadjBMI_COMBINED_EUR.txt')
# files_dict["HIP_COMBINED_AllAncestries"] = os.path.join(SUMSTATS_DIR, 'HIP', 'GIANT_2015_HIP_COMBINED_AllAncestries.txt')
# files_dict["HIP_FEMALES_EUR"] = os.path.join(SUMSTATS_DIR, 'HIP', 'GIANT_2015_HIP_FEMALES_EUR.txt')
# files_dict["HIP_MALES_EUR"] = os.path.join(SUMSTATS_DIR, 'HIP', 'GIANT_2015_HIP_MALES_EUR.txt')
# files_dict["HIP_HapMapCeuFreq_MEN"] = os.path.join(SUMSTATS_DIR, 'HIP', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_HIP_MEN_N.txt')
# files_dict["HIP_HapMapCeuFreq_WOMEN"] = os.path.join(SUMSTATS_DIR, 'HIP', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_HIP_WOMEN_N.txt')
# files_dict["INSULIN_SECRETION_AUCins"] = os.path.join(SUMSTATS_DIR, 'Insulin', 'MAGIC_INSULIN_SECRETION_AUCins_AUCgluc_for_release_HMrel27.txt')
# files_dict["INSULIN_SECRETION_for_release"] = os.path.join(SUMSTATS_DIR, 'Insulin', 'MAGIC_INSULIN_SECRETION_AUCins_for_release_HMrel27.txt')
# files_dict["OCD"] = os.path.join(SUMSTATS_DIR, 'OCD', 'ocd_aug2017')
# files_dict["PTSD"] = os.path.join(SUMSTATS_DIR, 'PTSD', 'SORTED_PTSD_EA9_AA7_LA1_SA2_ALL_study_specific_PCs1.txt')
# files_dict["Psoriasis"] = os.path.join(SUMSTATS_DIR, 'OCD', 'tsoi_2012_23143594_pso_efo0000676_1_ichip.sumstats.tsv')
# files_dict["T1D"] = os.path.join(SUMSTATS_DIR, 'T1D', 'bradfield_2011_21980299_t1d_efo0001359_1_gwas.sumstats.tsv')
# files_dict["Total_Cholesterol_AGEN"] = os.path.join(SUMSTATS_DIR, 'Total_Cholesterol', 'AGEN_lipids_hapmap_tc_m2.txt')
# files_dict["triglycerides_AGEN"] = os.path.join(SUMSTATS_DIR, 'triglycerides', 'AGEN_lipids_hapmap_tg_m2.txt')
# files_dict["Waist_WCadjBMI_ALL"] = os.path.join(SUMSTATS_DIR, 'waist', 'GIANT_2015_WCadjBMI_COMBINED_AllAncestries.txt')
# files_dict["Waist_ALL"] = os.path.join(SUMSTATS_DIR, 'waist', 'GIANT_2015_WC_COMBINED_AllAncestries.txt')
# files_dict["whr_WHRadjBMI_COMB_All"] = os.path.join(SUMSTATS_DIR, 'whr', 'GIANT_2015_WHRadjBMI_COMBINED_AllAncestries.txt')
# files_dict["whr_WHRadjBMI_COMB_EUR"] = os.path.join(SUMSTATS_DIR, 'whr', 'GIANT_2015_WHRadjBMI_COMBINED_EUR.txt')
# files_dict["whr_WHR_COMBINED_All"] = os.path.join(SUMSTATS_DIR, 'whr', 'GIANT_2015_WHR_COMBINED_AllAncestries.txt')
# files_dict["whr_WHR_COMBINED_EUR"] = os.path.join(SUMSTATS_DIR, 'whr', 'GIANT_2015_WHR_COMBINED_EUR.txt')
# files_dict["whr_WHR_FEMALES_EUR"] = os.path.join(SUMSTATS_DIR, 'whr', 'GIANT_2015_WHR_FEMALES_EUR.txt')
# files_dict["whr_WHR_MALES_EUR"] = os.path.join(SUMSTATS_DIR, 'whr', 'GIANT_2015_WHR_MALES_EUR.txt')
# files_dict["whr_WHR_MEN_N"] = os.path.join(SUMSTATS_DIR, 'whr', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_WHR_MEN_N.txt')
# files_dict["whr_WHR_WOMEN_N"] = os.path.join(SUMSTATS_DIR, 'whr', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_WHR_WOMEN_N.txt')
return files_dict
def get_traits_dict():
"""Building dictionary with Traits names, paths to traits are being built at get_files_dict()"""
traits_dict = dict([])
traits_dict['height'] = 'Height'
traits_dict['diabetes_BMI_Adjusted']='Diabetes'
traits_dict['diabetes_BMI_Unadjusted']='Diabetes'
traits_dict['ADHD'] = 'ADHD'
traits_dict['alzheimer'] = 'Alzheimer'
traits_dict['cognitive'] ='Cognitive'
traits_dict['anorexia'] = 'Anorexia'
traits_dict['ashtma'] = 'Ashtma'
traits_dict['baldness'] = 'Baldness'
traits_dict['depression'] = 'Depression'
traits_dict['cognitive'] ='Cognitive'
# traits_dict['crohns'] = 'Crohns'
# Dont Erase Used for calibration
traits_dict['cardio'] = 'Cardio'
traits_dict['bmi'] = 'BMI'
traits_dict['waist'] = 'Waist'
traits_dict['hips'] = 'Hips'
traits_dict['glucose2'] = 'WakeupGlucose'
traits_dict['glucose_iris'] = 'median_Without_BMI_ALT_Overall'
traits_dict['whr'] = 'WHR'
traits_dict['median_glucose'] = 'Median_Glucose'
traits_dict['hba1c'] = 'HbA1C%'
traits_dict['hdl'] = 'HDLCholesterol'
traits_dict['ldl'] = 'LDLCholesterol'
traits_dict['triglycerides'] = 'Triglycerides'
traits_dict['creatinine'] = 'Creatinine'
traits_dict['albumin'] = 'Albumin'
traits_dict['overweight'] = 'Overweight'
traits_dict['obesity_class1'] = 'Obesity_class1'
traits_dict['obesity_class2'] = 'Obesity_class2'
traits_dict['cholesterol'] = 'Cholesterol,total'
traits_dict['ever_smoked'] = 'Ever_smoked'
traits_dict['age_smoke'] = 'Start_smoking_age'
traits_dict['cigs_per_day'] = 'Cigarretes_per_day'
traits_dict['lactose'] = 'lactose'
#
return traits_dict
def Get_Top_Gen_Dict():
files_dict = dict([])
files_dict['height'] = os.path.join(Gen_DIR, 'Final_SNPs_height.csv')
files_dict['alzheimer'] = os.path.join(Gen_DIR, 'Final_SNPs_alzheimer.csv') # Jean-Charles Lambert et al.
files_dict['bmi'] = os.path.join(Gen_DIR, 'Final_SNPs_bmi.csv') # https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4382211/
files_dict['anorexia'] = os.path.join(Gen_DIR, 'Final_SNPs_anorexia.csv') # A genome-wide association study of anorexia nervosa,https://www.nature.com/articles/mp2013187
# TODO: check for Asthma pvalue
# files_dict['ashtma'] = os.path.join(Gen_DIR, 'Ashtma','gabriel_asthma_meta-analysis_36studies_format_repository_NEJM.txt') # https://www.cnrgh.fr/gabriel/study_description.html
files_dict['t2d_mega_meta'] = os.path.join(Gen_DIR, 'Final_SNPs_t2d_mega_meta.csv') # FKA iris Trans-ethnic T2D GWAS meta-analysis, http://diagram-consortium.org/downloads.html
files_dict['cardio'] = os.path.join(Gen_DIR, 'Final_SNPs_cardio.csv') # CARDIoGRAMplusC4D Metabochip is a two stage meta-analysis of Metabochip and GWAS studies of European and South Asian descent involving 63,746 cases and 130,681 controls. The CARDIoGRAM GWAS data was used as Stage 1 - data as published in: CARDIoGRAMplusC4D Consortium, Deloukas P, Kanoni S, Willenborg C, Farrall M, Assimes TL, Thompson JR, et al. Large-scale association analysis identifies new risk loci for coronary artery disease. Nat Genet 2013 45:25-33
files_dict['hips'] = os.path.join(Gen_DIR, 'Final_SNPs_hips.csv') # https://www.nature.com/articles/nature14132,https://portals.broadinstitute.org/collaboration/giant/index.php/GIANT_consortium_data_files
files_dict['waist'] = os.path.join(Gen_DIR, 'Final_SNPs_waist.csv') # https://www.nature.com/articles/nature14132,https://portals.broadinstitute.org/collaboration/giant/index.php/GIANT_consortium_data_files
#TODO:Clean the data below
# files_dict["whr_WHR_COMBINED_EUR2"] = os.path.join(Gen_DIR, 'whr', 'GIANT_2015_WHR_COMBINED_EUR2.txt')
# files_dict["whr_WHRadjBMI_COMB_All"] = os.path.join(Gen_DIR, 'whr', 'GIANT_2015_WHRadjBMI_COMBINED_AllAncestries.txt')
# files_dict["whr_WHRadjBMI_COMB_EUR"] = os.path.join(Gen_DIR, 'whr', 'GIANT_2015_WHRadjBMI_COMBINED_EUR.txt')
# files_dict["whr_WHR_COMBINED_All"] = os.path.join(Gen_DIR, 'whr', 'GIANT_2015_WHR_COMBINED_AllAncestries.txt')
# files_dict["whr_WHR_COMBINED_EUR"] = os.path.join(Gen_DIR, 'whr', 'GIANT_2015_WHR_COMBINED_EUR.txt')
# files_dict["whr_WHR_FEMALES_EUR"] = os.path.join(Gen_DIR, 'whr', 'GIANT_2015_WHR_FEMALES_EUR.txt')
# files_dict["whr_WHR_MALES_EUR"] = os.path.join(Gen_DIR, 'whr', 'GIANT_2015_WHR_MALES_EUR.txt')
# files_dict["whr_WHR_MEN_N"] = os.path.join(Gen_DIR, 'whr', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_WHR_MEN_N.txt')
# files_dict["whr_WHR_WOMEN_N"] = os.path.join(Gen_DIR, 'whr', 'GIANT_Randall2013PlosGenet_stage1_publicrelease_HapMapCeuFreq_WHR_WOMEN_N.txt')
files_dict['overweight'] = os.path.join(Gen_DIR, 'Final_SNPs_overweight.csv') # https://portals.broadinstitute.org/collaboration/giant/index.php/Main_Page
files_dict['obesity_class1'] = os.path.join(Gen_DIR, 'Final_SNPs_obesity_class1.csv') # https://portals.broadinstitute.org/collaboration/giant/index.php/Main_Page
files_dict['obesity_class2'] = os.path.join(Gen_DIR, 'Final_SNPs_obesity_class2.csv') # https://portals.broadinstitute.org/collaboration/giant/index.php/Main_Page
#TODO: Check for hba1c P value
# files_dict['hba1c'] = os.path.join(SUMSTATS_DIR, 'HbA1C','MAGIC_HbA1C.txt') # ftp://ftp.sanger.ac.uk/pub/magic/MAGIC_HbA1C.txt.gz
# files_dict['Non_Diabetic_glucose2'] = os.path.join(SUMSTATS_DIR, 'glucose','MAGIC_Manning_et_al_FastingGlucose_MainEffect.txt.gz') # ftp://ftp.sanger.ac.uk/pub/magic/MAGIC_HbA1C.txt.gz
# files_dict['Magnetic_glucose'] = os.path.join(SUMSTATS_DIR, 'glucose', 'Summary_statistics_MAGNETIC_Glc.txt.gz') #ftp://ftp.sanger.ac.uk/pub/magic/MAGIC_HbA1C.txt.gz
files_dict['cigs_per_day'] = os.path.join(Gen_DIR, 'Final_SNPs_cigs_per_day.csv') # Nature Genetics volume 42, pages 441 447 (2010),http://www.med.unc.edu/pgc/files/resultfiles/readme.tag.txt/view
files_dict['ever_smoked'] = os.path.join(Gen_DIR, 'Final_SNPs_ever_smoked.csv') # Nature Genetics volume 42, pages 441 447 (2010),http://www.med.unc.edu/pgc/files/resultfiles/readme.tag.txt/view
files_dict['age_smoke'] = os.path.join(Gen_DIR, 'Final_SNPs_age_smoke.csv') # Nature Genetics volume 42, pages 441 447 (2010),http://www.med.unc.edu/pgc/files/resultfiles/readme.tag.txt/view
files_dict['hdl'] = os.path.join(Gen_DIR, 'Final_SNPs_hdl.csv') # https://www.nature.com/articles/ng.2797,https://grasp.nhlbi.nih.gov/FullResults.aspx
files_dict['ldl'] = os.path.join(Gen_DIR, 'Final_SNPs_ldl.csv') ##https://www.nature.com/articles/ng.2797,https://grasp.nhlbi.nih.gov/FullResults.aspx
files_dict['triglycerides'] = os.path.join(Gen_DIR, 'Final_SNPs_triglycerides.csv') ##https://www.nature.com/articles/ng.2797,https://grasp.nhlbi.nih.gov/FullResults.aspx
files_dict['cholesterol'] = os.path.join(Gen_DIR, 'Final_SNPs_cholesterol.csv') ##https://www.nature.com/articles/ng.2797,https://grasp.nhlbi.nih.gov/FullResults.aspx
files_dict['diabetes_BMI_Unadjusted'] = os.path.join(Gen_DIR, 'Final_SNPs_diabetes_BMI_Unadjusted.csv') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['diabetes_BMI_Adjusted'] = os.path.join(Gen_DIR, 'Final_SNPs_diabetes_BMI_Adjusted.csv') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['FastingGlucose'] = os.path.join(Gen_DIR, 'Final_SNPs_FastingGlucose.csv') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['ln_HOMA-B'] = os.path.join(Gen_DIR, 'Final_SNPs_ln_HOMA-B.csv') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['ln_FastingInsulin'] = os.path.join(Gen_DIR, 'Final_SNPs_ln_FastingInsulin.csv') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['ln_HOMA-IR'] = os.path.join(Gen_DIR, 'Final_SNPs_ln_HOMA-IR.csv') # This file contains association summary statistics for the DIAGRAMv3 GWAS meta-analysis, as published in Morris et al. (2012).
files_dict['Leptin_BMI'] = os.path.join(Gen_DIR, 'Final_SNPs_Leptin_BMI.csv')
files_dict['Leptin_Unadjusted_BMI'] = os.path.join(Gen_DIR, 'Final_SNPs_Leptin_Unadjusted_BMI.csv')
# files_dict['Body_fat'] = os.path.join(Gen_DIR, 'Final_SNPs_Body_fat.csv')
files_dict['Heart_Rate'] = os.path.join(Gen_DIR, 'Final_SNPs_Heart_Rate.csv')
files_dict['Magic_2hrGlucose'] = os.path.join(Gen_DIR, 'Final_SNPs_Magic_2hrGlucose.csv')
files_dict['MAGIC_fastingProinsulin'] = os.path.join(Gen_DIR, 'Final_SNPs_MAGIC_fastingProinsulin.csv')
files_dict['MAGIC_Scott_2hGlu'] = os.path.join(Gen_DIR, 'Final_SNPs_MAGIC_Scott_2hGlu.csv')
files_dict['MAGIC_Scott_FG'] = os.path.join(Gen_DIR, 'Final_SNPs_MAGIC_Scott_FG.csv')
files_dict['MAGIC_Scott_FI_adjBMI'] = os.path.join(Gen_DIR, 'Final_SNPs_MAGIC_Scott_FI_adjBMI.csv')
files_dict['MAGIC_Scott_FI'] = os.path.join(Gen_DIR, 'Final_SNPs_MAGIC_Scott_FI.csv')
files_dict['MAGIC_HbA1C'] = os.path.join(Gen_DIR, 'Final_SNPs_MAGIC_HbA1C.csv') # Fasting Insulin
files_dict['Manning_FG'] = os.path.join(Gen_DIR, 'Final_SNPs_Manning_FG.csv') # Fasting Glucose
files_dict['Manning_BMI_ADJ_FG'] = os.path.join(Gen_DIR, 'Final_SNPs_Manning_BMI_ADJ_FG.csv') # Fasting Glucose
files_dict['Manning_Fasting_Insulin'] = os.path.join(Gen_DIR, 'Final_SNPs_Manning_Fasting_Insulin.csv') # Fasting Insulin
files_dict['Manning_BMI_ADJ_FI'] = os.path.join(Gen_DIR, 'Final_SNPs_Manning_BMI_ADJ_FI.csv') # Fasting Insulin
# files_dict['HBA1C_ISI'] = os.path.join(Gen_DIR, 'Final_SNPs_HBA1C_ISI',
# 'MAGIC_ISI_Model_1_AgeSexOnly.txt') # Fasting Insulin
files_dict['HBA1C_ISI'] = os.path.join(Gen_DIR, 'Final_SNPs_HBA1C_ISI.csv') # Fasting Insulin
# files_dict['HBA1C_ISI'] = os.path.join(SUMSTATS_DIR, 'HBA1C_ISI', 'MAGIC_ISI_Model_3_JMA.txt') # Fasting Insulin
files_dict['HbA1c_MANTRA'] = os.path.join(Gen_DIR, 'Final_SNPs_HbA1c_MANTRA.csv') # Fasting Insulin
return files_dict
def get_predictions(bfile_path):
"""Function that gets bfile of persons and computes their PRS"""
bed = read_bfile_forsumstats(bfile_path) #bfile_path for the bed file
df_bim = pd.read_csv(bfile_path+'.bim', delim_whitespace=True, header=None, names=['chr', 'rs', 'cm', 'bp', 'a1', 'a2']) #List of al SNPS
df_bed = pd.DataFrame(bed.sid, columns=['rs']) #SNP names
df_bed = df_bed.merge(df_bim, how='left', on='rs')
df_bed.rename(index=str, columns={"a1": "a1_bim", "a2": "a2_bim"})
files_dict = get_files_dict()
df_predictions = pd.DataFrame(index=bed.iid[:,1].astype(np.int))
for f_i,(trait, sumstats_file) in enumerate(files_dict.items()):
###if (trait not in ['bmi', 'height', 'hdl', 'creatinine', 'glucose2']): continue
###if (trait not in ['glucose_iris']): continue
#read summary statistics file
print(('reading summary statistics and performing prediction for %s...'%(trait)))
if (trait == 'creatinine'): df_sumstats = pd.read_csv(sumstats_file, sep=',')
else: df_sumstats = pd.read_csv(sumstats_file, delim_whitespace=True)
found_snp_col = False
#Checking for all posible SNP name versions
for snp_name_col in ['SNP_ID','MarkerName', 'SNP', 'rsID', 'snp', 'rsid', 'sid', 'Snp','rs','Markername',"ID"]:
if (snp_name_col not in df_sumstats.columns): continue
found_snp_col = True
break
assert found_snp_col, 'No SNP column found'
df_sumstats.drop_duplicates(subset=snp_name_col, inplace=True)
df_merge = df_bed.merge(df_sumstats, left_on='rs', right_on=snp_name_col)
df_merge_snps_set = set(df_merge['rs'])
is_snp_found = [(s in df_merge_snps_set) for s in bed.sid]
#find allele columns
try:
df_merge['A1'] = df_merge['Allele1'].str.upper()
df_merge['A2'] = df_merge['Allele2'].str.upper()
except: pass
try:
df_merge['A1'] = df_merge['Allele_1'].str.upper()
df_merge['A2'] = df_merge['Allele_2'].str.upper()
except: pass
try: # ~~~Yochai~~~ Addition for the Cardio file ()
df_merge['A1'] = df_merge['allele1'].str.upper()
df_merge['A2'] = df_merge['allele2'].str.upper()
except: pass
try:
df_merge['A1'] = df_merge['A1'].str.upper()
df_merge['A2'] = df_merge['A2'].str.upper()
except: pass
try:
df_merge['A1'] = df_merge['NEA'].str.upper() #Switched EA and NEA
df_merge['A2'] = df_merge['EA'].str.upper()
except: pass
try:
df_merge['A1'] = df_merge['other_allele'].str.upper()
df_merge['A2'] = df_merge['effect_allele'].str.upper()
except: pass
try:
df_merge['A1'] = df_merge['Other_allele'].str.upper()
df_merge['A2'] = df_merge['Effect_allele'].str.upper()
except: pass
try:
df_merge['A1'] = df_merge['OTHER_ALLELE'].str.upper()
df_merge['A2'] = df_merge['RISK_ALLELE'].str.upper()
except: pass
try: #~~~Yochai~~~ Addition for the Cardio file ()
df_merge['A1'] = df_merge['other_allele'].str.upper()
df_merge['A2'] = df_merge['reference_allele'].str.upper()
except: pass
try: # ~~~Yochai~~~ Addition for the Cardio file ()
df_merge['A1'] = df_merge['Non_Effect_allele'].str.upper()
df_merge['A2'] = df_merge['Effect_allele'].str.upper()
except: pass
#flip alleles quickly
a1 = df_merge['a1_bim'].values.copy()
is_A = (a1=='A')
is_T = (a1=='T')
is_C = (a1=='C')
is_G = (a1=='G')
a1[is_A] = 'T'
a1[is_T] = 'A'
a1[is_C] = 'G'
a1[is_G] = 'C'
df_merge['flip_a1'] = a1
a2 = df_merge['a2_bim'].values.copy()
is_A = (a2=='A')
is_T = (a2=='T')
is_C = (a2=='C')
is_G = (a2=='G')
a2[is_A] = 'T'
a2[is_T] = 'A'
a2[is_C] = 'G'
a2[is_G] = 'C'
df_merge['flip_a2'] = a2
#do some standardization
# try:
# is_same = ((df_merge['a1'] == df_merge['Allele1']) & (df_merge['a2'] == df_merge['Allele2'])).values
# is_reverse = ((df_merge['a2'] == df_merge['Allele1']) & (df_merge['a1'] == df_merge['Allele2'])).values
# is_flipped = ((df_merge['flip_a1'] == df_merge['Allele1']) & (df_merge['flip_a2'] == df_merge['Allele2'])).values
# is_reverse_flipped = ((df_merge['flip_a2'] == df_merge['Allele1']) & (df_merge['flip_a1'] == df_merge['Allele2'])).values
# except:
is_same = ((df_merge['a1_bim'] == df_merge['A1']) & (df_merge['a2_bim'] == df_merge['A2'])).values
is_reverse = ((df_merge['a2_bim'] == df_merge['A1']) & (df_merge['a1_bim'] == df_merge['A2'])).values
is_flipped = ((df_merge['flip_a1'] == df_merge['A1']) & (df_merge['flip_a2'] == df_merge['A2'])).values
is_reverse_flipped = ((df_merge['flip_a2'] == df_merge['A1']) & (df_merge['flip_a1'] == df_merge['A2'])).values
#decide which SNPs to keep
keep_snps = ((is_same) | (is_reverse))
#find the column of the effect sizes
found_effects_col = False
for effects_col in ['b', 'Beta', 'beta', 'effect', 'OR', 'MainEffects',"log_odds","OR_fix","log_odds_(stage2)"
,"Effect","log10bf"]: #"log_odds" was added by Yochai for the Cardio Estimation
if (effects_col not in df_merge.columns): continue
found_effects_col = True
if ((effects_col == 'OR') or (effects_col == 'OR_fix')):
df_merge['Beta'] = np.log10(df_merge[effects_col].values)
effects_col = 'Beta'
effects = df_merge[effects_col].values
assert found_effects_col, 'couldn\'t find a column of effects'
#flip effects if needed
effects[is_reverse] *= (-1)
#compute prediction for each p-values cutoff
best_corr = -np.inf
df_predictions.loc[ID,'predict_' + trait] = (bed.val[df_predictions.index, is_snp_found]).dot(effects) # Performing the dot product
return df_predictions
def Personal_PRS(bfile_path,ID,full_predictions=None,res=0.025): #Calculate a single person from PNP statistics (Quantile)
"""
full_predictions is a dataframe with the whole PNP cohort score for chosen phenotype
bfile_path is the path to the PNP SNPs data
ID is the ID of a person that we would like to get his statistics
"""
df_predictions = pd.read_pickle(PKL_PATH)
df_quantiles = df_predictions.quantile(np.arange(res, 1, res))
df_quantiles.to_pickle(Quant_PATH)
bed = read_bfile_forsumstats(bfile_path)
df_bim = pd.read_csv(bfile_path + '.bim', delim_whitespace=True, header=None,
names=['chr', 'rs', 'cm', 'bp', 'a1', 'a2']) # List of al SNPS
df_bed = pd.DataFrame(bed.sid, columns=['rs']) # SNP names
df_bed = df_bed.merge(df_bim, how='left', on='rs')
files_dict = get_files_dict()
df_predictions = pd.DataFrame(index=bed.iid[:, 1].astype(np.int))
personal_predictions = pd.DataFrame(index=[ID])
personal_quantiles = pd.DataFrame(index=[ID])
for f_i, (trait, sumstats_file) in enumerate(files_dict.items()):
# read summary statistics file
print('reading summary statistics and performing prediction for %s...' % (trait))
if (trait == 'creatinine'):
df_sumstats = pd.read_csv(sumstats_file, sep=',')
else:
df_sumstats = pd.read_csv(sumstats_file, delim_whitespace=True)
found_snp_col = False
# Checking for all posible SNP name versions
for snp_name_col in ['SNP_ID','MarkerName', 'SNP', 'rsID', 'snp', 'rsid', 'sid', 'Snp','rs','Markername',"ID"]:
if (snp_name_col not in df_sumstats.columns): continue
found_snp_col = True
break
assert found_snp_col, 'No SNP column found'
df_sumstats.drop_duplicates(subset=snp_name_col, inplace=True)
df_merge = df_bed.merge(df_sumstats, left_on='rs', right_on=snp_name_col)
df_merge_snps_set = set(df_merge['rs'])
is_snp_found = [(s in df_merge_snps_set) for s in bed.sid]
# find allele columns
try:
df_merge['Allele1'] = df_merge['Allele1'].str.upper()
df_merge['Allele2'] = df_merge['Allele2'].str.upper()
except:
pass
try:
df_merge['Allele1'] = df_merge['Allele_1'].str.upper()
df_merge['Allele2'] = df_merge['Allele_2'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['A1'].str.upper()
df_merge['A2'] = df_merge['A2'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['NEA'].str.upper() # Switched EA and NEA
df_merge['A2'] = df_merge['EA'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['other_allele'].str.upper()
df_merge['A2'] = df_merge['effect_allele'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['Other_allele'].str.upper()
df_merge['A2'] = df_merge['Effect_allele'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['OTHER_ALLELE'].str.upper()
df_merge['A2'] = df_merge['RISK_ALLELE'].str.upper()
except:
pass
try: # ~~~Yochai~~~ Addition for the Cardio file ()
df_merge['A1'] = df_merge['other_allele'].str.upper()
df_merge['A2'] = df_merge['reference_allele'].str.upper()
except:
pass
try: # ~~~Yochai~~~ Addition for the Cardio file ()
df_merge['A1'] = df_merge['Non_Effect_allele'].str.upper()
df_merge['A2'] = df_merge['Effect_allele'].str.upper()
except:
pass
# flip alleles quickly
a1 = df_merge['a1'].values.copy() #consider converting a1, which is from the bim file, to a1_bim in order not
# to be confused witrh a1 from PRS file
is_A = (a1 == 'A')
is_T = (a1 == 'T')
is_C = (a1 == 'C')
is_G = (a1 == 'G')
a1[is_A] = 'T'
a1[is_T] = 'A'
a1[is_C] = 'G'
a1[is_G] = 'C'
df_merge['flip_a1'] = a1
a2 = df_merge['a2'].values.copy()
a2 = df_merge['A2'].values.copy()
is_A = (a2 == 'A')
is_T = (a2 == 'T')
is_C = (a2 == 'C')
is_G = (a2 == 'G')
a2[is_A] = 'T'
a2[is_T] = 'A'
a2[is_C] = 'G'
a2[is_G] = 'C'
df_merge['flip_a2'] = a2
# do some standardization
try:
is_same = ((df_merge['A1'] == df_merge['Allele1']) & (df_merge['A2'] == df_merge['Allele2'])).values
is_reverse = ((df_merge['A2'] == df_merge['Allele1']) & (df_merge['A1'] == df_merge['Allele2'])).values
is_flipped = (
(df_merge['flip_a1'] == df_merge['Allele1']) & (df_merge['flip_a2'] == df_merge['Allele2'])).values
is_reverse_flipped = (
(df_merge['flip_a2'] == df_merge['Allele1']) & (df_merge['flip_a1'] == df_merge['Allele2'])).values
except:
is_same = ((df_merge['a1'] == df_merge['A1']) & (df_merge['a2'] == df_merge['A2'])).values
is_reverse = ((df_merge['a2'] == df_merge['A1']) & (df_merge['a1'] == df_merge['A2'])).values
is_flipped = ((df_merge['flip_a1'] == df_merge['A1']) & (df_merge['flip_a2'] == df_merge['A2'])).values
is_reverse_flipped = (
(df_merge['flip_a2'] == df_merge['A1']) & (df_merge['flip_a1'] == df_merge['A2'])).values
# decide which SNPs to keep
keep_snps = ((is_same) | (is_reverse))
# find the column of the effect sizes
found_effects_col = False
for effects_col in ['b', 'Beta', 'beta', 'effect', 'OR', 'MainEffects', "log_odds", "OR_fix",
"log_odds_(stage2)", "BETA", "Effect", "BMIadjMainEffects", "log10bf"]: # "log_odds" was added by Yochai for the Cardio Estimation
if (effects_col not in df_merge.columns): continue
found_effects_col = True
effects = df_merge[effects_col].values
assert found_effects_col, 'couldn\'t find a column of effects'
# flip effects if needed
effects[is_reverse] *= (-1)
# compute prediction for each p-values cutoff
best_corr = -np.inf
personal_predictions.loc[ID,'predict_' + trait] = (bed.val[df_predictions.index == ID, is_snp_found]).dot(effects) # Performing the dot product
personal_quantiles.loc[ID, 'predict_' + trait] = bisect(df_quantiles.loc[:,'predict_' + trait].values,
personal_predictions.loc[ID,'predict_' + trait])
return personal_quantiles
def compute_prs(bfile_path=None, verbose=False,res=0.025):
if (bfile_path is None): df_predictions = pd.read_pickle(PKL_PATH)
else:
#compute predictions for a grid of p-values
verbose = True
df_predictions = get_predictions(bfile_path)
df_quantiles = df_predictions.quantile([np.arange(res, 1, res)])
df_predictions.to_pickle(PKL_PATH)
df_quantiles.to_pickle(Quant_PATH)
return df_predictions
def Trait_top_SNPs(PRS_file,trait):
"""Adding top 1000 P values of PRS_file of trait to existing dictionary"""
found_P_col=False
snp_name_col=False
sumstats_file=PRS_file
# read summary statistics file
# print 'reading summary statistics and performing prediction for',trait,' at CHR#', str(CHR_Num)
if (trait == 'creatinine'):
df_sumstats = pd.read_csv(sumstats_file, sep=',')
else:
df_sumstats = pd.read_csv(sumstats_file, delim_whitespace=True)
found_snp_col = False
# Checking for all posible SNP name versions
for P_Name in ['P', 'p', 'P_value', 'Pvalue', 'P_VALUE','P-value',"MainP",'pvalue',
"Pvalue_Stage2","P-value","p_sanger","P.value"]:
if (P_Name not in df_sumstats.columns): continue
found_P_col = True
break
assert found_P_col, 'No P column found'
for snp_name_col in ['rsID', 'rsid', 'rs', 'sid', 'Markername', 'MarkerName', 'SNP', 'Snp', 'snp',
'SNP_ID','SNPID']:
if (snp_name_col not in df_sumstats.columns): continue
found_snp_col = True
break
df_sumstats=df_sumstats.loc[:,[snp_name_col,P_Name]]
df_sumstats.set_index(snp_name_col,inplace=True,drop=True)
df_sumstats.sort_values(by=P_Name,axis=0,inplace=True)
df1000=df_sumstats.iloc[0:1000]
df1000.columns=['P']
return df1000
def All_Traits_Top_SNPs(final_folder,dict_name,n_snps=1000):
found_P_col = False
snp_name_col = False
trait_dict = {}
files_dict = get_files_dict()
for f_i, (trait, sumstats_file) in enumerate(files_dict.items()):
# read summary statistics file
# print 'reading summary statistics and performing prediction for',trait,' at CHR#', str(CHR_Num)
if (trait == 'creatinine'):
df_sumstats = pd.read_csv(sumstats_file, sep=',')
else:
df_sumstats = pd.read_csv(sumstats_file, delim_whitespace=True)
found_snp_col = False
# Checking for all posible SNP name versions
for P_Name in ['P', 'p', 'P_value', 'Pvalue', 'P_VALUE', 'P-value', "MainP", 'pvalue',
"Pvalue_Stage2", "P-value", "p_sanger", "P.value"]:
if (P_Name not in df_sumstats.columns): continue
found_P_col = True
break
assert found_P_col, 'No P column found'
for snp_name_col in ['rsID', 'rsid', 'rs', 'sid', 'Markername', 'MarkerName', 'SNP', 'Snp', 'snp',
'SNP_ID', 'SNPID']:
if (snp_name_col not in df_sumstats.columns): continue
found_snp_col = True
break
assert found_snp_col, 'No SNP column found'
print("SNP COL NAME for trait:", trait, ' is:', snp_name_col)
df_sumstats = df_sumstats.loc[:, [snp_name_col, P_Name]]
df_sumstats.set_index(snp_name_col, inplace=True, drop=True)
df_sumstats.sort_values(by=P_Name, axis=0, inplace=True)
trait_dict[trait] = df_sumstats.iloc[0:n_snps]
trait_dict[trait].columns = ["P"]
trait_dict[trait].index.name = ["SNP"]
with open(final_folder + dict_name, 'wb') as fp:
pickle.dump(trait_dict, fp)
def extract_relevant_SNPS(top_P_dict,bfile_path, Results_Folder, Job_Name, CHR_Num):
bed = read_bfile_forsumstats(bfile_path) # bfile_path for the bed file
df_bim = pd.read_csv(bfile_path + '.bim', delim_whitespace=True, header=None,
names=['chr', 'rs', 'cm', 'bp', 'a1', 'a2']) # List of al SNPS
df_fam = pd.read_csv(bfile_path + '.fam', delim_whitespace=True, header=None)
df_bed = pd.DataFrame(bed.sid, columns=['rs']) # SNP names
df_bed = df_bed.merge(df_bim, how='left', on='rs')
df_bed = df_bed.rename(index=str, columns={"a1": "a1_bim", "a2": "a2_bim"})
df_merge = {}
is_snp_found = {}
df_ID_SNPs_for_trait = {}
for trait in top_P_dict.keys():
df_merge[trait] = df_bed.merge(top_P_dict[trait].reset_index(), left_on='rs', right_on='SNP')
df_merge[trait] = df_merge[trait].drop_duplicates(subset="rs")
df_merge[trait] = df_merge[trait].set_index('rs', drop=True)
print(df_merge[trait].head())
df_merge_snps_set = set(df_merge[trait].index.values)
is_snp_found[trait] = [(s in df_merge_snps_set) for s in bed.sid]
df_ID_SNPs_for_trait[trait] = pd.DataFrame(data=bed.val[:, is_snp_found[trait]],
index=df_fam.iloc[:, 0].values,
columns=df_merge[trait].index.values)
df_ID_SNPs_for_trait[trait].index.name = "eid"
df_ID_SNPs_for_trait[trait]=df_ID_SNPs_for_trait[trait].reset_index()
df_ID_SNPs_for_trait[trait].to_csv(path_or_buf=Results_Folder + trait +"_"+CHR_Num+"_.csv", index=False)
def get_UKBB_predictions(bfile_path, Results_Folder, Job_Name, CHR_Num):
"""Function that gets bfile of persons and computes their PRS"""
print("Started CHR#", CHR_Num)
bed = read_bfile_forsumstats(bfile_path) # bfile_path for the bed file
df_bim = pd.read_csv(bfile_path + '.bim', delim_whitespace=True, header=None,
names=['chr', 'rs', 'cm', 'bp', 'a1', 'a2']) # List of al SNPS
df_bed = pd.DataFrame(bed.sid, columns=['rs']) # SNP names
df_bed = df_bed.merge(df_bim, how='left', on='rs')
df_bed=df_bed.rename(index=str, columns={"a1": "a1_bim", "a2": "a2_bim"})
files_dict = get_files_dict()
df_predictions = pd.DataFrame(index=bed.iid[:, 1].astype(np.int))
df_predictions.index.name = "eid"
for f_i, (trait, sumstats_file) in enumerate(files_dict.items()):
###if (trait not in ['bmi', 'height', 'hdl', 'creatinine', 'glucose2']): continue
###if (trait not in ['glucose_iris']): continue
# read summary statistics file
print('reading summary statistics and performing prediction for',trait,' at CHR#', str(CHR_Num))
if (trait == 'creatinine'):
df_sumstats = pd.read_csv(sumstats_file, sep=',')
else:
df_sumstats = pd.read_csv(sumstats_file, delim_whitespace=True)
found_snp_col = False
# Checking for all posible SNP name versions
for snp_name_col in ['rsID', 'rsid', 'rs', 'sid', 'Markername', 'MarkerName', 'SNP', 'Snp', 'snp',
'SNP_ID','SNPID']:
if (snp_name_col not in df_sumstats.columns): continue
found_snp_col = True
break
assert found_snp_col, 'No SNP column found'
print("SNP COL NAME for trait:", trait,' is:',snp_name_col)
df_sumstats.drop_duplicates(subset=snp_name_col, inplace=True)
df_merge = df_bed.merge(df_sumstats, left_on='rs', right_on=snp_name_col)
print("df_merge.shape[0] according to RSID is: ", df_merge.shape[0],"(i.e. number of recognised SNPS of trarit", \
trait, " of CHR: ", str(CHR_Num), "of Jobname: ", Job_Name, " )")
if df_merge.shape[0] == 0:
print("No RS numbers, merging according to CHR:BP using HG37")
try:
df_merge = df_bed.merge(df_sumstats, left_on=['chr', "bp"], right_on=["CHR", "BP"])
except:
pass
try:
df_merge = df_bed.merge(df_sumstats, left_on=['CHR', "BP"], right_on=["CHR", "BP"])
except:
pass
try:
df_merge = df_bed.merge(df_sumstats, left_on=['CHR', "POS"], right_on=["CHR", "BP"])
except:
pass
if df_merge.shape[0]==0:
print("No matching SNPS Found for: ",bfile_path, "for trait:", trait)
df_merge_snps_set = set(df_merge['rs'])
is_snp_found = [(s in df_merge_snps_set) for s in bed.sid]
# find allele columns
try:
df_merge['A1'] = df_merge['Allele1'].str.upper()
df_merge['A2'] = df_merge['Allele2'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['Allele_1'].str.upper()
df_merge['A2'] = df_merge['Allele_2'].str.upper()
except:
pass
try: # ~~~Yochai~~~ Addition for the Cardio file ()
df_merge['A1'] = df_merge['allele1'].str.upper()
df_merge['A2'] = df_merge['allele2'].str.upper()
except: pass
try:
df_merge['A1'] = df_merge['A1'].str.upper()
df_merge['A2'] = df_merge['A2'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['NEA'].str.upper() # Switched EA and NEA
df_merge['A2'] = df_merge['EA'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['other_allele'].str.upper()
df_merge['A2'] = df_merge['effect_allele'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['Other_allele'].str.upper()
df_merge['A2'] = df_merge['Effect_allele'].str.upper()
except:
pass
try:
df_merge['A1'] = df_merge['OTHER_ALLELE'].str.upper()
df_merge['A2'] = df_merge['RISK_ALLELE'].str.upper()
except:
pass
try: # ~~~Yochai~~~ Addition for the Cardio file ()
df_merge['A1'] = df_merge['other_allele'].str.upper()
df_merge['A2'] = df_merge['reference_allele'].str.upper()
except:
pass
try: # ~~~Yochai~~~ Addition for the Cardio file ()
df_merge['A1'] = df_merge['Non_Effect_allele'].str.upper()
df_merge['A2'] = df_merge['Effect_allele'].str.upper()
except:
pass
try: # ~~~Yochai~~~ Addition for the Diabetes file ()
df_merge['A1'] = df_merge['OTHER_ALLELE'].str.upper()
df_merge['A2'] = df_merge['EFFECT_ALLELE'].str.upper()
except:
pass
try: # ~~~Yochai~~~ Addition for the Diabetes file ()
df_merge['A1'] = df_merge['Other_all'].str.upper()
df_merge['A2'] = df_merge['Effect_all'].str.upper()
except:
pass
# flip alleles quickly
a1 = df_merge['a1_bim'].values.copy()
is_A = (a1 == 'A')
is_T = (a1 == 'T')
is_C = (a1 == 'C')
is_G = (a1 == 'G')
a1[is_A] = 'T'
a1[is_T] = 'A'
a1[is_C] = 'G'
a1[is_G] = 'C'
df_merge['flip_a1'] = a1
a2 = df_merge['a2_bim'].values.copy()
is_A = (a2 == 'A')
is_T = (a2 == 'T')
is_C = (a2 == 'C')
is_G = (a2 == 'G')
a2[is_A] = 'T'
a2[is_T] = 'A'
a2[is_C] = 'G'
a2[is_G] = 'C'
df_merge['flip_a2'] = a2
# do some standardization
# try:
# is_same = ((df_merge['a1'] == df_merge['Allele1']) & (df_merge['a2'] == df_merge['Allele2'])).values
# is_reverse = ((df_merge['a2'] == df_merge['Allele1']) & (df_merge['a1'] == df_merge['Allele2'])).values
# is_flipped = (
# (df_merge['flip_a1'] == df_merge['Allele1']) & (df_merge['flip_a2'] == df_merge['Allele2'])).values
# is_reverse_flipped = (
# (df_merge['flip_a2'] == df_merge['Allele1']) & (df_merge['flip_a1'] == df_merge['Allele2'])).values
# except:
is_same = ((df_merge['a1_bim'] == df_merge['A1']) & (df_merge['a2_bim'] == df_merge['A2'])).values
is_reverse = ((df_merge['a2_bim'] == df_merge['A1']) & (df_merge['a1_bim'] == df_merge['A2'])).values
is_flipped = ((df_merge['flip_a1'] == df_merge['A1']) & (df_merge['flip_a2'] == df_merge['A2'])).values
is_reverse_flipped = ((df_merge['flip_a2'] == df_merge['A1']) & (df_merge['flip_a1'] == df_merge['A2'])).values
# decide which SNPs to keep
keep_snps = ((is_same) | (is_reverse))
# find the column of the effect sizes
found_effects_col = False
for effects_col in ['b', 'Beta', 'beta', 'effect', 'OR', 'MainEffects', "log_odds", "OR_fix",
"log_odds_(stage2)", "BETA", "Effect", "BMIadjMainEffects", "log10bf"]: # "log_odds" was added by Yochai for the Cardio Estimation
if (effects_col not in df_merge.columns): continue
found_effects_col = True
effects = df_merge[effects_col].values
assert found_effects_col, 'couldn\'t find a column of effects:' + df_merge.columns.values
if (((effects_col == 'OR') or (effects_col == 'OR_fix')) and (np.min(df_merge[effects_col].values) > 0)):
df_merge['Beta'] = np.log10(df_merge[effects_col].values)
effects_col='Beta'
# flip effects if needed
effects[is_reverse] *= (-1)
# compute prediction for each p-values cutoff
best_corr = -np.inf
df_predictions.loc[df_predictions.index, 'predict_' + trait] = (bed.val[:, is_snp_found]).dot(
effects) # Performing the dot product
print("Finished trait#",trait," in chromosom number", CHR_Num,"Which is:",str(f_i),"out of", len(files_dict))
df_predictions.to_csv(Results_Folder+Job_Name+"_CHR_"+CHR_Num+".csv")
print("Finished CHR#", CHR_Num)
def Convert_to_Class(trait, Results_Folder):
print("Start reading csv:", trait)
CSV_file = pd.read_csv(Results_Folder + "Final_Raw_SNPs" + trait + ".csv")
print("Finished reading csv:", trait)
uniques={}
print(trait)
print(CSV_file)
# print CSV_Dict[trait].isna().sum()
CSV_file.set_index("eid", inplace=True, drop=True)
print("Started filna:", trait)
CSV_file = CSV_file.fillna("-1")
print(CSV_file.isnull().sum())
for col in CSV_file.columns.values:
uniques[col] = CSV_file.loc[:, col].unique()
for ind, val in enumerate(uniques[col]):
if np.issubdtype(type(val), np.number):
CSV_file.loc[CSV_file.loc[:, col] == val, col] = str(int(ind + 1))
print(CSV_file.loc[:, col].head())
print("Started saving:", trait)
CSV_file.to_csv(path_or_buf=Results_Folder + "Final_Results/Final_SNPs_" + trait + ".csv", index=True)
print("finished trait :",trait)
| 62.201763
| 535
| 0.659744
| 8,759
| 63,508
| 4.496518
| 0.082087
| 0.048876
| 0.051289
| 0.06764
| 0.824629
| 0.794693
| 0.76618
| 0.732538
| 0.698261
| 0.654691
| 0
| 0.025465
| 0.198621
| 63,508
| 1,020
| 536
| 62.262745
| 0.748399
| 0.384393
| 0
| 0.54848
| 0
| 0
| 0.217554
| 0.087952
| 0
| 0
| 0
| 0.00098
| 0.014472
| 1
| 0.017366
| false
| 0.049204
| 0.015919
| 0
| 0.044863
| 0.034732
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0525d2c72ac1b6a626a0253c5fac9043c7917e68
| 38,756
|
py
|
Python
|
beehive_job_info_pb2.py
|
wangjie07/grpc_demo
|
b06e66934634e14208d529e0d59928d2cd7eb6a0
|
[
"MIT"
] | null | null | null |
beehive_job_info_pb2.py
|
wangjie07/grpc_demo
|
b06e66934634e14208d529e0d59928d2cd7eb6a0
|
[
"MIT"
] | null | null | null |
beehive_job_info_pb2.py
|
wangjie07/grpc_demo
|
b06e66934634e14208d529e0d59928d2cd7eb6a0
|
[
"MIT"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: beehive-job-info.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='beehive-job-info.proto',
package='beehivejobinfopb',
syntax='proto3',
serialized_options=_b('ZPgit-pd.megvii-inc.com/Data-Core/Platform/Beehive-Job-Info/proto;beehivejobinfopb'),
serialized_pb=_b('\n\x16\x62\x65\x65hive-job-info.proto\x12\x10\x62\x65\x65hivejobinfopb\x1a\x1cgoogle/api/annotations.proto\"\xa3\x02\n\x10\x43reateJobRequest\x12\x0f\n\x07user_id\x18\x01 \x01(\x04\x12\r\n\x05title\x18\x02 \x01(\t\x12\x12\n\nmanager_id\x18\x03 \x01(\x04\x12\x10\n\x08\x64\x65\x61\x64line\x18\x04 \x01(\x04\x12\x10\n\x08supplier\x18\x05 \x01(\t\x12\r\n\x05share\x18\x06 \x01(\t\x12\x10\n\x08ride_job\x18\x07 \x01(\t\x12\x0e\n\x06region\x18\x08 \x01(\x05\x12\r\n\x05ready\x18\t \x01(\x05\x12\x11\n\tsensitive\x18\n \x01(\x08\x12\x13\n\x0b\x63\x61tegory_id\x18\x0b \x01(\x04\x12\x13\n\x0b\x65xpense_man\x18\x0c \x01(\t\x12\x14\n\x0cuploader_ids\x18\r \x03(\x04\x12\x0f\n\x07job_uid\x18\x0e \x01(\t\x12\x13\n\x0btemplate_id\x18\x0f \x01(\x04\"\x1d\n\x0bJobResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\x04\"\xb0\x02\n\rPutJobRequest\x12\x11\n\tpre_state\x18\x01 \x01(\x04\x12\r\n\x05state\x18\x02 \x01(\r\x12\r\n\x05title\x18\x03 \x01(\t\x12\x12\n\nmanager_id\x18\x04 \x01(\x04\x12\x10\n\x08\x64\x65\x61\x64line\x18\x05 \x01(\x04\x12\x10\n\x08supplier\x18\x06 \x01(\t\x12\r\n\x05share\x18\x07 \x01(\t\x12\x10\n\x08ride_job\x18\x08 \x01(\t\x12\x0e\n\x06region\x18\t \x01(\x05\x12\r\n\x05ready\x18\n \x01(\x05\x12\x11\n\tsensitive\x18\x0b \x01(\x08\x12\x13\n\x0b\x63\x61tegory_id\x18\x0c \x01(\r\x12\x13\n\x0b\x65xpense_man\x18\r \x01(\t\x12\x14\n\x0cuploader_ids\x18\x0e \x03(\x04\x12\x0e\n\x06job_id\x18\x0f \x01(\x04\x12\x13\n\x0btemplate_id\x18\x10 \x01(\x04\"\x8d\x02\n\rGetJobRequest\x12\x0c\n\x04page\x18\x01 \x01(\r\x12\x10\n\x08per_page\x18\x02 \x01(\r\x12\x11\n\tasc_sorts\x18\x03 \x03(\t\x12\x12\n\ndesc_sorts\x18\x04 \x03(\t\x12$\n\x04over\x18\x05 \x01(\x0e\x32\x16.beehivejobinfopb.Over\x12\x10\n\x08identity\x18\x06 \x01(\t\x12\x12\n\nmanager_id\x18\x07 \x01(\x04\x12\x0f\n\x07user_id\x18\x08 \x01(\x04\x12\x0e\n\x06states\x18\t \x03(\r\x12\x0f\n\x07job_uid\x18\n \x01(\t\x12\r\n\x05title\x18\x0b \x01(\t\x12\x0f\n\x07job_ids\x18\x0c \x03(\x04\x12\x17\n\x0freturn_entities\x18\r \x03(\t\"\x9d\x04\n\x0eGetJobResponse\x12?\n\npagination\x18\x01 \x01(\x0b\x32+.beehivejobinfopb.GetJobResponse.Pagination\x12\x33\n\x05items\x18\x02 \x03(\x0b\x32$.beehivejobinfopb.GetJobResponse.Job\x1ap\n\nPagination\x12\r\n\x05total\x18\x01 \x01(\x04\x12\x10\n\x08per_page\x18\x02 \x01(\x04\x12\x14\n\x0c\x63urrent_page\x18\x03 \x01(\x04\x12\x11\n\tlast_page\x18\x04 \x01(\x04\x12\x0c\n\x04\x66rom\x18\x05 \x01(\x04\x12\n\n\x02to\x18\x06 \x01(\x04\x1a\xa2\x02\n\x03Job\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0f\n\x07job_uid\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\x04\x12\r\n\x05title\x18\x04 \x01(\t\x12\x12\n\nmanager_id\x18\x05 \x01(\x04\x12\x10\n\x08\x64\x65\x61\x64line\x18\x06 \x01(\x03\x12\x10\n\x08supplier\x18\x07 \x01(\t\x12\r\n\x05share\x18\x08 \x01(\t\x12\x10\n\x08ride_job\x18\t \x01(\t\x12\x0e\n\x06region\x18\n \x01(\x05\x12\r\n\x05ready\x18\x0b \x01(\x05\x12\x11\n\tsensitive\x18\x0c \x01(\x08\x12\x13\n\x0b\x63\x61tegory_id\x18\r \x01(\x04\x12\x13\n\x0b\x65xpense_man\x18\x0e \x01(\t\x12\x14\n\x0cuploader_ids\x18\x0f \x03(\x04\x12\x13\n\x0btemplate_id\x18\x10 \x01(\x04*1\n\x04Over\x12\x0c\n\x08NO_LIMIT\x10\x00\x12\x0b\n\x07\x41RCHIVE\x10\x01\x12\x0e\n\nNO_ARCHIVE\x10\x02\x32\xc7\x02\n\nJobService\x12h\n\tCreateJob\x12\".beehivejobinfopb.CreateJobRequest\x1a\x1d.beehivejobinfopb.JobResponse\"\x18\x82\xd3\xe4\x93\x02\x12\"\r/info/v1/jobs:\x01*\x12k\n\x06PutJob\x12\x1f.beehivejobinfopb.PutJobRequest\x1a\x1d.beehivejobinfopb.JobResponse\"!\x82\xd3\xe4\x93\x02\x1b\x1a\x16/info/v1/jobs/{job_id}:\x01*\x12\x62\n\x06GetJob\x12\x1f.beehivejobinfopb.GetJobRequest\x1a .beehivejobinfopb.GetJobResponse\"\x15\x82\xd3\xe4\x93\x02\x0f\x12\r/info/v1/jobsBRZPgit-pd.megvii-inc.com/Data-Core/Platform/Beehive-Job-Info/proto;beehivejobinfopbb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_OVER = _descriptor.EnumDescriptor(
name='Over',
full_name='beehivejobinfopb.Over',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NO_LIMIT', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ARCHIVE', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NO_ARCHIVE', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1522,
serialized_end=1571,
)
_sym_db.RegisterEnumDescriptor(_OVER)
Over = enum_type_wrapper.EnumTypeWrapper(_OVER)
NO_LIMIT = 0
ARCHIVE = 1
NO_ARCHIVE = 2
_CREATEJOBREQUEST = _descriptor.Descriptor(
name='CreateJobRequest',
full_name='beehivejobinfopb.CreateJobRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='beehivejobinfopb.CreateJobRequest.user_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='title', full_name='beehivejobinfopb.CreateJobRequest.title', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manager_id', full_name='beehivejobinfopb.CreateJobRequest.manager_id', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deadline', full_name='beehivejobinfopb.CreateJobRequest.deadline', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supplier', full_name='beehivejobinfopb.CreateJobRequest.supplier', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='share', full_name='beehivejobinfopb.CreateJobRequest.share', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ride_job', full_name='beehivejobinfopb.CreateJobRequest.ride_job', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='region', full_name='beehivejobinfopb.CreateJobRequest.region', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ready', full_name='beehivejobinfopb.CreateJobRequest.ready', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sensitive', full_name='beehivejobinfopb.CreateJobRequest.sensitive', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='category_id', full_name='beehivejobinfopb.CreateJobRequest.category_id', index=10,
number=11, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expense_man', full_name='beehivejobinfopb.CreateJobRequest.expense_man', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uploader_ids', full_name='beehivejobinfopb.CreateJobRequest.uploader_ids', index=12,
number=13, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='job_uid', full_name='beehivejobinfopb.CreateJobRequest.job_uid', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='template_id', full_name='beehivejobinfopb.CreateJobRequest.template_id', index=14,
number=15, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=75,
serialized_end=366,
)
_JOBRESPONSE = _descriptor.Descriptor(
name='JobResponse',
full_name='beehivejobinfopb.JobResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='job_id', full_name='beehivejobinfopb.JobResponse.job_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=368,
serialized_end=397,
)
_PUTJOBREQUEST = _descriptor.Descriptor(
name='PutJobRequest',
full_name='beehivejobinfopb.PutJobRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pre_state', full_name='beehivejobinfopb.PutJobRequest.pre_state', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='beehivejobinfopb.PutJobRequest.state', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='title', full_name='beehivejobinfopb.PutJobRequest.title', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manager_id', full_name='beehivejobinfopb.PutJobRequest.manager_id', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deadline', full_name='beehivejobinfopb.PutJobRequest.deadline', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supplier', full_name='beehivejobinfopb.PutJobRequest.supplier', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='share', full_name='beehivejobinfopb.PutJobRequest.share', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ride_job', full_name='beehivejobinfopb.PutJobRequest.ride_job', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='region', full_name='beehivejobinfopb.PutJobRequest.region', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ready', full_name='beehivejobinfopb.PutJobRequest.ready', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sensitive', full_name='beehivejobinfopb.PutJobRequest.sensitive', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='category_id', full_name='beehivejobinfopb.PutJobRequest.category_id', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expense_man', full_name='beehivejobinfopb.PutJobRequest.expense_man', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uploader_ids', full_name='beehivejobinfopb.PutJobRequest.uploader_ids', index=13,
number=14, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='job_id', full_name='beehivejobinfopb.PutJobRequest.job_id', index=14,
number=15, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='template_id', full_name='beehivejobinfopb.PutJobRequest.template_id', index=15,
number=16, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=400,
serialized_end=704,
)
_GETJOBREQUEST = _descriptor.Descriptor(
name='GetJobRequest',
full_name='beehivejobinfopb.GetJobRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='page', full_name='beehivejobinfopb.GetJobRequest.page', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='beehivejobinfopb.GetJobRequest.per_page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='asc_sorts', full_name='beehivejobinfopb.GetJobRequest.asc_sorts', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='desc_sorts', full_name='beehivejobinfopb.GetJobRequest.desc_sorts', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='over', full_name='beehivejobinfopb.GetJobRequest.over', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='identity', full_name='beehivejobinfopb.GetJobRequest.identity', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manager_id', full_name='beehivejobinfopb.GetJobRequest.manager_id', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='beehivejobinfopb.GetJobRequest.user_id', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='states', full_name='beehivejobinfopb.GetJobRequest.states', index=8,
number=9, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='job_uid', full_name='beehivejobinfopb.GetJobRequest.job_uid', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='title', full_name='beehivejobinfopb.GetJobRequest.title', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='job_ids', full_name='beehivejobinfopb.GetJobRequest.job_ids', index=11,
number=12, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='return_entities', full_name='beehivejobinfopb.GetJobRequest.return_entities', index=12,
number=13, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=707,
serialized_end=976,
)
_GETJOBRESPONSE_PAGINATION = _descriptor.Descriptor(
name='Pagination',
full_name='beehivejobinfopb.GetJobResponse.Pagination',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='total', full_name='beehivejobinfopb.GetJobResponse.Pagination.total', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='beehivejobinfopb.GetJobResponse.Pagination.per_page', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='current_page', full_name='beehivejobinfopb.GetJobResponse.Pagination.current_page', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_page', full_name='beehivejobinfopb.GetJobResponse.Pagination.last_page', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='from', full_name='beehivejobinfopb.GetJobResponse.Pagination.from', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='to', full_name='beehivejobinfopb.GetJobResponse.Pagination.to', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1115,
serialized_end=1227,
)
_GETJOBRESPONSE_JOB = _descriptor.Descriptor(
name='Job',
full_name='beehivejobinfopb.GetJobResponse.Job',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='beehivejobinfopb.GetJobResponse.Job.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='job_uid', full_name='beehivejobinfopb.GetJobResponse.Job.job_uid', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='beehivejobinfopb.GetJobResponse.Job.user_id', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='title', full_name='beehivejobinfopb.GetJobResponse.Job.title', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manager_id', full_name='beehivejobinfopb.GetJobResponse.Job.manager_id', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deadline', full_name='beehivejobinfopb.GetJobResponse.Job.deadline', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supplier', full_name='beehivejobinfopb.GetJobResponse.Job.supplier', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='share', full_name='beehivejobinfopb.GetJobResponse.Job.share', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ride_job', full_name='beehivejobinfopb.GetJobResponse.Job.ride_job', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='region', full_name='beehivejobinfopb.GetJobResponse.Job.region', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ready', full_name='beehivejobinfopb.GetJobResponse.Job.ready', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sensitive', full_name='beehivejobinfopb.GetJobResponse.Job.sensitive', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='category_id', full_name='beehivejobinfopb.GetJobResponse.Job.category_id', index=12,
number=13, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expense_man', full_name='beehivejobinfopb.GetJobResponse.Job.expense_man', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uploader_ids', full_name='beehivejobinfopb.GetJobResponse.Job.uploader_ids', index=14,
number=15, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='template_id', full_name='beehivejobinfopb.GetJobResponse.Job.template_id', index=15,
number=16, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1230,
serialized_end=1520,
)
_GETJOBRESPONSE = _descriptor.Descriptor(
name='GetJobResponse',
full_name='beehivejobinfopb.GetJobResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pagination', full_name='beehivejobinfopb.GetJobResponse.pagination', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='items', full_name='beehivejobinfopb.GetJobResponse.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GETJOBRESPONSE_PAGINATION, _GETJOBRESPONSE_JOB, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=979,
serialized_end=1520,
)
_GETJOBREQUEST.fields_by_name['over'].enum_type = _OVER
_GETJOBRESPONSE_PAGINATION.containing_type = _GETJOBRESPONSE
_GETJOBRESPONSE_JOB.containing_type = _GETJOBRESPONSE
_GETJOBRESPONSE.fields_by_name['pagination'].message_type = _GETJOBRESPONSE_PAGINATION
_GETJOBRESPONSE.fields_by_name['items'].message_type = _GETJOBRESPONSE_JOB
DESCRIPTOR.message_types_by_name['CreateJobRequest'] = _CREATEJOBREQUEST
DESCRIPTOR.message_types_by_name['JobResponse'] = _JOBRESPONSE
DESCRIPTOR.message_types_by_name['PutJobRequest'] = _PUTJOBREQUEST
DESCRIPTOR.message_types_by_name['GetJobRequest'] = _GETJOBREQUEST
DESCRIPTOR.message_types_by_name['GetJobResponse'] = _GETJOBRESPONSE
DESCRIPTOR.enum_types_by_name['Over'] = _OVER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CreateJobRequest = _reflection.GeneratedProtocolMessageType('CreateJobRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATEJOBREQUEST,
__module__ = 'beehive_job_info_pb2'
# @@protoc_insertion_point(class_scope:beehivejobinfopb.CreateJobRequest)
))
_sym_db.RegisterMessage(CreateJobRequest)
JobResponse = _reflection.GeneratedProtocolMessageType('JobResponse', (_message.Message,), dict(
DESCRIPTOR = _JOBRESPONSE,
__module__ = 'beehive_job_info_pb2'
# @@protoc_insertion_point(class_scope:beehivejobinfopb.JobResponse)
))
_sym_db.RegisterMessage(JobResponse)
PutJobRequest = _reflection.GeneratedProtocolMessageType('PutJobRequest', (_message.Message,), dict(
DESCRIPTOR = _PUTJOBREQUEST,
__module__ = 'beehive_job_info_pb2'
# @@protoc_insertion_point(class_scope:beehivejobinfopb.PutJobRequest)
))
_sym_db.RegisterMessage(PutJobRequest)
GetJobRequest = _reflection.GeneratedProtocolMessageType('GetJobRequest', (_message.Message,), dict(
DESCRIPTOR = _GETJOBREQUEST,
__module__ = 'beehive_job_info_pb2'
# @@protoc_insertion_point(class_scope:beehivejobinfopb.GetJobRequest)
))
_sym_db.RegisterMessage(GetJobRequest)
GetJobResponse = _reflection.GeneratedProtocolMessageType('GetJobResponse', (_message.Message,), dict(
Pagination = _reflection.GeneratedProtocolMessageType('Pagination', (_message.Message,), dict(
DESCRIPTOR = _GETJOBRESPONSE_PAGINATION,
__module__ = 'beehive_job_info_pb2'
# @@protoc_insertion_point(class_scope:beehivejobinfopb.GetJobResponse.Pagination)
))
,
Job = _reflection.GeneratedProtocolMessageType('Job', (_message.Message,), dict(
DESCRIPTOR = _GETJOBRESPONSE_JOB,
__module__ = 'beehive_job_info_pb2'
# @@protoc_insertion_point(class_scope:beehivejobinfopb.GetJobResponse.Job)
))
,
DESCRIPTOR = _GETJOBRESPONSE,
__module__ = 'beehive_job_info_pb2'
# @@protoc_insertion_point(class_scope:beehivejobinfopb.GetJobResponse)
))
_sym_db.RegisterMessage(GetJobResponse)
_sym_db.RegisterMessage(GetJobResponse.Pagination)
_sym_db.RegisterMessage(GetJobResponse.Job)
DESCRIPTOR._options = None
_JOBSERVICE = _descriptor.ServiceDescriptor(
name='JobService',
full_name='beehivejobinfopb.JobService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=1574,
serialized_end=1901,
methods=[
_descriptor.MethodDescriptor(
name='CreateJob',
full_name='beehivejobinfopb.JobService.CreateJob',
index=0,
containing_service=None,
input_type=_CREATEJOBREQUEST,
output_type=_JOBRESPONSE,
serialized_options=_b('\202\323\344\223\002\022\"\r/info/v1/jobs:\001*'),
),
_descriptor.MethodDescriptor(
name='PutJob',
full_name='beehivejobinfopb.JobService.PutJob',
index=1,
containing_service=None,
input_type=_PUTJOBREQUEST,
output_type=_JOBRESPONSE,
serialized_options=_b('\202\323\344\223\002\033\032\026/info/v1/jobs/{job_id}:\001*'),
),
_descriptor.MethodDescriptor(
name='GetJob',
full_name='beehivejobinfopb.JobService.GetJob',
index=2,
containing_service=None,
input_type=_GETJOBREQUEST,
output_type=_GETJOBRESPONSE,
serialized_options=_b('\202\323\344\223\002\017\022\r/info/v1/jobs'),
),
])
_sym_db.RegisterServiceDescriptor(_JOBSERVICE)
DESCRIPTOR.services_by_name['JobService'] = _JOBSERVICE
# @@protoc_insertion_point(module_scope)
| 47.378973
| 3,780
| 0.742053
| 5,073
| 38,756
| 5.412576
| 0.060911
| 0.063515
| 0.070799
| 0.056086
| 0.789497
| 0.718333
| 0.700889
| 0.675395
| 0.654126
| 0.651868
| 0
| 0.047934
| 0.131205
| 38,756
| 817
| 3,781
| 47.436965
| 0.767545
| 0.017081
| 0
| 0.695596
| 1
| 0.006477
| 0.221522
| 0.189013
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009067
| 0
| 0.009067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0599a8ffa812a08745ecef084b15449d8e236e7a
| 9,130
|
py
|
Python
|
Pyto Mac/PyObjC/Photos/_metadata.py
|
cclauss/Pyto
|
1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed
|
[
"MIT"
] | 4
|
2019-03-11T18:05:49.000Z
|
2021-05-22T21:09:09.000Z
|
Pyto Mac/PyObjC/Photos/_metadata.py
|
cclauss/Pyto
|
1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed
|
[
"MIT"
] | null | null | null |
Pyto Mac/PyObjC/Photos/_metadata.py
|
cclauss/Pyto
|
1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed
|
[
"MIT"
] | 1
|
2019-03-18T18:53:36.000Z
|
2019-03-18T18:53:36.000Z
|
# This file is generated by objective.metadata
#
# Last update: Tue Jun 5 13:02:38 2018
import objc, sys
if sys.maxsize > 2 ** 32:
def sel32or64(a, b): return b
else:
def sel32or64(a, b): return a
if sys.byteorder == 'little':
def littleOrBig(a, b): return a
else:
def littleOrBig(a, b): return b
misc = {
}
constants = '''$PHImageCancelledKey$PHImageErrorKey$PHImageManagerMaximumSize@{CGSize=dd}$PHImageResultIsDegradedKey$PHImageResultIsInCloudKey$PHImageResultRequestIDKey$PHLivePhotoEditingErrorDomain$PHLivePhotoShouldRenderAtPlaybackTime$PHLocalIdentifierNotFound$'''
enums = '''$PHAssetBurstSelectionTypeAutoPick@1$PHAssetBurstSelectionTypeNone@0$PHAssetBurstSelectionTypeUserPick@2$PHAssetCollectionSubtypeAlbumCloudShared@101$PHAssetCollectionSubtypeAlbumImported@6$PHAssetCollectionSubtypeAlbumMyPhotoStream@100$PHAssetCollectionSubtypeAlbumRegular@2$PHAssetCollectionSubtypeAlbumSyncedAlbum@5$PHAssetCollectionSubtypeAlbumSyncedEvent@3$PHAssetCollectionSubtypeAlbumSyncedFaces@4$PHAssetCollectionSubtypeSmartAlbumAllHidden@205$PHAssetCollectionSubtypeSmartAlbumBursts@207$PHAssetCollectionSubtypeSmartAlbumDepthEffect@212$PHAssetCollectionSubtypeSmartAlbumFavorites@203$PHAssetCollectionSubtypeSmartAlbumGeneric@200$PHAssetCollectionSubtypeSmartAlbumLivePhotos@213$PHAssetCollectionSubtypeSmartAlbumPanoramas@201$PHAssetCollectionSubtypeSmartAlbumRecentlyAdded@206$PHAssetCollectionSubtypeSmartAlbumScreenshots@211$PHAssetCollectionSubtypeSmartAlbumSelfPortraits@210$PHAssetCollectionSubtypeSmartAlbumSlomoVideos@208$PHAssetCollectionSubtypeSmartAlbumTimelapses@204$PHAssetCollectionSubtypeSmartAlbumUserLibrary@209$PHAssetCollectionSubtypeSmartAlbumVideos@202$PHAssetCollectionTypeAlbum@1$PHAssetCollectionTypeMoment@3$PHAssetCollectionTypeSmartAlbum@2$PHAssetEditOperationContent@2$PHAssetEditOperationDelete@1$PHAssetEditOperationProperties@3$PHAssetMediaSubtypeNone@0$PHAssetMediaSubtypePhotoDepthEffect@16$PHAssetMediaSubtypePhotoHDR@2$PHAssetMediaSubtypePhotoLive@8$PHAssetMediaSubtypePhotoPanorama@1$PHAssetMediaSubtypePhotoScreenshot@4$PHAssetMediaSubtypeVideoHighFrameRate@131072$PHAssetMediaSubtypeVideoStreamed@65536$PHAssetMediaSubtypeVideoTimelapse@262144$PHAssetMediaTypeAudio@3$PHAssetMediaTypeImage@1$PHAssetMediaTypeUnknown@0$PHAssetMediaTypeVideo@2$PHAssetPlaybackStyleImage@1$PHAssetPlaybackStyleImageAnimated@2$PHAssetPlaybackStyleLivePhoto@3$PHAssetPlaybackStyleUnsupported@0$PHAssetPlaybackStyleVideo@4$PHAssetPlaybackStyleVideoLooping@5$PHAssetResourceTypeAdjustmentBasePhoto@8$PHAssetResourceTypeAdjustmentData@7$PHAssetResourceTypeAlternatePhoto@4$PHAssetResourceTypeAudio@3$PHAssetResourceTypeFullSizePhoto@5$PHAssetResourceTypeFullSizeVideo@6$PHAssetResourceTypePairedVideo@9$PHAssetResourceTypePhoto@1$PHAssetResourceTypeVideo@2$PHAssetSourceTypeCloudShared@2$PHAssetSourceTypeNone@0$PHAssetSourceTypeUserLibrary@1$PHAssetSourceTypeiTunesSynced@4$PHAuthorizationStatusAuthorized@3$PHAuthorizationStatusDenied@2$PHAuthorizationStatusNotDetermined@0$PHAuthorizationStatusRestricted@1$PHCollectionEditOperationAddContent@3$PHCollectionEditOperationCreateContent@4$PHCollectionEditOperationDelete@6$PHCollectionEditOperationDeleteContent@1$PHCollectionEditOperationRearrangeContent@5$PHCollectionEditOperationRemoveContent@2$PHCollectionEditOperationRename@7$PHCollectionListSubtypeMomentListCluster@1$PHCollectionListSubtypeMomentListYear@2$PHCollectionListSubtypeRegularFolder@100$PHCollectionListSubtypeSmartFolderEvents@200$PHCollectionListSubtypeSmartFolderFaces@201$PHCollectionListTypeFolder@2$PHCollectionListTypeMomentList@1$PHCollectionListTypeSmartFolder@3$PHImageContentModeAspectFill@1$PHImageContentModeAspectFit@0$PHImageRequestOptionsDeliveryModeFastFormat@2$PHImageRequestOptionsDeliveryModeHighQualityFormat@1$PHImageRequestOptionsDeliveryModeOpportunistic@0$PHImageRequestOptionsResizeModeExact@2$PHImageRequestOptionsResizeModeFast@1$PHImageRequestOptionsResizeModeNone@0$PHImageRequestOptionsVersionCurrent@0$PHImageRequestOptionsVersionOriginal@2$PHImageRequestOptionsVersionUnadjusted@1$PHInvalidImageRequestID@0$PHLivePhotoEditingErrorCodeAborted@1$PHLivePhotoEditingErrorCodeUnknown@0$PHLivePhotoFrameTypePhoto@0$PHLivePhotoFrameTypeVideo@1$'''
misc.update({})
aliases = {'PHCollectionListSubtypeAny': 'NSIntegerMax', 'PHAssetCollectionSubtypeAny': 'NSIntegerMax', 'PHImageContentModeDefault': 'PHImageContentModeAspectFit'}
r = objc.registerMetaDataForSelector
objc._updatingMetadata(True)
try:
r(b'NSObject', b'renderScale', {'retval': {'type': sel32or64(b'f', b'd')}})
r(b'NSObject', b'time', {'retval': {'type': '{_CMTime=qiIq}'}})
r(b'NSObject', b'type', {'retval': {'type': sel32or64(b'i', b'q')}})
r(b'PHAsset', b'isFavorite', {'retval': {'type': 'Z'}})
r(b'PHAsset', b'isHidden', {'retval': {'type': 'Z'}})
r(b'PHAsset', b'isSyncFailureHidden', {'retval': {'type': 'Z'}})
r(b'PHFetchOptions', b'includeHiddenAssets', {'retval': {'type': 'Z'}})
r(b'PHFetchOptions', b'setIncludeHiddenAssets:', {'arguments': {2: {'type': 'Z'}}})
r(b'PHFetchOptions', b'setWantsIncrementalChangeDetails:', {'arguments': {2: {'type': 'Z'}}})
r(b'PHFetchOptions', b'wantsIncrementalChangeDetails', {'retval': {'type': 'Z'}})
r(b'PHFetchResult', b'containsObject:', {'retval': {'type': 'Z'}})
r(b'PHFetchResult', b'enumerateObjectsAtIndexes:options:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'o^Z'}}}}}})
r(b'PHFetchResult', b'enumerateObjectsUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'o^Z'}}}}}})
r(b'PHFetchResult', b'enumerateObjectsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'o^Z'}}}}}})
r(b'PHFetchResultChangeDetails', b'hasIncrementalChanges', {'retval': {'type': 'Z'}})
r(b'PHFetchResultChangeDetails', b'hasMoves', {'retval': {'type': 'Z'}})
r(b'PHImageManager', b'requestImageDataForAsset:options:resultHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'I'}, 4: {'type': b'@'}}}}}})
r(b'PHImageManager', b'requestImageForAsset:targetSize:contentMode:options:resultHandler:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}})
r(b'PHImageRequestOptions', b'isNetworkAccessAllowed', {'retval': {'type': 'Z'}})
r(b'PHImageRequestOptions', b'isSynchronous', {'retval': {'type': 'Z'}})
r(b'PHImageRequestOptions', b'progressHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'd'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}, 4: {'type': b'@'}}}}})
r(b'PHImageRequestOptions', b'setNetworkAccessAllowed:', {'arguments': {2: {'type': 'Z'}}})
r(b'PHImageRequestOptions', b'setProgressHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'd'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}, 4: {'type': b'@'}}}}}})
r(b'PHImageRequestOptions', b'setSynchronous:', {'arguments': {2: {'type': 'Z'}}})
r(b'PHLivePhotoEditingContext', b'frameProcessor', {'retval': {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^@'}}}}})
r(b'PHLivePhotoEditingContext', b'prepareLivePhotoForPlaybackWithTargetSize:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}})
r(b'PHLivePhotoEditingContext', b'saveLivePhotoToOutput:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}})
r(b'PHLivePhotoEditingContext', b'setFrameProcessor:', {'arguments': {2: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^@'}}}}}})
r(b'PHObjectChangeDetails', b'assetContentChanged', {'retval': {'type': 'Z'}})
r(b'PHObjectChangeDetails', b'objectWasDeleted', {'retval': {'type': 'Z'}})
r(b'PHPhotoLibrary', b'performChanges:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}})
r(b'PHPhotoLibrary', b'performChangesAndWait:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}})
r(b'PHPhotoLibrary', b'requestAuthorization:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}})
r(b'PHProject', b'hasProjectPreview', {'retval': {'type': 'Z'}})
finally:
objc._updatingMetadata(False)
expressions = {}
# END OF FILE
| 142.65625
| 3,630
| 0.738773
| 841
| 9,130
| 8.015458
| 0.298454
| 0.041537
| 0.021362
| 0.016615
| 0.240024
| 0.208871
| 0.19463
| 0.161697
| 0.147159
| 0.147159
| 0
| 0.030435
| 0.067908
| 9,130
| 63
| 3,631
| 144.920635
| 0.761692
| 0.010296
| 0
| 0.036364
| 1
| 0.036364
| 0.707674
| 0.538479
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072727
| false
| 0
| 0.036364
| 0.072727
| 0.109091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5525310dc96426c28c66a5a03aa575daa429f84b
| 53
|
py
|
Python
|
verygoodpackage/__init__.py
|
BiAPoL/example-repo-sphinx
|
a3fac607f47f1e68adb35f5409e8477824114224
|
[
"BSD-3-Clause"
] | null | null | null |
verygoodpackage/__init__.py
|
BiAPoL/example-repo-sphinx
|
a3fac607f47f1e68adb35f5409e8477824114224
|
[
"BSD-3-Clause"
] | null | null | null |
verygoodpackage/__init__.py
|
BiAPoL/example-repo-sphinx
|
a3fac607f47f1e68adb35f5409e8477824114224
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from .subpackage_a import *
| 13.25
| 27
| 0.603774
| 7
| 53
| 4.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 0.188679
| 53
| 3
| 28
| 17.666667
| 0.697674
| 0.396226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
55383bdb199fcf3df80bfadfc39369882e02aa3b
| 114
|
py
|
Python
|
src/pandas_profiling/model/__init__.py
|
anurag-gandhi/pandas-profiling
|
2373f3a299264f7b312dbe4b92edc14d36e8140e
|
[
"MIT"
] | 8,107
|
2018-01-07T23:27:39.000Z
|
2022-02-22T12:57:11.000Z
|
src/pandas_profiling/model/__init__.py
|
anurag-gandhi/pandas-profiling
|
2373f3a299264f7b312dbe4b92edc14d36e8140e
|
[
"MIT"
] | 771
|
2018-01-06T11:33:08.000Z
|
2022-02-21T11:16:02.000Z
|
src/pandas_profiling/model/__init__.py
|
anurag-gandhi/pandas-profiling
|
2373f3a299264f7b312dbe4b92edc14d36e8140e
|
[
"MIT"
] | 1,308
|
2018-01-08T21:22:08.000Z
|
2022-02-21T04:10:21.000Z
|
"""The model module handles all logic/calculations, e.g. calculate statistics, testing for special conditions."""
| 57
| 113
| 0.780702
| 15
| 114
| 5.933333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114035
| 114
| 1
| 114
| 114
| 0.881188
| 0.938596
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
55906f5b5986d0f3290eee5a8df89aa1d2a55aed
| 60
|
py
|
Python
|
test/test_class.py
|
psypersky/gifme
|
213ca63d6af6647413a5f219be07eb00b7659769
|
[
"Unlicense"
] | null | null | null |
test/test_class.py
|
psypersky/gifme
|
213ca63d6af6647413a5f219be07eb00b7659769
|
[
"Unlicense"
] | null | null | null |
test/test_class.py
|
psypersky/gifme
|
213ca63d6af6647413a5f219be07eb00b7659769
|
[
"Unlicense"
] | null | null | null |
class Foo:
def say(self):
return 'bar'
| 12
| 20
| 0.45
| 7
| 60
| 3.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.45
| 60
| 5
| 21
| 12
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
55a07fa5cae52f29ccb46fb490ae69749dbc6559
| 48
|
py
|
Python
|
settings.py
|
AlexTexis/platziCoursesPythonApi
|
6f2569d051358c6f6d89558bd5b6541bbd15afe9
|
[
"MIT"
] | 1
|
2019-08-19T03:51:25.000Z
|
2019-08-19T03:51:25.000Z
|
settings.py
|
AlexTexis/platziCoursesPythonApi
|
6f2569d051358c6f6d89558bd5b6541bbd15afe9
|
[
"MIT"
] | null | null | null |
settings.py
|
AlexTexis/platziCoursesPythonApi
|
6f2569d051358c6f6d89558bd5b6541bbd15afe9
|
[
"MIT"
] | null | null | null |
import os
MONGO_URI=os.environ.get('MONGO_URI')
| 16
| 37
| 0.791667
| 9
| 48
| 4
| 0.666667
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 3
| 37
| 16
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.183673
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e957294f30d7d45dc153a309ec585c693ea3af76
| 111
|
py
|
Python
|
lib/__init__.py
|
WallaceIT/meta-fossology
|
dcd78f157d6c93aa3d1c024fc37569053cc308bf
|
[
"MIT"
] | null | null | null |
lib/__init__.py
|
WallaceIT/meta-fossology
|
dcd78f157d6c93aa3d1c024fc37569053cc308bf
|
[
"MIT"
] | null | null | null |
lib/__init__.py
|
WallaceIT/meta-fossology
|
dcd78f157d6c93aa3d1c024fc37569053cc308bf
|
[
"MIT"
] | null | null | null |
#
# SPDX-License-Identifier: MIT
#
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| 15.857143
| 42
| 0.783784
| 14
| 111
| 5.214286
| 0.714286
| 0.273973
| 0.383562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126126
| 111
| 6
| 43
| 18.5
| 0.752577
| 0.252252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e9607d379ed625ed2d3253d4809520bf11dbf831
| 146
|
py
|
Python
|
bot/discord/__init__.py
|
meooow25/cp-discord-bot
|
4d25b51f9dc4dc44105a6cebeeaea9ef1191c8c1
|
[
"MIT"
] | 11
|
2018-09-03T16:50:25.000Z
|
2020-07-17T05:27:25.000Z
|
bot/discord/__init__.py
|
meooow25/cp-discord-bot
|
4d25b51f9dc4dc44105a6cebeeaea9ef1191c8c1
|
[
"MIT"
] | 5
|
2018-10-08T00:18:21.000Z
|
2018-11-26T22:01:40.000Z
|
bot/discord/__init__.py
|
meooow25/cp-discord-bot
|
4d25b51f9dc4dc44105a6cebeeaea9ef1191c8c1
|
[
"MIT"
] | 1
|
2018-10-09T09:30:07.000Z
|
2018-10-09T09:30:07.000Z
|
from .client import Client, EventType
from .models import Channel, Message, User
__all__ = ['Channel', 'Client', 'EventType', 'Message', 'User']
| 29.2
| 63
| 0.719178
| 17
| 146
| 5.941176
| 0.529412
| 0.29703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130137
| 146
| 4
| 64
| 36.5
| 0.795276
| 0
| 0
| 0
| 0
| 0
| 0.226027
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e966f4233fb64d348b6c98c9b1b7124dee16e0a9
| 146
|
py
|
Python
|
docs/new-pandas-doc/generated/pandas-DataFrame-plot-hexbin-1.py
|
maartenbreddels/datapythonista.github.io
|
f78d7b9a8a793fc446c5ba3ee74423433b00fb63
|
[
"Apache-2.0"
] | null | null | null |
docs/new-pandas-doc/generated/pandas-DataFrame-plot-hexbin-1.py
|
maartenbreddels/datapythonista.github.io
|
f78d7b9a8a793fc446c5ba3ee74423433b00fb63
|
[
"Apache-2.0"
] | null | null | null |
docs/new-pandas-doc/generated/pandas-DataFrame-plot-hexbin-1.py
|
maartenbreddels/datapythonista.github.io
|
f78d7b9a8a793fc446c5ba3ee74423433b00fb63
|
[
"Apache-2.0"
] | null | null | null |
n = 10000
df = pd.DataFrame({'x': np.random.randn(n),
'y': np.random.randn(n)})
ax = df.plot.hexbin(x='x', y='y', gridsize=20)
| 29.2
| 46
| 0.527397
| 25
| 146
| 3.08
| 0.6
| 0.207792
| 0.337662
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061947
| 0.226027
| 146
| 4
| 47
| 36.5
| 0.619469
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e97f7dbd696a3f69902eafa7241c0f87a720a812
| 56
|
py
|
Python
|
pretty_html_table/__init__.py
|
LaurentEsingle/ph_table
|
8fedfb030dc52a35836a40785b074994b3d9f0ee
|
[
"MIT"
] | 1
|
2020-12-06T22:21:39.000Z
|
2020-12-06T22:21:39.000Z
|
pretty_html_table/__init__.py
|
Harsh-Git-Hub/ph_table
|
cb60bb2a25296d47d45668854ab259b1394c338c
|
[
"MIT"
] | null | null | null |
pretty_html_table/__init__.py
|
Harsh-Git-Hub/ph_table
|
cb60bb2a25296d47d45668854ab259b1394c338c
|
[
"MIT"
] | null | null | null |
from .pretty_html_table import table_color, build_table
| 28
| 55
| 0.875
| 9
| 56
| 5
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089286
| 56
| 1
| 56
| 56
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7571aa0800e3fef5a12e2ca78b8d348908b45093
| 75
|
py
|
Python
|
app/routes/index.py
|
bhaktijkoli/attendance-system-flask
|
dbe6fff9576e95e19c5ca2881d9e95c1d09e6aab
|
[
"MIT"
] | null | null | null |
app/routes/index.py
|
bhaktijkoli/attendance-system-flask
|
dbe6fff9576e95e19c5ca2881d9e95c1d09e6aab
|
[
"MIT"
] | null | null | null |
app/routes/index.py
|
bhaktijkoli/attendance-system-flask
|
dbe6fff9576e95e19c5ca2881d9e95c1d09e6aab
|
[
"MIT"
] | null | null | null |
from app import app
@app.route('/')
def get():
return "Welcome to API"
| 15
| 27
| 0.64
| 12
| 75
| 4
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 75
| 4
| 28
| 18.75
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
75b5ba44a28f65fcb4490815fb543ade2755ae9c
| 3,663
|
py
|
Python
|
restaurant_project/order/migrations/0006_auto_20210726_1701.py
|
lukart80/restaurant
|
419786cd87a7bd15c82b2fda8ad7c5e3e1f6c9cd
|
[
"MIT"
] | null | null | null |
restaurant_project/order/migrations/0006_auto_20210726_1701.py
|
lukart80/restaurant
|
419786cd87a7bd15c82b2fda8ad7c5e3e1f6c9cd
|
[
"MIT"
] | null | null | null |
restaurant_project/order/migrations/0006_auto_20210726_1701.py
|
lukart80/restaurant
|
419786cd87a7bd15c82b2fda8ad7c5e3e1f6c9cd
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.5 on 2021-07-26 14:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('menu', '0003_auto_20210725_2150'),
('order', '0005_auto_20210725_2313'),
]
operations = [
migrations.AlterField(
model_name='deliveryorder',
name='code',
field=models.IntegerField(verbose_name='код'),
),
migrations.AlterField(
model_name='deliveryorder',
name='delivery_address',
field=models.CharField(max_length=100, verbose_name='адресс доставки'),
),
migrations.AlterField(
model_name='deliveryorder',
name='first_name',
field=models.CharField(max_length=50, verbose_name='имя'),
),
migrations.AlterField(
model_name='deliveryorder',
name='last_name',
field=models.CharField(max_length=50, verbose_name='фамилия'),
),
migrations.AlterField(
model_name='deliveryorder',
name='payed',
field=models.BooleanField(default=False, verbose_name='оплачено'),
),
migrations.AlterField(
model_name='deliveryorder',
name='price',
field=models.PositiveIntegerField(default=0, verbose_name='цена'),
),
migrations.AlterField(
model_name='deliveryorder',
name='status',
field=models.CharField(choices=[('unpaid', 'неоплачен'), ('cooking', 'готовится'), ('delivering', 'доставляется'), ('received', 'получен')], default='unpaid', max_length=100, verbose_name='статус'),
),
migrations.AlterField(
model_name='orderitem',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='orders', to='menu.product', verbose_name='продукт'),
),
migrations.AlterField(
model_name='orderitem',
name='quantity',
field=models.PositiveIntegerField(default=0, verbose_name='количество'),
),
migrations.AlterField(
model_name='pickuporder',
name='code',
field=models.IntegerField(verbose_name='код'),
),
migrations.AlterField(
model_name='pickuporder',
name='first_name',
field=models.CharField(max_length=50, verbose_name='имя'),
),
migrations.AlterField(
model_name='pickuporder',
name='last_name',
field=models.CharField(max_length=50, verbose_name='фамилия'),
),
migrations.AlterField(
model_name='pickuporder',
name='payed',
field=models.BooleanField(default=False, verbose_name='оплачено'),
),
migrations.AlterField(
model_name='pickuporder',
name='price',
field=models.PositiveIntegerField(default=0, verbose_name='цена'),
),
migrations.AlterField(
model_name='pickuporder',
name='restaurant',
field=models.CharField(choices=[('loc1', 'Первый ресторан'), ('loc2', 'Второй ресторан'), ('loc3', 'Третий ресторан')], max_length=100, verbose_name='ресторан'),
),
migrations.AlterField(
model_name='pickuporder',
name='status',
field=models.CharField(choices=[('unpaid', 'неоплачен'), ('cooking', 'готовится'), ('ready', 'готов'), ('received', 'получен')], max_length=100, verbose_name='статус'),
),
]
| 38.15625
| 210
| 0.58504
| 330
| 3,663
| 6.333333
| 0.284848
| 0.15311
| 0.191388
| 0.22201
| 0.744498
| 0.712919
| 0.505263
| 0.48134
| 0.48134
| 0.48134
| 0
| 0.027631
| 0.278733
| 3,663
| 95
| 211
| 38.557895
| 0.763437
| 0.012285
| 0
| 0.786517
| 1
| 0
| 0.184458
| 0.012721
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022472
| 0
| 0.05618
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
75f56795514fc634c7207a37aadb7d3ddbb7dda7
| 7,200
|
py
|
Python
|
src/pylib_sakata/plot.py
|
Koichi-Sakata/pylib_sakata
|
4e8fef6d26d5bca079fa0304b37b8103b7ee0c32
|
[
"MIT"
] | 2
|
2021-08-23T06:54:55.000Z
|
2021-09-10T16:17:38.000Z
|
src/pylib_sakata/plot.py
|
Koichi-Sakata/pylib_sakata
|
4e8fef6d26d5bca079fa0304b37b8103b7ee0c32
|
[
"MIT"
] | null | null | null |
src/pylib_sakata/plot.py
|
Koichi-Sakata/pylib_sakata
|
4e8fef6d26d5bca079fa0304b37b8103b7ee0c32
|
[
"MIT"
] | 1
|
2021-09-10T16:29:58.000Z
|
2021-09-10T16:29:58.000Z
|
# Copyright (c) 2021 Koichi Sakata
# plot_xy(ax, x, y, styl='-', col='b', width=1.5, alpha=1.0, xrange=None, yrange=None, xlabel=None, ylabel=None, legend=None, title=None, xscale='linear', yscale='linear', labelouter=True)
# plot_tf(ax_mag, ax_phase, sys, freq, styl='-', col='b', width=1.5, alpha=1.0, freqrange=None, magrange=None, legend=None, title=None, labelouter=True)
# plot_tffrd(ax_mag, ax_phase, freqresp, styl='-', col='b', width=1.5, alpha=1.0, freqrange=None, magrange=None, legend=None, title=None, labelouter=True, ax_coh=None, coh=None)
# plot_nyquist(ax, freqresp, styl='-', col='b', width=1.5, alpha=1.0, xrange=None, yrange=None, legend=None, title=None, labelouter=True)
# plot_nyquist_assistline(ax)
# makefig()
# savefig(figName)
# showfig()
import numpy as np
import matplotlib
from control import matlab
from matplotlib import pyplot as plt
from .fft import FreqResp
def plot_xy(ax, x, y, styl='-', col='b', width=1.5, alpha=1.0, xrange=None, yrange=None, xlabel=None, ylabel=None, legend=None, title=None, xscale='linear', yscale='linear', labelouter=True):
ax.set_xscale(xscale)
ax.set_yscale(yscale)
if xrange == None:
xmin = min(x)
xmax = max(x)
xrange = [xmin, xmax]
if yrange == None:
ymin = min(y)
ymax = max(y)
yrange = [ymin - 0.2*(ymax-ymin), ymax + 0.2*(ymax-ymin)]
ax.set_xlim(xrange)
ax.set_ylim(yrange)
if xlabel != None:
ax.set_xlabel(xlabel)
if ylabel != None:
ax.set_ylabel(ylabel)
ax.grid(b=True, which='both', axis='both')
# plot
ax.plot(x, y, linestyle=styl, color=col, linewidth=width, alpha=alpha)
# legend and title
if legend != None:
ax.legend(legend, loc='best')
if title != None:
ax.set_title(title)
if labelouter == True:
ax.label_outer()
def plot_tf(ax_mag, ax_phase, sys, freq, styl='-', col='b', width=1.5, alpha=1.0, freqrange=None, magrange=None, legend=None, title=None, labelouter=True):
if type(freq) == list:
freq = np.array(freq)
mag, phase, omega = matlab.freqresp(sys, freq*2.0*np.pi)
magdb = 20.0*np.log10(mag)
phasedeg = phase*180.0/np.pi
ax_mag.set_xscale('log')
if freqrange == None:
freqmin = min(freq)
freqmax = max(freq)
freqrange = [freqmin, freqmax]
if magrange == None:
magmin = min(magdb)
magmax = max(magdb)
magrange = [magmin - 0.2*(magmax-magmin), magmax + 0.2*(magmax-magmin)]
ax_mag.set_xlim(freqrange)
ax_mag.set_ylim(magrange)
if ax_phase == None:
ax_mag.set_xlabel('Frequency [Hz]')
ax_mag.set_ylabel('Magnitude [dB]')
ax_mag.grid(b=True, which='both', axis='both')
# mag plot
ax_mag.plot(freq, magdb, linestyle=styl, color=col, linewidth=width, alpha=alpha)
# legend and title
if legend != None:
ax_mag.legend(legend, loc='best')
if title != None:
ax_mag.set_title(title)
if labelouter == True:
ax_mag.label_outer()
if ax_phase != None:
ax_phase.set_xscale('log')
ax_phase.set_xlim(freqrange)
ax_phase.set_ylim(-200, 200)
ax_phase.set_xlabel('Frequency [Hz]')
ax_phase.set_ylabel('Phase [deg]')
ax_phase.set_yticks([-180, -90, 0, 90, 180])
ax_phase.grid(b=True, which='both', axis='both')
# phase plot
ax_phase.plot(freq, phasedeg, linestyle=styl, color=col, linewidth=width, alpha=alpha)
if labelouter == True:
ax_phase.label_outer()
def plot_tffrd(ax_mag, ax_phase, freqresp, styl='-', col='b', width=1.5, alpha=1.0, freqrange=None, magrange=None, legend=None, title=None, labelouter=True, ax_coh=None, coh=None):
mag = np.absolute(freqresp.resp)
phase = np.angle(freqresp.resp)
magdb = 20.0*np.log10(mag)
phasedeg = phase*180.0/np.pi
ax_mag.set_xscale('log')
if freqrange == None:
freqmin = min(freqresp.freq)
freqmax = max(freqresp.freq)
freqrange = [freqmin, freqmax]
if magrange == None:
magmin = min(magdb)
magmax = max(magdb)
magrange = [magmin - 0.2*(magmax-magmin), magmax + 0.2*(magmax-magmin)]
ax_mag.set_xlim(freqrange)
ax_mag.set_ylim(magrange)
if ax_phase == None and ax_coh == None:
ax_mag.set_xlabel('Frequency [Hz]')
ax_mag.set_ylabel('Magnitude [dB]')
ax_mag.grid(b=True, which='both', axis='both')
# mag plot
ax_mag.plot(freqresp.freq, magdb, linestyle=styl, color=col, linewidth=width, alpha=alpha)
# legend and title
if legend != None:
ax_mag.legend(legend, loc='best')
if title != None:
ax_mag.set_title(title)
if labelouter == True:
ax_mag.label_outer()
if ax_phase != None:
ax_phase.set_xscale('log')
ax_phase.set_xlim(freqrange)
ax_phase.set_ylim(-200, 200)
if ax_coh == None:
ax_phase.set_xlabel('Frequency [Hz]')
ax_phase.set_ylabel('Phase [deg]')
ax_phase.set_yticks([-180, -90, 0, 90, 180])
ax_phase.grid(b=True, which='both', axis='both')
# phase plot
ax_phase.plot(freqresp.freq, phasedeg, linestyle=styl, color=col, linewidth=width, alpha=alpha)
if labelouter == True:
ax_phase.label_outer()
if ax_coh != None:
ax_coh.set_xscale('log')
ax_coh.set_xlim(freqrange)
ax_coh.set_ylim(0, 1.2)
ax_coh.set_xlabel('Frequency [Hz]')
ax_coh.set_ylabel('Coherence [.]')
ax_coh.grid(b=True, which='both', axis='both')
# coherence plot
ax_coh.plot(freqresp.freq, coh, linestyle=styl, color=col, linewidth=width, alpha=alpha)
if labelouter == True:
ax_phase.label_outer()
def plot_nyquist(ax, freqresp, styl='-', col='b', width=1.5, alpha=1.0, xrange=None, yrange=None, legend=None, title=None, labelouter=True):
x = np.real(freqresp.resp)
y = np.imag(freqresp.resp)
if xrange == None:
xrange = [-2, 1]
if yrange == None:
yrange = [-1.5, 1.5]
ax.set_xlim(xrange)
ax.set_ylim(yrange)
ax.set_xlabel('Real')
ax.set_ylabel('Imaginary')
ax.set_aspect('equal', adjustable='box')
ax.grid(b=True, which='both', axis='both')
# plot
ax.plot(x, y, linestyle=styl, color=col, linewidth=width, alpha=alpha)
# legend and title
if legend != None:
ax.legend(legend, loc='best')
if title != None:
ax.set_title(title)
if labelouter == True:
ax.label_outer()
def plot_nyquist_assistline(ax):
cir = np.linspace(-np.pi, np.pi)
cx = np.sin(cir)
cy = np.cos(cir)
# plot
ax.plot(cx, cy, linestyle='-', color='gray', linewidth=0.5)
ax.plot(0.5*cx-1, 0.5*cy, linestyle='-', color='gray', linewidth=0.5)
ax.plot(-1.0, 0.0, marker='x', color='r')
def makefig(dpi=100, popwin=False):
fig = plt.figure(dpi=dpi)
if popwin != False:
mngr = plt.get_current_fig_manager()
# to put it into the upper left corner for example:
mngr.window.setGeometry(50,250,640, 545)
return fig
def savefig(figName):
plt.savefig(figName)
def showfig():
plt.show()
| 35.643564
| 191
| 0.621389
| 1,067
| 7,200
| 4.074039
| 0.142455
| 0.043478
| 0.022084
| 0.023925
| 0.739591
| 0.727398
| 0.727398
| 0.721417
| 0.706694
| 0.689671
| 0
| 0.024731
| 0.225
| 7,200
| 201
| 192
| 35.820896
| 0.754301
| 0.129861
| 0
| 0.554839
| 0
| 0
| 0.043736
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051613
| false
| 0
| 0.032258
| 0
| 0.090323
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2f0106ae7282b224f92b0eca6d8babd8854ed299
| 65
|
py
|
Python
|
5kyu/python/the_hashtag_generator.py
|
jactymilena/Codewars
|
11f5d4b2c3c29ed91c5f83753ea4429df7843e3d
|
[
"MIT"
] | null | null | null |
5kyu/python/the_hashtag_generator.py
|
jactymilena/Codewars
|
11f5d4b2c3c29ed91c5f83753ea4429df7843e3d
|
[
"MIT"
] | null | null | null |
5kyu/python/the_hashtag_generator.py
|
jactymilena/Codewars
|
11f5d4b2c3c29ed91c5f83753ea4429df7843e3d
|
[
"MIT"
] | null | null | null |
# link : https://www.codewars.com/kata/52449b062fb80683ec000024
| 21.666667
| 63
| 0.784615
| 7
| 65
| 7.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.316667
| 0.076923
| 65
| 2
| 64
| 32.5
| 0.533333
| 0.938462
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dda530adf2087154edd451ff124128fc708549e1
| 227
|
py
|
Python
|
Ch2_Variables/ch2_assignment4.py
|
romitpatel/learn_python
|
42230d04be5af5576ac2cfc4b1d2a9413a1e777a
|
[
"MIT"
] | 1
|
2021-02-24T11:40:05.000Z
|
2021-02-24T11:40:05.000Z
|
Ch2_Variables/ch2_assignment4.py
|
Chatak1/learn_python
|
198333e56557301aeff95af321f4daa29834c61e
|
[
"MIT"
] | null | null | null |
Ch2_Variables/ch2_assignment4.py
|
Chatak1/learn_python
|
198333e56557301aeff95af321f4daa29834c61e
|
[
"MIT"
] | 2
|
2020-10-02T17:08:42.000Z
|
2021-02-24T11:40:12.000Z
|
width = 17
height = 12.0
print(width//2) # should return the quotient
print(width/2.0) # should return the quotient in float
print(height/3.0) # should return the quotient in float
ans = 1 + 2 * 5 # should return 11
print(ans)
| 28.375
| 55
| 0.718062
| 41
| 227
| 3.97561
| 0.439024
| 0.294479
| 0.276074
| 0.423313
| 0.380368
| 0.380368
| 0.380368
| 0
| 0
| 0
| 0
| 0.080214
| 0.176211
| 227
| 7
| 56
| 32.428571
| 0.791444
| 0.506608
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
ddf55667d77ad707d35a96dc372483fe301bc9c1
| 173
|
py
|
Python
|
monitor/monitor/utils/urls_provider/__init__.py
|
reynierg/websites-monitor
|
afa67d65f4a3dcef11ef86b068e885689970cdd1
|
[
"MIT"
] | null | null | null |
monitor/monitor/utils/urls_provider/__init__.py
|
reynierg/websites-monitor
|
afa67d65f4a3dcef11ef86b068e885689970cdd1
|
[
"MIT"
] | null | null | null |
monitor/monitor/utils/urls_provider/__init__.py
|
reynierg/websites-monitor
|
afa67d65f4a3dcef11ef86b068e885689970cdd1
|
[
"MIT"
] | null | null | null |
# from .csv_urls_provider import CsvUrlsProvider
# from .factory import InvalidUrlsProviderException, UrlsProviderFactory
# from .json_urls_provider import JsonUrlsProvider
| 43.25
| 72
| 0.867052
| 17
| 173
| 8.588235
| 0.647059
| 0.164384
| 0.246575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092486
| 173
| 3
| 73
| 57.666667
| 0.929936
| 0.959538
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ddfab3a0d22318d34ac8be36a1f45d80d50932ff
| 327
|
py
|
Python
|
Unidade/models.py
|
higornobrega/ramais_UNIFIP
|
e022d198ee18dd05f49a202288dea79cea9fdc39
|
[
"MIT"
] | 1
|
2021-06-10T18:03:42.000Z
|
2021-06-10T18:03:42.000Z
|
Unidade/models.py
|
higornobrega/ramais_UNIFIP
|
e022d198ee18dd05f49a202288dea79cea9fdc39
|
[
"MIT"
] | 7
|
2021-06-10T14:21:56.000Z
|
2021-06-11T12:08:09.000Z
|
Unidade/models.py
|
higornobrega/ramais_UNIFIP
|
e022d198ee18dd05f49a202288dea79cea9fdc39
|
[
"MIT"
] | 1
|
2021-06-09T13:33:25.000Z
|
2021-06-09T13:33:25.000Z
|
from django.db import models
# Create your models here.
class Unidade(models.Model):
nome_unidade = models.CharField(max_length=255)
cidade = models.CharField(max_length=255)
campus = models.CharField(max_length=255)
endereco = models.CharField(max_length=255)
def __str__(self):
return str(self.nome_unidade)
| 25.153846
| 49
| 0.764526
| 46
| 327
| 5.217391
| 0.5
| 0.25
| 0.3
| 0.4
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042403
| 0.134557
| 327
| 13
| 50
| 25.153846
| 0.805654
| 0.073395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0.125
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fb0888c734c7924a7501553937f5ba2c53c9b812
| 61
|
py
|
Python
|
testproject/tests/conftest.py
|
jarshwah/django-hookshot
|
026c95ff9ded8e21300696437311fc0b36e5cfa2
|
[
"BSD-3-Clause"
] | 5
|
2020-02-14T05:36:53.000Z
|
2020-03-03T21:25:30.000Z
|
testproject/tests/conftest.py
|
jarshwah/django-hookshot
|
026c95ff9ded8e21300696437311fc0b36e5cfa2
|
[
"BSD-3-Clause"
] | null | null | null |
testproject/tests/conftest.py
|
jarshwah/django-hookshot
|
026c95ff9ded8e21300696437311fc0b36e5cfa2
|
[
"BSD-3-Clause"
] | null | null | null |
def pytest_configure(config):
import application # noqa
| 20.333333
| 30
| 0.754098
| 7
| 61
| 6.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180328
| 61
| 2
| 31
| 30.5
| 0.9
| 0.065574
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fb213f18e737f794caaa0b7ad2c8b4546649681b
| 83
|
py
|
Python
|
pynars/NARS/InferenceEngine/__init__.py
|
AIxer/PyNARS
|
443b6a5e1c9779a1b861df1ca51ce5a190998d2e
|
[
"MIT"
] | null | null | null |
pynars/NARS/InferenceEngine/__init__.py
|
AIxer/PyNARS
|
443b6a5e1c9779a1b861df1ca51ce5a190998d2e
|
[
"MIT"
] | null | null | null |
pynars/NARS/InferenceEngine/__init__.py
|
AIxer/PyNARS
|
443b6a5e1c9779a1b861df1ca51ce5a190998d2e
|
[
"MIT"
] | null | null | null |
from .GeneralEngine import GeneralEngine
from .TemporalEngine import TemporalEngine
| 41.5
| 42
| 0.891566
| 8
| 83
| 9.25
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084337
| 83
| 2
| 42
| 41.5
| 0.973684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fb2744d845cb95d612b692476b82f311a8fedd80
| 102
|
py
|
Python
|
media_guard/backends/__init__.py
|
omni-digital/django-media-guard
|
b2465864bd39663e0ce4712be7bb87b43fe74bcb
|
[
"MIT"
] | null | null | null |
media_guard/backends/__init__.py
|
omni-digital/django-media-guard
|
b2465864bd39663e0ce4712be7bb87b43fe74bcb
|
[
"MIT"
] | null | null | null |
media_guard/backends/__init__.py
|
omni-digital/django-media-guard
|
b2465864bd39663e0ce4712be7bb87b43fe74bcb
|
[
"MIT"
] | null | null | null |
from .django import MediaGuardDjangoBackend # noqa
from .nginx import MediaGuardNginxBackend # noqa
| 34
| 51
| 0.823529
| 10
| 102
| 8.4
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 102
| 2
| 52
| 51
| 0.954545
| 0.088235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fb339f1afe672330c7ca90d1e654b36d30eea9f3
| 886
|
py
|
Python
|
tests/test_322.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_322.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_322.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 322. Coin Change
"""
@pytest.fixture(scope="session")
def init_variables_322():
from src.leetcode_322_coin_change import Solution
solution = Solution()
def _init_variables_322():
return solution
yield _init_variables_322
class TestClass322:
def test_solution_0(self, init_variables_322):
assert init_variables_322().coinChange([1, 2, 5], 11) == 3
def test_solution_1(self, init_variables_322):
assert init_variables_322().coinChange([2], 3) == -1
def test_solution_2(self, init_variables_322):
assert init_variables_322().coinChange([1], 0) == 0
def test_solution_3(self, init_variables_322):
assert init_variables_322().coinChange([1], 1) == 1
def test_solution_4(self, init_variables_322):
assert init_variables_322().coinChange([1], 2) == 2
| 23.945946
| 66
| 0.69526
| 122
| 886
| 4.713115
| 0.278689
| 0.293913
| 0.361739
| 0.173913
| 0.462609
| 0.462609
| 0.462609
| 0.462609
| 0.462609
| 0.372174
| 0
| 0.098748
| 0.188488
| 886
| 36
| 67
| 24.611111
| 0.700974
| 0.022573
| 0
| 0
| 0
| 0
| 0.008373
| 0
| 0
| 0
| 0
| 0
| 0.263158
| 1
| 0.368421
| false
| 0
| 0.105263
| 0.052632
| 0.578947
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
fb361b2e83e877fc43d2b34bec7b1cc201628a52
| 195
|
py
|
Python
|
Part_3_advanced/m17_tests_II/unittest_module/homework_1_start/estudent/tests/test_grade.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
Part_3_advanced/m17_tests_II/unittest_module/homework_1_start/estudent/tests/test_grade.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
Part_3_advanced/m17_tests_II/unittest_module/homework_1_start/estudent/tests/test_grade.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
def test_grade_above_1_is_passing(passing_grade):
assert passing_grade.is_passing() is True
def test_grade_below_2_is_failing(failing_grade):
assert failing_grade.is_passing() is False
| 27.857143
| 49
| 0.825641
| 32
| 195
| 4.53125
| 0.40625
| 0.186207
| 0.165517
| 0.22069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011561
| 0.112821
| 195
| 6
| 50
| 32.5
| 0.82659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| false
| 0.75
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
34919b98ee0715999650362c9ab0a6644a91c6e8
| 3,355
|
py
|
Python
|
TestTicTacToeBoard.py
|
dz1domin/tictactoe
|
671eeaecb6e6c8434b2db5595d15fbcd28e90f49
|
[
"MIT"
] | null | null | null |
TestTicTacToeBoard.py
|
dz1domin/tictactoe
|
671eeaecb6e6c8434b2db5595d15fbcd28e90f49
|
[
"MIT"
] | null | null | null |
TestTicTacToeBoard.py
|
dz1domin/tictactoe
|
671eeaecb6e6c8434b2db5595d15fbcd28e90f49
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from TicTacToeBoard import TicTacToeBoard
from ConsoleOutput import ConsoleOutput
dim = 5 # you can change this value to check any board size >= 3
class TestTicTacToeBoard(TestCase):
def test_set_point_return_false_if_field_is_occupied(self):
board = TicTacToeBoard(dim)
board.set_point(0, 0, 'X')
self.assertFalse(board.set_point(0, 0, "X"))
def test_set_point_return_true_if_field_is_not_occupied(self):
board = TicTacToeBoard(dim)
self.assertTrue(board.set_point(0, 0, "X"))
def test_is_over_return_true_for_horizontal_win(self):
board = TicTacToeBoard(dim)
for i in range(board.get_dim()):
board.set_point(0, i, 'X')
self.assertTrue(board.is_over())
def test_is_over_return_true_for_vertical_win(self):
board = TicTacToeBoard(dim)
for i in range(board.get_dim()):
board.set_point(i, 0, 'X')
self.assertTrue(board.is_over())
def test_is_over_return_true_for_diagonal1_win(self):
board = TicTacToeBoard(dim)
for i in range(board.get_dim()):
board.set_point(i, i, 'X')
self.assertTrue(board.is_over())
def test_is_over_return_true_for_diagonal1_win(self):
board = TicTacToeBoard(dim)
for i in range(board.get_dim()):
board.set_point(-i, -i, 'X')
self.assertTrue(board.is_over())
def test_is_over_return_false_for_draw(self):
board = TicTacToeBoard(dim)
if dim % 2 == 1:
board.set_point(dim - 1, 0, "O")
for j in range(1, dim):
board.set_point(dim - 1, j, "X")
for i in range(0, dim - 1, 2):
board.set_point(i, 0, "O")
board.set_point(i + 1, 0, "X")
for j in range(1, dim):
board.set_point(i, j, "X")
board.set_point(i + 1, j, "O")
else:
for i in range(0, dim - 1, 2):
board.set_point(i, 0, "O")
board.set_point(i + 1, 0, "X")
for j in range(1, dim):
board.set_point(i, j, "X")
board.set_point(i + 1, j, "O")
self.assertFalse(board.is_over())
def test_is_move_available_return_true_for_board_with_empty_fields(self):
board = TicTacToeBoard()
board.set_point(0, 0, 'X')
self.assertTrue(board.is_move_available())
def test_is_move_available_return_false_for_board_without_empty_fields(self):
board = TicTacToeBoard(dim)
if dim % 2 == 1:
board.set_point(dim - 1, 0, "O")
for j in range(1, dim):
board.set_point(dim - 1, j, "X")
for i in range(0, dim - 1, 2):
board.set_point(i, 0, "O")
board.set_point(i + 1, 0, "X")
for j in range(1, dim):
board.set_point(i, j, "X")
board.set_point(i + 1, j, "O")
else:
for i in range(0, dim - 1, 2):
board.set_point(i, 0, "O")
board.set_point(i + 1, 0, "X")
for j in range(1, dim):
board.set_point(i, j, "X")
board.set_point(i + 1, j, "O")
self.assertFalse(board.is_move_available())
| 36.075269
| 81
| 0.555589
| 479
| 3,355
| 3.65762
| 0.125261
| 0.136986
| 0.207763
| 0.151826
| 0.815639
| 0.730594
| 0.686073
| 0.652397
| 0.626142
| 0.626142
| 0
| 0.025607
| 0.324888
| 3,355
| 92
| 82
| 36.467391
| 0.747903
| 0.016095
| 0
| 0.710526
| 0
| 0
| 0.008487
| 0
| 0
| 0
| 0
| 0
| 0.118421
| 1
| 0.118421
| false
| 0
| 0.039474
| 0
| 0.171053
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
349413511de660eaf430a8856594968c374a6bf4
| 174
|
py
|
Python
|
apps/users/tests.py
|
Niracler/website_py
|
4c28f82a34122e4a02cc1f940e14f43ee0a4571d
|
[
"MIT"
] | 1
|
2018-11-21T08:31:37.000Z
|
2018-11-21T08:31:37.000Z
|
apps/users/tests.py
|
Niracler/website_py
|
4c28f82a34122e4a02cc1f940e14f43ee0a4571d
|
[
"MIT"
] | 6
|
2018-09-21T12:34:58.000Z
|
2018-09-22T12:05:01.000Z
|
apps/users/tests.py
|
niracler/django-blog
|
4c28f82a34122e4a02cc1f940e14f43ee0a4571d
|
[
"MIT"
] | 1
|
2018-11-14T01:09:46.000Z
|
2018-11-14T01:09:46.000Z
|
from django.test import TestCase
# Create your tests here.
from users.models import VerifyCode
print(VerifyCode.objects.filter(mobile="13427498660").order_by("-add_time"))
| 24.857143
| 76
| 0.798851
| 24
| 174
| 5.708333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06962
| 0.091954
| 174
| 6
| 77
| 29
| 0.797468
| 0.132184
| 0
| 0
| 0
| 0
| 0.134228
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
34e79fe82185e99876dca017aa6c84156a6b0e48
| 3,652
|
py
|
Python
|
pattern/text/en/wordnet/pywordnet/concordance.py
|
zaffnet/pattern
|
8e7e0f8885d847e5d2d47c2fd0602eb0159c43a2
|
[
"BSD-3-Clause"
] | 67
|
2015-01-04T09:46:50.000Z
|
2020-08-13T14:30:57.000Z
|
pattern/text/en/wordnet/pywordnet/concordance.py
|
zaffnet/pattern
|
8e7e0f8885d847e5d2d47c2fd0602eb0159c43a2
|
[
"BSD-3-Clause"
] | 24
|
2015-01-09T20:35:02.000Z
|
2020-07-21T08:30:51.000Z
|
pattern/text/en/wordnet/pywordnet/concordance.py
|
zaffnet/pattern
|
8e7e0f8885d847e5d2d47c2fd0602eb0159c43a2
|
[
"BSD-3-Clause"
] | 48
|
2015-07-02T23:04:32.000Z
|
2021-11-25T14:48:38.000Z
|
# some accessing of the semantic concordance data for wordnet 1.6
# by Des Berry, berry@ais.it
import string
import os
from wordnet import binarySearchFile
# Sample entries in the 'taglist' file
# ordinary%1:18:01:: 1 br-a01:78,1;86,1;88,4
# ordered%5:00:00:organized:01 2 br-j23:6,14;13,32;66,12
# where the general form is:
# lemma%ss_type:lex_filenum:lex_id:head_word:head_id sense_number
[location_list]
# location_list: filename:sent_num,word_num[;sent_num,word_num...]
ss_type = ("NOUN", "VERB", "ADJECTIVE", "ADVERB", "ADJECTIVE SATELLITE")
# given a sentence number (and the contents of a semantic concordance file)
# return a string of words as the sentence
def find_sentence(snum, msg):
str = "<s snum=%s>" % snum
s = string.find(msg, str)
if s < 0:
return "<Unknown>"
s = s + len(str)
sentence = ""
tag = ""
while 1:
if msg[s] == '\n':
s = s + 1
n = string.find(msg, '<', s)
if n < 0:
break
if n - s != 0:
if tag == "w" and msg[s] != "'" and len(sentence) > 0: # word form
sentence = sentence + " "
sentence = sentence + msg[s:n]
e = string.find(msg, '>', n)
if e < 0:
break
tag = msg[n + 1]
if tag == "/": # check for ending sentence
if msg[n + 2] == 's':
# end of sentence
break
s = e + 1
return sentence
# given a taglist sense (one line of the tagfile) and where to find the tagfile (root)
# return a tuple of
# symset type ('1' .. '5')
# sense (numeric character string)
# list of sentences (constructed from the taglist)
def tagsentence(tag, root):
s = string.find(tag, '%')
sentence = []
type = tag[s + 1]
c = s
for i in range(0, 4):
c = string.find(tag, ':', c + 1)
c = string.find(tag, ' ', c + 1)
sense = tag[c + 1]
c = c + 3
while 1:
d = string.find(tag, ' ', c) # file separator
if d < 0:
loclist = tag[c:]
else:
loclist = tag[c:d]
c = d + 1
e = string.find(loclist, ':')
filename = loclist[:e]
fh = open(root + filename, "rb")
msg = fh.read()
fh.close()
while 1:
e = e + 1
f = string.find(loclist, ';', e)
if f < 0:
sent_word = loclist[e:]
else:
sent_word = loclist[e:f]
e = f
g = string.find(sent_word, ',')
sent = sent_word[:g]
sentence.append(find_sentence(sent, msg))
if f < 0:
break
if d < 0:
break
return (type, sense, sentence)
# given a word to search for and where to find the files (root)
# displays the information
# This could be changed to display in different ways!
def sentences(word, root):
cache = {}
file = open(root + "taglist", "rb")
key = word + "%"
keylen = len(key)
binarySearchFile(file, key + " ", cache, 10)
print "Word '%s'" % word
while 1:
line = file.readline()
if line[:keylen] != key:
break
type, sense, sentence = tagsentence(line, root + "tagfiles/")
print ss_type[string.atoi(type) - 1], sense
for sent in sentence:
print sent
def _test(word, corpus, base):
print corpus
sentences("ordinary", base + corpus + "/")
if __name__ == '__main__':
base = "C:/win16/dict/semcor/"
word = "ordinary"
_test(word, "brown1", base)
_test(word, "brown2", base)
_test(word, "brownv", base)
| 3,652
| 3,652
| 0.530668
| 498
| 3,652
| 3.827309
| 0.311245
| 0.052466
| 0.027282
| 0.022036
| 0.034627
| 0.016789
| 0
| 0
| 0
| 0
| 0
| 0.030953
| 0.336528
| 3,652
| 1
| 3,652
| 3,652
| 0.755675
| 0.993428
| 0
| 0.170213
| 0
| 0
| 0.063932
| 0.007761
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.031915
| null | null | 0.042553
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
550cddf192def73474974dee49768dff5dea9ad9
| 11,480
|
py
|
Python
|
NeoTrellis_M4_MIDI_Synth/events.py
|
gamblor21/Adafruit_Learning_System_Guides
|
f5dab4a758bc82d0bfc3c299683fe89dc093912a
|
[
"MIT"
] | 665
|
2017-09-27T21:20:14.000Z
|
2022-03-31T09:09:25.000Z
|
NeoTrellis_M4_MIDI_Synth/events.py
|
gamblor21/Adafruit_Learning_System_Guides
|
f5dab4a758bc82d0bfc3c299683fe89dc093912a
|
[
"MIT"
] | 641
|
2017-10-03T19:46:37.000Z
|
2022-03-30T18:28:46.000Z
|
NeoTrellis_M4_MIDI_Synth/events.py
|
gamblor21/Adafruit_Learning_System_Guides
|
f5dab4a758bc82d0bfc3c299683fe89dc093912a
|
[
"MIT"
] | 734
|
2017-10-02T22:47:38.000Z
|
2022-03-30T14:03:51.000Z
|
"""
NeoTrellis M4 Express MIDI synth
Adafruit invests time and resources providing this open source code.
Please support Adafruit and open source hardware by purchasing
products from Adafruit!
Written by Dave Astels for Adafruit Industries
Copyright (c) 2018 Adafruit Industries
Licensed under the MIT license.
All text above must be included in any redistribution.
"""
# Events as defined in http://www.music.mcgill.ca/~ich/classes/mumt306/StandardMIDIfileformat.html
# pylint: disable=unused-argument,no-self-use
class Event(object):
def __init__(self, delta_time):
self._delta_time = delta_time
@property
def time(self):
return self._delta_time
def execute(self, sequencer):
return False
class F0SysexEvent(Event):
def __init__(self, delta_time, data):
Event.__init__(self, delta_time)
self._data = data
class F7SysexEvent(Event):
def __init__(self, delta_time, data):
Event.__init__(self, delta_time)
self._data = data
class MetaEvent(Event):
def __init__(self, delta_time):
Event.__init__(self, delta_time)
class SequenceNumberMetaEvent(MetaEvent):
def __init__(self, delta_time, sequence_number):
MetaEvent.__init__(self, delta_time)
self._sequence_number = sequence_number
def __str__(self):
return '%d : Sequence Number : %d' % (self._delta_time, self._sequence_number)
class TextMetaEvent(MetaEvent):
def __init__(self, delta_time, text):
MetaEvent.__init__(self, delta_time)
self._text = text
def __str__(self):
return '%d : Text : %s' % (self._delta_time, self._text)
class CopyrightMetaEvent(MetaEvent):
def __init__(self, delta_time, copyright_notice):
MetaEvent.__init__(self, delta_time)
self._copyright_notice = copyright_notice
def __str__(self):
return '%d : Copyright : %s' % (self._delta_time, self._copyright_notice)
class TrackNameMetaEvent(MetaEvent):
def __init__(self, delta_time, track_name):
MetaEvent.__init__(self, delta_time)
self._track_name = track_name
def __str__(self):
return '%d : Track Name : %s' % (self._delta_time, self._track_name)
class InstrumentNameMetaEvent(MetaEvent):
def __init__(self, delta_time, instrument_name):
MetaEvent.__init__(self, delta_time)
self._instrument_name = instrument_name
def __str__(self):
return '%d : Instrument Name : %s' % (self._delta_time, self._instrument_name)
class LyricMetaEvent(MetaEvent):
def __init__(self, delta_time, lyric):
MetaEvent.__init__(self, delta_time)
self._lyric = lyric
def __str__(self):
return '%d : Lyric : %s' % (self._delta_time, self._lyric)
class MarkerMetaEvent(MetaEvent):
def __init__(self, delta_time, marker):
MetaEvent.__init__(self, delta_time)
self._marker = marker
def __str__(self):
return '%d : Marker : %s' % (self._delta_time, self._marker)
class CuePointMetaEvent(MetaEvent):
def __init__(self, delta_time, cue):
MetaEvent.__init__(self, delta_time)
self._cue = cue
def __str__(self):
return '%d : Cue : %s' % (self._delta_time, self._cue)
class ChannelPrefixMetaEvent(MetaEvent):
def __init__(self, delta_time, channel):
MetaEvent.__init__(self, delta_time)
self._channel = channel
def __str__(self):
return '%d: Channel Prefix : %d' % (self._delta_time, self._channel)
class EndOfTrackMetaEvent(MetaEvent):
def __init__(self, delta_time):
MetaEvent.__init__(self, delta_time)
def __str__(self):
return '%d: End Of Track' % (self._delta_time)
def execute(self, sequencer):
sequencer.end_track()
return True
class SetTempoMetaEvent(MetaEvent):
def __init__(self, delta_time, tempo):
MetaEvent.__init__(self, delta_time)
self._tempo = tempo
def __str__(self):
return '%d: Set Tempo : %d' % (self._delta_time, self._tempo)
def execute(self, sequencer):
sequencer.set_tempo(self._tempo)
return False
class SmpteOffsetMetaEvent(MetaEvent):
def __init__(self, delta_time, hour, minute, second, fr, rr):
MetaEvent.__init__(self, delta_time)
self._hour = hour
self._minute = minute
self._second = second
self._fr = fr
self._rr = rr
def __str__(self):
return '%d : SMPTE Offset : %02d:%02d:%02d %d %d' % (self._delta_time,
self._hour,
self._minute,
self._second,
self._fr,
self._rr)
class TimeSignatureMetaEvent(MetaEvent):
def __init__(self, delta_time, nn, dd, cc, bb):
MetaEvent.__init__(self, delta_time)
self._numerator = nn
self._denominator = dd
self._cc = cc
self._bb = bb
def __str__(self):
return '%d : Time Signature : %d %d %d %d' % (self._delta_time,
self._numerator,
self._denominator,
self._cc,
self._bb)
def execute(self, sequencer):
sequencer.set_time_signature(self._numerator, self._denominator, self._cc)
return False
class KeySignatureMetaEvent(MetaEvent):
def __init__(self, delta_time, sf, mi):
MetaEvent.__init__(self, delta_time)
self._sf = sf
self._mi = mi
def __str__(self):
return '%d : Key Signature : %d %d' % (self._delta_time, self._sf, self._mi)
class SequencerSpecificMetaEvent(MetaEvent):
def __init__(self, delta_time, data):
MetaEvent.__init__(self, delta_time)
self._data = data
class MidiEvent(Event):
def __init__(self, delta_time, channel):
Event.__init__(self, delta_time)
self._channel = channel
class NoteOffEvent(MidiEvent):
def __init__(self, delta_time, channel, key, velocity):
MidiEvent.__init__(self, delta_time, channel)
self._key = key
self._velocity = velocity
def __str__(self):
return '%d : Note Off : key %d, velocity %d' % (self._delta_time,
self._key,
self._velocity)
def execute(self, sequencer):
sequencer.note_off(self._key, self._velocity)
return False
class NoteOnEvent(MidiEvent):
def __init__(self, delta_time, channel, key, velocity):
MidiEvent.__init__(self, delta_time, channel)
self._key = key
self._velocity = velocity
def __str__(self):
return '%d : Note On : key %d, velocity %d' % (self._delta_time,
self._key,
self._velocity)
def execute(self, sequencer):
sequencer.note_on(self._key, self._velocity)
return False
class PolyphonicKeyPressureEvent(MidiEvent):
def __init__(self, delta_time, channel, key, pressure):
MidiEvent.__init__(self, delta_time, channel)
self._key = key
self._pressure = pressure
def __str__(self):
return '%d : Poly Key Pressure : key %d, velocity %d' % (self._delta_time,
self._key,
self._pressure)
class ControlChangeEvent(MidiEvent):
def __init__(self, delta_time, channel, controller, value):
MidiEvent.__init__(self, delta_time, channel)
self._controller = controller
self._value = value
def __str__(self):
return '%d : Control Change : controller %d, value %d' % (self._delta_time,
self._controller,
self._value)
class ProgramChangeEvent(MidiEvent):
def __init__(self, delta_time, channel, program_number):
MidiEvent.__init__(self, delta_time, channel)
self._program_number = program_number
def __str__(self):
return '%d : Program Change : program %d' % (self._delta_time,
self._program_number)
class ChannelPressureEvent(MidiEvent):
def __init__(self, delta_time, channel, pressure):
MidiEvent.__init__(self, delta_time, channel)
self._pressure = pressure
def __str__(self):
return '%d : Channel Pressure : %d' % (self._delta_time, self._channel)
class PitchWheelChangeEvent(MidiEvent):
def __init__(self, delta_time, channel, value):
MidiEvent.__init__(self, delta_time, channel)
self._value = value
def __str__(self):
return '%d : Pitch Wheel Change : %d' % (self._delta_time, self._value)
class SystemExclusiveEvent(MidiEvent):
def __init__(self, delta_time, channel, data):
MidiEvent.__init__(self, delta_time, channel)
self._data = data
class SongPositionPointerEvent(MidiEvent):
def __init__(self, delta_time, beats):
MidiEvent.__init__(self, delta_time, None)
self._beats = beats
def __str__(self):
return '%d: SongPositionPointerEvent(beats %d)' % (self._delta_time,
self._beats)
class SongSelectEvent(MidiEvent):
def __init__(self, delta_time, song):
MidiEvent.__init__(self, delta_time, None)
self._song = song
def __str__(self):
return '%d: SongSelectEvent(song %d)' % (self._delta_time,
self._song)
class TuneRequestEvent(MidiEvent):
def __init__(self, delta_time):
MidiEvent.__init__(self, delta_time, None)
def __str__(self):
return '%d : Tune Request' % (self._delta_time)
class TimingClockEvent(MidiEvent):
def __init__(self, delta_time):
MidiEvent.__init__(self, delta_time, None)
def __str__(self):
return '%d : Timing Clock' % (self._delta_time)
class StartEvent(MidiEvent):
def __init__(self, delta_time):
MidiEvent.__init__(self, delta_time, None)
def __str__(self):
return '%d : Start' % (self._delta_time)
class ContinueEvent(MidiEvent):
def __init__(self, delta_time):
MidiEvent.__init__(self, delta_time, None)
def __str__(self):
return '%d : Continue' % (self._delta_time)
class StopEvent(MidiEvent):
def __init__(self, delta_time):
MidiEvent.__init__(self, delta_time, None)
def __str__(self):
return '%d : Stop' % (self._delta_time)
class ActiveSensingEvent(MidiEvent):
def __init__(self, delta_time):
MidiEvent.__init__(self, delta_time, None)
def __str__(self):
return '%d : Active Sensing' % (self._delta_time)
class ResetEvent(MidiEvent):
def __init__(self, delta_time):
MidiEvent.__init__(self, delta_time, None)
def __str__(self):
return '%d : Reset' % (self._delta_time)
| 28
| 98
| 0.603223
| 1,237
| 11,480
| 5.067098
| 0.141471
| 0.152202
| 0.217773
| 0.19799
| 0.651244
| 0.56605
| 0.346522
| 0.240906
| 0.18762
| 0.18762
| 0
| 0.001995
| 0.30122
| 11,480
| 409
| 99
| 28.06846
| 0.779357
| 0.044077
| 0
| 0.456
| 0
| 0
| 0.064569
| 0.002736
| 0
| 0
| 0
| 0
| 0
| 1
| 0.296
| false
| 0
| 0
| 0.128
| 0.592
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
551c4a39689f6f1b76b52bd3914ea77b79316832
| 58
|
py
|
Python
|
SecretPlots/objects/__init__.py
|
secretBiology/SecretPlots
|
eca1d0e0932e605df49d1f958f98a1f41200d589
|
[
"MIT"
] | null | null | null |
SecretPlots/objects/__init__.py
|
secretBiology/SecretPlots
|
eca1d0e0932e605df49d1f958f98a1f41200d589
|
[
"MIT"
] | null | null | null |
SecretPlots/objects/__init__.py
|
secretBiology/SecretPlots
|
eca1d0e0932e605df49d1f958f98a1f41200d589
|
[
"MIT"
] | 1
|
2022-01-14T05:43:49.000Z
|
2022-01-14T05:43:49.000Z
|
from SecretPlots.objects._base import Data, Axis, Element
| 29
| 57
| 0.827586
| 8
| 58
| 5.875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 58
| 1
| 58
| 58
| 0.903846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9b4bd8ff4fb21dffa8376bfe7c6941cff191d89a
| 435
|
py
|
Python
|
src/darjeeling/transformation/database/__init__.py
|
rshariffdeen/Darjeeling
|
609eb5d4271723d63a1f7053a149fbc0f00edd2f
|
[
"Apache-2.0"
] | 21
|
2018-06-26T18:01:43.000Z
|
2022-03-16T09:51:57.000Z
|
src/darjeeling/transformation/database/__init__.py
|
rshariffdeen/Darjeeling
|
609eb5d4271723d63a1f7053a149fbc0f00edd2f
|
[
"Apache-2.0"
] | 175
|
2018-03-21T03:03:53.000Z
|
2022-03-09T20:36:58.000Z
|
src/darjeeling/transformation/database/__init__.py
|
rshariffdeen/Darjeeling
|
609eb5d4271723d63a1f7053a149fbc0f00edd2f
|
[
"Apache-2.0"
] | 10
|
2018-06-26T18:01:45.000Z
|
2022-03-10T02:37:21.000Z
|
# -*- coding: utf-8 -*-
"""
Transformation databases are a convenient abstraction for storing and querying
transformations to a given program. This module defines a common interface for
all interacting with transformation databases as well as reference
implementations of that interface. Developers may extend Darjeeling to add their
own, customized transformation database implementation.
"""
from .base import TransformationDatabase
| 43.5
| 80
| 0.82069
| 54
| 435
| 6.611111
| 0.833333
| 0.128852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002653
| 0.133333
| 435
| 9
| 81
| 48.333333
| 0.944297
| 0.882759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9b766b3c9eb8c0ccd01561da3e4d7919de331551
| 102
|
py
|
Python
|
wandb/sync/__init__.py
|
borisgrafx/client
|
c079f7816947a3092b500751eb920fda3866985f
|
[
"MIT"
] | 3,968
|
2017-08-23T21:27:19.000Z
|
2022-03-31T22:00:19.000Z
|
wandb/sync/__init__.py
|
borisgrafx/client
|
c079f7816947a3092b500751eb920fda3866985f
|
[
"MIT"
] | 2,725
|
2017-04-17T00:29:15.000Z
|
2022-03-31T21:01:53.000Z
|
wandb/sync/__init__.py
|
borisgrafx/client
|
c079f7816947a3092b500751eb920fda3866985f
|
[
"MIT"
] | 351
|
2018-04-08T19:39:34.000Z
|
2022-03-30T19:38:08.000Z
|
"""
module sync
"""
from .sync import get_run_from_path, get_runs, SyncManager, TMPDIR # noqa: F401
| 17
| 80
| 0.72549
| 15
| 102
| 4.666667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034884
| 0.156863
| 102
| 5
| 81
| 20.4
| 0.77907
| 0.22549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9bc0b69e05da140f00728ec754eec30fed664970
| 399
|
py
|
Python
|
src/ikazuchi/tests/data/rst/api_call_get_table_column_width.py
|
t2y/ikazuchi
|
7023111e92fa47360c50cfefd1398c554475f2c6
|
[
"Apache-2.0"
] | null | null | null |
src/ikazuchi/tests/data/rst/api_call_get_table_column_width.py
|
t2y/ikazuchi
|
7023111e92fa47360c50cfefd1398c554475f2c6
|
[
"Apache-2.0"
] | null | null | null |
src/ikazuchi/tests/data/rst/api_call_get_table_column_width.py
|
t2y/ikazuchi
|
7023111e92fa47360c50cfefd1398c554475f2c6
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
DATA_SET = [
(
[u"123", u"12345"],
[1, 3],
([3, 5], [3, 5])
),
(
[u"123", u"12345"],
[4, 3],
([3, 5], [4, 5])
),
(
[u"123", u"12345"],
[5, 8],
([3, 5], [5, 8])
),
(
[u"123", u"12345", u"lorem ipsum"],
[5, 3, 4],
([3, 5, 11], [5, 5, 11])
),
]
| 13.758621
| 43
| 0.245614
| 51
| 399
| 1.901961
| 0.294118
| 0.103093
| 0.206186
| 0.412371
| 0.226804
| 0
| 0
| 0
| 0
| 0
| 0
| 0.292453
| 0.468672
| 399
| 28
| 44
| 14.25
| 0.165094
| 0.052632
| 0
| 0.318182
| 0
| 0
| 0.114362
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
32f56716f50d4ef8bb6b9070f114c6378e9e2fc4
| 215
|
py
|
Python
|
basic_grammar/indention.py
|
OnoYuta/python_programing
|
5d191bef5666c0a826f6daa0bd45bc9dd6603d59
|
[
"MIT"
] | null | null | null |
basic_grammar/indention.py
|
OnoYuta/python_programing
|
5d191bef5666c0a826f6daa0bd45bc9dd6603d59
|
[
"MIT"
] | null | null | null |
basic_grammar/indention.py
|
OnoYuta/python_programing
|
5d191bef5666c0a826f6daa0bd45bc9dd6603d59
|
[
"MIT"
] | null | null | null |
# 80文字以上になる場合は改行するのがルール
s = 'aaaaaaaaa' \
+ 'bbbbbbbb'
print(s)
x = 1 + 1 + 1 + 1 + 1 + 1 + 1 \
+ 1 + 1 + 1 + 1 + 1 + 1 + 1
print(x)
x = (1 + 1 + 1 + 1 + 1 + 1 + 1
+ 1 + 1 + 1 + 1 + 1 + 1 + 1)
print(x)
| 17.916667
| 32
| 0.404651
| 40
| 215
| 2.175
| 0.175
| 0.597701
| 0.827586
| 1.011494
| 0.482759
| 0.482759
| 0.482759
| 0.482759
| 0.482759
| 0.482759
| 0
| 0.227273
| 0.386047
| 215
| 12
| 33
| 17.916667
| 0.431818
| 0.097674
| 0
| 0.222222
| 0
| 0
| 0.088083
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fd02103552384ff466f7766b423c78ef5b34f7a5
| 126
|
py
|
Python
|
vendors/admin.py
|
matthewgan/xsteam
|
64bc33a15902d15df910c42d82e708b75787c4f0
|
[
"MIT"
] | null | null | null |
vendors/admin.py
|
matthewgan/xsteam
|
64bc33a15902d15df910c42d82e708b75787c4f0
|
[
"MIT"
] | null | null | null |
vendors/admin.py
|
matthewgan/xsteam
|
64bc33a15902d15df910c42d82e708b75787c4f0
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from vendors.models import Vendor
# Register your models here.
admin.site.register(Vendor)
| 18
| 33
| 0.809524
| 18
| 126
| 5.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 126
| 6
| 34
| 21
| 0.927273
| 0.206349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fd5020bd8256243ee94885b87bdfa0ee0c733639
| 150
|
py
|
Python
|
research/simulate.py
|
noe98/Cayley
|
dbd60c6fa04f00aa995094acc76ef0d06a0346b1
|
[
"MIT"
] | 4
|
2018-04-16T18:17:55.000Z
|
2019-05-08T03:11:16.000Z
|
research/simulate.py
|
noe98/Cayley
|
dbd60c6fa04f00aa995094acc76ef0d06a0346b1
|
[
"MIT"
] | 13
|
2018-06-05T17:10:38.000Z
|
2018-10-23T23:39:57.000Z
|
research/simulate.py
|
noe98/Cayley
|
dbd60c6fa04f00aa995094acc76ef0d06a0346b1
|
[
"MIT"
] | 5
|
2018-05-30T16:10:14.000Z
|
2018-06-29T02:29:49.000Z
|
"""
@author: Justin K. Pusztay
Filename: simulate.py
Project: Reserach for Irina Mazilu, Ph.D
"""
import Cayley as cy
import Cayley.research as cr
| 13.636364
| 40
| 0.733333
| 23
| 150
| 4.782609
| 0.869565
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 150
| 10
| 41
| 15
| 0.88
| 0.593333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b5caefcaa8e03608aa6faa6f68b2dc2225cddd63
| 2,448
|
py
|
Python
|
example/controller/tests/helper/security/crypto/__init__.py
|
donghak-shin/dp-tornado
|
095bb293661af35cce5f917d8a2228d273489496
|
[
"MIT"
] | 18
|
2015-04-07T14:28:39.000Z
|
2020-02-08T14:03:38.000Z
|
example/controller/tests/helper/security/crypto/__init__.py
|
donghak-shin/dp-tornado
|
095bb293661af35cce5f917d8a2228d273489496
|
[
"MIT"
] | 7
|
2016-10-05T05:14:06.000Z
|
2021-05-20T02:07:22.000Z
|
example/controller/tests/helper/security/crypto/__init__.py
|
donghak-shin/dp-tornado
|
095bb293661af35cce5f917d8a2228d273489496
|
[
"MIT"
] | 11
|
2015-12-15T09:49:39.000Z
|
2021-09-06T18:38:21.000Z
|
# -*- coding: utf-8 -*-
from dp_tornado.engine.controller import Controller
class CryptoController(Controller):
def get(self):
key = 'CRYPTO-SECRET-KE*'
plain = 'HELLO.'
enc = 'v12yDgV7/5cLNMyLM1C2uw=='
encrypted = self.helper.security.crypto.encrypt(plain, key=key)
decrypted = self.helper.security.crypto.decrypt(encrypted, key=key)
assert(encrypted == enc)
assert(decrypted == plain)
encrypted = self.helper.security.crypto.encrypt(plain, randomized=True, key=key)
decrypted = self.helper.security.crypto.decrypt(encrypted, key=key)
assert(encrypted != enc)
assert(decrypted == plain)
encrypted = self.helper.security.crypto.encrypt(plain, expire_in=1, key=key)
decrypted = self.helper.security.crypto.decrypt(encrypted, key=key)
assert(encrypted != enc)
assert(decrypted == plain)
import time
time.sleep(1.6)
assert(self.helper.security.crypto.decrypt(encrypted, key=key) is False)
# RAW
plain = 'HELLO.'
enc = 'wOlChy1LGjQemn6UBpJrwA=='
key = ('01234567890123456789012345678901', '\0' * 16)
encrypted = self.helper.security.crypto.encrypt(plain, key=key, raw=True)
decrypted = self.helper.security.crypto.decrypt(encrypted, key=key, raw=True)
assert enc == encrypted
assert plain == decrypted
# RAW (Unicode)
plain = '안녕하세요.'
enc = 'D00uYTgwZBSq1c1wubOY1xMzyJVKHT4X1tj9lHqXu5Y='
key = ('01234567890123456789012345678901', '\0' * 16)
encrypted = self.helper.security.crypto.encrypt(plain, key=key, raw=True)
decrypted = self.helper.security.crypto.decrypt(encrypted, key=key, raw=True)
assert enc == encrypted
assert plain == decrypted
# without Encode
encrypted = self.helper.security.crypto.encrypt(plain, key=key, raw=True, encode=False)
decrypted = self.helper.security.crypto.decrypt(encrypted, key=key, raw=True, encode=False)
assert plain == decrypted
# without Encode and Pad
plain = 'HELLO PY. WORLD.'
encrypted = self.helper.security.crypto.encrypt(plain, key=key, raw=True, encode=True, pad=False)
decrypted = self.helper.security.crypto.decrypt(encrypted, key=key, raw=True, encode=True, pad=False)
assert plain == decrypted
self.finish('done')
| 30.6
| 109
| 0.645833
| 269
| 2,448
| 5.869888
| 0.208178
| 0.094997
| 0.170994
| 0.227992
| 0.768841
| 0.739709
| 0.736542
| 0.736542
| 0.695377
| 0.666878
| 0
| 0.048533
| 0.234069
| 2,448
| 79
| 110
| 30.987342
| 0.7936
| 0.031454
| 0
| 0.511628
| 0
| 0
| 0.090909
| 0.065962
| 0
| 0
| 0
| 0
| 0.302326
| 1
| 0.023256
| false
| 0
| 0.046512
| 0
| 0.093023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bd40947f7d1e32062bc4f7dac802c15396eaf1c3
| 130
|
py
|
Python
|
source/cards/__init__.py
|
omerk2511/TakiServer
|
f162d80e1b48f739c77c31a942416bd60b3f5af6
|
[
"MIT"
] | 4
|
2020-10-07T16:50:07.000Z
|
2020-10-30T11:27:53.000Z
|
source/cards/__init__.py
|
omerk2511/TakiServer
|
f162d80e1b48f739c77c31a942416bd60b3f5af6
|
[
"MIT"
] | 2
|
2020-10-14T16:43:48.000Z
|
2020-10-25T12:41:51.000Z
|
source/cards/__init__.py
|
omerk2511/TakiServer
|
f162d80e1b48f739c77c31a942416bd60b3f5af6
|
[
"MIT"
] | 1
|
2020-10-10T14:29:51.000Z
|
2020-10-10T14:29:51.000Z
|
from card import Card
from card_type import CardType
from deck import Deck
from hand import Hand
from validator import valid_move
| 21.666667
| 32
| 0.846154
| 22
| 130
| 4.909091
| 0.454545
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 130
| 5
| 33
| 26
| 0.981818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1ffdc94cba4de60375962f6d3b30e50434bc4025
| 164
|
py
|
Python
|
backend/apps/transfer/admin.py
|
kevindice/cnap-dms
|
edb850412b6f95d1d4e057674e5cd899ee0b444e
|
[
"MIT"
] | 1
|
2018-11-01T22:16:02.000Z
|
2018-11-01T22:16:02.000Z
|
backend/apps/transfer/admin.py
|
kevindice/cnap-dms
|
edb850412b6f95d1d4e057674e5cd899ee0b444e
|
[
"MIT"
] | 128
|
2018-04-19T08:28:03.000Z
|
2018-12-20T19:02:06.000Z
|
backend/apps/transfer/admin.py
|
cnap-cobre/hyperion
|
edb850412b6f95d1d4e057674e5cd899ee0b444e
|
[
"MIT"
] | 2
|
2018-04-24T20:04:55.000Z
|
2018-04-25T12:17:29.000Z
|
from django.contrib import admin
from apps.transfer.models import TransferBatch, TransferFile
admin.site.register(TransferBatch)
admin.site.register(TransferFile)
| 27.333333
| 60
| 0.853659
| 20
| 164
| 7
| 0.6
| 0.128571
| 0.242857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 164
| 5
| 61
| 32.8
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9506f14545d31a81faf3279a566c7526eca8993f
| 1,243
|
py
|
Python
|
package/test/test_sandbox_output.py
|
QualiSystemsLab/cloudshell-training-workflow
|
95360acc5a180badc7c46788c2edf4e348b1d2e0
|
[
"Apache-2.0"
] | null | null | null |
package/test/test_sandbox_output.py
|
QualiSystemsLab/cloudshell-training-workflow
|
95360acc5a180badc7c46788c2edf4e348b1d2e0
|
[
"Apache-2.0"
] | null | null | null |
package/test/test_sandbox_output.py
|
QualiSystemsLab/cloudshell-training-workflow
|
95360acc5a180badc7c46788c2edf4e348b1d2e0
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from mock import Mock
from cloudshell.orch.training.services.sandbox_output import SandboxOutputService
class TestSandboxOutput(unittest.TestCase):
def test_notify(self):
# arrange
sandbox = Mock(automation_api=Mock())
output_service = SandboxOutputService(sandbox, Mock())
message = Mock()
# act
output_service.notify(message)
# assert
sandbox.automation_api.WriteMessageToReservationOutput.assert_called_once_with(sandbox.id, message)
def test_debug_enable(self):
# arrange
sandbox = Mock(automation_api=Mock())
output_service = SandboxOutputService(sandbox, True)
message = Mock()
# act
output_service.debug_print(message)
# assert
sandbox.automation_api.WriteMessageToReservationOutput.assert_called_once_with(sandbox.id, message)
def test_debug_disabled(self):
# arrange
sandbox = Mock(automation_api=Mock())
output_service = SandboxOutputService(sandbox, False)
message = Mock()
# act
output_service.debug_print(message)
# assert
sandbox.automation_api.WriteMessageToReservationOutput.assert_not_called()
| 28.25
| 107
| 0.690265
| 121
| 1,243
| 6.859504
| 0.305785
| 0.093976
| 0.06506
| 0.079518
| 0.761446
| 0.728916
| 0.728916
| 0.728916
| 0.728916
| 0.728916
| 0
| 0
| 0.233307
| 1,243
| 44
| 108
| 28.25
| 0.870934
| 0.045052
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 1
| 0.136364
| false
| 0
| 0.136364
| 0
| 0.318182
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
950e268d7fc06ca83edf0c3023fdd567076d83a8
| 110
|
py
|
Python
|
AI/day03/XRAI/Submit/main.py
|
Ersikan/Pool2021
|
cc64658039dee04127a3a641f891781c53647244
|
[
"MIT"
] | 16
|
2021-03-09T10:25:18.000Z
|
2022-02-08T14:29:24.000Z
|
AI/day03/XRAI/Submit/main.py
|
Ersikan/Pool2021
|
cc64658039dee04127a3a641f891781c53647244
|
[
"MIT"
] | null | null | null |
AI/day03/XRAI/Submit/main.py
|
Ersikan/Pool2021
|
cc64658039dee04127a3a641f891781c53647244
|
[
"MIT"
] | 3
|
2021-02-10T09:32:21.000Z
|
2022-02-01T17:07:59.000Z
|
import network
import dataset_loader
import torch
import torch.optim as optim
import matplotlib.pyplot as plt
| 18.333333
| 31
| 0.854545
| 17
| 110
| 5.470588
| 0.588235
| 0.236559
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 110
| 6
| 31
| 18.333333
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1f0df4c83bac490ed7c132aaf8966c59b53ce9f9
| 101
|
py
|
Python
|
memo/__init__.py
|
changeemma/memo-tree
|
f5332b211aae7719c30f13974cbe02fd720d5275
|
[
"MIT"
] | null | null | null |
memo/__init__.py
|
changeemma/memo-tree
|
f5332b211aae7719c30f13974cbe02fd720d5275
|
[
"MIT"
] | null | null | null |
memo/__init__.py
|
changeemma/memo-tree
|
f5332b211aae7719c30f13974cbe02fd720d5275
|
[
"MIT"
] | null | null | null |
from .memo_leaf import MemoLeaf
from .memo_node import MemoNode
__all__ = ["MemoNode", "MemoLeaf"]
| 16.833333
| 34
| 0.762376
| 13
| 101
| 5.461538
| 0.615385
| 0.225352
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138614
| 101
| 5
| 35
| 20.2
| 0.816092
| 0
| 0
| 0
| 0
| 0
| 0.158416
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1f1f3c5fbe02d977e30181b62c70f0d23cbd1dc7
| 123
|
py
|
Python
|
napari_spacetx_explorer/__init__.py
|
neuromusic/napari-spacetx-explorer
|
d719f291cf65740fbba5128c5acaf8be0a6daa92
|
[
"BSD-3-Clause"
] | 3
|
2021-07-21T13:42:55.000Z
|
2022-03-24T18:24:50.000Z
|
napari_spacetx_explorer/__init__.py
|
neuromusic/napari-spacetx-explorer
|
d719f291cf65740fbba5128c5acaf8be0a6daa92
|
[
"BSD-3-Clause"
] | 2
|
2021-11-10T17:24:52.000Z
|
2022-02-09T16:28:32.000Z
|
napari_spacetx_explorer/__init__.py
|
neuromusic/napari-spacetx-explorer
|
d719f291cf65740fbba5128c5acaf8be0a6daa92
|
[
"BSD-3-Clause"
] | 1
|
2021-12-21T03:38:28.000Z
|
2021-12-21T03:38:28.000Z
|
_version__ = "0.1.8"
from ._reader import napari_get_reader
from ._function import napari_experimental_provide_function
| 17.571429
| 59
| 0.829268
| 17
| 123
| 5.411765
| 0.705882
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027523
| 0.113821
| 123
| 6
| 60
| 20.5
| 0.816514
| 0
| 0
| 0
| 0
| 0
| 0.040984
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1f4c4ffcb4bb4b3670fdab03bc629f8dca7775fb
| 169
|
py
|
Python
|
colibris/test/fixtures.py
|
AMecea/colibris
|
068b7cbc4ed328dd9f3b4c40c5227b026589b028
|
[
"BSD-3-Clause"
] | 6
|
2019-06-22T19:36:10.000Z
|
2021-11-16T08:07:21.000Z
|
colibris/test/fixtures.py
|
AMecea/colibris
|
068b7cbc4ed328dd9f3b4c40c5227b026589b028
|
[
"BSD-3-Clause"
] | 34
|
2019-07-07T18:01:41.000Z
|
2020-11-01T16:14:58.000Z
|
colibris/test/fixtures.py
|
AMecea/colibris
|
068b7cbc4ed328dd9f3b4c40c5227b026589b028
|
[
"BSD-3-Clause"
] | 2
|
2020-09-01T13:07:17.000Z
|
2021-07-29T12:16:29.000Z
|
import pytest
from colibris import app
@pytest.fixture
async def web_app_client(aiohttp_client):
return await aiohttp_client(app.get_web_app(force_create=True))
| 16.9
| 67
| 0.810651
| 26
| 169
| 5
| 0.653846
| 0.092308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12426
| 169
| 9
| 68
| 18.777778
| 0.878378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1f6dea8315eb949bd22dfa8bf323d5c75c56cb56
| 11,131
|
py
|
Python
|
thaisummit/thaisummit/doctype/attendance_dashboard/attendance_dashboard.py
|
thispl/thaisummit
|
697a43068a87916dedf1e8de10249152a9fd2735
|
[
"MIT"
] | null | null | null |
thaisummit/thaisummit/doctype/attendance_dashboard/attendance_dashboard.py
|
thispl/thaisummit
|
697a43068a87916dedf1e8de10249152a9fd2735
|
[
"MIT"
] | null | null | null |
thaisummit/thaisummit/doctype/attendance_dashboard/attendance_dashboard.py
|
thispl/thaisummit
|
697a43068a87916dedf1e8de10249152a9fd2735
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021, TEAMPRO and contributors
# For license information, please see license.txt
import frappe
from frappe.model.document import Document
from datetime import date, timedelta,time
import datetime
from datetime import datetime
from frappe.utils import (getdate, cint, add_months, date_diff, add_days,
nowdate, get_datetime_str, cstr, get_datetime, now_datetime, format_datetime)
class AttendanceDashboard(Document):
pass
@frappe.whitelist()
def get_shift(emp,month,year):
month_dict = [{'Jan':'1','Feb':'2','Mar':'3','Apr':'4','May':'5','Jun':'6','Jul':'7','Aug':'8','Sep':'9','Oct':'10','Nov':'11','Dec':'12'}]
month_no = [ m[month] for m in month_dict ]
date = str(year) + "-" + str(month_no[0])+ "-" + str(1)
date = datetime.strptime(date,'%Y-%m-%d')
previous_month = frappe.utils.add_months(date, -1).strftime("%Y-%m-26")
current_month = date.strftime("%Y-%m-25")
date_list = get_dates(previous_month,current_month)
data = ''
rh1 = '<tr>'
rh2 = ''
rh3 = ''
rd1 = '<tr>'
rd2 = ''
rd3 = ''
r1 = ''
r2 = ''
r3 = ''
i = 1
for date in date_list:
if frappe.db.exists('Attendance',{'employee':emp,'attendance_date':date,'docstatus':['!=','2']}):
att = frappe.get_doc('Attendance',{'employee':emp,'attendance_date':date,'docstatus':['!=','2']})
status = ''
if att.employee_type != "WC":
if not att.in_time or not att.out_time:
if att.qr_shift:
status = "M" + str(att.qr_shift)
else:
status = "AA"
if att.in_time and att.out_time:
if not att.qr_shift:
status = str(att.shift) + "M"
elif att.late_entry == '1':
status = str(att.shift) + 'L' + "M"
else:
status = str(att.shift) + str(att.qr_shift)
if att.status == 'On Leave':
status = att.leave_type
if att.on_duty_application:
status = "OD"
else:
if att.status == 'On Leave':
status = att.leave_type
if att.on_duty_application:
status = "OD"
if att.shift:
if att.late_entry == '1':
status = str(att.shift) + 'L' + str(att.shift)
elif att.in_time:
if not att.out_time:
status = str(att.shift) + 'M'
else:
status = str(att.shift) + str(att.shift)
elif att.out_time:
if not att.in_time:
status = 'M' + str(att.shift)
else:
status = str(att.shift) + str(att.shift)
if i <= 10:
rh1 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
if status:
rd1 += """<td style = 'border: 1px solid black'><center>%s</center></td>"""%(status)
else:
holiday = check_holiday(date)
if holiday:
rd1 += """<td style = 'border: 1px solid black'><center><b>%s</b></center></td>"""%(holiday)
else:
rd1 += """<td style = 'border: 1px solid black'><center><b><p style="color:red;">A</p></b></center></td>"""
if i == 10:
r1 = rh1 + '</tr>' + rd1 + '</tr>'
elif 10 < i <= 20:
rh2 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
if status:
rd2 += """<td style = 'border: 1px solid black'><center>%s</center></td>"""%(status)
else:
holiday = check_holiday(date)
if holiday:
rd2 += """<td style = 'border: 1px solid black'><center><b>%s</b></center></td>"""%(holiday)
else:
rd2 += """<td style = 'border: 1px solid black'><center><b><p style="color:red;">A</p></b></center></td>"""
if i == 20:
r2 = '<tr>' + rh2 + '</tr><tr>' + rd2 + '</tr>'
elif 20 < i:
rh3 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
if status:
rd3 += """<td style = 'border: 1px solid black'><center>%s</center></td>"""%(status)
else:
holiday = check_holiday(date)
if holiday:
rd3 += """<td style = 'border: 1px solid black'><center><b>%s</b></center></td>"""%(holiday)
else:
rd3 += """<td style = 'border: 1px solid black'><center><b><p style="color:red;">A</p></b></center></td>"""
i += 1
else:
if i <= 10:
rh1 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd1 += """<td style = 'border: 1px solid black'><center>-</center></td>"""
r1 = rh1 + '</tr>' + rd1 + '</tr>'
elif 10 < i <= 20:
rh2 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd2 += """<td style = 'border: 1px solid black'><center>-</center></td>"""
if i == 20:
r2 = '<tr>' + rh2 + '</tr><tr>' + rd2 + '</tr>'
elif 20 < i:
rh3 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd3 += """<td style = 'border: 1px solid black'><center>-</center></td>"""
i += 1
data = "<h3>Attendance Summary</h3><table border='1px' class='table table-bordered'>" + r1 + r2 + '<tr>' + rh3 + '</tr><tr>' + rd3 + '</tr>' +"</table>"
return data
def get_dates(previous_month,current_month):
"""get list of dates in between from date and to date"""
no_of_days = date_diff(add_days(current_month, 1),previous_month )
dates = [add_days(previous_month, i) for i in range(0, no_of_days)]
return dates
def check_holiday(date):
holiday = frappe.db.sql("""select `tabHoliday`.holiday_date,`tabHoliday`.weekly_off from `tabHoliday List`
left join `tabHoliday` on `tabHoliday`.parent = `tabHoliday List`.name where `tabHoliday List`.name = 'Holiday List - 2021' and holiday_date = '%s' """%(date),as_dict=True)
if holiday:
if holiday[0].weekly_off == 1:
return "WW"
else:
return "HH"
@frappe.whitelist()
def get_ot(emp,month,year):
month_dict = [{'Jan':'1','Feb':'2','Mar':'3','Apr':'4','May':'5','Jun':'6','Jul':'7','Aug':'8','Sep':'9','Oct':'10','Nov':'11','Dec':'12'}]
month_no = [ m[month] for m in month_dict ]
date = str(year) + "-" + str(month_no[0])+ "-" + str(1)
date = datetime.strptime(date,'%Y-%m-%d')
previous_month = frappe.utils.add_months(date, -1).strftime("%Y-%m-26")
current_month = date.strftime("%Y-%m-25")
date_list = get_dates(previous_month,current_month)
data = ''
rh1 = '<tr>'
rh2 = ''
rh3 = ''
rd1 = '<tr>'
rd2 = ''
rd3 = ''
r1 = ''
r2 = ''
r3 = ''
i = 1
total_ot = timedelta(0,0,0)
for date in date_list:
if frappe.db.exists('Overtime Request',{'employee':emp,'ot_date':date,'workflow_state':'Approved'}):
ot = frappe.db.get_value('Overtime Request',{'employee':emp,'ot_date':date,'workflow_state':'Approved'},'ot_hours')
total_ot = total_ot + ot
if i <= 10:
rh1 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd1 += """<td style = 'border: 1px solid black'><center>%s</center></td>"""%(ot or 'A')
r1 = rh1 + '</tr>' + rd1 + '</tr>'
elif 10 < i <= 20:
rh2 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd2 += """<td style = 'border: 1px solid black'><center>%s</center></td>"""%(ot or 'A')
if i == 20:
r2 = '<tr>' + rh2 + '</tr><tr>' + rd2 + '</tr>'
elif 20 < i:
rh3 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd3 += """<td style = 'border: 1px solid black'><center>%s</center></td>"""%(ot or 'A')
i += 1
else:
if i <= 10:
rh1 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd1 += """<td style = 'border: 1px solid black'><center>-</center></td>"""
r1 = rh1 + '</tr>' + rd1 + '</tr>'
elif 10 < i <= 20:
rh2 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd2 += """<td style = 'border: 1px solid black'><center>-</center></td>"""
if i == 20:
r2 = '<tr>' + rh2 + '</tr><tr>' + rd2 + '</tr>'
elif 20 < i:
rh3 += """<th style = 'border: 1px solid black;background-color:#ffedcc;'><center>%s</center></th>"""%((datetime.strptime(date, '%Y-%m-%d').date()).strftime("%d-%b"))
rd3 += """<td style = 'border: 1px solid black'><center>-</center></td>"""
i += 1
day = total_ot.days * 24
hours = day + total_ot.seconds // 3600
minutes = (total_ot.seconds//60)%60
data = "<h3>Overtime Summary </h3><p style='font-size:25px'> Total OT : "+ str(hours) + 'hr ' + str(minutes) + 'min' + "</p><table border='1' class='table table-bordered'>" + r1 + r2 + '<tr>' + rh3 + '</tr><tr>' + rd3 + '</tr>' +"</table>"
return data
def get_dates(previous_month,current_month):
"""get list of dates in between from date and to date"""
no_of_days = date_diff(add_days(current_month, 1),previous_month )
dates = [add_days(previous_month, i) for i in range(0, no_of_days)]
return dates
| 53.514423
| 243
| 0.492498
| 1,385
| 11,131
| 3.88231
| 0.131408
| 0.051888
| 0.07811
| 0.106007
| 0.794123
| 0.779803
| 0.773108
| 0.773108
| 0.737586
| 0.710433
| 0
| 0.030989
| 0.301321
| 11,131
| 207
| 244
| 53.772947
| 0.660409
| 0.017519
| 0
| 0.677083
| 0
| 0.098958
| 0.315602
| 0.124702
| 0.010417
| 0
| 0
| 0
| 0
| 1
| 0.026042
| false
| 0.005208
| 0.03125
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2f1100606d6f4c01591a7a48e451170f9c8222dd
| 143
|
py
|
Python
|
canvasaio/scope.py
|
spapadim/canvasaio
|
a17e60447acd45cdbd6e4f0f24f3c9ae03a58ca8
|
[
"MIT"
] | null | null | null |
canvasaio/scope.py
|
spapadim/canvasaio
|
a17e60447acd45cdbd6e4f0f24f3c9ae03a58ca8
|
[
"MIT"
] | null | null | null |
canvasaio/scope.py
|
spapadim/canvasaio
|
a17e60447acd45cdbd6e4f0f24f3c9ae03a58ca8
|
[
"MIT"
] | null | null | null |
from canvasaio.canvas_object import CanvasObject
class Scope(CanvasObject):
def __str__(self):
return "{}".format(self.resource)
| 20.428571
| 48
| 0.727273
| 16
| 143
| 6.1875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167832
| 143
| 6
| 49
| 23.833333
| 0.831933
| 0
| 0
| 0
| 0
| 0
| 0.013986
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
2f2294db2aac13a30822a3a89a31a23f23c584a9
| 1,089
|
py
|
Python
|
Yoyo/normalization.py
|
teabao/AI-introduction
|
0a608326f627446011f723201b8f705ad7c77e8c
|
[
"MIT"
] | null | null | null |
Yoyo/normalization.py
|
teabao/AI-introduction
|
0a608326f627446011f723201b8f705ad7c77e8c
|
[
"MIT"
] | 1
|
2021-05-19T08:34:44.000Z
|
2021-05-19T08:34:44.000Z
|
Yoyo/normalization.py
|
teabao/AI-introduction
|
0a608326f627446011f723201b8f705ad7c77e8c
|
[
"MIT"
] | 1
|
2021-05-19T08:23:28.000Z
|
2021-05-19T08:23:28.000Z
|
def normalize(state):
temp_state = []
temp_state.append(0.001*state[0])
temp_state.append((state[1]-2478330.0752)/188151.0787)
temp_state.append((state[2]-9696.347622)/14234.47008)
temp_state.append((state[3]-2515.821571)/2615.29795)
temp_state.append(0.01*state[4])
temp_state.append(0.01*state[5])
temp_state.append(0.01*state[6])
temp_state.append((state[7]-5177.579363)/16123.41258)
temp_state.append((state[8]-270.7733533)/1248.568074)
temp_state.append((state[9]-35.71445465)/269.3077132)
temp_state.append((state[10]-54.79755959)/10.66533212)
temp_state.append((state[11]-52.87394922)/11.70261057)
temp_state.append((state[12]-52.65588396)/8.861974927)
temp_state.append((state[13]-33.72752522)/120.876015)
temp_state.append((state[14]-25.8489596)/122.6781467)
temp_state.append((state[15]-25.2647541)/119.2325979)
temp_state.append((state[16]-29.77151467)/21.96257966)
temp_state.append((state[17]-30.84369715)/21.36197042)
temp_state.append((state[18]-27.72517914)/20.37034246)
return temp_state
| 47.347826
| 58
| 0.716253
| 171
| 1,089
| 4.438596
| 0.473684
| 0.249012
| 0.375494
| 0.395257
| 0.090909
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0.34525
| 0.10101
| 1,089
| 22
| 59
| 49.5
| 0.430031
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2f29bb755c59071552e33f7b9ee3337cec9154b3
| 7,476
|
py
|
Python
|
day1.py
|
Lajnold/adventofcode2015
|
cd99969c5701d1afd5ec58afc76d7c03681d0648
|
[
"MIT"
] | null | null | null |
day1.py
|
Lajnold/adventofcode2015
|
cd99969c5701d1afd5ec58afc76d7c03681d0648
|
[
"MIT"
] | null | null | null |
day1.py
|
Lajnold/adventofcode2015
|
cd99969c5701d1afd5ec58afc76d7c03681d0648
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
inp = "(((())))()((((((((())()(()))(()((((()(()(((()((()((()(()()()()()))(((()(()((((((((((())(()()((())()(((())))()(()(()((()(()))(()()()()((()((()(((()()(((((((()()())()((((()()(((((()(())()(())((())()()))()(((((((())(()())(()(((())(()))((())))(()((()())))()())((((())))(()(((((()(())(((()()((()((()((((((((((())(()())))))()))())()()((((()()()()()()((((((())())(((()())()((()()(((()()()))(((((()))(((()(()()()(()(()(((())()))(()(((()((())()(()())())))((()()()(()()(((()))(((()((((()(((((()()(()())((()())())(()((((((()(()()))((((()))))())((())()()((()(()))))((((((((()))(()()(((())())(())()((()()()()((()((()((()()(((())))(()((())()((((((((()((()(()()(((())())())))(())())))()((((()))))))())))()()))()())((()())()((()()()))(()()(((()(())((((())())((((((((()()()()())))()()()((((()()))))))()((((()(((()))(()()())))((()()(((()))()()())())(((())((()()(())()()()(((())))))()())((()))()))((())()()())()())()()(()))())))())()))(())((()(())))(()(())(()))))(()(())())(()(())(()(()))))((()())()))()((((()()))))())))()()())((())()((()()()))()(((()(()))))(())()()))(((()())))))))))(((())))()))())()))))()()(((())))))))()(()()(()))((()))))((())))((()((())))())))()()(()))())()(()((()())(()(()()())())(()()))()))))(()())()()))()()()()))(()(()(()))))))()(()))()))()()(()((())(()(())))()(((())(())())))))()(()(()))))()))(()()()(())()(()(())))()))))()()(((((())))))())()())())())()())()))))()))))))))())()()()()()()())))()))((())()))())))()((())()))))()))())))))))())()()()))()()(()((((()(((((((()(())((()())((()()))()))))(())))()()()(())((())()())))(())))(())))(((()()))()(())(((()(()))((())))())()))((((()))())()))))))))()(())())))(()))()(()()))())()()(())())))())()()(()())))()((()())(()(())(())))))))))))))(()))))()))))))()()())(()(((((()(()())))())()))(()))()))(()()))()())(()))())()(())((()()))))))())))())()(((())))(()(()))()()))()(()))))))((()())(()))))))()())))()()))))))))((((((((()()()(()))))))()())))())))()()((())()))((())(())))())())))()()()((()((()(())))())()(())))))))))()())))()()()()()()))()))((())())(()(()))))))(()()))()))(())))()))))))))))))(()))))))))()))))()))()())()))()()))))))()))))((()))))(()))())()(())))(()())((((()())))()))))(()))()(()()(())))))())))))()))))))())))())))))())))())())))())(()))))(())()(())))())()))((()()))))))())))((())))))))())))(())))))()()())))))())))))()))))))()))()()()(()(((()())())())(()))())))))((()(())(()))))))))(())))()()()())())(()))))()()()))()))())())())()(())))()(((()((((())))))))()))))))))))))))))))))((())()())(()))))()()))))))(()()(())())))())))((())))((())))))))))))))()))))()(()))))))())))))()))(()()())(()())))))))))()))))))(())))))()()))()())(((())))()))(()))))))))(())())))())))())())())()()))((())()(())()())()))()())(())(()))))()())))(()(((()))))))()(()())()()()))()))))))))()()()(())()())()(((((()))()())())(()))))()()()(())))())))()((()())))(()))())()(()())())(()))()()))((()()))((()()()()())))(())()))(()(())))((()()))))))))())))))))())()()))))))))))))))))(())()(())(())()())())()))()(()))))())())))))()())()(()))()()(())))(())())))))(()))))))))))))))())())(())(())))(((()))()))))())((())(()))())))))))())))))())))()))()))))))))))))())()))))()))))((()))(())))()(())))(())()))()))())))())))))))()(()())())))()()())))(())))))(()))))))))))))(()))()))()))())))(((()()()(())((()())))()())(((()))(())()))((()()()())))())(())(()))))()(((((())))(()))())())))))))((((()()()))())())()(()(()())))))))))()())())))(())))()())(((()(())())()()))())())))))))((()())((()()(()))(()(())))()))()))(()))(()))()()(()(((())((((()))()(()))((())()(()(()())()(()))()())))))(()))()))())()())))())))(())))((())(()())))))()))(())(()))()())()(()()((()(()))))))()(())(()())(())()))(((())()))(()()(()()()))))(()(())))()))))())))))())(()()()()()()(((())))(()()))()((())(((((()()())))(()))(()))()()))(((())())()(((()()()()))))(()))(())())))()())(()()())())))))))()))))((())))()())(()))(()(()))())))))())(())))))()()())())()))()()(())))(()))(())((((((())(()))(()))())()))(()()(())))()))(()()))()))()(())))(())))((()(()))(())()()())())))(((()()())(())()))))))()(((()(((((()()(((())(())))())()((()))))((()())()(())(((())))(((()((()(()(()))(()()))())(()))(())(())))()))))))((((()))()((((()(()))()))()()))))()(()(()))()(()((()(((()(()()(((()))))()(((()(()(()(((()(()())())()()(()(()())())(()((((())(()))()))(((((()()())(())()((()()())))()()(((()()))()((((((((()(())))())((()))))(())))(()))))((()((((()()(())(((((()))(((((((((((((()())))((((()(((()((())())()))((()))()(()()((()()()()(()()(()(()(((())()(()((((((()((()()((())()((((()((()()(()()())((()()()((()((())()(()(((()((())((((())(()))((()(()))(()())()((((((((()(((((((((((()))(()(((()(()()()((((())((())()())()))(())((())(()))(((()((()(())))(()))))((()()))))((((()(()(()())(()(())((((((((()((((()((()(((((()))())()(()))(()()((()(())(((((()(())()(((((()()))))))()(((())()(()()((((())()((())((()(((())(((()))((()()((((()(())))))((()((((()((()((()(((())((()))(((((((()(((()((((((((())()))((((())(((((()((((((((()(((()((()(((()()(((()((((((()()(()((((((((()()(()(()(())((((()())()))))(((()))((((())((((()())((()(())()((()((((((()((((((()(())))()())(((())())())()(())()(()())((()()((((())((((((())(()(((((()((((())()((((()(()(())(()())(((())()((())((((()))()((((((())(()(((()(((()((((((()(((()))(()()())())((()((()())()((((())(((()(()(((((((((())(())))()((()()()()(())((()))(((((((()(((((((((()(()))))(()((((((((()((((()((()()((((((()()(((((((()(()(())()(())((()()()((()(((((()())()(((((()())()()((()(()())(()()()(((()()(((((()((((((()()((()(()()()((((((((((((()((((((((()()(((()())))()(((()()(())())((((()((((()((((()()()(())(())((()(()(((((((((((((((()(())(())))))()()))((()(((()(())((()(((()(()()((((()()(((()(((()(((((()()((()(()(((()))((((((()((((((((()((()((())(((((()(((())(())())((()()))((((())()()((()(((()(((((()()(((()))(((()(()(((((((((((((()))((((((((()(((()))))())((((((((((((())((())((()())(((())((())(()((((((((((()(((())((()()(()((())(((((((((((()))((((((((((((()(()())((()((()((()(()(((()((((((((()()(()((()(()(((()))((()))(((((((((((((()(())((((((())(((()(())(()(()(()((()()))((((()((((()((((())))())((((()((((()))((((((()((((((()((()(((())))((())(()))(()((()((((()((()(((()()))((((()()()(((((((())(((())(()))())((((()())(((()(((((((((((()(()(()((()(((((((((((((((()()((((()((((((((()(((()()((()((((()))(((()(())((((((()((((())()((((()((()))(())()(()(((()((())())((((((()(()(())())(((())(()(()())(((((()((()((())()())(())))(((()(())))))))(((()(((()))()((()(((()()((()())()()))())))(((()))(()(((()(((((((((()(()(((((()()(((()())()()))))()(((()))(((()(()(()(()(()))()(())()))(()(((())))(()))))))))))(())((()((())((()(())()(())((()()((((()()((()()))((())(((()((()(())(())))()(()(((((()((()))())()(((((()()(((()(()((((((())(()))(())()))((()(()()))(())())()))(((())))(()((()(((())(())())))((()()((((((((((((((()((()(()()(()(((()))())()()((()()()(())(()))(()())(((())((())()(())()()(()()(())))((()(((()))))(((()()(()()))())((()((())()))((((()()()())((())))(((()(())(((((()(((((()((()(()((((()()(((()()()(((()())(((()()((((())(()))(((()))(())())((()))(((()((()))(((()()((())((()(((((()((((()()())((()))()((((()((()(()()()("
def part1():
result = inp.count("(") - inp.count(")")
print("Final floor: {}".format(result))
def part2():
floor = 0
for i in range(len(inp)):
x = inp[i]
if x == "(":
floor += 1
elif x == ")":
floor -= 1
if floor < 0:
print("Reaches basement at position {}".format(i + 1))
return
print("Santa never reached the basement")
part1()
part2()
| 287.538462
| 7,008
| 0.030364
| 58
| 7,476
| 3.913793
| 0.568966
| 0.070485
| 0.061674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001368
| 0.022204
| 7,476
| 25
| 7,009
| 299.04
| 0.029685
| 0.002809
| 0
| 0
| 0
| 0
| 0.950094
| 0.939093
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.166667
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2f2db5cd761702c04060bed8bd1072626eb00342
| 201
|
py
|
Python
|
accounts/admin.py
|
MH-Lee/sunbo_django
|
a95358801cb3ee9a4c4bc16732a2f80312403290
|
[
"MIT"
] | null | null | null |
accounts/admin.py
|
MH-Lee/sunbo_django
|
a95358801cb3ee9a4c4bc16732a2f80312403290
|
[
"MIT"
] | 18
|
2019-11-16T15:50:08.000Z
|
2022-02-10T11:46:51.000Z
|
accounts/admin.py
|
MH-Lee/sunbo_ubuntu
|
27a435838421b4950eed53da3ccbd15cbb501cf2
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import User
# Register your models here.
class UserAdmin(admin.ModelAdmin):
list_display = ('email', 'password')
admin.site.register(User, UserAdmin)
| 25.125
| 40
| 0.766169
| 26
| 201
| 5.884615
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129353
| 201
| 7
| 41
| 28.714286
| 0.874286
| 0.129353
| 0
| 0
| 0
| 0
| 0.075145
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
2f86b4701d2e693eab8f07aa4300a5302c74724c
| 128
|
py
|
Python
|
project_euler/solutions/problem_48.py
|
cryvate/project-euler
|
6ed13880d7916c34554559f5f71662a863735eda
|
[
"MIT"
] | null | null | null |
project_euler/solutions/problem_48.py
|
cryvate/project-euler
|
6ed13880d7916c34554559f5f71662a863735eda
|
[
"MIT"
] | 9
|
2017-02-20T23:41:40.000Z
|
2017-04-16T15:36:54.000Z
|
project_euler/solutions/problem_48.py
|
cryvate/project-euler
|
6ed13880d7916c34554559f5f71662a863735eda
|
[
"MIT"
] | null | null | null |
def solve(bound: int=1000, modulo: int=10_000_000_000):
return sum(pow(i, i, modulo) for i in range(1, bound + 1)) % modulo
| 42.666667
| 71
| 0.679688
| 25
| 128
| 3.36
| 0.64
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160377
| 0.171875
| 128
| 2
| 72
| 64
| 0.632075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c852ef822e38fa7d193d3ff36c1889eb3a97ee22
| 68
|
py
|
Python
|
wk1/hellosomething.py
|
lokijota/datadrivenastronomymooc
|
175655e5c6450c091534299da6bce6f10a1a3627
|
[
"MIT"
] | 8
|
2018-12-09T18:10:16.000Z
|
2021-03-21T16:38:58.000Z
|
wk1/hellosomething.py
|
lokijota/datadrivenastronomymooc
|
175655e5c6450c091534299da6bce6f10a1a3627
|
[
"MIT"
] | null | null | null |
wk1/hellosomething.py
|
lokijota/datadrivenastronomymooc
|
175655e5c6450c091534299da6bce6f10a1a3627
|
[
"MIT"
] | 5
|
2018-11-09T16:57:17.000Z
|
2020-04-15T09:11:33.000Z
|
def greet(val):
return 'Hello, ' + val + '!'
print(greet('pah'))
| 11.333333
| 29
| 0.558824
| 9
| 68
| 4.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191176
| 68
| 5
| 30
| 13.6
| 0.690909
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c070c933aa3e4506060b1a3725d3d699d33e51e1
| 113
|
py
|
Python
|
ComputerConfigurator/configurator/admin.py
|
AndreasBuc/Computer-Configurator
|
e771176dd118d5820fe69d3b534f59ced264295e
|
[
"MIT"
] | null | null | null |
ComputerConfigurator/configurator/admin.py
|
AndreasBuc/Computer-Configurator
|
e771176dd118d5820fe69d3b534f59ced264295e
|
[
"MIT"
] | null | null | null |
ComputerConfigurator/configurator/admin.py
|
AndreasBuc/Computer-Configurator
|
e771176dd118d5820fe69d3b534f59ced264295e
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from configurator.models import Configurator
admin.site.register(Configurator)
| 22.6
| 44
| 0.858407
| 14
| 113
| 6.928571
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088496
| 113
| 4
| 45
| 28.25
| 0.941748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c09bb18f05512c60145526d97c11106a83dc4aba
| 2,136
|
py
|
Python
|
tests/test_flask_jwt.py
|
manoadamro/flask-jwt
|
dcfc5ea6597079c1f1d0f7db5357ba53e980eac3
|
[
"MIT"
] | 1
|
2018-10-18T15:06:59.000Z
|
2018-10-18T15:06:59.000Z
|
tests/test_flask_jwt.py
|
manoadamro/flask-jwt
|
dcfc5ea6597079c1f1d0f7db5357ba53e980eac3
|
[
"MIT"
] | 1
|
2018-11-03T14:32:39.000Z
|
2018-11-03T14:32:39.000Z
|
tests/test_flask_jwt.py
|
manoadamro/flask-jwt
|
dcfc5ea6597079c1f1d0f7db5357ba53e980eac3
|
[
"MIT"
] | 1
|
2018-10-17T09:14:07.000Z
|
2018-10-17T09:14:07.000Z
|
import unittest
import jwt
import flask
import flask_jwt
from . import mocks
class FlaskJWTTest(unittest.TestCase):
def setUp(self):
self.app = flask.Flask(__name__)
self.flaskjwt = flask_jwt.handlers.FlaskJWT("secret", 60, auto_update=True)
self.flaskjwt.init_app(self.app)
def test_no_bearer(self):
token_body = {"thing": True}
token = jwt.encode(token_body, "secret").decode("utf8")
mock_store = mocks.MockStore()
mock_request = mocks.MockRequest(headers={"Authorization": token})
with mocks.patch_object(flask, "request", mock_request), mocks.patch_object(
flask_jwt.handlers.FlaskJWT, "store", mock_store
):
self.assertRaises(
flask_jwt.errors.JWTValidationError, self.flaskjwt._pre_request_callback
)
def test_no_token(self):
mock_store = mocks.MockStore()
mock_request = mocks.MockRequest(headers={})
with mocks.patch_object(flask, "request", mock_request), mocks.patch_object(
flask_jwt.handlers.FlaskJWT, "store", mock_store
):
self.flaskjwt._pre_request_callback()
self.assertEqual(mock_store.obj, {})
response = flask.Response(200)
self.flaskjwt._post_request_callback(response)
auth = response.headers.get("Authorization")
self.assertIsNone(auth)
def test_with_token(self):
token_body = {"thing": True}
token = jwt.encode(token_body, "secret").decode("utf8")
mock_store = mocks.MockStore()
mock_request = mocks.MockRequest(headers={"Authorization": f"Bearer {token}"})
with mocks.patch_object(flask, "request", mock_request), mocks.patch_object(
flask_jwt.handlers.FlaskJWT, "store", mock_store
):
self.flaskjwt._pre_request_callback()
self.assertEqual(mock_store.obj, token_body)
response = flask.Response(200)
self.flaskjwt._post_request_callback(response)
auth = response.headers.get("Authorization")
self.assertIsNotNone(auth)
| 40.301887
| 88
| 0.649345
| 236
| 2,136
| 5.631356
| 0.237288
| 0.054176
| 0.072235
| 0.094808
| 0.731377
| 0.708804
| 0.708804
| 0.708804
| 0.708804
| 0.665914
| 0
| 0.006165
| 0.240637
| 2,136
| 52
| 89
| 41.076923
| 0.813194
| 0
| 0
| 0.510638
| 0
| 0
| 0.064607
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 1
| 0.085106
| false
| 0
| 0.106383
| 0
| 0.212766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c0b86c65e559fe1b3c42c1561da2c3872b2ecf16
| 43
|
py
|
Python
|
src/__init__.py
|
andrewnachtigal/wind-forecasting
|
ac3669f10d5709ae202b254eb8519b0730109467
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
andrewnachtigal/wind-forecasting
|
ac3669f10d5709ae202b254eb8519b0730109467
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
andrewnachtigal/wind-forecasting
|
ac3669f10d5709ae202b254eb8519b0730109467
|
[
"MIT"
] | 1
|
2019-10-08T04:18:41.000Z
|
2019-10-08T04:18:41.000Z
|
# treat directories as containing packages
| 21.5
| 42
| 0.837209
| 5
| 43
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 43
| 1
| 43
| 43
| 0.972973
| 0.930233
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c0e7c02562bbcee46eb3f434b08f661a13320e42
| 95
|
py
|
Python
|
apps/documents/admin.py
|
dnetochaves/repense_rh
|
ba549bdadc90c088f258d9d640bd59fd696bb705
|
[
"MIT"
] | null | null | null |
apps/documents/admin.py
|
dnetochaves/repense_rh
|
ba549bdadc90c088f258d9d640bd59fd696bb705
|
[
"MIT"
] | 3
|
2021-01-22T06:05:42.000Z
|
2021-02-16T10:06:36.000Z
|
apps/documents/admin.py
|
dnetochaves/repense_rh
|
ba549bdadc90c088f258d9d640bd59fd696bb705
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Documents
admin.site.register(Documents)
| 19
| 32
| 0.831579
| 13
| 95
| 6.076923
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 95
| 4
| 33
| 23.75
| 0.929412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
23d5f96a458854c5cdb0a566c8e4a78f09a96f5f
| 117
|
py
|
Python
|
transactions/admin.py
|
Koyel-134/Basic-Banking-System
|
01d2559f28739dfb743140880c43b973ff1ea941
|
[
"MIT"
] | 1
|
2022-03-31T07:35:21.000Z
|
2022-03-31T07:35:21.000Z
|
transactions/admin.py
|
Koyel-134/Basic-Banking-System
|
01d2559f28739dfb743140880c43b973ff1ea941
|
[
"MIT"
] | null | null | null |
transactions/admin.py
|
Koyel-134/Basic-Banking-System
|
01d2559f28739dfb743140880c43b973ff1ea941
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from transactions.models import Transaction
admin.site.register(Transaction)
| 19.5
| 44
| 0.811966
| 14
| 117
| 6.785714
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136752
| 117
| 5
| 45
| 23.4
| 0.940594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9b21d781841ab481b3775fa73a8027872b9bbd8e
| 7,775
|
py
|
Python
|
15/1_which_floor.py
|
0LL13/advent
|
917a10a43fcdba3bd9ce3140a8b8cb44b1fc8c0f
|
[
"Unlicense"
] | null | null | null |
15/1_which_floor.py
|
0LL13/advent
|
917a10a43fcdba3bd9ce3140a8b8cb44b1fc8c0f
|
[
"Unlicense"
] | null | null | null |
15/1_which_floor.py
|
0LL13/advent
|
917a10a43fcdba3bd9ce3140a8b8cb44b1fc8c0f
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
def find_floor(string: str) -> int:
floor = 0
for direction in string:
if direction == '(':
floor += 1
else:
floor -= 1
return floor
def find_position(string: str) -> int:
floor = 0
for i, s in enumerate(string):
floor = floor + find_floor(s)
if floor == -1:
break
return i+1
if __name__ == '__main__':
floors = []
for string in [
'(())',
'()()',
'(((',
'(()(()(',
'))(((((',
'())',
'))(',
')))',
')())())'
]:
floors.append(find_floor(string))
print(floors == [0, 0, 3, 3, 3, -1, -1, -3, -3])
d = '()()(()()()(()()((()((()))((()((((()()((((()))()((((())(((((((()(((((((((()(((())(()()(()((()()(()(())(()((((()((()()()((((())((((((()(()(((()())(()((((()))())(())(()(()()))))))))((((((((((((()())()())())(())))(((()()()((((()(((()(()(()()(()(()()(()(((((((())(())(())())))((()())()((((()()((()))(((()()()())))(())))((((())(((()())(())(()))(()((((()())))())((()(())(((()((((()((()(())())))((()))()()(()(()))))((((((((()())((((()()((((()(()())(((((()(()())()))())(((()))()(()(()(()((((()(())(()))(((((()()(()()()(()(((())())(((()()(()()))(((()()(((())())(()(())())()()(())()()()((()(((()(())((()()((())()))((()()))((()()())((((()(()()(()(((()))()(()))))((()(((()()()))(()(((())()(()((()())(()(()()(()())(())()(((()(()())()((((()((()))))())()))((()()()()(())()())()()()((((()))))(()(((()()(((((((())()))()((((()((())()(()())(())()))(()(()())(((((((())))(((()))())))))()))())((())(()()((())()())()))))()((()()())(())((())((((()())())()()()(((()))())))()()))())(()()()(()((((((()()))())()))()(((()(((())((((()()()(()))())()()))))())()))())((())()())(((((())())((())())))(((())(((())(((((()(((((())(()(()())())(()(())(()))(()((((()))())()))))())))((()(()))))())))(((((())()))())()))))()))))(((()))()))))((()))((()((()(()(())()())))(()()()(())()))()((((())))))))(())(()((()()))(()))(()))(()((()))))))()()((((()()))()())()))))))()()()))(()((())(()))((()()()())()(((()((((())())))()((((()(()))))))())))()()())()))(()))))(()())()))))))((())))))))())()))()((())())))(()((()))()))(())))))(()))()())()()))((()(()))()()()()))))())()()))())(())()()))()))((()))))()()(()())))))()()()))((((()))()))))(()(())))(()())))((())())(()))()))))()())))()())()())))))))))()()))))())))((())((()))))())))(((()())))))))(()))()()))(()))()))))()())))))())((((()())))))))())))()()))))))))()))()))))()))))))(())))))))))())))))))))))))))())())((())))))))))()))((())))()))))))))())()(()))))))())))))()()()())()(()()()(()())(()))()()()(()())))())())))()))))())))))))()()()()())(())())()())()))))(()()()()()))))()))())())))((()())()())))()))()))))(()())))()))))))))(((()))()()))))))))))))))))))))(()))(()((()))())))())(()))(()(()(())))))()(()))()))()()))))))))))))()((()())(())())()(())))))())()())((()()))))(()()))))())()(())()))))))))))))))))))))()))(()(()())))))))()()((()))()))))))((())))()))))))))((()))())()()))())()()))((()))())))))))))))(()())()))(())((()(()()))(()())(())))()())(()(())()()))))()))()(()))))))(()))))))))))(()))())))))))))())))))())))(())))))()))))(())())))))))))()(()))))()())))())(()))()())))))))))))))())()()))))()))))))())))))()))))(())(()()()()((())()))())(()))((())()))())())(())(()()))))()))(())()()((())(())))(())))()))())))))))))()(((((())())))(())()))))(())))((()))()(((((((()))))()()))(())))))()(()))))(()()))()))())))))))(()())()))))))))())))(()))())()))(())()((())())()())())(()(()))))()))))))((()())(())()()(()())))()()))(())(())(()))())))()))(()))()()))((((()))))()))((()()()))))()))()))())))(()))()))))(())))()))())()(()))()())))())))))))())))())))()()))))))(()))())())))()))()()())())))))))))))))())))()))(()()))))())))())()(())))())))))))))))))))))()()())())))))()()()((()(()))()()(())()())()))()))))()()()))))))((()))))))))()(()(()((((((()()((()())))))))))))()))())))))((())())(()))())))())))))())()()())(())))())))()())())(())))))))()()(())))()))())))())())())()))))))))()))(()()()())())())))(())())))))))()()())()))))())))())()(())())))))))()())()))(()()(())())))()(()((()()((()()(((((())(()())()))(())()))(())))(())))))))()))()))((()))()))()))))))))()))))))))((()()())(()))(((()))(())))()))((())(((())))()())))())))))((())))))(())())((((((())())()(()))()(()((()())))((())()(()(()))))(())(()()())(())))())((()(((())())))(((()())())))())()(())())((((()()))))())((()))()()()()(())(((((((()()()((()))())(()())))(())())((((()()(()))))()((())))((())()))()(((()))())))()))((()(()))(())(()((((())((((()()(()()))(((())(()))))((((()(()))(())))))((()))(()))((()(((()(()))(()(()((()(())(()(()(()(()()((()))())(((())(()(()))))(()))()()))(())))(())()(((())(()))()((((()()))))())(()))))((())()((((()(((()))())())(((()))()())((())(())())(())()(())()(()()((((((()()))))()()(((()()))))()())()(((()(()))(()(()())(()(()))))(((((()(((())())))))(((((()((()()((())())((((((()(())(()()((()()()()()()()(()()))()(((()))()))(((((((())(((()((()())()((((())(((()(())))()((()(()()()((())((()())()))()))())))())((((((()))(()(()()()))(()((()(()(()))()((()(((()()()((())(((((())()(()))())())((()(())))(()(()())(())((())())())(((()()()(())))))())(()))))))()))))))())((()()()))((()((((((()))(((()((((()()()(((()))())()(()()(((()((()()()()())()()))()()()(()(())((()))))(()))())))))))()(()()(((((())()(()(((((()((()(()()())(()((((((((()((((((())()((((()()()((()((()((((((()))((())))))))())()))((()(()))()(()()(()((())((()()((((((((((((()())(()()()))((((()((((((())(()))())(()()((()()))()(((((((()((()()((((((()(((())))((())))((((((((()()(((((((())(((((()())(((())((())()((((()(((((((()(()(((()((((((()(((()(((((((((((()()((()()(()))((()()(((()(((())))((((())()(()(((())()(()(((())(((((((((((()))())))((((((())((()()((((()())())((((()()))((())(((((()(()()(()()()((())(()((()()((((()(((((()((()(()((((()())((((((()(((((()()(()(()((((())))(())(())(())((((()(()()((((()((((()()((()((((((())))(((((()))))()))(()((((((((()(((())())(((())))(()(()((())(((()((()()(((((()((()()(((())()(()))(((((((())(()(((((()))((()((()((()))(())())((((()((((())()(()))(((()(((((((((((((((())(((((((((()))(((()(()()()()((((((()((())()((((((((()(())(((((((((((()(()((())()((()()(()(()()((((()()((())(()((()()(()()((((()(((((((())))((((())(())()(((()()((()()((((()((()(((()((())(((()()()((((()((((()()(()(()((((((((())(()(((((())(()())(((((((()())()(()((((()((())(()()())((((()()(((()((((())(())(()()(((((((((()()))()(((())(()(()((((((())(()()())(()))()()(((()(((()((())(()(((((((()(()(()((()(((((()(()((()(()((((((()((((()()((((()(((()((())(()(()((()()((((()()(())()(())(((())(()((((((((()())(((((((((()(())()((((())))()))()()(((((()()((((((())(()()(((()(()(((((((()(()(((((((())(())((((()((()(())))((((()()())(()))((()())((((()(((((()(()(())(()(()()())(((((()(((((()((((()()((((((((()()))(()((((((())((((())()(()(((()()()(((()(()(())(())(((((()(())())((((())(())(()(((()(((((())((((())())((()(((((((()(((())(()(()))(((((((((()((()((()()(()((((())(((()((())((((())(()(((()(((()(()((((()(((())(()(((()(()()(()(()((()()(()())(())())((()(()(((()(((()(((()()(((((((((()(((((((((()()(((()(((()())((((()(()(((()()()((())((((((((((())(()(((()((((()())((((()((()))(((()()()(((((()(((((((())((()())(()((((())((((((((())(()((()((((((((((()()((()((()()))(((()())()())()(((()())()()(()(()(((((((())()))(())()))())()()((())()((()((((()((()((())(((((()((((((()(())))(()))())(((()))((()()(()(((()))((((())()(((()))))()(()(())()(((((())(()(()(())(())()((()()()((((()(())((()())(()(()))(()(()(()()(())()()(()((())()((()))))()))((()(()()()()((()())(()))())()(()(((((((((())())((()((()((((((())()((((())(((())((()(()()()((())(()((())(((()((((()()((()(()(((((())()))()((((((()))((())(((()()))(((())(())()))(((((((())(())())()(())(((((()))()((()))()(()()((()()()()()())(((((((' # noqa
pos = find_position(d)
print(pos)
| 176.704545
| 7,018
| 0.039871
| 81
| 7,775
| 3.666667
| 0.382716
| 0.090909
| 0.10101
| 0.114478
| 0.141414
| 0.141414
| 0
| 0
| 0
| 0
| 0
| 0.002142
| 0.039486
| 7,775
| 43
| 7,019
| 180.813953
| 0.037627
| 0.003344
| 0
| 0.060606
| 0
| 0
| 0.910147
| 0.903692
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0
| 0
| 0.121212
| 0.060606
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9b2357d8203b41535720ddd5542b7e38c48c733f
| 32
|
py
|
Python
|
nbopen/__main__.py
|
NumesSanguis/nbopen
|
65452bd3ad6a52240d4b6e8a40d95041aa3abc35
|
[
"BSD-3-Clause"
] | 277
|
2015-01-20T19:49:06.000Z
|
2022-03-05T19:09:32.000Z
|
nbopen/__main__.py
|
NumesSanguis/nbopen
|
65452bd3ad6a52240d4b6e8a40d95041aa3abc35
|
[
"BSD-3-Clause"
] | 70
|
2015-03-16T07:35:54.000Z
|
2022-02-01T19:29:56.000Z
|
nbopen/__main__.py
|
NumesSanguis/nbopen
|
65452bd3ad6a52240d4b6e8a40d95041aa3abc35
|
[
"BSD-3-Clause"
] | 71
|
2015-04-16T20:58:52.000Z
|
2022-02-10T01:19:45.000Z
|
from .nbopen import main
main()
| 10.666667
| 24
| 0.75
| 5
| 32
| 4.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 32
| 2
| 25
| 16
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f1c0f2c0c98fc3636729766cc0658c1189f4ca73
| 550
|
py
|
Python
|
satori/__init__.py
|
lastmeta/Satori
|
cb321ee53a15fe8cba8fcdd483eeb6acc8dab3ea
|
[
"MIT"
] | 3
|
2022-02-16T17:25:53.000Z
|
2022-02-25T05:24:15.000Z
|
satori/__init__.py
|
lastmeta/Satori
|
cb321ee53a15fe8cba8fcdd483eeb6acc8dab3ea
|
[
"MIT"
] | 9
|
2022-02-16T20:23:55.000Z
|
2022-03-26T17:27:23.000Z
|
satori/__init__.py
|
lastmeta/Satori
|
cb321ee53a15fe8cba8fcdd483eeb6acc8dab3ea
|
[
"MIT"
] | null | null | null |
from satori import config
from satori.lib import engine
from satori.lib import apis
from satori.lib import spoof
from satori.lib import start
from satori.lib import wallet
from satori.lib.apis import disk
from satori.lib.engine import view
from satori.lib.engine import Engine
from satori.lib.engine import DataManager
from satori.lib.engine import ModelManager
from satori.lib.engine import HyperParameter
from satori.lib.engine.view import View
from satori.lib.engine.view import JupyterView
from satori.lib.engine.view import JupyterViewReactive
| 32.352941
| 54
| 0.841818
| 86
| 550
| 5.383721
| 0.197674
| 0.323974
| 0.393089
| 0.328294
| 0.542117
| 0.259179
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110909
| 550
| 16
| 55
| 34.375
| 0.94683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f1ec8d842eaea80db127c7bb05dd865194e2b525
| 135
|
py
|
Python
|
ssms/__init__.py
|
AlexanderFengler/ssm_simulators
|
cf650641647b7c049e60c48dde365607c8d3c54a
|
[
"MIT"
] | 1
|
2021-10-31T15:08:11.000Z
|
2021-10-31T15:08:11.000Z
|
ssms/__init__.py
|
AlexanderFengler/ssm_simulators
|
cf650641647b7c049e60c48dde365607c8d3c54a
|
[
"MIT"
] | 3
|
2021-07-30T15:57:56.000Z
|
2022-02-25T02:47:09.000Z
|
ssms/__init__.py
|
AlexanderFengler/ssm_simulators
|
cf650641647b7c049e60c48dde365607c8d3c54a
|
[
"MIT"
] | null | null | null |
__version__ = '0.0.1'
from . import basic_simulators
from . import dataset_generators
from . import config
from . import support_utils
| 22.5
| 32
| 0.792593
| 19
| 135
| 5.263158
| 0.631579
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025862
| 0.140741
| 135
| 6
| 33
| 22.5
| 0.836207
| 0
| 0
| 0
| 0
| 0
| 0.036765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f1f71980ba5d004659e767af347f1386796c4b2c
| 58
|
py
|
Python
|
ssms/basic_simulators/__init__.py
|
AlexanderFengler/ssm_simulators
|
cf650641647b7c049e60c48dde365607c8d3c54a
|
[
"MIT"
] | 1
|
2021-10-31T15:08:11.000Z
|
2021-10-31T15:08:11.000Z
|
ssms/basic_simulators/__init__.py
|
AlexanderFengler/ssm_simulators
|
cf650641647b7c049e60c48dde365607c8d3c54a
|
[
"MIT"
] | 3
|
2021-07-30T15:57:56.000Z
|
2022-02-25T02:47:09.000Z
|
ssms/basic_simulators/__init__.py
|
AlexanderFengler/ssm_simulators
|
cf650641647b7c049e60c48dde365607c8d3c54a
|
[
"MIT"
] | null | null | null |
from .boundary_functions import *
from .simulator import *
| 29
| 33
| 0.810345
| 7
| 58
| 6.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12069
| 58
| 2
| 34
| 29
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7b08dd1aaa79a9d86c5455701cb4023e50818dbe
| 53
|
py
|
Python
|
datetime/randomize delay.py
|
pydeveloper510/Python
|
2e3cf5f9d132fbc6dd8c41a96166b6e879d86e0d
|
[
"MIT"
] | 3
|
2021-04-23T08:04:14.000Z
|
2021-05-08T01:24:08.000Z
|
datetime/randomize delay.py
|
pydeveloper510/Python
|
2e3cf5f9d132fbc6dd8c41a96166b6e879d86e0d
|
[
"MIT"
] | null | null | null |
datetime/randomize delay.py
|
pydeveloper510/Python
|
2e3cf5f9d132fbc6dd8c41a96166b6e879d86e0d
|
[
"MIT"
] | 1
|
2021-05-08T01:24:46.000Z
|
2021-05-08T01:24:46.000Z
|
import time
import random
print(random.randint(0, 5))
| 17.666667
| 27
| 0.792453
| 9
| 53
| 4.666667
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0.09434
| 53
| 3
| 27
| 17.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7b2e2c1efaf42c5b895deb72311f9a679556225c
| 619
|
py
|
Python
|
S4/S4 Library/simulation/situations/visiting/visiting_tuning.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1
|
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Library/simulation/situations/visiting/visiting_tuning.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Library/simulation/situations/visiting/visiting_tuning.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
from sims4.tuning.tunable import TunableList, TunableReference
import services
class VisitingTuning:
ALWAYS_WELCOME_TRAITS = TunableList(description='\n Traits that will guarantee that after the Sim is welcomed into a \n household, it will always be automatically welcomed if he/she comes\n back.\n i.e. Vampires are always welcomed after being welcomed once.\n ', tunable=TunableReference(description='\n Trait reference to make the Sim always be welcomed after they \n are welcomed once.\n ', manager=services.trait_manager(), pack_safe=True))
| 103.166667
| 516
| 0.714055
| 82
| 619
| 5.341463
| 0.597561
| 0.054795
| 0.059361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002079
| 0.22294
| 619
| 5
| 517
| 123.8
| 0.908524
| 0
| 0
| 0
| 0
| 0.25
| 0.596123
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9e2ccb6d1e02252ea348f150bcca1b63b81ccd74
| 1,371
|
py
|
Python
|
sample_pipeline/sample_pipeline/tests_3_fixtures_from_a_cached_pipeline/conftest.py
|
CFMTech/python_pipeline_blog_post
|
21938d3531653af4240b6b229cf649139abeef9d
|
[
"MIT"
] | 1
|
2021-12-12T14:19:10.000Z
|
2021-12-12T14:19:10.000Z
|
sample_pipeline/sample_pipeline/tests_3_fixtures_from_a_cached_pipeline/conftest.py
|
mwouts/python_pipeline_blog_post
|
e4b0e579a568a916d37645cbfa838b21668b2c6c
|
[
"MIT"
] | null | null | null |
sample_pipeline/sample_pipeline/tests_3_fixtures_from_a_cached_pipeline/conftest.py
|
mwouts/python_pipeline_blog_post
|
e4b0e579a568a916d37645cbfa838b21668b2c6c
|
[
"MIT"
] | 1
|
2021-12-12T14:19:54.000Z
|
2021-12-12T14:19:54.000Z
|
"""In this conftest, we load the sample fixtures from a cached pipeline
generated by the test test_generate_cached_pipeline"""
import pytest
from . import get_cached_pipeline_path, load_from_cache
@pytest.fixture(scope="session")
def start_date():
"""A sample start date for the pipeline"""
return "2021-01-04"
@pytest.fixture(scope="session")
def end_date():
"""A sample end date for the pipeline"""
return "2021-01-29"
@pytest.fixture(scope="session")
def tickers():
"""A sample list of tickers"""
return {"AAPL", "MSFT", "AMZN", "GOOGL"}
@pytest.fixture(scope="session")
def cached_pipeline_path(tickers, start_date, end_date, worker_id):
"""This fixture returns the path to the cached pipeline and evaluates the
pipeline if necessary.
worker_id: the id of the worker in pytest-xdist
(remove this argument if you don't use pytest-xdist)
"""
return get_cached_pipeline_path(tickers, start_date, end_date, worker_id)
@pytest.fixture(scope="session")
def yahoo_data(cached_pipeline_path):
return load_from_cache(cached_pipeline_path, "yahoo_data")
@pytest.fixture(scope="session")
def closes(cached_pipeline_path):
return load_from_cache(cached_pipeline_path, "closes")
@pytest.fixture(scope="session")
def volumes(cached_pipeline_path):
return load_from_cache(cached_pipeline_path, "volumes")
| 26.882353
| 77
| 0.742524
| 198
| 1,371
| 4.929293
| 0.30303
| 0.172131
| 0.165984
| 0.179303
| 0.531762
| 0.330943
| 0.330943
| 0.269467
| 0.269467
| 0.269467
| 0
| 0.013652
| 0.14515
| 1,371
| 50
| 78
| 27.42
| 0.819113
| 0.30124
| 0
| 0.304348
| 1
| 0
| 0.119126
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.304348
| false
| 0
| 0.086957
| 0.130435
| 0.695652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
9e403f9d6179442ab512fd3ee0619dbe8f632703
| 122
|
py
|
Python
|
listings/admin.py
|
toyerovsky/btre-project
|
4a3e401bba765d8964f65642fac40ef6d54ca71b
|
[
"MIT"
] | null | null | null |
listings/admin.py
|
toyerovsky/btre-project
|
4a3e401bba765d8964f65642fac40ef6d54ca71b
|
[
"MIT"
] | null | null | null |
listings/admin.py
|
toyerovsky/btre-project
|
4a3e401bba765d8964f65642fac40ef6d54ca71b
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Listing
admin.site.register(Listing)
| 15.25
| 32
| 0.795082
| 17
| 122
| 5.705882
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139344
| 122
| 7
| 33
| 17.428571
| 0.92381
| 0.213115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9e4475c631ee5016d102c12d241189d08b65d83f
| 1,051
|
py
|
Python
|
load_data/ILoadSupervised.py
|
erickfmm/ML-experiments
|
b1e81b8eea976efeda6e4dc70af747628a6eb43a
|
[
"MIT"
] | null | null | null |
load_data/ILoadSupervised.py
|
erickfmm/ML-experiments
|
b1e81b8eea976efeda6e4dc70af747628a6eb43a
|
[
"MIT"
] | null | null | null |
load_data/ILoadSupervised.py
|
erickfmm/ML-experiments
|
b1e81b8eea976efeda6e4dc70af747628a6eb43a
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod, ABC
from enum import Enum
class SupervisedType(Enum):
Unknown = 0
Classification = 1
Regression = 2
Both = 3
class ILoadSupervised(ABC):
__metaclass__ = ABCMeta
TYPE: SupervisedType = SupervisedType.Unknown
#@classmethod
#def version(self): return "1.0"
@abstractmethod
def get_all(self): raise NotImplementedError
# #@abstractmethod
# def get_all_yielded(self): raise NotImplementedError
@abstractmethod
def get_classes(self): raise NotImplementedError
@abstractmethod
def get_headers(self): raise NotImplementedError
class ISplitted(ABC):
@abstractmethod
def get_splited(self): raise NotImplementedError
#@abstractmethod
def get_train_yielded(self): raise NotImplementedError
#@abstractmethod
def get_test_yielded(self): raise NotImplementedError
#@abstractmethod
def get_train(self): raise NotImplementedError
#@abstractmethod
def get_test(self): raise NotImplementedError
| 23.886364
| 58
| 0.722169
| 106
| 1,051
| 7.009434
| 0.330189
| 0.205922
| 0.242261
| 0.395693
| 0.504711
| 0.504711
| 0.375505
| 0
| 0
| 0
| 0
| 0.007229
| 0.210276
| 1,051
| 44
| 59
| 23.886364
| 0.887952
| 0.169363
| 0
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.347826
| false
| 0
| 0.086957
| 0
| 0.826087
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
9e6ebf2f32f17179dd8e304b70e898fd1202c5c9
| 5,760
|
py
|
Python
|
data/Config.py
|
yunan4nlp/E-NNRSTParser
|
c247685d5e5e0a9b81a8417680e20964570a71c9
|
[
"Apache-2.0"
] | null | null | null |
data/Config.py
|
yunan4nlp/E-NNRSTParser
|
c247685d5e5e0a9b81a8417680e20964570a71c9
|
[
"Apache-2.0"
] | null | null | null |
data/Config.py
|
yunan4nlp/E-NNRSTParser
|
c247685d5e5e0a9b81a8417680e20964570a71c9
|
[
"Apache-2.0"
] | null | null | null |
from configparser import ConfigParser
import sys, os
sys.path.append('..')
#import models
class Configurable(object):
def __init__(self, config_file, extra_args):
config = ConfigParser()
config.read(config_file)
if extra_args:
extra_args = dict([ (k[2:], v) for k, v in zip(extra_args[0::2], extra_args[1::2])])
for section in config.sections():
for k, v in config.items(section):
if k in extra_args:
v = type(v)(extra_args[k])
config.set(section, k, v)
self._config = config
if not os.path.isdir(self.save_dir):
os.mkdir(self.save_dir)
config.write(open(self.config_file,'w'))
print('Loaded config file sucessfully.')
for section in config.sections():
for k, v in config.items(section):
print(k, v)
@property
def pretrained_embeddings_file(self):
return self._config.get('Data','pretrained_embeddings_file')
@property
def xlnet_dir(self):
return self._config.get('Data','xlnet_dir')
@property
def data_dir(self):
return self._config.get('Data','data_dir')
@property
def train_file(self):
return self._config.get('Data','train_file')
@property
def dev_file(self):
return self._config.get('Data','dev_file')
@property
def test_file(self):
return self._config.get('Data','test_file')
@property
def min_occur_count(self):
return self._config.getint('Data','min_occur_count')
@property
def save_dir(self):
return self._config.get('Save','save_dir')
@property
def xlnet_save_dir(self):
return self._config.get('Save','xlnet_save_dir')
@property
def config_file(self):
return self._config.get('Save','config_file')
@property
def save_model_path(self):
return self._config.get('Save','save_model_path')
@property
def save_vocab_path(self):
return self._config.get('Save','save_vocab_path')
@property
def load_dir(self):
return self._config.get('Save','load_dir')
@property
def load_model_path(self):
return self._config.get('Save', 'load_model_path')
@property
def load_vocab_path(self):
return self._config.get('Save', 'load_vocab_path')
@property
def lstm_layers(self):
return self._config.getint('Network','lstm_layers')
@property
def word_dims(self):
return self._config.getint('Network','word_dims')
@property
def edu_type_dims(self):
return self._config.getint('Network','edu_type_dims')
@property
def dropout_emb(self):
return self._config.getfloat('Network','dropout_emb')
@property
def lstm_hiddens(self):
return self._config.getint('Network','lstm_hiddens')
@property
def dropout_lstm_input(self):
return self._config.getfloat('Network','dropout_lstm_input')
@property
def dropout_lstm_hidden(self):
return self._config.getfloat('Network','dropout_lstm_hidden')
@property
def dropout_mlp(self):
return self._config.getfloat('Network','dropout_mlp')
@property
def output_hidden_states(self):
return self._config.getboolean('Network', 'output_hidden_states')
@property
def output_attentions(self):
return self._config.getboolean('Network', 'output_attentions')
@property
def hidden_size(self):
return self._config.getint('Network', 'hidden_size')
@property
def start_layer(self):
return self._config.getint('Network', 'start_layer')
@property
def end_layer(self):
return self._config.getint('Network', 'end_layer')
@property
def tune_start_layer(self):
return self._config.getint('Network', 'tune_start_layer')
@property
def L2_REG(self):
return self._config.getfloat('Optimizer','L2_REG')
@property
def learning_rate(self):
return self._config.getfloat('Optimizer','learning_rate')
@property
def plm_learning_rate(self):
return self._config.getfloat('Optimizer','plm_learning_rate')
@property
def decay(self):
return self._config.getfloat('Optimizer','decay')
@property
def decay_steps(self):
return self._config.getint('Optimizer','decay_steps')
@property
def beta_1(self):
return self._config.getfloat('Optimizer','beta_1')
@property
def beta_2(self):
return self._config.getfloat('Optimizer','beta_2')
@property
def epsilon(self):
return self._config.getfloat('Optimizer','epsilon')
@property
def clip(self):
return self._config.getfloat('Optimizer','clip')
@property
def train_iters(self):
return self._config.getint('Run','train_iters')
@property
def train_batch_size(self):
return self._config.getint('Run','train_batch_size')
@property
def test_batch_size(self):
return self._config.getint('Run','test_batch_size')
@property
def validate_every(self):
return self._config.getint('Run','validate_every')
@property
def save_after(self):
return self._config.getint('Run','save_after')
@property
def update_every(self):
return self._config.getint('Run','update_every')
@property
def max_edu_len(self):
return self._config.getint('Run','max_edu_len')
@property
def max_state_len(self):
return self._config.getint('Run','max_state_len')
@property
def seed(self):
return self._config.getint('Run','seed')
@property
def max_token_len(self):
return self._config.getint('Run','max_token_len')
| 33.294798
| 96
| 0.648611
| 725
| 5,760
| 4.892414
| 0.147586
| 0.143783
| 0.189456
| 0.270651
| 0.530025
| 0.515365
| 0.438117
| 0.221878
| 0.028757
| 0.028757
| 0
| 0.002462
| 0.224306
| 5,760
| 173
| 97
| 33.294798
| 0.791406
| 0.002257
| 0
| 0.313253
| 0
| 0
| 0.151557
| 0.004524
| 0
| 0
| 0
| 0
| 0
| 1
| 0.295181
| false
| 0
| 0.012048
| 0.289157
| 0.60241
| 0.012048
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
7b6173623d4e661de7d69aaa3372db7a756e2928
| 58
|
py
|
Python
|
diffusion/__init__.py
|
janniklasrose/diffusion-models
|
3379e9b0cde59ee068508982cff1999bb53ce054
|
[
"MIT"
] | 1
|
2021-07-09T12:06:42.000Z
|
2021-07-09T12:06:42.000Z
|
diffusion/__init__.py
|
ignasiialemany/diffusion-models
|
3379e9b0cde59ee068508982cff1999bb53ce054
|
[
"MIT"
] | 1
|
2021-01-22T10:58:05.000Z
|
2021-02-02T09:25:33.000Z
|
diffusion/__init__.py
|
ignasiialemany/diffusion-models
|
3379e9b0cde59ee068508982cff1999bb53ce054
|
[
"MIT"
] | 1
|
2021-01-22T10:52:05.000Z
|
2021-01-22T10:52:05.000Z
|
from . import mcrw, analytical
from .domain import Domain
| 19.333333
| 30
| 0.793103
| 8
| 58
| 5.75
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155172
| 58
| 2
| 31
| 29
| 0.938776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7b6e507d5cebde710f154dd6237160988dcb8de8
| 148
|
py
|
Python
|
examples/TREC/preprocessor.py
|
decile-team/spear
|
8f8641927325090f85af7a86bc8a5795ea4c3da9
|
[
"MIT"
] | 89
|
2021-06-14T17:38:30.000Z
|
2022-03-31T05:16:26.000Z
|
examples/TREC/preprocessor.py
|
harshading/spear
|
7629cc46ce738a4a67e5b4a6ba7d1935c4833421
|
[
"MIT"
] | null | null | null |
examples/TREC/preprocessor.py
|
harshading/spear
|
7629cc46ce738a4a67e5b4a6ba7d1935c4833421
|
[
"MIT"
] | 7
|
2021-06-14T17:38:32.000Z
|
2021-12-25T22:44:45.000Z
|
import sys
sys.path.append('../../')
from spear.labeling import preprocessor
@preprocessor()
def convert_to_lower(x):
return x.lower().strip()
| 18.5
| 39
| 0.716216
| 20
| 148
| 5.2
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114865
| 148
| 8
| 40
| 18.5
| 0.793893
| 0
| 0
| 0
| 0
| 0
| 0.040268
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
7b7ea104cb880af3e0349125c53989bfcd26f1d0
| 83
|
py
|
Python
|
pyweek22/tile.py
|
fpischedda/yaff
|
ddf9cb11cdf979cfcdf4072959e4ad7dee984a8d
|
[
"BSD-3-Clause"
] | 2
|
2015-08-04T09:26:05.000Z
|
2015-08-04T20:33:33.000Z
|
pyweek22/tile.py
|
fpischedda/yaff
|
ddf9cb11cdf979cfcdf4072959e4ad7dee984a8d
|
[
"BSD-3-Clause"
] | 1
|
2015-08-04T10:56:32.000Z
|
2015-08-04T11:09:43.000Z
|
pyweek22/tile.py
|
fpischedda/yaff
|
ddf9cb11cdf979cfcdf4072959e4ad7dee984a8d
|
[
"BSD-3-Clause"
] | 1
|
2015-08-04T10:26:44.000Z
|
2015-08-04T10:26:44.000Z
|
class Tile:
def __init__(self, tile_type):
self.tyle_type = tile_type
| 16.6
| 34
| 0.662651
| 12
| 83
| 4
| 0.583333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.253012
| 83
| 4
| 35
| 20.75
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
7bc98eb9e0cbba5197989393fb1b5f4672827339
| 75
|
py
|
Python
|
findy/database/plugins/alpaca/__init__.py
|
doncat99/FinanceDataCenter
|
1538c8347ed5bff9a99a3cca07507a7605108124
|
[
"MIT"
] | null | null | null |
findy/database/plugins/alpaca/__init__.py
|
doncat99/FinanceDataCenter
|
1538c8347ed5bff9a99a3cca07507a7605108124
|
[
"MIT"
] | null | null | null |
findy/database/plugins/alpaca/__init__.py
|
doncat99/FinanceDataCenter
|
1538c8347ed5bff9a99a3cca07507a7605108124
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from findy.database.plugins.alpaca.quotes import *
| 25
| 50
| 0.693333
| 10
| 75
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.12
| 75
| 2
| 51
| 37.5
| 0.772727
| 0.28
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c8a3e9aa068c715b6550551ae5cb89315b64a9b6
| 140
|
py
|
Python
|
public/error.py
|
IcyCC/fly6to4
|
a15a37b1764a4739dd476584b420749864dd7a8a
|
[
"MIT"
] | null | null | null |
public/error.py
|
IcyCC/fly6to4
|
a15a37b1764a4739dd476584b420749864dd7a8a
|
[
"MIT"
] | null | null | null |
public/error.py
|
IcyCC/fly6to4
|
a15a37b1764a4739dd476584b420749864dd7a8a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class NotRecognizeCommandException(Exception):
pass
class NotRecognizeProtocolException(Exception):
pass
| 15.555556
| 47
| 0.735714
| 11
| 140
| 9.363636
| 0.727273
| 0.252427
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008475
| 0.157143
| 140
| 8
| 48
| 17.5
| 0.864407
| 0.15
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
cda17cfb49853794940282f3f9e3fb217da3a3a0
| 185
|
py
|
Python
|
django-furniture_factory/furniture_factory/table/admin.py
|
Milanmangar/global_logic_furniture_company
|
861f83f1ae9695fb894a04418126962fc39ad6c9
|
[
"MIT"
] | null | null | null |
django-furniture_factory/furniture_factory/table/admin.py
|
Milanmangar/global_logic_furniture_company
|
861f83f1ae9695fb894a04418126962fc39ad6c9
|
[
"MIT"
] | null | null | null |
django-furniture_factory/furniture_factory/table/admin.py
|
Milanmangar/global_logic_furniture_company
|
861f83f1ae9695fb894a04418126962fc39ad6c9
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from table.models import Feet, Leg, Table
# Register your models here.
admin.site.register(Feet)
admin.site.register(Leg)
admin.site.register(Table)
| 18.5
| 41
| 0.789189
| 28
| 185
| 5.214286
| 0.464286
| 0.184932
| 0.349315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113514
| 185
| 9
| 42
| 20.555556
| 0.890244
| 0.140541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cdd3dcbaa80f3aa7b1f49da37905058274d723e9
| 94
|
py
|
Python
|
qcfractal/services/__init__.py
|
MolSSI/dqm_server
|
ceff64fe032590095e0f865bc1d0c2da4684404e
|
[
"BSD-3-Clause"
] | 113
|
2018-08-04T20:33:41.000Z
|
2022-02-08T21:17:52.000Z
|
qcfractal/services/__init__.py
|
doaa-altarawy/QCFractal
|
5f00dd06bb34ca912c4055f0cbac60863ea89c7f
|
[
"BSD-3-Clause"
] | 665
|
2018-08-04T14:16:53.000Z
|
2022-03-25T15:37:41.000Z
|
qcfractal/services/__init__.py
|
doaa-altarawy/QCFractal
|
5f00dd06bb34ca912c4055f0cbac60863ea89c7f
|
[
"BSD-3-Clause"
] | 40
|
2018-08-16T21:41:02.000Z
|
2022-01-26T15:07:06.000Z
|
"""
Base import for services
"""
from .services import construct_service, initialize_service
| 15.666667
| 59
| 0.787234
| 11
| 94
| 6.545455
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 94
| 5
| 60
| 18.8
| 0.878049
| 0.255319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b54420c4c4df539aeb4585e33e156a6a1c1e9a29
| 186
|
py
|
Python
|
gibbs/models/__init__.py
|
eladnoor/equilibrator
|
b7b7b1651aa605dd117af7654132cab5f83889da
|
[
"MIT"
] | 12
|
2015-08-05T16:12:29.000Z
|
2021-03-05T11:57:49.000Z
|
gibbs/models/__init__.py
|
eladnoor/equilibrator
|
b7b7b1651aa605dd117af7654132cab5f83889da
|
[
"MIT"
] | 48
|
2016-07-07T13:10:22.000Z
|
2018-05-30T21:38:03.000Z
|
gibbs/models/__init__.py
|
eladnoor/equilibrator
|
b7b7b1651aa605dd117af7654132cab5f83889da
|
[
"MIT"
] | 4
|
2016-01-21T10:45:25.000Z
|
2017-12-14T14:45:18.000Z
|
from .reaction import Reaction, StoredReaction, Enzyme
from .compound import Compound, CommonName, ValueSource, Specie, SpeciesGroup, \
CompoundWithCoeff, Reactant
| 46.5
| 80
| 0.731183
| 16
| 186
| 8.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209677
| 186
| 3
| 81
| 62
| 0.92517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b59bd9304bdc6fbdf9fbe5fad71116ddc01dfeb0
| 325
|
py
|
Python
|
pds4_tools/utils/exceptions.py
|
Small-Bodies-Node/pds4_tools
|
26864efff1915e16983689324fa8e59ccde409d0
|
[
"BSD-3-Clause"
] | 7
|
2017-11-29T18:28:28.000Z
|
2021-08-06T16:53:39.000Z
|
pds4_tools/utils/exceptions.py
|
LevN0/pds4_tools
|
3d833575b1fe0e0ac35c6e4ecbda1630b884df55
|
[
"BSD-3-Clause"
] | 17
|
2018-05-15T18:31:14.000Z
|
2021-10-30T06:31:38.000Z
|
pds4_tools/utils/exceptions.py
|
LevN0/pds4_tools
|
3d833575b1fe0e0ac35c6e4ecbda1630b884df55
|
[
"BSD-3-Clause"
] | 9
|
2018-06-15T01:00:16.000Z
|
2021-04-29T20:54:54.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from .deprecation import PDS4ToolsDeprecationWarning
class PDS4StandardsException(Exception):
""" Custom exception thrown when PDS4 Standards are violated. """
pass
| 27.083333
| 69
| 0.830769
| 35
| 325
| 7.171429
| 0.628571
| 0.159363
| 0.25498
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010676
| 0.135385
| 325
| 11
| 70
| 29.545455
| 0.882562
| 0.175385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.142857
| 0.714286
| 0
| 0.857143
| 0.142857
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
a91f752c438b59fe97f552f2f9789773973388c5
| 651
|
py
|
Python
|
lebanese_channels/services/jadeed.py
|
blazeinmedia/Lebanese-Channels
|
f314868ac3da69ce5a27f6f953145096be1c31eb
|
[
"MIT"
] | 1
|
2020-04-09T19:39:35.000Z
|
2020-04-09T19:39:35.000Z
|
lebanese_channels/services/jadeed.py
|
blazeinmedia/Lebanese-Channels
|
f314868ac3da69ce5a27f6f953145096be1c31eb
|
[
"MIT"
] | null | null | null |
lebanese_channels/services/jadeed.py
|
blazeinmedia/Lebanese-Channels
|
f314868ac3da69ce5a27f6f953145096be1c31eb
|
[
"MIT"
] | null | null | null |
from lebanese_channels.channel import CheckedChannel
from lebanese_channels.services.epg_parsers.jadeed_parser import JadeedParser
from lebanese_channels.services.utils import stream
from lebanese_channels.services.utils.epg import fetch_epg
class Jadeed(CheckedChannel):
def get_name(self) -> str:
return 'Al Jadeed'
def get_logo(self) -> str:
return 'http://www.aljadeed.tv/images/logo.png'
def get_stream_url(self) -> str:
return stream.fetch_from('https://www.aljadeed.tv/arabic/live')
def get_epg_data(self):
return fetch_epg('http://www.aljadeed.tv/arabic/programs/schedule', JadeedParser())
| 34.263158
| 91
| 0.74808
| 88
| 651
| 5.363636
| 0.420455
| 0.101695
| 0.169492
| 0.177966
| 0.139831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145929
| 651
| 18
| 92
| 36.166667
| 0.848921
| 0
| 0
| 0
| 0
| 0
| 0.198157
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.307692
| 0.307692
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
a94ecfee141411bd53ade9153779c5c0eaa193be
| 28
|
py
|
Python
|
win_unc/internal/loggers.py
|
zo-edv/py_win_unc
|
610b7c9ce4ea17554d04342126169b488c8ccfae
|
[
"MIT"
] | 10
|
2015-08-14T06:34:28.000Z
|
2020-10-03T17:48:09.000Z
|
win_unc/internal/loggers.py
|
zo-edv/py_win_unc
|
610b7c9ce4ea17554d04342126169b488c8ccfae
|
[
"MIT"
] | 11
|
2017-01-12T23:43:56.000Z
|
2020-06-19T18:32:56.000Z
|
win_unc/internal/loggers.py
|
zo-edv/py_win_unc
|
610b7c9ce4ea17554d04342126169b488c8ccfae
|
[
"MIT"
] | 8
|
2015-09-25T20:44:33.000Z
|
2018-10-04T03:19:42.000Z
|
def no_logging(_):
pass
| 9.333333
| 18
| 0.642857
| 4
| 28
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 28
| 2
| 19
| 14
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
a959af040f1acacc7d710918bb752ff7384ed00f
| 46
|
py
|
Python
|
projects/reinforcement learning/causal_reinforcement_learning/src/errors.py
|
amoskowitz14/causalML
|
6c21033b05c82b3ba55efce6258c38669287eaa9
|
[
"MIT"
] | 354
|
2018-12-21T15:20:21.000Z
|
2021-01-02T14:48:51.000Z
|
projects/reinforcement learning/causal_reinforcement_learning/src/errors.py
|
amoskowitz14/causalML
|
6c21033b05c82b3ba55efce6258c38669287eaa9
|
[
"MIT"
] | 5
|
2021-04-15T20:38:12.000Z
|
2022-03-12T00:52:29.000Z
|
projects/reinforcement learning/causal_reinforcement_learning/src/errors.py
|
amoskowitz14/causalML
|
6c21033b05c82b3ba55efce6258c38669287eaa9
|
[
"MIT"
] | 112
|
2019-05-21T22:10:43.000Z
|
2020-12-29T05:52:07.000Z
|
class InternalStateError(Exception):
pass
| 15.333333
| 36
| 0.782609
| 4
| 46
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 37
| 23
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
a9687b6f01e69939f16f1d6eded85504daefef54
| 146
|
py
|
Python
|
example_call_fetch_current_session_worker.py
|
ATLJoeReed/ga-legislation-scraper
|
7933bb57cb62b0bf5974aff8cd05e1fa9498cae8
|
[
"MIT"
] | null | null | null |
example_call_fetch_current_session_worker.py
|
ATLJoeReed/ga-legislation-scraper
|
7933bb57cb62b0bf5974aff8cd05e1fa9498cae8
|
[
"MIT"
] | null | null | null |
example_call_fetch_current_session_worker.py
|
ATLJoeReed/ga-legislation-scraper
|
7933bb57cb62b0bf5974aff8cd05e1fa9498cae8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.9
# -*- coding: utf-8 -*-
from workers import fetch_current_session
results = fetch_current_session.process()
print(results)
| 18.25
| 41
| 0.746575
| 20
| 146
| 5.25
| 0.8
| 0.228571
| 0.361905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023077
| 0.109589
| 146
| 7
| 42
| 20.857143
| 0.784615
| 0.280822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a985a42a628c323ba0fc98ca2c15c8630fc5292c
| 66
|
py
|
Python
|
gamelib/__init__.py
|
Alchez/Discord-GameLibrary
|
f5986aa567689854afd75bdb32b151359cca1cee
|
[
"MIT"
] | 2
|
2020-05-17T04:50:27.000Z
|
2021-01-18T10:27:35.000Z
|
gamelib/__init__.py
|
Alchez/Discord-GameLibrary
|
f5986aa567689854afd75bdb32b151359cca1cee
|
[
"MIT"
] | 1
|
2020-02-11T18:11:16.000Z
|
2020-02-12T08:36:30.000Z
|
gamelib/__init__.py
|
Alchez/Discord-GameLibrary
|
f5986aa567689854afd75bdb32b151359cca1cee
|
[
"MIT"
] | 1
|
2020-01-30T16:32:29.000Z
|
2020-01-30T16:32:29.000Z
|
from .game import Game
def setup(bot):
bot.add_cog(Game(bot))
| 16.5
| 26
| 0.69697
| 12
| 66
| 3.75
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 66
| 4
| 26
| 16.5
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a9912c44f1ad32609cc3fc3401666339a01cf371
| 101
|
py
|
Python
|
grizli/version.py
|
gwalth/grizli
|
9d2609027dfef2e4efde87b0e8256a5e4ad36565
|
[
"MIT"
] | null | null | null |
grizli/version.py
|
gwalth/grizli
|
9d2609027dfef2e4efde87b0e8256a5e4ad36565
|
[
"MIT"
] | null | null | null |
grizli/version.py
|
gwalth/grizli
|
9d2609027dfef2e4efde87b0e8256a5e4ad36565
|
[
"MIT"
] | null | null | null |
# git describe --tags
__version__ = "1.3.2"
__long_version__ = "1.3.2"
__version_hash__ = "gb22d0d1"
| 25.25
| 30
| 0.712871
| 15
| 101
| 3.866667
| 0.666667
| 0.275862
| 0.310345
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 0.128713
| 101
| 4
| 30
| 25.25
| 0.545455
| 0.188119
| 0
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8d1804abd48e796bf8cf8acc29e30d1dcbbd18c0
| 47
|
py
|
Python
|
private_connect/models.py
|
lpatmo/actionify_the_news
|
998d8ca6b35d0ef1b16efca70f50e59503f5a62d
|
[
"MIT"
] | 66
|
2015-11-30T20:35:38.000Z
|
2019-06-12T17:40:32.000Z
|
private_connect/models.py
|
lpatmo/actionify_the_news
|
998d8ca6b35d0ef1b16efca70f50e59503f5a62d
|
[
"MIT"
] | 18
|
2015-11-30T22:03:05.000Z
|
2019-07-02T00:50:29.000Z
|
private_connect/models.py
|
lpatmo/actionify_the_news
|
998d8ca6b35d0ef1b16efca70f50e59503f5a62d
|
[
"MIT"
] | 11
|
2015-11-30T20:56:01.000Z
|
2019-07-01T17:06:09.000Z
|
"""Models file for local version of Connect"""
| 23.5
| 46
| 0.723404
| 7
| 47
| 4.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 47
| 1
| 47
| 47
| 0.85
| 0.851064
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8d419324619fed58e55c7860eafc45408abe8b7f
| 55
|
py
|
Python
|
Day2/Q2.py
|
nkem1010/python-challenge-solutions
|
203cedc691094a83b110fc75764aac51dbbc1a03
|
[
"MIT"
] | 1
|
2020-05-24T21:53:59.000Z
|
2020-05-24T21:53:59.000Z
|
Day2/Q2.py
|
nkem1010/python-challenge-solutions
|
203cedc691094a83b110fc75764aac51dbbc1a03
|
[
"MIT"
] | null | null | null |
Day2/Q2.py
|
nkem1010/python-challenge-solutions
|
203cedc691094a83b110fc75764aac51dbbc1a03
|
[
"MIT"
] | null | null | null |
import sys
print('Python version')
print(sys.version)
| 18.333333
| 24
| 0.763636
| 8
| 55
| 5.25
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 55
| 3
| 25
| 18.333333
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
8d696798b46f1c17b87fb0b5fd88c1cab4469c51
| 80
|
py
|
Python
|
src/compas_rv2/ui/Rhino/RV2/dev/__plugin__.py
|
selinabitting/compas-RV2
|
0884cc00d09c8f4a75eb2b97614105e4c8bfd818
|
[
"MIT"
] | 34
|
2020-04-27T13:54:38.000Z
|
2022-01-17T19:16:27.000Z
|
src/compas_rv2/ui/Rhino/RV2/dev/__plugin__.py
|
selinabitting/compas-RV2
|
0884cc00d09c8f4a75eb2b97614105e4c8bfd818
|
[
"MIT"
] | 306
|
2020-04-27T12:00:54.000Z
|
2022-03-23T22:28:54.000Z
|
src/compas_rv2/ui/Rhino/RV2/dev/__plugin__.py
|
selinabitting/compas-RV2
|
0884cc00d09c8f4a75eb2b97614105e4c8bfd818
|
[
"MIT"
] | 11
|
2020-06-30T08:23:40.000Z
|
2022-02-01T20:47:39.000Z
|
id = "{949ca7a4-7ddf-4939-8a5b-d945d5ac0bc8}"
version = "0.1.0.0"
title = "RV2"
| 20
| 45
| 0.6625
| 13
| 80
| 4.076923
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.323944
| 0.1125
| 80
| 3
| 46
| 26.666667
| 0.422535
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0.475
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8d77649feeda6cae3f6abca7289ec948726f1252
| 176
|
py
|
Python
|
gtr/constants.py
|
allerter/geniustrecommender
|
f903daf06d6d01feb312ff84216e26017bc543c9
|
[
"MIT"
] | null | null | null |
gtr/constants.py
|
allerter/geniustrecommender
|
f903daf06d6d01feb312ff84216e26017bc543c9
|
[
"MIT"
] | null | null | null |
gtr/constants.py
|
allerter/geniustrecommender
|
f903daf06d6d01feb312ff84216e26017bc543c9
|
[
"MIT"
] | null | null | null |
import os
LASTFM_API_KEY: str = os.environ["LASTFM_API_KEY"]
SECRET_KEY: str = os.environ["SECRET_KEY"]
REDIS_URL: str = os.environ["REDIS_URL"]
HASH_ALGORITHM: str = "HS256"
| 25.142857
| 50
| 0.75
| 29
| 176
| 4.241379
| 0.448276
| 0.121951
| 0.292683
| 0.243902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019108
| 0.107955
| 176
| 6
| 51
| 29.333333
| 0.764331
| 0
| 0
| 0
| 0
| 0
| 0.215909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8d896a9e935c0c2c22bb024a067c7f4bf5353a8d
| 6,370
|
py
|
Python
|
racoon/inputs/testdata2.py
|
brekkanegg/birdclef-2021
|
2674b60328372ec35e5deac7677eae347f827a04
|
[
"MIT"
] | null | null | null |
racoon/inputs/testdata2.py
|
brekkanegg/birdclef-2021
|
2674b60328372ec35e5deac7677eae347f827a04
|
[
"MIT"
] | null | null | null |
racoon/inputs/testdata2.py
|
brekkanegg/birdclef-2021
|
2674b60328372ec35e5deac7677eae347f827a04
|
[
"MIT"
] | null | null | null |
import torch.utils.data as torchdata
import numpy as np
import pandas as pd
import librosa
from audiomentations import *
# from config import CFG
# Modified
# # TODO: TTA
# class TestDataset(torchdata.Dataset):
# def __init__(self, df: pd.DataFrame, clip: np.ndarray, chunks_len=[5, 30, 20]):
# self.df = df
# self.clip = clip
# self.chunks_len = chunks_len
# def __len__(self):
# return len(self.df)
# def __getitem__(self, idx: int):
# item_dict = {}
# SR = 32000
# sample = self.df.loc[idx, :]
# row_id = sample.row_id
# item_dict["row_id"] = row_id
# for chunk_len in self.chunks_len:
# chunk_h = (chunk_len - 5) // 2
# end_seconds_c = min(int(sample.seconds + chunk_h + 1), 600)
# start_seconds_c = max(int(end_seconds_c - 5 - chunk_h), 0)
# start_index_c = SR * start_seconds_c
# end_index_c = SR * end_seconds_c
# y_c = self.clip[start_index_c:end_index_c].astype(np.float32)
# # if self.wav_transfos is not None:
# # y_c = self.wav_transfos(y_c, 32000)
# melspec_c = self.compute_melspec(y_c)
# melspec_c = (melspec_c - melspec_c.mean()) / (melspec_c.std() + 1e-6)
# melspec_c = (melspec_c - melspec_c.min()) / (
# melspec_c.max() - melspec_c.min() + 1e-6
# )
# image_c = melspec_c[np.newaxis, ...]
# item_dict[f"{chunk_len}sec_mel"] = image_c
# return item_dict
# def compute_melspec(self, y):
# """
# Computes a mel-spectrogram and puts it at decibel scale
# Arguments:
# y {np array} -- signal
# params {AudioParams} -- Parameters to use for the spectrogram. Expected to have the attributes sr, n_mels, f_min, f_max
# Returns:
# np array -- Mel-spectrogram
# """
# melspec = librosa.feature.melspectrogram(
# y, sr=32000, n_mels=128, fmin=20, fmax=16000
# )
# melspec = librosa.power_to_db(melspec).astype(np.float32)
# return melspec
class TestDataset(torchdata.Dataset):
def __init__(
self,
df: pd.DataFrame,
clip: np.ndarray,
chunks_len=[5, 30, 20],
tta=10,
background_datadir="/data2/minki/kaggle/birdclef-2021/background_soundscape",
):
self.df = df
self.clip = clip
self.chunks_len = chunks_len
self.tta = tta
if self.tta > 0:
self.wav_transfos = self.get_wav_transforms(background_datadir)
self.spec_transfos = self.get_specaug_transforms()
def __len__(self):
return len(self.df)
def __getitem__(self, idx: int):
item_dict = {}
SR = 32000
sample = self.df.loc[idx, :]
row_id = sample.row_id
item_dict["row_id"] = row_id
for chunk_len in self.chunks_len:
chunk_h = (chunk_len - 5) // 2
end_seconds_c = min(int(sample.seconds + chunk_h + 1), 600)
start_seconds_c = max(int(end_seconds_c - 5 - chunk_h), 0)
start_index_c = SR * start_seconds_c
end_index_c = SR * end_seconds_c
y_c = self.clip[start_index_c:end_index_c].astype(np.float32)
if self.tta > 0:
image_cs = []
for _ in range(self.tta):
y_c_m = self.wav_transfos(y_c, 32000)
melspec_c = self.compute_melspec(y_c_m)
melspec_c = (melspec_c - melspec_c.mean()) / (
melspec_c.std() + 1e-6
)
melspec_c = (melspec_c - melspec_c.min()) / (
melspec_c.max() - melspec_c.min() + 1e-6
)
melspec_c = self.spec_transfos(melspec_c)
image_c = melspec_c[np.newaxis, ...]
image_cs.append(image_c)
image_cs = np.concatenate(image_cs, axis=0)
item_dict[f"{chunk_len}sec_mel"] = image_cs
else:
melspec_c = self.compute_melspec(y_c)
melspec_c = (melspec_c - melspec_c.mean()) / (melspec_c.std() + 1e-6)
melspec_c = (melspec_c - melspec_c.min()) / (
melspec_c.max() - melspec_c.min() + 1e-6
)
image_c = melspec_c[np.newaxis, ...]
item_dict[f"{chunk_len}sec_mel"] = image_c
return item_dict
def compute_melspec(self, y):
"""
Computes a mel-spectrogram and puts it at decibel scale
Arguments:
y {np array} -- signal
params {AudioParams} -- Parameters to use for the spectrogram. Expected to have the attributes sr, n_mels, f_min, f_max
Returns:
np array -- Mel-spectrogram
"""
melspec = librosa.feature.melspectrogram(
y, sr=32000, n_mels=128, fmin=20, fmax=16000
)
melspec = librosa.power_to_db(melspec).astype(np.float32)
return melspec
def get_wav_transforms(self, background_datadir):
"""
Returns the transformation to apply on waveforms
Returns:
Audiomentations transform -- Transforms
"""
transforms = Compose(
[
AddGaussianNoise(min_amplitude=0.001, max_amplitude=0.015, p=0.5),
AddGaussianSNR(max_SNR=0.5, p=0.5),
AddBackgroundNoise(
sounds_path=background_datadir,
min_snr_in_db=0,
max_snr_in_db=2,
p=0.5,
),
FrequencyMask(min_frequency_band=0.0, max_frequency_band=0.5, p=0.5),
Gain(min_gain_in_db=-15, max_gain_in_db=15, p=0.5),
]
)
return transforms
def get_specaug_transforms(self):
"""
Returns the transformation to apply on waveforms
Returns:
Audiomentations transform -- Transforms
"""
transforms = SpecFrequencyMask(
min_mask_fraction=0.03,
max_mask_fraction=0.25,
fill_mode="constant",
fill_constant=0.0,
p=0.5,
)
return transforms
| 33.177083
| 133
| 0.543485
| 786
| 6,370
| 4.12341
| 0.204835
| 0.086393
| 0.047208
| 0.059241
| 0.742672
| 0.727862
| 0.720765
| 0.720765
| 0.712126
| 0.712126
| 0
| 0.03542
| 0.352904
| 6,370
| 191
| 134
| 33.350785
| 0.750849
| 0.377237
| 0
| 0.130435
| 0
| 0
| 0.027807
| 0.014566
| 0
| 0
| 0
| 0.005236
| 0
| 1
| 0.065217
| false
| 0
| 0.054348
| 0.01087
| 0.184783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8da6e99e6857ea53e4593a74bf05175c614b76d0
| 410
|
py
|
Python
|
dash/__init__.py
|
pikhovkin/dj-plotly-dash
|
eed3ced1e652510e39d1aeec4e2703ed21e9f752
|
[
"MIT"
] | 39
|
2018-10-07T23:44:51.000Z
|
2022-02-16T18:16:40.000Z
|
dash/__init__.py
|
pikhovkin/dj-plotly-dash
|
eed3ced1e652510e39d1aeec4e2703ed21e9f752
|
[
"MIT"
] | 66
|
2018-10-07T16:57:25.000Z
|
2022-03-17T18:29:47.000Z
|
dash/__init__.py
|
pikhovkin/dj-plotly-dash
|
eed3ced1e652510e39d1aeec4e2703ed21e9f752
|
[
"MIT"
] | 7
|
2019-02-13T14:54:18.000Z
|
2022-02-15T20:03:19.000Z
|
from .dash import Dash, no_update # noqa: F401
from .views import BaseDashView # noqa: F401
from . import dependencies # noqa: F401
from . import development # noqa: F401
from . import exceptions # noqa: F401
from . import resources # noqa: F401
from .version import __version__ # noqa: F401
# from ._callback_context import CallbackContext as _CallbackContext
#
# callback_context = _CallbackContext()
| 37.272727
| 68
| 0.760976
| 50
| 410
| 6.04
| 0.36
| 0.18543
| 0.278146
| 0.238411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061584
| 0.168293
| 410
| 10
| 69
| 41
| 0.824047
| 0.441463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
573041cd3ea8cd5fc381b57204bb61241031fa0d
| 202
|
py
|
Python
|
base/config.py
|
yanshicheng/super-ops
|
dd39fe971bfd0f912cab155b82e41a09aaa47892
|
[
"Apache-2.0"
] | null | null | null |
base/config.py
|
yanshicheng/super-ops
|
dd39fe971bfd0f912cab155b82e41a09aaa47892
|
[
"Apache-2.0"
] | 1
|
2022-01-17T09:34:14.000Z
|
2022-01-18T13:32:20.000Z
|
base/config.py
|
yanshicheng/super_ops
|
dd39fe971bfd0f912cab155b82e41a09aaa47892
|
[
"Apache-2.0"
] | null | null | null |
import os
import configparser
from django.conf import settings
config = configparser.ConfigParser()
config.read(r'config/config.ini')
def get_config(section, key):
return config.get(section, key)
| 20.2
| 36
| 0.782178
| 28
| 202
| 5.607143
| 0.571429
| 0.127389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118812
| 202
| 9
| 37
| 22.444444
| 0.882022
| 0
| 0
| 0
| 0
| 0
| 0.084158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.428571
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
9397283d924162708036ee15845bd86603e089fc
| 492
|
py
|
Python
|
test_iscontained.py
|
sanjioh/weird_tdd
|
b13f9c523f0c33e4b289de2114a3616bbea82d28
|
[
"MIT"
] | null | null | null |
test_iscontained.py
|
sanjioh/weird_tdd
|
b13f9c523f0c33e4b289de2114a3616bbea82d28
|
[
"MIT"
] | null | null | null |
test_iscontained.py
|
sanjioh/weird_tdd
|
b13f9c523f0c33e4b289de2114a3616bbea82d28
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from iscontained import iscontained
def test_step1():
assert iscontained([], []) is True
def test_step2():
assert iscontained([1], []) is False
def test_step3():
assert iscontained([], [1]) is True
def test_step4():
assert iscontained([1], [1]) is True
def test_step5():
assert iscontained([1], [2, 1]) is True
def test_step6():
assert iscontained([1, 3], [2, 1]) is False
def test_step7():
assert iscontained([1, 2], [2, 1]) is True
| 15.870968
| 47
| 0.632114
| 71
| 492
| 4.28169
| 0.309859
| 0.161184
| 0.355263
| 0.171053
| 0.236842
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059126
| 0.20935
| 492
| 30
| 48
| 16.4
| 0.722365
| 0.026423
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.466667
| 1
| 0.466667
| true
| 0
| 0.066667
| 0
| 0.533333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
93cb6c8ea5d240e14bfec70eef070092b0727ac4
| 84
|
py
|
Python
|
solvers/__init__.py
|
oryba/hashcode2020-prep
|
a742773825c420c6f1b9cbc5e39f0f19fcd667e9
|
[
"MIT"
] | 1
|
2020-02-19T21:32:20.000Z
|
2020-02-19T21:32:20.000Z
|
solvers/__init__.py
|
oryba/hashcode2020-prep
|
a742773825c420c6f1b9cbc5e39f0f19fcd667e9
|
[
"MIT"
] | null | null | null |
solvers/__init__.py
|
oryba/hashcode2020-prep
|
a742773825c420c6f1b9cbc5e39f0f19fcd667e9
|
[
"MIT"
] | null | null | null |
from .dynamic import Dynamic
from .genetic import Genetic
from .simple import Simple
| 28
| 28
| 0.833333
| 12
| 84
| 5.833333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130952
| 84
| 3
| 29
| 28
| 0.958904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
93d58143f1a1952abb6005aa70f9589588b005d0
| 21
|
py
|
Python
|
happy.py
|
abhishekbodkhe/DMBSPROJ
|
13a71371d259f413ac15783fa86083eb2cfdfdd2
|
[
"Apache-2.0"
] | null | null | null |
happy.py
|
abhishekbodkhe/DMBSPROJ
|
13a71371d259f413ac15783fa86083eb2cfdfdd2
|
[
"Apache-2.0"
] | null | null | null |
happy.py
|
abhishekbodkhe/DMBSPROJ
|
13a71371d259f413ac15783fa86083eb2cfdfdd2
|
[
"Apache-2.0"
] | null | null | null |
print('hello WORLD')
| 10.5
| 20
| 0.714286
| 3
| 21
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 21
| 1
| 21
| 21
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
93fa15705db73688a57c8e2854eafeb6c28e9c51
| 1,243
|
py
|
Python
|
users/migrations/0015_auto_20210305_1702.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
users/migrations/0015_auto_20210305_1702.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
users/migrations/0015_auto_20210305_1702.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.7 on 2021-03-05 17:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0014_auto_20210304_1001'),
]
operations = [
migrations.AlterField(
model_name='notification',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='notification',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='question',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='question',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
]
| 36.558824
| 145
| 0.628319
| 151
| 1,243
| 5.033113
| 0.357616
| 0.105263
| 0.131579
| 0.152632
| 0.805263
| 0.805263
| 0.676316
| 0.676316
| 0.676316
| 0.676316
| 0
| 0.033441
| 0.254224
| 1,243
| 33
| 146
| 37.666667
| 0.786408
| 0.036203
| 0
| 0.740741
| 1
| 0
| 0.312709
| 0.109532
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f50ea972e3a6760ad73715a5a43144b082e73e95
| 209
|
py
|
Python
|
python/8kyu/super_duper_easy.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 3
|
2021-06-08T01:57:13.000Z
|
2021-06-26T10:52:47.000Z
|
python/8kyu/super_duper_easy.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | null | null | null |
python/8kyu/super_duper_easy.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 2
|
2021-06-10T21:20:13.000Z
|
2021-06-30T10:13:26.000Z
|
"""Kata url: https://www.codewars.com/kata/55a5bfaa756cfede78000026."""
from typing import Union
def problem(a: Union[int, str]) -> Union[int, str]:
return 'Error' if isinstance(a, str) else a * 50 + 6
| 26.125
| 71
| 0.684211
| 31
| 209
| 4.612903
| 0.741935
| 0.111888
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096045
| 0.15311
| 209
| 7
| 72
| 29.857143
| 0.711864
| 0.311005
| 0
| 0
| 0
| 0
| 0.036232
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
f56c2a8219ab8fc74b6bab99a90ece2f62dd2e83
| 217
|
py
|
Python
|
platform/core/polyaxon/administration/register/clusters.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/administration/register/clusters.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/administration/register/clusters.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
from administration.register.utils import DiffModelAdmin
from db.models.clusters import Cluster
class ClusterAdmin(DiffModelAdmin):
pass
def register(admin_register):
admin_register(Cluster, ClusterAdmin)
| 19.727273
| 56
| 0.815668
| 24
| 217
| 7.291667
| 0.625
| 0.148571
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124424
| 217
| 10
| 57
| 21.7
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.166667
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
f57bb28cde78115a135f4ff24952094b10a91116
| 46
|
py
|
Python
|
mazikeen/GeneratorException.py
|
hanniballar/mazikeen
|
68693a96c69376f18c21576a610470a543a89316
|
[
"MIT"
] | null | null | null |
mazikeen/GeneratorException.py
|
hanniballar/mazikeen
|
68693a96c69376f18c21576a610470a543a89316
|
[
"MIT"
] | 3
|
2021-04-05T17:14:21.000Z
|
2021-04-06T21:49:41.000Z
|
mazikeen/GeneratorException.py
|
hanniballar/mazikeen
|
68693a96c69376f18c21576a610470a543a89316
|
[
"MIT"
] | null | null | null |
class GeneratorException(Exception):
pass
| 15.333333
| 36
| 0.782609
| 4
| 46
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 37
| 23
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f57d3675a0c41f61754ef0af5e6b5dcfebc89b2e
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/future/backports/urllib/response.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/future/backports/urllib/response.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/future/backports/urllib/response.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/a2/84/32/b30c1b6fef4de88562d4ac23b2cd5a47e2af9bc64d7b3a32544a27a7c7
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.385417
| 0
| 96
| 1
| 96
| 96
| 0.510417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
19271a41d52ec37514d337dd5f85ce38ccf45b63
| 35
|
py
|
Python
|
page.py
|
Mat001/fs_rest_customer
|
736bd2ca23bba8fcd2d8883c1008e477903632a6
|
[
"Apache-2.0"
] | 1
|
2020-08-04T06:06:50.000Z
|
2020-08-04T06:06:50.000Z
|
page.py
|
Mat001/fs_rest_customer
|
736bd2ca23bba8fcd2d8883c1008e477903632a6
|
[
"Apache-2.0"
] | 1
|
2019-05-09T14:51:28.000Z
|
2019-05-13T12:59:05.000Z
|
page.py
|
Mat001/fs_rest_customer
|
736bd2ca23bba8fcd2d8883c1008e477903632a6
|
[
"Apache-2.0"
] | 3
|
2018-11-29T04:36:17.000Z
|
2021-02-11T10:52:26.000Z
|
# Pages don't work for Full Stack.
| 35
| 35
| 0.714286
| 7
| 35
| 3.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 35
| 1
| 35
| 35
| 0.892857
| 0.914286
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.