blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5be540cb69db94f0013c175a710ce2d735c56c87 | 9f2445e9a00cc34eebcf3d3f60124d0388dcb613 | /2021-10-04-somamulti/CVvsMean_True_channels4.py | 7067411d2e70dcbb16d6d285329b3265919e5e21 | [] | no_license | analkumar2/Thesis-work | 7ee916d71f04a60afbd117325df588908518b7d2 | 75905427c2a78a101b4eed2c27a955867c04465c | refs/heads/master | 2022-01-02T02:33:35.864896 | 2021-12-18T03:34:04 | 2021-12-18T03:34:04 | 201,130,673 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 23,375 | py | ## exec(open('CVvsMean.py').read())
import moose
import rdesigneur as rd
import numpy as np
import matplotlib.pyplot as plt
import xmltodict
import sys
import os
import io
import importlib
import MOOSEModel_17_somamulti as mm
import pickle
# import featuresv26_nonallen as fts
import moose
# import plotexpv2 as pex
from copy import deepcopy
import numpy.random as nr
from multiprocessing import Pool
import time
import argparse
from pprint import pprint
import pickle
from copy import deepcopy
from allensdk.ephys.ephys_extractor import EphysSweepFeatureExtractor
import warnings
warnings.filterwarnings("ignore", category=RuntimeWarning)
from sklearn.linear_model import LinearRegression
import gc
# from sklearn.linear_model import LinearRegression
elecid_ori = None
elecPlotDt = 0.0001
elecDt = 0.0001
stim_start = 1
stim_end = 1.9
totalsec = 2.5
samprate = 20000
nan = 10000
# model = {'Error': 470.08426631177923, 'Parameters': {'notes': '', 'Morphology': {'sm_len': 6.73077545020806e-05, 'sm_diam': 6.73077545020806e-05}, 'Passive': {'Cm': 1.19e-10, 'Rm': 198905511.35013187, 'Em': -0.05608805404962253}, 'Channels': {'Na_Chan': {'gbar': 4.932428436178534e-06, 'Erev': 0.06, 'Kinetics': '../../Compilations/Kinetics/Na_Chan_Custom4', 'KineticVars': {'m_vhalf_inf': -0.0359019320487981, 'm_slope_inf': 0.00788319407034278, 'm_A': -0.044359772011543515, 'm_B': 0.02, 'm_C': 0.0161, 'm_D': 0.0547, 'm_E': 0.0311, 'm_F': 0.00064, 'h_vhalf_inf': -0.050316776512570736, 'h_slope_inf': -0.005219098993311621, 'h_A': -0.04763346729779447, 'h_B': 0.003464, 'h_C': 0.0, 'h_D': 0.0262, 'h_E': 0.00854, 'h_F': 0.3069864140957819, 's_vhalf_inf': -0.04492406530182888, 's_slope_inf': -0.010911310412463028, 's_A': 1, 's_B': 0.001, 's_C': 0, 's_D': 0.6152461928009552, 's_E': 0.001, 's_F': 1}}, 'K_DR_Chan': {'gbar': 7.658644948611434e-07, 'Erev': -0.09, 'Kinetics': '../../Compilations/Kinetics/K_DR_Chan_Custom3', 'KineticVars': {'n_vhalf_inf': 0.01731374470366132, 'n_slope_inf': 0.01471839705516013, 'n_A': 0.014985238813908366, 'n_E': 0.020993521832043036, 'n_F': 0.01380360065197889}}, 'K_A_Chan': {'gbar': 1.8215306367504489e-07, 'Erev': -0.09, 'Kinetics': '../../Compilations/Kinetics/K_A_Chan_Custom3', 'KineticVars': {'n_vhalf_inf': 0.02149776726410334, 'n_slope_inf': 0.012912009256498078, 'n_A': -0.024208971400308105, 'n_B': 0.08860901204015699, 'n_C': 0, 'n_D': 0, 'n_E': 0.0060496419089759845, 'n_F': 0.004765782060316019, 'l_vhalf_inf': -0.04389847493434692, 'l_slope_inf': -0.025778506595107165, 'l_min': 0.002, 'l_m': 0.3418388911022859, 'l_cm': 0.05}}, 'K_M_Chan': {'gbar': 1.6808195467313064e-10, 'Erev': -0.09, 'Kinetics': '../../Compilations/Kinetics/K_M_Chan_Custom1', 'KineticVars': {'factor': 3.3e-05}}, 'K_SK_Chan': {'gbar': 2.056391414204614e-15, 'Erev': -0.09, 'Kinetics': '../../Compilations/Kinetics/K_SK_Chan_Custom3'}, 'Ca_T_Chan': {'gbar': 1.6721696312505114e-15, 'Erev': 0.12, 'Kinetics': '../../Compilations/Kinetics/Ca_T_Chan_Custom1'}}, 'Ca_Conc': {'Ca_B': 1800000000.0, 'Ca_tau': 0.15, 'Ca_base': 5e-05, 'Kinetics': '../../Compilations/Kinetics/Ca_Conc_(Common)'}}, 'Scores': {}, 'Score': {'E_rest_0': 0, 'Input resistance': -1.9528889095844233, 'Cell capacitance': 0, 'AP1_amp_1.5e-10': 0, 'APp_amp_1.5e-10': 0, 'APavgpratio_amp_1.5e-10': -3.0984125901593194, 'AP1_width_1.5e-10': -3.6253310169109465, 'APp_width_1.5e-10': -2.4509513665268226, 'AP1_thresh_1.5e-10': 0, 'APp_thresh_1.5e-10': 0, 'ISI1_1.5e-10': 5.549977871150843, 'ISIl_1.5e-10': 0, 'ISIavg_1.5e-10': 0, 'freq_1.5e-10': 0, 'Adptn_id_1.5e-10': -6.337688639200551, 'fAHP_AP1_amp_1.5e-10': 0, 'fAHP_APp_amp_1.5e-10': -1.601930907299259, 'mAHP_APp_amp_1.5e-10': -2.7346819933400606, 'mAHP_APp_time_1.5e-10': 0, 'AHP_AP1_amp_1.5e-10': 0, 'AHP_APp_amp_1.5e-10': -2.57430297658575, 'AHP_APp_time_1.5e-10': 0, 'Upstroke_AP1_time_1.5e-10': 1.7639475967188118, 'Upstroke_APp_time_1.5e-10': 4.166816666667142, 'Upstroke_AP1_amp_1.5e-10': -1.103959350109573, 'Upstroke_APp_amp_1.5e-10': -2.960660499806755, 'Upstroke_AP1_value_1.5e-10': 1.4455993645508278, 'Upstroke_APp_value_1.5e-10': 2.9806177283258517, 'Downstroke_AP1_time_1.5e-10': -3.901371573204427, 'Downstroke_APp_time_1.5e-10': -1.166711666665328, 'Downstroke_AP1_amp_1.5e-10': 4.945860117472832, 'Downstroke_APp_amp_1.5e-10': 0, 'Downstroke_AP1_value_1.5e-10': -3.376310966584173, 'Downstroke_APp_value_1.5e-10': -2.2735722411322983, 'UpDn_AP1_ratio_1.5e-10': 0, 'UpThr_AP1_diff_1.5e-10': -2.028035043099396, 'UpThr_APp_diff_1.5e-10': -3.054102872346163, 'offset_1.5e-10': -2.0129256585474393, 'AP1_amp_3e-10': 0, 'APp_amp_3e-10': -3.2455115126952085, 'APavgpratio_amp_3e-10': 0, 'AP1_width_3e-10': -3.742272086154128, 'APp_width_3e-10': -1.368315651114673, 'AP1_thresh_3e-10': 0, 'APp_thresh_3e-10': 0, 'ISI1_3e-10': 6.997038752423039, 'ISIl_3e-10': 0, 'ISIavg_3e-10': 0, 'freq_3e-10': 0, 'Adptn_id_3e-10': -5.2066952295950335, 'fAHP_AP1_amp_3e-10': 0, 'fAHP_APp_amp_3e-10': -2.0616055775966013, 'mAHP_APp_amp_3e-10': -1.9164540887580153, 'mAHP_APp_time_3e-10': 0, 'AHP_AP1_amp_3e-10': 0, 'AHP_APp_amp_3e-10': -1.9423259205205055, 'AHP_APp_time_3e-10': 0, 'Upstroke_AP1_time_3e-10': 3.1454726305580336, 'Upstroke_APp_time_3e-10': 0, 'Upstroke_AP1_amp_3e-10': 0, 'Upstroke_APp_amp_3e-10': -6.279148420998243, 'Upstroke_AP1_value_3e-10': 1.2877783413072783, 'Upstroke_APp_value_3e-10': 0, 'Downstroke_AP1_time_3e-10': -4.151434269793974, 'Downstroke_APp_time_3e-10': 0, 'Downstroke_AP1_amp_3e-10': 5.5410446811235525, 'Downstroke_APp_amp_3e-10': 0, 'Downstroke_AP1_value_3e-10': -4.102321125163147, 'Downstroke_APp_value_3e-10': 0, 'UpDn_AP1_ratio_3e-10': 0, 'UpThr_AP1_diff_3e-10': 0, 'UpThr_APp_diff_3e-10': 0, 'offset_3e-10': -1.1159060878515004, 'freq300to150ratio': 1.4756342221364624}, 'Features': {'Sampling rate': 10000.4, 'stim_start': 1, 'stim_end': 1.5, 'E_rest_0': -0.06716766200787411, 'E_rest_m25': -0.06716847233440179, 'E_rest_150': -0.06716847233440967, 'E_rest_300': -0.0671684723344067, 'Input resistance': 99007520.80599822, 'Cell capacitance': 1.189516331831558e-10, 'AP1_amp_1.5e-10': 0.11489228726817852, 'APp_amp_1.5e-10': 0.10309974181042947, 'APavgpratio_amp_1.5e-10': 1.011636128160689, 'AP1_width_1.5e-10': 0.000600000000000156, 'APp_width_1.5e-10': 0.0008000000000001339, 'AP1_thresh_1.5e-10': -0.04792232083616102, 'APp_thresh_1.5e-10': -0.04408274251225343, 'ISI1_1.5e-10': 0.04059999999999997, 'ISIl_1.5e-10': 0.04730000000000012, 'ISIavg_1.5e-10': 0.04504000000000001, 'freq_1.5e-10': 22.0, 'Adptn_id_1.5e-10': 0.14164904862579564, 'fAHP_AP1_amp_1.5e-10': 0.013265121619563294, 'fAHP_APp_amp_1.5e-10': 0.01462801477249473, 'mAHP_APp_amp_1.5e-10': 0.010687823364556537, 'mAHP_APp_time_1.5e-10': 0.01760000000000006, 'AHP_AP1_amp_1.5e-10': 0.010698216494546685, 'AHP_APp_amp_1.5e-10': 0.010687823364556537, 'AHP_APp_time_1.5e-10': 0.017599296028158875, 'Upstroke_AP1_time_1.5e-10': -0.00019999999999997797, 'Upstroke_APp_time_1.5e-10': -0.00019999999999997797, 'Upstroke_AP1_amp_1.5e-10': 0.052720853802266814, 'Upstroke_APp_amp_1.5e-10': 0.053067399588552736, 'Upstroke_AP1_value_1.5e-10': 458.853813569696, 'Upstroke_APp_value_1.5e-10': 310.57488393468816, 'Downstroke_AP1_time_1.5e-10': 0.0, 'Downstroke_APp_time_1.5e-10': 0.000300000000000189, 'Downstroke_AP1_amp_1.5e-10': 0.11489228726817852, 'Downstroke_APp_amp_1.5e-10': 0.08283788538314664, 'Downstroke_AP1_value_1.5e-10': -150.80847062436462, 'Downstroke_APp_value_1.5e-10': -103.94659286355234, 'UpDn_AP1_ratio_1.5e-10': 2.9878313023917076, 'UpThr_AP1_diff_1.5e-10': 0.03347470230401816, 'UpThr_APp_diff_1.5e-10': 0.029981669766396494, 'offset_1.5e-10': 0.010699343217505, 'AP1_amp_3e-10': 0.11501530256693318, 'APp_amp_3e-10': 0.0771327904013039, 'APavgpratio_amp_3e-10': 1.0320725001695672, 'AP1_width_3e-10': 0.0005999999999999339, 'APp_width_3e-10': 0.0013000000000000789, 'AP1_thresh_3e-10': -0.05174071145709095, 'APp_thresh_3e-10': -0.0420327284835539, 'ISI1_3e-10': 0.01859999999999995, 'ISIl_3e-10': 0.026799999999999935, 'ISIavg_3e-10': 0.024055000000000003, 'freq_3e-10': 42.0, 'Adptn_id_3e-10': 0.30597014925373156, 'fAHP_AP1_amp_3e-10': 0.014689867767231538, 'fAHP_APp_amp_3e-10': 0.018489331086231908, 'mAHP_APp_amp_3e-10': 0.017472600616231806, 'mAHP_APp_time_3e-10': 0.011900000000000022, 'AHP_AP1_amp_3e-10': 0.014395128935768262, 'AHP_APp_amp_3e-10': 0.017472600616231806, 'AHP_APp_time_3e-10': 0.011899524019039239, 'Upstroke_AP1_time_3e-10': -9.999999999998899e-05, 'Upstroke_APp_time_3e-10': -0.000400000000000178, 'Upstroke_AP1_amp_3e-10': 0.05988333479158259, 'Upstroke_APp_amp_3e-10': 0.047241378190381905, 'Upstroke_AP1_value_3e-10': 434.8558465335387, 'Upstroke_APp_value_3e-10': 103.05596623503826, 'Downstroke_AP1_time_3e-10': 9.999999999998899e-05, 'Downstroke_APp_time_3e-10': 0.0004999999999999449, 'Downstroke_AP1_amp_3e-10': 0.11396195824118535, 'Downstroke_APp_amp_3e-10': 0.06324332216898552, 'Downstroke_AP1_value_3e-10': -159.11017309229854, 'Downstroke_APp_value_3e-10': -45.730396281957546, 'UpDn_AP1_ratio_3e-10': 2.253555066517058, 'UpThr_AP1_diff_3e-10': 0.044455573914266834, 'UpThr_APp_diff_3e-10': 0.0221056343395291, 'offset_3e-10': 0.01756057160002228, 'freq300to150ratio': 1.9090909090909092}}
# mm.plotModel(model)
# exec(open("Combined100models.py").read())
from Combined100models import Models
modelname = 'Model4'
fullModel = deepcopy(Models[modelname])
mm.plotModel(
fullModel,
CurrInjection=150e-12,
vClamp=None,
refreshKin=True,
Truntime=0.01,
syn=True, synwg=0.01, synfq=0.5
)
plt.close('all')
def get_Vmvec(fullModel_tI_II):
fullModel = deepcopy(fullModel_tI_II[0])
tI = fullModel_tI_II[1]
II = fullModel_tI_II[2]
tempt, tempv, Ca = mm.runModel(
fullModel,
CurrInjection=150e-12,
vClamp=None,
refreshKin=False,
Truntime=0.01,
syn=True, synwg=0.01, synfq=0.43
)
moose.delete("/model/stims/stim0")
stimtable = moose.StimulusTable("/model/stims/stim2")
soma = moose.element("/model/elec/soma")
moose.connect(stimtable, "output", soma, "setInject")
stimtable.vector = II
stimtable.stepSize = (
0 # This forces use of current time as x value for interpolation
)
stimtable.stopTime = tI[-1]
Tdur = tI[-1]
moose.reinit()
moose.start(tI[-1])
Vmvec = moose.element("/model/graphs/plot0").vector
tvec = moose.element("/Graphs/plott").vector
spiket = processVmvec(tvec,Vmvec)
return spiket
def processVmvec(tvec,Vmvec):
tt = tvec
vv = Vmvec
I = 150e-12
ii = np.zeros(len(tt))
ii[(tt >= stim_start) & (tt <= stim_end)] = I
sweep_ext = EphysSweepFeatureExtractor(
t=tt,
v=vv * 1e3,
i=ii * 1e12,
filter=len(tt) / tt[-1] / 2500,
start=stim_start,
end=stim_end,
)
try:
sweep_ext.process_spikes()
except ValueError:
return []
spiket = sweep_ext.spike_feature("peak_t")
return spiket
def calcCV(ISIlist):
return np.std(ISIlist)/np.mean(ISIlist)
def calcjit(spiket_list):
minspikes=100
for spiket in spiket_list:
if len(spiket) < minspikes:
minspikes = len(spiket)
if minspikes<3:
print('<3 spikes')
return np.nan
for i in range(len(spiket_list)):
spiket_list[i] = spiket_list[i][:minspikes]
jitter = np.nanstd(spiket_list, 0)
spikemean = np.nanmean(spiket_list, 0)
# print(jitter, spikemean)
x = spikemean.reshape((-1, 1))
y = jitter
model = LinearRegression().fit(x, y)
# print(model.score(x, y), model.intercept_, model.coef_)
return model.coef_[0]
def calcCVjit(fullModel):
f = open('CVvsMean_True_channels4_spiket.py', 'a+')
f.write(f'fullModel = {fullModel} \n \n')
tI_list = []
II_list = []
for i in range(1000):
curr = np.zeros(int(samprate*totalsec))
curr[int(1*samprate):int(1.9*samprate)] = 150e-12
noise = np.random.normal(0,20e-12,int(samprate*totalsec))
curr = curr + noise
t = np.linspace(0,totalsec, int(totalsec*samprate))
tI_list.append(t)
II_list.append(curr)
tempspiket = get_Vmvec([fullModel,tI_list[0],II_list[0]])
if len(tempspiket)<2:
print('Too few spikes 1')
return [np.nan,np.nan]
spiket_list = []
ISI500list = []
pool = Pool(processes=os.cpu_count()-10) #opening processes
A = pool.map(get_Vmvec, zip(np.repeat(fullModel, 1000), tI_list, II_list))
for a in A:
# tvec,Vmvec = a
# plt.plot(tvec, Vmvec)
# plt.show()
# spiket = processVmvec(tvec,Vmvec)
spiket = a
if len(spiket)<2:
print('Too few spikes 2')
continue
# return [np.nan,np.nan]
if len([i for i in spiket if i>(stim_start+0.5)])<1 or len([i for i in spiket if i<(stim_start+0.5)])<1:
print('No spikes around 0.5s mark')
continue
# return [np.nan,np.nan]
ISI500list.append(min([i for i in spiket if i>(stim_start+0.5)]) - max([i for i in spiket if i<(stim_start+0.5)]))
spiket_list.append(spiket)
# plt.show()
pool.terminate()
CV500 = calcCV(ISI500list)
jitter = calcjit(spiket_list)
f.write(f'spiketlist = {spiket_list} \n \n')
f.close()
if len(ISI500list)<500 or len(spiket_list)<500:
return [np.nan,np.nan]
else:
return [CV500, jitter]
def main(Channame):
gbarratio = [0,0.1,0.2,0.5,0.75,0.9,1,1.1,1.5,2,3,5,10]
# gbarratio = [0.9,1,1.1]
#####
if Channame == 'Na_Chan':
CV500_Na = []
jit_Na = []
for i in range(len(gbarratio)):
print('Na_Chan', end='\t')
print(gbarratio[i], end='\t')
# fig,axs = plt.subplots(1,1)
# fig.suptitle(f'Na_Changbar x {gbarratio[i]}')
# axs.set_xlabel('Time')
# axs.set_ylabel('Vm')
# axs.set_xlim(0.5,2)
newModel = deepcopy(fullModel)
newModel["Parameters"]["Channels"]["Na_Chan"]["gbar"] = (
fullModel["Parameters"]["Channels"]["Na_Chan"]["gbar"] * gbarratio[i]
)
print(newModel["Parameters"]["Channels"]["Na_Chan"]["gbar"])
CV500, jit = calcCVjit(newModel)
CV500_Na.append(CV500)
jit_Na.append(jit)
# fig.savefig(f'noisyplots/Na_Changbar x {gbarratio[i]}.png')
# plt.clf()
# plt.close('all')
# gc.collect()
ff = open('CVvsMean_True_channels4_jitCV.py', 'a+')
ff.write(f'jit_Na = {jit_Na} \n')
ff.write(f'CV500_Na = {CV500_Na} \n \n')
ff.close()
####
if Channame == 'K_DR_Chan':
CV500_K_DR = []
jit_K_DR = []
for i in range(len(gbarratio)):
print('K_DR_Chan', end='\t')
print(gbarratio[i], end='\t')
# fig,axs = plt.subplots(1,1)
# fig.suptitle(f'K_DR_Changbar x {gbarratio[i]}')
# axs.set_xlabel('Time')
# axs.set_ylabel('Vm')
# axs.set_xlim(0.5,2)
newModel = deepcopy(fullModel)
newModel["Parameters"]["Channels"]["K_DR_Chan"]["gbar"] = (
fullModel["Parameters"]["Channels"]["K_DR_Chan"]["gbar"] * gbarratio[i]
)
print(newModel["Parameters"]["Channels"]["K_DR_Chan"]["gbar"])
CV500, jit = calcCVjit(newModel)
CV500_K_DR.append(CV500)
jit_K_DR.append(jit)
# fig.savefig(f'noisyplots/K_DR_Changbar x {gbarratio[i]}.png')
# plt.clf()
# plt.close('all')
# gc.collect()
ff = open('CVvsMean_True_channels4_jitCV.py', 'a+')
ff.write(f'jit_K_DR = {jit_K_DR} \n')
ff.write(f'CV500_K_DR = {CV500_K_DR} \n \n')
ff.close()
####
if Channame == 'K_A_Chan':
CV500_K_A = []
jit_K_A = []
for i in range(len(gbarratio)):
print('K_A_Chan', end='\t')
print(gbarratio[i], end='\t')
# fig,axs = plt.subplots(1,1)
# fig.suptitle(f'K_A_Changbar x {gbarratio[i]}')
# axs.set_xlabel('Time')
# axs.set_ylabel('Vm')
# axs.set_xlim(0.5,2)
newModel = deepcopy(fullModel)
newModel["Parameters"]["Channels"]["K_A_Chan"]["gbar"] = (
fullModel["Parameters"]["Channels"]["K_A_Chan"]["gbar"] * gbarratio[i]
)
print(newModel["Parameters"]["Channels"]["K_A_Chan"]["gbar"])
CV500, jit = calcCVjit(newModel)
CV500_K_A.append(CV500)
jit_K_A.append(jit)
# fig.savefig(f'noisyplots/K_A_Changbar x {gbarratio[i]}.png')
# plt.clf()
# plt.close('all')
# gc.collect()
ff = open('CVvsMean_True_channels4_jitCV.py', 'a+')
ff.write(f'jit_K_A = {jit_K_A} \n')
ff.write(f'CV500_K_A = {CV500_K_A} \n \n')
ff.close()
####
if Channame == 'K_M_Chan':
CV500_K_M = []
jit_K_M = []
for i in range(len(gbarratio)):
print('K_M_Chan', end='\t')
print(gbarratio[i], end='\t')
# fig,axs = plt.subplots(1,1)
# fig.suptitle(f'K_M_Changbar x {gbarratio[i]}')
# axs.set_xlabel('Time')
# axs.set_ylabel('Vm')
# axs.set_xlim(0.5,2)
newModel = deepcopy(fullModel)
newModel["Parameters"]["Channels"]["K_M_Chan"]["gbar"] = (
fullModel["Parameters"]["Channels"]["K_M_Chan"]["gbar"] * gbarratio[i]
)
print(newModel["Parameters"]["Channels"]["K_M_Chan"]["gbar"])
CV500, jit = calcCVjit(newModel)
CV500_K_M.append(CV500)
jit_K_M.append(jit)
# fig.savefig(f'noisyplots/K_M_Changbar x {gbarratio[i]}.png')
# plt.clf()
# plt.close('all')
# gc.collect()
ff = open('CVvsMean_True_channels4_jitCV.py', 'a+')
ff.write(f'jit_K_M = {jit_K_M} \n')
ff.write(f'CV500_K_M = {CV500_K_M} \n \n')
ff.close()
# ####
# if Channame == 'h_Chan':
# CV500_h = []
# jit_h = []
# for i in range(len(gbarratio)):
# print('h_Chan', end='\t')
# print(gbarratio[i], end='\t')
# # fig,axs = plt.subplots(1,1)
# # fig.suptitle(f'h_Changbar x {gbarratio[i]}')
# # axs.set_xlabel('Time')
# # axs.set_ylabel('Vm')
# # axs.set_xlim(0.5,2)
# newModel = deepcopy(fullModel)
# newModel["Parameters"]["Channels"]["h_Chan"]["gbar"] = (
# fullModel["Parameters"]["Channels"]["h_Chan"]["gbar"] * gbarratio[i]
# )
# print(newModel["Parameters"]["Channels"]["h_Chan"]["gbar"])
# CV500, jit = calcCVjit(newModel)
# CV500_h.append(CV500)
# jit_h.append(jit)
# # fig.savefig(f'noisyplots/h_Changbar x {gbarratio[i]}.png')
# # plt.clf()
# # plt.close('all')
# # gc.collect()
# ff = open('CVvsMean_True_channels4_jitCV.py', 'a+')
# ff.write(f'jit_h = {jit_h} \n')
# ff.write(f'CV500_h = {CV500_h} \n \n')
# ff.close()
####
if Channame == 'K_SK_Chan':
CV500_K_SK = []
jit_K_SK = []
for i in range(len(gbarratio)):
print('K_SK_Chan', end='\t')
print(gbarratio[i], end='\t')
# fig,axs = plt.subplots(1,1)
# fig.suptitle(f'K_SK_Changbar x {gbarratio[i]}')
# axs.set_xlabel('Time')
# axs.set_ylabel('Vm')
# axs.set_xlim(0.5,2)
newModel = deepcopy(fullModel)
newModel["Parameters"]["Channels"]["K_SK_Chan"]["gbar"] = (
fullModel["Parameters"]["Channels"]["K_SK_Chan"]["gbar"] * gbarratio[i]
)
print(newModel["Parameters"]["Channels"]["K_SK_Chan"]["gbar"])
CV500, jit = calcCVjit(newModel)
CV500_K_SK.append(CV500)
jit_K_SK.append(jit)
# fig.savefig(f'noisyplots/K_SK_Changbar x {gbarratio[i]}.png')
# plt.clf()
# plt.close('all')
# gc.collect()
ff = open('CVvsMean_True_channels4_jitCV.py', 'a+')
ff.write(f'jit_K_SK = {jit_K_SK} \n')
ff.write(f'CV500_K_SK = {CV500_K_SK} \n \n')
ff.close()
####
if Channame == 'Ca_L_Chan':
CV500_Ca_L = []
jit_Ca_L = []
for i in range(len(gbarratio)):
print('Ca_L_Chan', end='\t')
print(gbarratio[i], end='\t')
# fig,axs = plt.subplots(1,1)
# fig.suptitle(f'Ca_L_Changbar x {gbarratio[i]}')
# axs.set_xlabel('Time')
# axs.set_ylabel('Vm')
# axs.set_xlim(0.5,2)
newModel = deepcopy(fullModel)
newModel["Parameters"]["Channels"]["Ca_L_Chan"]["gbar"] = (
fullModel["Parameters"]["Channels"]["Ca_L_Chan"]["gbar"] * gbarratio[i]
)
print(newModel["Parameters"]["Channels"]["Ca_L_Chan"]["gbar"])
CV500, jit = calcCVjit(newModel)
CV500_Ca_L.append(CV500)
jit_Ca_L.append(jit)
# fig.savefig(f'noisyplots/Ca_L_Changbar x {gbarratio[i]}.png')
# plt.clf()
# plt.close('all')
# gc.collect()
ff = open('CVvsMean_True_channels4_jitCV.py', 'a+')
ff.write(f'jit_Ca_L = {jit_Ca_L} \n')
ff.write(f'CV500_Ca_L = {CV500_Ca_L} \n \n')
ff.close()
# ####
# fig, axs = plt.subplots(1, 2)
# axs[0].plot(gbarratio, CV500_Na, label='Na')
# axs[0].plot(gbarratio, CV500_K_DR, label='K_DR')
# axs[0].plot(gbarratio, CV500_K_A, label='K_A')
# axs[0].plot(gbarratio, CV500_K_M, label='K_M')
# axs[0].plot(gbarratio, CV500_h, label='h')
# axs[0].plot(gbarratio, CV500_K_SK, label='K_SK')
# axs[0].plot(gbarratio, CV500_Ca_L, label='Ca_L')
# axs[1].plot(gbarratio, jit_Na, label='Na')
# axs[1].plot(gbarratio, jit_K_DR, label='K_DR')
# axs[1].plot(gbarratio, jit_K_A, label='K_A')
# axs[1].plot(gbarratio, jit_K_M, label='K_M')
# axs[1].plot(gbarratio, jit_h, label='h')
# axs[1].plot(gbarratio, jit_K_SK, label='K_SK')
# axs[1].plot(gbarratio, jit_Ca_L, label='Ca_L')
# axs[0].legend()
# axs[1].legend()
# axs[0].set_xlabel('gbar ratio')
# axs[0].set_ylabel('CV500')
# axs[1].set_xlabel('gbar ratio')
# axs[1].set_ylabel('jitter slope')
# fig.tight_layout()
# plt.savefig(f'CVjitChannels4_{modelname}')
# pickle.dump(fig, open(f'CVjitChannels4_{modelname}.pkl', 'wb'))
# np.save(f'CVjitChannels4_{modelname}.npy',[CV500_Na,CV500_K_DR,CV500_K_A,CV500_K_M,CV500_h,CV500_K_SK,CV500_Ca_L, jit_Na,jit_K_DR,jit_K_A,jit_K_M,jit_h,jit_K_SK,jit_Ca_L])
# # plt.show()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('Chan', type=str)
args = parser.parse_args()
main(args.Chan)
#fig = pickle.load(open('CVvsMean.pkl', 'rb'))
#plt.show()
| [
"noreply@github.com"
] | analkumar2.noreply@github.com |
7117f04d76a66de8c596825a9882b35876b5d25c | f00a32d96b1d13b5c1783a6b55c3843bf8130e3f | /QuestoesPy/venv/Scripts/pip3-script.py | c21117c2f143ad63f3eaf4400e72a55e0747a3b1 | [] | no_license | paulodias99/LapiscoComputerVisionPython | d6e4f215b5b407730339ad2783e5a9b59e156dec | eff6aaeecd99462722e49f04d0732a885bccb01c | refs/heads/main | 2023-03-03T14:24:57.857319 | 2021-02-15T00:23:06 | 2021-02-15T00:23:06 | 301,819,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | #!C:\Users\User\Desktop\GIT\LapiscoComputerVisionPython\QuestoesPy\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"paulodiasprivado@gmail.com"
] | paulodiasprivado@gmail.com |
7d7d40936fcdafd873c7e58ac21c52f4cf5597a5 | f4cb3ed05aba5ea834022b48570d18ba83e16af4 | /echo_test.py | c0d028e2e992b618fed67765c896bc5ef9f0d032 | [] | no_license | ProgrammersIT/acelera-devops | 32fdd0be1f53686ffe8ddd56ea4b31fc0a2a7196 | 905a2819c316dd5fd85447548cb345e96db3ef04 | refs/heads/main | 2023-04-14T12:47:39.505155 | 2021-04-23T19:54:32 | 2021-04-23T19:54:32 | 351,758,129 | 0 | 1 | null | 2021-04-23T19:54:32 | 2021-03-26T11:23:50 | Python | UTF-8 | Python | false | false | 204 | py | import unittest
import echo
class EchoTests(unittest.TestCase):
def testEcho(self):
expected = "TEST MESSAGE"
result = echo.echo(expected)
self.assertEqual(expected, result)
| [
"lazarodm@gmail.com"
] | lazarodm@gmail.com |
1eb89645579300e537725a8b38d2983639985edc | c74035d22a23dc3c3d3a38c9cedee97e31e4d5af | /migrations/versions/b08a27f7d731_.py | db8e7b2d9dd66fa4c672458f0b1cfc90b92f1e84 | [] | no_license | mostafayahia/Fyyur | a817ed25dcecc65eba326c836ca0066a7d3b7ffa | fd4992800499cb744dc818eb1ca7ae9796c3d4fd | refs/heads/master | 2022-08-03T21:46:40.679305 | 2020-05-18T08:42:35 | 2020-05-18T08:42:35 | 258,949,525 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,690 | py | """empty message
Revision ID: b08a27f7d731
Revises:
Create Date: 2020-05-09 21:25:28.329306
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b08a27f7d731'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('Artist',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('city', sa.String(length=120), nullable=False),
sa.Column('state', sa.String(length=120), nullable=False),
sa.Column('phone', sa.String(length=120), nullable=True),
sa.Column('genres', sa.String(length=120), nullable=False),
sa.Column('image_link', sa.String(length=500), nullable=True),
sa.Column('facebook_link', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('Venue',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('city', sa.String(length=120), nullable=False),
sa.Column('state', sa.String(length=120), nullable=False),
sa.Column('address', sa.String(length=120), nullable=False),
sa.Column('phone', sa.String(length=120), nullable=True),
sa.Column('image_link', sa.String(length=500), nullable=True),
sa.Column('facebook_link', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('Venue')
op.drop_table('Artist')
# ### end Alembic commands ###
| [
"yahia.eltaib@yahoo.com"
] | yahia.eltaib@yahoo.com |
9dc49b8cb22cb27f43401adb3d65dd39445a4033 | 27a09a56fb6c2308afbc5b2173a26add55ef1e31 | /4.9example/manage.py | 7ede6ab680b0ca0274b9f5a977e8b71fd41378c2 | [] | no_license | mahbubme/Flask-Web-Development | 1529747c5fe89a7e9fb4be3deb4ea00d3f4ffbe3 | 05035ccaec22f3e7d6361cf0629d692be2896d07 | refs/heads/master | 2020-07-26T07:32:18.123536 | 2015-03-23T10:08:51 | 2015-03-23T10:08:51 | 32,164,330 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | #!/usr/bin/env python
import os
from app import create_app, db
from app.models import User, Role
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, User=User, Role=Role)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test():
"""Run the unit tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run() | [
"mahbuburrahmanmihir@gmail.com"
] | mahbuburrahmanmihir@gmail.com |
d225313155a966ba861e82567bdca6f0f344d5d6 | 2c82be67b4a853178bae96ce1436394802e0915b | /training/trainAI.py | f9184bd27360927d31bc5df0c106ca176bdd6df9 | [] | no_license | willbowden/ai-discord-chatbot | d19a5fa72d229b58feea516bacf5512ef3e6ae16 | 40b21122d09679b1a88035df02783450eafd8db8 | refs/heads/main | 2023-06-03T03:13:49.667753 | 2021-06-23T17:52:22 | 2021-06-23T17:52:22 | 375,110,252 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,380 | py | import os
import importlib
from util import wbjson
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation, Dropout
from tensorflow.keras.optimizers import SGD
import numpy as np
from .use import embed
async def train_ai():
data = wbjson.ReadToRaw("dataset.json")
trainingData = []
for intent in data.keys():
for msg in data[intent]["patterns"]:
trainingData.append({"type": intent, "message": msg})
sentences = list(map(lambda i: i["message"].lower(), trainingData))
xTrain = embed(sentences)
yList = list(map(lambda x: [1 if x["type"] == "greeting" else 0, 1 if x["type"] =="goodbye" else 0, 1 if x["type"] == "insult" else 0, 1 if x["type"] == "compliment" else 0], trainingData))
yTrain = tf.constant(yList)
model = Sequential()
model.add(Dense(128, input_shape=(len(xTrain[0]),), activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(len(yTrain[0]), activation='softmax'))
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
model.fit(np.array(xTrain), np.array(yTrain), epochs=200, batch_size=5, verbose=1)
model.save("david2_model")
return model | [
"willbowden03@yahoo.co.uk"
] | willbowden03@yahoo.co.uk |
f0a99741ea3899f3f2f7366efc0c80b12758e874 | a524901d55a4c0a74a91301d60486aec5aec3cfa | /zu_fang.py | b94d406a495e8491189409fcbc6032954b2c04a5 | [] | no_license | tenlee2012/PlayCollections | 4d32516ce05dc1b72fa43816f7044004d7abd49a | a8b5cf835c54605ccabfb14b2c2926a147c99c60 | refs/heads/master | 2021-09-10T15:14:24.339577 | 2018-03-28T09:51:40 | 2018-03-28T09:51:40 | 104,848,122 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,531 | py | # coding: utf-8
import requests
from pymongo import MongoClient
import json
import time
from bs4 import BeautifulSoup
from raven import Client
import re
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36',
}
client = MongoClient()
db = client.house
collection = db.room
def get_mogo():
"""
蘑菇公寓
:return:
"""
url = 'http://www.mogoroom.com/list'
page = 1
id = 1
headers['cookies'] = 'gr_user_id=b9aa0347-8938-4de8-9ebc-c97c8d21b7fe; UM_distinctid=15f6baa20f659-07fcf34b9d40b5-31657c00-13c680-15f6baa20f723a; hadoop_renter_key=c8e1d53f-d676-4d3b-b9fc-d42cb09870ae; sajssdk_2015_cross_new_user=1; nice_id9d030e80-e73e-11e5-b771-11c8f335ec09=c5bb52b3-bd2f-11e7-b1e7-c31ba49b12da; qimo_seosource_9d030e80-e73e-11e5-b771-11c8f335ec09=%E7%AB%99%E5%86%85; qimo_seokeywords_9d030e80-e73e-11e5-b771-11c8f335ec09=; accessId=9d030e80-e73e-11e5-b771-11c8f335ec09; JSESSIONID=61296B508A6847287558EAB51544BBBF-n1; CNZZDATA1253147438=957311629-1509334538-null%7C1509361566; sensorsdata2015jssdkcross=%7B%22distinct_id%22%3A%2215f6baa8dc78da-037606a33820c2-31657c00-1296000-15f6baa8dc89ba%22%2C%22%24device_id%22%3A%2215f6baa8dc78da-037606a33820c2-31657c00-1296000-15f6baa8dc89ba%22%2C%22props%22%3A%7B%22%24latest_referrer%22%3A%22%22%2C%22%24latest_referrer_host%22%3A%22%22%7D%7D; gr_session_id_aca7dc2ea0f02f49=44df7ed7-429b-4494-a71a-7a8f6ae2a98e; JSESSIONID=61296B508A6847287558EAB51544BBBF-n1'
while True:
# time.sleep(3)
params = {"page": page}
response = requests.post(url, headers=headers, timeout=100, params=params)
print("url", response.url)
d = json.loads(response.text, encoding="UTF-8")
if d is None or (not d.get('roomInfos')) or len(d.get('roomInfos', [])) == 0:
break
for room in d['roomInfos']:
room['origin'] = 'mogo'
room['rentType'] = room['rentType']['value']
collection.update({'roomId': room['roomId']}, room, True)
id += 1
page += 1
print("id = {}, page = {}".format(id, page))
def get_ziroom():
url = 'http://sh.ziroom.com/z/nl/z2.html'
page = 1
while True:
if page >= 50:
break
params = {'p': page}
resp = requests.get(url, headers=headers, timeout=100, params=params)
print("url", resp.url)
if resp.status_code != 200 or not resp.text:
raise RuntimeError('返回数据失败,page={},status={},html={}'.format(page, resp.status_code, resp.text))
with open('a.html', 'w') as f:
f.write(resp.text)
soup = BeautifulSoup(resp.text, 'html.parser')
house_list = soup.select("ul#houseList > li.clearfix")
for house in house_list:
if house.find(class_='clearfix zry'):
continue
room = {
'roomId': re.findall(r'(\d+)', house.select_one('.txt h3 a')['href'])[0],
'title': house.select_one('.txt h3 a').text,
'detail': {
"area": house.select('.txt .detail span')[0].text,
'floor': house.select('.txt .detail span')[1].text,
'houseType': house.select('.txt .detail span')[2].text,
},
'rentType': house.select('.txt .detail span')[3].text,
'metroInfo': [house.select('.txt .detail span')[4].text],
'showPrice': re.findall(r'(\d+)', house.select_one('.priceDetail .price').text)[0],
'districtName': re.findall(r'\[(\S+)\]', house.select_one('.txt h4').text)[0],
'image': house.select_one('.img.pr a img')['_src'],
'origin': 'ziroom',
}
collection.update({'roomId': room['roomId']}, room, True)
time.sleep(3)
page += 1
def main():
total = collection.count()
print("total", total)
while True:
get_ziroom()
get_mogo()
count = collection.count()
print("total", total)
if total == count or count > 100000:
break
total = count
def update():
rooms = collection.find({'origin': 'ziroom'})
for room in rooms:
room['rentType'] = room['rentType'][0]
collection.save(room)
if __name__ == '__main__':
client = Client('https://409c169cc67c461cbdcc7bafa2655446:e1d36cde7b6341448d2dbe06dca64b18@sentry.io/237753')
main()
# update()
| [
"jiahao.li@petkit.com"
] | jiahao.li@petkit.com |
1d4ff2b7d90b456b3b501666a6872a740150bf38 | e0b9075283490d189caac87645d7542706883e37 | /user/forms.py | 2a98a938fda6abaf1bbf06a5cb5d496dad6bde1e | [] | no_license | Ciriosss/MongoDb-Project | b1f217f43637802e0519a3b55402ec3ce2a4e539 | 1a57ac392eaeea90aa3606730be112c8b3d1347e | refs/heads/master | 2023-03-22T03:35:39.840217 | 2021-03-17T10:50:39 | 2021-03-17T10:50:39 | 345,783,063 | 0 | 2 | null | 2021-03-12T18:30:39 | 2021-03-08T20:17:02 | Python | UTF-8 | Python | false | false | 1,712 | py | from django import forms
from app.models import BuyOrder, SellOrder, Profile
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class UserRegisterForm(UserCreationForm):
first_name = forms.CharField(max_length=30)
last_name = forms.CharField(max_length=30)
email = forms.EmailField()
class Meta:
model = User
fields = ['first_name', 'last_name', 'username', 'email', 'password1', 'password2']
class NewBuyOrder(forms.ModelForm):
price = forms.CharField(max_length=10)
quantity = forms.CharField(max_length=10)
class Meta:
model = BuyOrder
fields = ['price', 'quantity']
def checkBalance(self, request):
user = User.objects.get(username=request.user)
profile = Profile.objects.get(user=user)
balance = profile.balance
price = self.cleaned_data.get('price')
quantity = self.cleaned_data.get('quantity')
total = float(price) * float(quantity)
if (total > balance) :
return None
return price, quantity
class NewSellOrder(forms.ModelForm):
price = forms.CharField(max_length=10)
quantity = forms.CharField(max_length=10)
class Meta:
model = SellOrder
fields = ['price', 'quantity']
def checkBTC(self, request):
user = User.objects.get(username=request.user)
profile = Profile.objects.get(user=user)
BTC = profile.BTC
quantity = self.cleaned_data.get('quantity')
price = self.cleaned_data.get('price')
quantity = float(quantity)
BTC = float(BTC)
if (quantity > BTC) :
return None
return price, quantity | [
"domenicociriello2000@gmail.com"
] | domenicociriello2000@gmail.com |
c117af46846c1174ea7008f8552fa52b1505a9b8 | ebbfc69c9b14b8fd0e8b51abb0ba4f767abce6e9 | /sandbook/base/models/novel.py | 3e4e8fdb929f8cfbc346753c49db9354d87a73f8 | [] | no_license | lwaxx/novel | 0a882347dffaafb35f061eaf6301abe32254b54f | 5b538e85606cd22c34ac10f53438fc0d3ff131a0 | refs/heads/master | 2022-06-30T03:21:52.673001 | 2020-05-09T15:26:58 | 2020-05-09T15:26:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,965 | py | import os
import re
import time
from django.core.files.storage import FileSystemStorage
from django.db import models
from base.constants.novel import (
DEFAULT_COVER, NOVEL_STATUS_UNAPPROVED, NOVEL_STATUS_ACTIVE, NOVEL_STATUS_FINISHED,
NOVEL_STATUS_BLOCKED
)
from django.core.cache import cache
from general.utils.text import get_filename_extension, calc_word_count
class CategoryMixin:
@property
def novel_count_key(self):
raise NotImplementedError
def novel_count(self):
return cache.get(self.novel_count_key)
class Category(CategoryMixin, models.Model):
"""
一级分类
"""
name = models.CharField('名称', max_length=32)
description = models.CharField('描述', max_length=255)
class Meta:
db_table = 'base_novel_category'
def __str__(self):
return self.name
@property
def novel_count_key(self):
return 'sc_%d_count' % self.id
class SubCategory(CategoryMixin, models.Model):
"""
二级分类
"""
name = models.CharField('名称', max_length=32)
category = models.ForeignKey(Category, on_delete=models.CASCADE, verbose_name='一级分类')
description = models.CharField('描述', max_length=255)
class Meta:
db_table = 'base_novel_sub_category'
default_permissions = ()
def __str__(self):
return self.name
@property
def novel_count_key(self):
return 'c_%d_count' % self.id
def incr_novel_count(self, count: int):
"""
count 可以为正负整数
"""
cache.incr(self.novel_count_key, count)
def cover_path(instance, filename):
new_name = '%s.%s' % (str(int(time.time())), get_filename_extension(filename))
return os.path.join('novel', 'cover', str(instance.author_id), new_name)
class Novel(models.Model):
"""
小说
"""
STATUS = {
'unapproved': NOVEL_STATUS_UNAPPROVED,
'active': NOVEL_STATUS_ACTIVE,
'finished': NOVEL_STATUS_FINISHED,
'blocked': NOVEL_STATUS_BLOCKED,
}
STATUS_CHOICES = (
(STATUS['unapproved'], '未审核'),
(STATUS['active'], '连载中'),
(STATUS['finished'], '已完结'),
(STATUS['blocked'], '已屏蔽')
)
name = models.CharField('书名', unique=True, max_length=64) # TODO: 书名验证
author = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True, verbose_name='作者')
intro = models.TextField('简介', max_length=1024)
status = models.SmallIntegerField('状态', choices=STATUS_CHOICES, default=NOVEL_STATUS_UNAPPROVED)
category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='一级分类')
sub_category = models.ForeignKey(SubCategory, on_delete=models.SET_NULL, null=True, verbose_name='二级分类')
cover = models.ImageField(
'封面', storage=FileSystemStorage(), default=DEFAULT_COVER,
upload_to=cover_path, blank=True
)
word_count = models.PositiveIntegerField('字数', default=0)
created_at = models.DateTimeField('创建于', auto_now_add=True)
updated_at = models.DateTimeField('更新于', auto_now=True)
class Meta:
db_table = 'base_novel'
ordering = ('-id',)
default_permissions = ()
permissions = (
('view_novel', '查看小说'),
('create_novel', '创建小说'),
('change_novel', '更改小说'),
('delete_novel', '删除小说'),
('finish_novel', '完结小说'),
('block_novel', '屏蔽小说'),
('verify_novel', '审核小说')
)
class NovelComment(models.Model):
"""
书评
"""
novel = models.ForeignKey(Novel, on_delete=models.CASCADE, verbose_name='小说', related_name='comments', )
user = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True, verbose_name='用户',
related_name='nc_user')
title = models.CharField('标题', max_length=32, blank=True) # 标题可选
content = models.CharField('内容', max_length=4096)
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_comment'
default_permissions = ()
class NovelCommentReply(models.Model):
"""
书评回复
"""
comment = models.ForeignKey(NovelComment, on_delete=models.CASCADE, related_name='replies',
verbose_name='书评')
content = models.CharField('内容', max_length=1024)
user = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True,
related_name='nc_reply_user', verbose_name='回复用户')
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_comment_reply'
default_permissions = ()
class Volume(models.Model):
"""
卷
"""
name = models.CharField('卷名', max_length=32, default='正文卷')
novel = models.ForeignKey(Novel, on_delete=models.CASCADE, verbose_name='小说')
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_volume'
default_permissions = ()
class Chapter(models.Model):
"""
章节
"""
STATUS = {
'saved': 0,
'submitted': 1,
'blocked': 2,
'approved': 3 # 暂不用
}
STATUS_CHOICES = (
(STATUS['saved'], '已保存'),
(STATUS['submitted'], '已提交'),
(STATUS['blocked'], '已屏蔽'),
(STATUS['approved'], '已审核')
)
title = models.CharField('标题', max_length=32, blank=True, default='新章节') # TODO: 章节名验证
content = models.TextField('内容', max_length=65535, blank=True)
volume = models.ForeignKey(Volume, on_delete=models.CASCADE, verbose_name='卷')
word_count = models.PositiveIntegerField('字数', default=0)
is_free = models.BooleanField('免费', default=True)
status = models.IntegerField('状态', choices=STATUS_CHOICES, default=STATUS['saved'])
created_at = models.DateTimeField('创建于', auto_now_add=True)
updated_at = models.DateTimeField('更新于', auto_now=True)
class Meta:
db_table = 'base_novel_chapter'
default_permissions = ()
# class ChapterUpdated(models.Model):
# ...
class Paragraph(models.Model):
"""
段落
"""
chapter = models.ForeignKey(Chapter, on_delete=models.CASCADE, verbose_name='章节')
content = models.TextField('内容', max_length=65535) # TODO: 段落字数限制
serial = models.PositiveIntegerField('序号', default=1)
class Meta:
db_table = 'base_novel_paragraph'
default_permissions = ()
unique_together = (('chapter', 'serial'),)
@property
def word_count(self):
return calc_word_count(self.content)
class ParagraphComment(models.Model):
"""
段评
"""
paragraph = models.ForeignKey(Paragraph, on_delete=models.CASCADE, verbose_name='段落')
user = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True, verbose_name='用户')
content = models.CharField('内容', max_length=1024)
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_paragraph_comment'
default_permissions = ()
class ParagraphCommentReply(models.Model):
"""
段评回复
"""
paragraph_comment = models.ForeignKey(ParagraphComment, on_delete=models.CASCADE, verbose_name='段评')
user = models.ForeignKey('base.User', on_delete=models.SET_NULL, null=True,
related_name='pc_reply_user', verbose_name='回复用户')
created_at = models.DateTimeField('创建于', auto_now_add=True)
class Meta:
db_table = 'base_novel_paragraph_comment_reply'
default_permissions = ()
| [
"1225191678@qq.com"
] | 1225191678@qq.com |
7d1fe42854606c1607dcd4ebd85f703029d3f4f5 | 1f0ca601fd0d2ed26f0c88cb4f4f15eea7eeb7c7 | /m8/GCD using Iteration/gcd_iter.py | 1f8839b2814ef260e59b4d075019ef2aa7245154 | [] | no_license | krishnaja625/CSPP-1-assignments | b4cf03bb98fd8ddc28585c3da298d5db0c5bc09d | 65d446699f78c1617a25218fbe51db8dbe459af0 | refs/heads/master | 2020-03-25T01:57:03.217813 | 2018-08-26T14:19:50 | 2018-08-26T14:19:50 | 143,266,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | """gcd"""
def gcd_iter(a_1, b_1):
"""gcd"""
while b_1 > 0:
r_1 = a_1%b_1
if r_1 == 0:
return b_1
break
a_1 = b_1
b_1 = r_1
return b_1
def main():
"""gcd"""
data = input()
data = data.split()
print(gcd_iter(int(data[0]), int(data[1])))
if __name__ == "__main__":
main()
| [
"krishnaja625@msitprogram.net"
] | krishnaja625@msitprogram.net |
f7541cbe6adab2b6a82fea79e573be2aa729e114 | 81b6c2e94de8ef96cc6564f4ad886ec11f783a59 | /Rota_System/UI/Appointments/__init__.py | edd89599326fc881301a3976d50dd7790a7b0cdf | [
"MIT"
] | permissive | ergoregion/Rota-Program | a1a465a1b848f2790ea064d502ab74e14e7e611c | 44dab4cb11add184619d88aa0fcab61532d128e6 | refs/heads/master | 2016-08-11T07:18:15.257077 | 2016-03-11T21:28:25 | 2016-03-11T21:28:25 | 47,653,697 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | __author__ = 'Neil Butcher'
from widget_appointment_list import AppointmentsListWidget
from widget_appointment import SingleAppointmentWidget
from widget_appointment_tree import AppointmentsTreeListWidget | [
"nbutcher@fmail.co.uk"
] | nbutcher@fmail.co.uk |
1ed6492de665c9e09dab9d960c2503737654ef9e | ab7ceae9debb5400ed6b3313e78e23b8bcadd5f0 | /app/recipe/tests/test_recipe_api.py | d14347b1b5a1e19053c169ed52dbec6422aa4ccb | [
"MIT"
] | permissive | Hassan-gholipoor/recipe-app-api | 3ff8c35be69443ec015b1a5d2ac7d7d7fa84ffbd | 36fd66e6918d36056a480cefb3983b0c8ab13f39 | refs/heads/main | 2023-05-13T12:56:34.064656 | 2021-06-03T20:21:22 | 2021-06-03T20:21:22 | 337,067,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,906 | py | import tempfile
import os
from PIL import Image
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Recipe, Tag, Ingredient
from recipe.serializers import RecipeSerializer, RecipeDetailSerializer
RECIPES_URL = reverse('recipe:recipe-list')
def image_upload_url(recipe_id):
"""Return url for recipe image upload"""
return reverse('recipe:recipe-upload-image', args=[recipe_id])
def detail_url(recipe_id):
"""Return recipe detail url"""
return reverse('recipe:recipe-detail', args=[recipe_id])
def sample_tag(user, name='Main course'):
"""Create and return a sample tag"""
return Tag.objects.create(user=user, name=name)
def sample_ingredient(user, name='Cinnamon'):
"""Create and return a sample ingredient"""
return Ingredient.objects.create(user=user, name=name)
def sample_recipe(user, **params):
"""Create and return a sample recipe"""
defaults = {
'title': 'sample recipe',
'time_minutes': 10,
'price': 5.00
}
defaults.update(params)
return Recipe.objects.create(user=user, **defaults)
class PublicRecipeApiTests(TestCase):
"""Test unauthenticated recipe api access"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test that authentication required"""
res = self.client.get(RECIPES_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateRecipeApiTests(TestCase):
"""Test authenticated recipe api access"""
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(
'test@gmail.com',
'testpass'
)
self.client.force_authenticate(self.user)
def test_retrieve_recipes(self):
"""Test retrieving a list of recipes"""
sample_recipe(user=self.user)
sample_recipe(user=self.user)
res = self.client.get(RECIPES_URL)
recipes = Recipe.objects.all().order_by('-id')
serializer = RecipeSerializer(recipes, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_recipes_limited_to_user(self):
"""Test retrieving rccipes for user"""
user2 = get_user_model().objects.create_user(
'other@gmail.com',
'testpass'
)
sample_recipe(user=user2)
sample_recipe(user=self.user)
res = self.client.get(RECIPES_URL)
recipes = Recipe.objects.filter(user=self.user)
serializer = RecipeSerializer(recipes, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data, serializer.data)
def test_view_recipe_detail(self):
"""Test viewing a recipe detail"""
recipe = sample_recipe(user=self.user)
recipe.tags.add(sample_tag(user=self.user))
recipe.ingredients.add(sample_ingredient(user=self.user))
url = detail_url(recipe.id)
res = self.client.get(url)
serializer = RecipeDetailSerializer(recipe)
self.assertEqual(res.data, serializer.data)
def test_create_basic_recipe(self):
"""Test creating recipe"""
payload = {
'title': 'Chocolate Cheesecake',
'time_minutes': 10,
'price': 5.00
}
res = self.client.post(RECIPES_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
recipe = Recipe.objects.get(id=res.data['id'])
for key in payload.keys():
self.assertEqual(payload[key], getattr(recipe, key))
def test_create_recipe_with_tags(self):
"""Test creating a recipe with tags"""
tag1 = sample_tag(user=self.user, name='Vegan')
tag2 = sample_tag(user=self.user, name='Dessert')
payload = {
'title': 'Avocado lime cheesecake',
'tags': [tag1.id, tag2.id],
'time_minutes': 60,
'price': 10.99
}
res = self.client.post(RECIPES_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
recipe = Recipe.objects.get(id=res.data['id'])
tags = recipe.tags.all()
self.assertEqual(tags.count(), 2)
self.assertIn(tag1, tags)
self.assertIn(tag2, tags)
def test_create_recipe_with_ingredient(self):
"""Test creating recipe with ingredients"""
ingredient1 = sample_ingredient(user=self.user, name='Prawns')
ingredient2 = sample_ingredient(user=self.user, name='Ginger')
payload = {
'title': 'Thai prawn red curry',
'ingredients': [ingredient1.id, ingredient2.id],
'time_minutes': 20,
'price': 40.99
}
res = self.client.post(RECIPES_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
recipe =Recipe.objects.get(id=res.data['id'])
ingredients = recipe.ingredients.all()
self.assertEqual(ingredients.count(), 2)
self.assertIn(ingredient1, ingredients)
self.assertIn(ingredient2, ingredients)
def test_partial_update_recipe(self):
"""Test updating a recipe with patch"""
recipe = sample_recipe(user=self.user)
recipe.tags.add(sample_tag(user=self.user))
new_tag = sample_tag(user=self.user, name='Curry')
payload = {
'title': 'Chicken Tikka',
'tags': [new_tag.id]
}
url = detail_url(recipe.id)
self.client.patch(url, payload)
recipe.refresh_from_db()
self.assertEqual(recipe.title, payload['title'])
tags = recipe.tags.all()
self.assertEqual(len(tags), 1)
self.assertIn(new_tag, tags)
def test_full_update_recipe(self):
"""Test updating a recipe with put"""
recipe = sample_recipe(user=self.user)
recipe.tags.add(sample_tag(user=self.user))
payload = {
'title': 'Spaghetti carbonara',
'time_minutes': 20,
'price': 5.00
}
url = detail_url(recipe.id)
self.client.put(url, payload)
recipe.refresh_from_db()
self.assertEqual(recipe.title, payload['title'])
self.assertEqual(recipe.time_minutes, payload['time_minutes'])
self.assertEqual(recipe.price, payload['price'])
tags = recipe.tags.all()
self.assertEqual(len(tags), 0)
class RecipeImageUploadTest(TestCase):
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user('test@gamil.com', 'testpass')
self.client.force_authenticate(self.user)
self.recipe = sample_recipe(user=self.user)
def tearDown(self):
self.recipe.image.delete()
def test_upload_image_to_recipe(self):
"""Test uploading an image to recipe"""
url = image_upload_url(self.recipe.id)
with tempfile.NamedTemporaryFile(suffix='.jpg') as ntf:
img = Image.new('RGB', (10, 10))
img.save(ntf, format('JPEG'))
ntf.seek(0)
res = self.client.post(url, {'image': ntf}, format='multipart')
self.recipe.refresh_from_db()
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertIn('image', res.data)
self.assertTrue(os.path.exists(self.recipe.image.path))
def test_upload_image_bad_request(self):
"""Test uploading an invalid image"""
url = image_upload_url(self.recipe.id)
res = self.client.post(url, {'image': 'notimage'}, format='multipart')
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
| [
"hassangpoor@gmail.com"
] | hassangpoor@gmail.com |
4190d8cc4a61ecb396bf01fa403b2e4eae589e17 | 973c5e4a2941e817f43c773dfca0278125f87142 | /app.py | 04d6bde404c38eeb06d5f87e5a1f62d421adbba8 | [] | no_license | mokshm/herokuTest | a3c656a79f6a2a4c47e5f21e3d40679ace55fe4a | 35ca9865930896af2ef0a381dd210dbde3bf81d5 | refs/heads/master | 2022-12-19T09:48:40.023064 | 2017-06-23T22:09:08 | 2017-06-23T22:09:08 | 95,259,134 | 0 | 0 | null | 2022-12-07T23:58:48 | 2017-06-23T21:53:23 | Python | UTF-8 | Python | false | false | 185 | py | from flask import Flask
app = Flask(__name__);
@app.route('/',methods=['GET','POST'])
def index():
return "<h2>Hello World</h2>";
if __name__ == '__main__':
app.run(debug=True);
| [
"moksh.mcs.du.2015@gmail.com"
] | moksh.mcs.du.2015@gmail.com |
dcd9d496ee0141ea04f9a8c83b711f5ce5252089 | ffc1cc3bb7b68335b115122fdc7924fc4e31d528 | /pro38.py | fd23c4fc45f9509da980b95618a4cae2a5d6442b | [] | no_license | Rihanashariff/swathi24 | dba1dd3c3d2ff583ae431b432e0ef262bfeb3ac3 | 2b0d21f2febdd2a563e8f0affeebd5ca7a5821b8 | refs/heads/master | 2020-07-02T05:28:32.199982 | 2019-06-29T08:22:10 | 2019-06-29T08:22:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | #s
n,k = map(int,input().split())
l = list(map(int,input().split()))
c= 0
for i in l:
if(i+k <=5):
c+=1
g=c//3
print(g)
| [
"noreply@github.com"
] | Rihanashariff.noreply@github.com |
71e010a0ee8bd0c039bbfee90d3832742d34e2d3 | 300439863212e53a2698f1475df6c8772ef6de52 | /leetcode/字符串String/exam028.py | bc4badc5453283d195b50a5d56f3e957d3d7f1ec | [] | no_license | MingduDing/A-plan | e2a12a0ea7ede5ad21bcd117b9919744c3376a27 | c510ac359d99562185691bb0f5437c8494cf8387 | refs/heads/master | 2020-08-10T01:00:54.721253 | 2019-10-14T14:55:31 | 2019-10-14T14:55:31 | 214,216,056 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 582 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time: 2019/8/21 14:03
# @Author: Domi
# @File: exam028.py
# @Software: PyCharm
"""
028.《实现strStr()》(easy)
题目:给定一个haystack字符串和一个needle字符串,在haystack字符串中找出needle字符串
出现的第一个位置(从0开始)。如果不存在,则返回-1
思路:遍历一遍
"""
def str_str(haystack, needle):
for i in range(len(haystack)-len(needle)+1):
if haystack[i:i+len(needle)] == needle:
return i
return -1
print(str_str('hello', 'll'))
| [
"dingmingdugithub@163.com"
] | dingmingdugithub@163.com |
b872742d718ed260cb05469ace92ca4d09643aa2 | b041bacd728e5600ae34447593e1d9e90ce99547 | /Data Stitching/Sup_Unsup.py | 9e655417d71d191e98a82769fb9eba20ffdfd5c2 | [] | no_license | ElMehdiBen/BenRepository | b27d8ef417d9f63a6f5964706f05ad179f840715 | a5c76187f97e9c4364e1d3ebd6700c205b675891 | refs/heads/master | 2021-05-04T10:00:10.810990 | 2020-09-25T13:49:55 | 2020-09-25T13:49:55 | 49,004,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,873 | py | # Blocking : multiple column (first_name[0]+last_name[0]+postal_code)
# The script MUST contain a function named azureml_main
# which is the entry point for this module.
# imports up here can be used to
import pandas as pd
# The entry point function can contain up to two input arguments:
# Param<dataframe1>: a pandas.DataFrame
# Param<dataframe2>: a pandas.DataFrame
def azureml_main(dataframe1):
lam=lambda x:x[1][0][0]+x[2][0][0]+str(x[4])
dataframe1["blocking_key"]=dataframe1.apply(lam,axis=1)
#dataframe1["blocking_key"]=dataframe1["first_name"].values[0][0]
return dataframe1,
# Generation of the candidate pairs
# The script MUST contain a function named azureml_main
# which is the entry point for this module.
# imports up here can be used to
import pandas as pd
# The entry point function can contain up to two input arguments:
# Param<dataframe1>: a pandas.DataFrame
# Param<dataframe2>: a pandas.DataFrame
def azureml_main(dataframe1):
dataframe1=dataframe1.merge(dataframe1,on="blocking_key")
lam=lambda x:str(x[0])+x[1]+x[2]+x[3]+str(x[4])+x[5]+str(x[6])
lam1=lambda x:str(x[8])+x[9]+x[10]+x[11]+str(x[12])+x[13]+str(x[14])
lam2=lambda x:1 if x["check"]<x["check1"] else 2
dataframe1["check"]=dataframe1.apply(lam,axis=1)
dataframe1["check1"]=dataframe1.apply(lam1,axis=1)
dataframe1["check2"]=dataframe1.apply(lam2,axis=1)
dataframe1=dataframe1.loc[dataframe1["check2"]==1,]
dataframe1=dataframe1.drop("check",axis=1)
dataframe1=dataframe1.drop("check1",axis=1)
dataframe1=dataframe1.drop("check2",axis=1)
return dataframe1,
# Calculation of the similarity scores
# The script MUST contain a function named azureml_main
# which is the entry point for this module.
# imports up here can be used to
import pandas as pd
import jellyfish as jf
# The entry point function can contain up to two input arguments:
# Param<dataframe1>: a pandas.DataFrame
# Param<dataframe2>: a pandas.DataFrame
def azureml_main(dataframe1):
dataframe2=pd.DataFrame()
jaro_udf_first_name = lambda x: round(jf.jaro_winkler(x["first_name_x"],x["first_name_y"]),7)#approximate comparison
jaro_udf_last_name = lambda x: round(jf.jaro_winkler(x["last_name_x"],x["last_name_y"]),7)#approximate comparison
jaro_udf_address_line1 = lambda x: round(jf.jaro_winkler(x["address_line1_x"],x["address_line1_y"]),7)#approximate comparison
jaro_udf_email = lambda x: round(jf.jaro_winkler(x["email_x"],x["email_y"]),7)#approximate comparison
int_udf_deviceid = lambda x:1 if x["device_id_x"]==x["device_id_y"] else 0#approximate comparison
dataframe2["cosmos_customerid_x"]=dataframe1["cosmos_customerid_x"]
dataframe2["cosmos_customerid_y"]=dataframe1["cosmos_customerid_y"]
dataframe2["first_name_dist"]=dataframe1.apply(jaro_udf_first_name,axis=1)
dataframe2["last_name_dist"]=dataframe1.apply(jaro_udf_last_name,axis=1)
dataframe2["address_dist"]=dataframe1.apply(jaro_udf_address_line1,axis=1)
dataframe2["email_dist"]=dataframe1.apply(jaro_udf_email,axis=1)
dataframe2["device_id_dist"]=dataframe1.apply(int_udf_deviceid,axis=1)
return dataframe2,
# Labelization using K-Means
# The script MUST contain a function named azureml_main
# which is the entry point for this module.
# imports up here can be used to
import pandas as pd
from sklearn.cluster import KMeans
import numpy as np
# The entry point function can contain up to two input arguments:
# Param<dataframe1>: a pandas.DataFrame
# Param<dataframe2>: a pandas.DataFrame
def azureml_main(dataframe1):
X1=dataframe1[dataframe1.columns[2:7]].values
initModel = np.array([[0,0,0,0,0],
[1, 1, 1, 1,1]],np.float64)
model = KMeans(n_clusters=2,init=initModel)
model.fit(X1)
dataframe1["label"]=model.labels_
return dataframe1,
| [
"elmehdi.benyoussef@publicisgroupe.com"
] | elmehdi.benyoussef@publicisgroupe.com |
2c0019e0a6ec232a3119ef7e4b4ba87bc62ae2f0 | baadd7f2299123a2055cb8177a54cab33cbf2088 | /flags.py | fe30aaddaa3c529e881be076d61745cbc7a71c5e | [] | no_license | leapis/circ-gen | 99bb69e49d53013827f1e7f8175f6cdb958dd2f7 | 78260a27b2e505a09d13a4e23bedcf2d759d79ba | refs/heads/master | 2020-06-11T23:43:43.826173 | 2019-08-07T18:55:24 | 2019-08-07T18:55:24 | 194,125,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | from enum import Enum
class flags(Enum):
NONE = 0
ERROR = 1
NO_ANSWER_AS_DISTRACTOR = 2
| [
"eric@leapis.dev"
] | eric@leapis.dev |
cc09ec3dc544f923a01256d80c96928a1ec33d28 | 0dc24a6e729a4b438fbcd9cfb72da3b6ee716d77 | /ksiazka_zrob_to_sam/my_electric_car.py | 10fbec7652bb7144eb706ddd63de707e5df80507 | [] | no_license | Licho59/learning_python_eric_matthes_book | fca84a2bff207c10dec20c7fea9aeacf05d6a101 | 969f95132822d8bd21c30403d8e0bf6aadb9914f | refs/heads/master | 2021-09-01T03:41:37.281741 | 2017-12-24T15:12:13 | 2017-12-24T15:12:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 784 | py | # -*- coding: utf-8 -*-
"""
Created on Mon May 8 21:20:17 2017
@author: Leszek
"""
from car import ElectricCar
my_tesla = ElectricCar('tesla', 'model s', 2016)
print(my_tesla.get_descriptive_name())
my_tesla.battery.describe_battery()
my_tesla.battery.get_range()
# Import wielu klas z modułu
from car import Car, ElectricCar
my_beetle = Car('volkswagen', 'beetle', 2016)
print(my_beetle.get_descriptive_name())
my_tesla = ElectricCar('tesla', 'model s', 2016)
print(my_tesla.get_descriptive_name())
# Import całego modułu
import car
my_beetle = car.Car('volkswagen', 'beetle', 2016)
print(my_beetle.get_descriptive_name())
my_tesla = car.ElectricCar('tesla', 'model s', 2016)
print(my_tesla.get_descriptive_name())
# Import wszystkich klas modułu
from car import *
| [
"lestlalka@gmail.com"
] | lestlalka@gmail.com |
551b03dfde66b66b84800cf4f0dfbe11000e2d7c | 5d03ee41677bbe4c37a873932f4e2ca63cb50df1 | /gae.sonstige.d/gae.wikilicious2.d/facebook.py | ae2b15f1cd0eabe2702fefe101a65d35e32a9f6c | [] | no_license | wolfhesse/saikogallery | 159acc1bab431070e8156da8d355e9e51ec0d4ac | f719f29be54d1e2190f3c841ddeeb58997aa555a | refs/heads/master | 2016-09-06T00:41:58.012920 | 2013-05-23T22:11:13 | 2013-05-23T22:11:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,554 | py | #!/usr/bin/env python
#
# Copyright 2010 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python client library for the Facebook Platform.
This client library is designed to support the Graph API and the official
Facebook JavaScript SDK, which is the canonical way to implement
Facebook authentication. Read more about the Graph API at
http://developers.facebook.com/docs/api. You can download the Facebook
JavaScript SDK at http://github.com/facebook/connect-js/.
If your application is using Google AppEngine's webapp framework, your
usage of this module might look like this:
user = facebook.get_user_from_cookie(self.request.cookies, key, secret)
if user:
graph = facebook.GraphAPI(user["access_token"])
profile = graph.get_object("me")
friends = graph.get_connections("me", "friends")
"""
import cgi
import hashlib
import time
import urllib
import urllib2
# Find a JSON parser
try:
import json
_parse_json = lambda s: json.loads(s)
except ImportError:
try:
import simplejson
_parse_json = lambda s: simplejson.loads(s)
except ImportError:
# For Google AppEngine
from django.utils import simplejson
_parse_json = lambda s: simplejson.loads(s)
class GraphAPI(object):
"""A client for the Facebook Graph API.
See http://developers.facebook.com/docs/api for complete documentation
for the API.
The Graph API is made up of the objects in Facebook (e.g., people, pages,
events, photos) and the connections between them (e.g., friends,
photo tags, and event RSVPs). This client provides access to those
primitive types in a generic way. For example, given an OAuth access
token, this will fetch the profile of the active user and the list
of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at http://developers.facebook.com/docs/reference/api/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See http://developers.facebook.com/docs/authentication/
for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK.
"""
def __init__(self, access_token=None):
self.access_token = access_token
def get_object(self, id, **args):
"""Fetchs the given object from the graph."""
return self.request(id, args)
def get_objects(self, ids, **args):
"""Fetchs all of the given object from the graph.
We return a map from ID to object. If any of the IDs are invalid,
we raise an exception.
"""
args["ids"] = ",".join(ids)
return self.request("", args)
def get_connections(self, id, connection_name, **args):
"""Fetchs the connections for given object."""
return self.request(id + "/" + connection_name, args)
def put_object(self, parent_object, connection_name, **data):
"""Writes the given object to the graph, connected to the given parent.
For example,
graph.put_object("me", "feed", message="Hello, world")
writes "Hello, world" to the active user's wall. Likewise, this
will comment on a the first post of the active user's feed:
feed = graph.get_connections("me", "feed")
post = feed["data"][0]
graph.put_object(post["id"], "comments", message="First!")
See http://developers.facebook.com/docs/api#publishing for all of
the supported writeable objects.
Most write operations require extended permissions. For example,
publishing wall posts requires the "publish_stream" permission. See
http://developers.facebook.com/docs/authentication/ for details about
extended permissions.
"""
assert self.access_token, "Write operations require an access token"
return self.request(parent_object + "/" + connection_name, post_args=data)
def put_wall_post(self, message, attachment={}, profile_id="me"):
"""Writes a wall post to the given profile's wall.
We default to writing to the authenticated user's wall if no
profile_id is specified.
attachment adds a structured attachment to the status message being
posted to the Wall. It should be a dictionary of the form:
{"name": "Link name"
"link": "http://www.example.com/",
"caption": "{*actor*} posted a new review",
"description": "This is a longer description of the attachment",
"picture": "http://www.example.com/thumbnail.jpg"}
"""
return self.put_object(profile_id, "feed", message=message, **attachment)
def put_comment(self, object_id, message):
"""Writes the given comment on the given post."""
return self.put_object(object_id, "comments", message=message)
def put_like(self, object_id):
"""Likes the given post."""
return self.put_object(object_id, "likes")
def delete_object(self, id):
"""Deletes the object with the given ID from the graph."""
self.request(id, post_args={"method": "delete"})
def request(self, path, args=None, post_args=None):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is given,
we send a POST request to the given path with the given arguments.
"""
if not args: args = {}
if self.access_token:
if post_args is not None:
post_args["access_token"] = self.access_token
else:
args["access_token"] = self.access_token
post_data = None if post_args is None else urllib.urlencode(post_args)
file = urllib2.urlopen("https://graph.facebook.com/" + path + "?" +
urllib.urlencode(args), post_data)
try:
response = _parse_json(file.read())
finally:
file.close()
if response.get("error"):
raise GraphAPIError(response["error"]["type"],
response["error"]["message"])
return response
class GraphAPIError(Exception):
def __init__(self, type, message):
Exception.__init__(self, message)
self.type = type
def get_user_from_cookie(cookies, app_id, app_secret):
"""Parses the cookie set by the official Facebook JavaScript SDK.
cookies should be a dictionary-like object mapping cookie names to
cookie values.
If the user is logged in via Facebook, we return a dictionary with the
keys "uid" and "access_token". The former is the user's Facebook ID,
and the latter can be used to make authenticated requests to the Graph API.
If the user is not logged in, we return None.
Download the official Facebook JavaScript SDK at
http://github.com/facebook/connect-js/.
* r20121121 chgd to https://github.com/facebook/facebook-js-sdk/
Read more about Facebook authentication
at http://developers.facebook.com/docs/authentication/.
"""
cookie = cookies.get("fbs_" + app_id, "")
if not cookie: return None
args = dict((k, v[-1]) for k, v in cgi.parse_qs(cookie.strip('"')).items())
payload = "".join(k + "=" + args[k] for k in sorted(args.keys())
if k != "sig")
sig = hashlib.md5(payload + app_secret).hexdigest()
expires = int(args["expires"])
if sig == args.get("sig") and (expires == 0 or time.time() < expires):
return args
else:
return None
| [
"b.stoy@yasssu.com"
] | b.stoy@yasssu.com |
5dfd868351ef627575b6d34e46ff7559626dae70 | a83b78c9a98efea351331e6a8b2d76c73bcbc3ca | /setup.py | 35aa745586f865221c1867755a08f2dfeac6a7e1 | [
"BSD-2-Clause"
] | permissive | sjl421/django-micro | 3856ada57b17f3cefa8b9271cddc8a3d79263457 | 84b0a6cf550e9c5d490e59c585d78daf5e5632b8 | refs/heads/master | 2020-03-26T08:25:57.257788 | 2018-06-30T20:32:54 | 2018-06-30T20:32:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-micro',
description='Django as a microframework',
long_description=read('README.rst'),
keywords='django microframework',
py_modules=['django_micro'],
version='1.7.1',
author='Max Poletaev',
author_email='max.poletaev@gmail.com',
url='https://github.com/zenwalker/django-micro',
license='BSD',
install_requires=[
'django>=2.0,<2.1',
],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Framework :: Django',
'Framework :: Django :: 2.0',
],
)
| [
"max.poletaev@gmail.com"
] | max.poletaev@gmail.com |
5487773f243f788ebc92256016ebad447a41750c | d5f75adf5603927396bdecf3e4afae292143ddf9 | /python/paddle/fluid/tests/unittests/test_kthvalue_op.py | 66eb8ab4f31fba1ef8cb7eee8a8896077b683a1f | [
"Apache-2.0"
] | permissive | jiweibo/Paddle | 8faaaa1ff0beaf97ef7fb367f6c9fcc065f42fc4 | 605a2f0052e0ffb2fab3a4cf4f3bf1965aa7eb74 | refs/heads/develop | 2023-07-21T03:36:05.367977 | 2022-06-24T02:31:11 | 2022-06-24T02:31:11 | 196,316,126 | 3 | 2 | Apache-2.0 | 2023-04-04T02:42:53 | 2019-07-11T03:51:12 | Python | UTF-8 | Python | false | false | 6,945 | py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle
import paddle.fluid as fluid
def cal_kthvalue(x, k, axis, keepdim=False):
if axis < 0:
axis = len(x.shape) + axis
indices = np.argsort(x, axis=axis)
value = np.sort(x, axis=axis)
indices = indices.take(indices=k - 1, axis=axis)
value = value.take(indices=k - 1, axis=axis)
if keepdim:
indices = np.expand_dims(indices, axis)
value = np.expand_dims(value, axis)
return value, indices
class TestKthvalueOp(OpTest):
def init_args(self):
self.k = 5
self.axis = -1
def setUp(self):
self.op_type = "kthvalue"
self.python_api = paddle.kthvalue
self.dtype = np.float64
self.input_data = np.random.random((2, 1, 2, 4, 10))
self.init_args()
self.inputs = {'X': self.input_data}
self.attrs = {'k': self.k, 'axis': self.axis}
output, indices = cal_kthvalue(self.input_data,
k=self.k,
axis=self.axis)
self.outputs = {'Out': output, 'Indices': indices}
def test_check_output(self):
paddle.enable_static()
self.check_output(check_eager=True)
def test_check_grad(self):
paddle.enable_static()
self.check_grad(set(['X']), 'Out', check_eager=True)
class TestKthvalueOpWithKeepdim(OpTest):
def init_args(self):
self.k = 2
self.axis = 1
def setUp(self):
self.init_args()
self.op_type = "kthvalue"
self.python_api = paddle.kthvalue
self.dtype = np.float64
self.input_data = np.random.random((1, 3, 2, 4, 10))
self.inputs = {'X': self.input_data}
self.attrs = {'k': self.k, 'axis': self.axis, 'keepdim': True}
output, indices = cal_kthvalue(self.input_data,
k=self.k,
axis=self.axis,
keepdim=True)
self.outputs = {'Out': output, 'Indices': indices}
def test_check_output(self):
paddle.enable_static()
self.check_output(check_eager=True)
def test_check_grad(self):
paddle.enable_static()
self.check_grad(set(['X']), 'Out', check_eager=True)
class TestKthvalueOpKernels(unittest.TestCase):
def setUp(self):
self.axises = [2, -1]
def test_kthvalue_op(self):
paddle.disable_static()
def test_cpu_kernel():
shape = (2, 128, 10)
k = 2
paddle.set_device('cpu')
inputs = np.random.random(shape)
tensor = paddle.to_tensor(inputs)
for axis in self.axises:
value_expect, indice_expect = cal_kthvalue(inputs, k, axis)
v, inds = paddle.kthvalue(tensor, k, axis)
self.assertTrue(np.allclose(v.numpy(), value_expect))
self.assertTrue(np.allclose(inds.numpy(), indice_expect))
def test_gpu_kernel():
shape = (2, 30, 250)
k = 244
paddle.set_device('gpu')
inputs = np.random.random(shape)
tensor = paddle.to_tensor(inputs)
for axis in self.axises:
value_expect, indice_expect = cal_kthvalue(inputs, k, axis)
v, inds = paddle.kthvalue(tensor, k, axis)
self.assertTrue(np.allclose(v.numpy(), value_expect))
self.assertTrue(np.allclose(inds.numpy(), indice_expect))
test_cpu_kernel()
if fluid.core.is_compiled_with_cuda():
test_gpu_kernel()
class TestKthvalueOpWithNaN(unittest.TestCase):
def setUp(self):
paddle.disable_static()
self.x = paddle.uniform([2, 200, 10], dtype='float32')
def test_errors(self):
def test_nan_in_cpu_kernel():
paddle.set_device('cpu')
nan_position = 100
self.x[0, nan_position, 2] = float('nan')
v, inds = self.x.kthvalue(k=200, axis=1)
self.assertTrue(np.isnan(v[0, 2].numpy()[0]))
self.assertEqual(inds[0, 2].numpy()[0], nan_position)
def test_nan_in_gpu_kernel():
paddle.set_device('gpu')
nan_position = 100
self.x[0, nan_position, 2] = float('nan')
v, inds = self.x.kthvalue(k=200, axis=1)
self.assertTrue(np.isnan(v[0, 2].numpy()[0]))
self.assertEqual(inds[0, 2].numpy()[0], nan_position)
test_nan_in_cpu_kernel()
if fluid.core.is_compiled_with_cuda():
test_nan_in_gpu_kernel()
class TestKthvalueOpErrors(unittest.TestCase):
def setUp(self):
self.x = paddle.uniform([2, 10, 20, 25], dtype='float32')
def test_errors(self):
paddle.disable_static()
def test_k_lowrange_error():
self.x.kthvalue(k=0, axis=2)
self.assertRaises(ValueError, test_k_lowrange_error)
def test_k_uprange_error():
self.x.kthvalue(k=500, axis=2)
self.assertRaises(ValueError, test_k_uprange_error)
def test_dim_range_error():
self.x.kthvalue(k=10, axis=5)
self.assertRaises(ValueError, test_dim_range_error)
class TestModeOpInStatic(unittest.TestCase):
def setUp(self):
np.random.seed(666)
self.input_data = np.random.random((2, 20, 1, 2, 80)).astype(np.float64)
self.k = 10
def test_run_static(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program(),
paddle.static.Program()):
input_tensor = paddle.static.data(name="x",
shape=[2, 20, 1, 2, 80],
dtype="float64")
result = paddle.kthvalue(input_tensor, self.k, axis=1)
expect_value = cal_kthvalue(self.input_data, self.k, axis=1)[0]
exe = paddle.static.Executor(paddle.CPUPlace())
paddle_result = exe.run(feed={"x": self.input_data},
fetch_list=[result])[0]
self.assertTrue(np.allclose(paddle_result, expect_value))
if __name__ == '__main__':
unittest.main()
| [
"noreply@github.com"
] | jiweibo.noreply@github.com |
2c7a69f9ebfd2de68ab4f751f65e7e08002705e9 | 9e50467d6417e79e66ad087365be7b2a3da79f74 | /extract_features.py | 8099ff5b479120012b9a8d4776c30a6cf1901792 | [] | no_license | yancarm/Plant-Recognition | 8e5264cec417191297d3045323966a2cbe5aa2d0 | 2f500539c6e7f6c25e364073dc7ebec021ad2148 | refs/heads/master | 2023-02-25T21:28:34.597939 | 2021-02-01T15:34:34 | 2021-02-01T15:34:34 | 334,981,920 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,780 | py | # filter warnings / filtre Uyarısı
import warnings
warnings.simplefilter(action="ignore", category=FutureWarning)
# keras imports important libraries / gerekli Keras Kütüphanelerinin içe aktarımı
from keras.applications.vgg16 import VGG16, preprocess_input
from keras.applications.vgg19 import VGG19, preprocess_input
from keras.applications.xception import Xception, preprocess_input
from keras.applications.resnet50 import ResNet50, preprocess_input
from keras.applications.inception_resnet_v2 import InceptionResNetV2, preprocess_input
from keras.applications.mobilenet import MobileNet, preprocess_input
from keras.applications.inception_v3 import InceptionV3, preprocess_input
from keras.preprocessing import image
from keras.models import Model
from keras.models import model_from_json
from keras.layers import Input
# other imports libraries / diğer kütüphanelerin içe aktarımı
from sklearn.preprocessing import LabelEncoder
import numpy as np
import glob
import cv2
import h5py
import os
import json
import datetime
import time
# load the user configs file which is present in conf folder / conf'ta bulunan kullanıcı dosyasının yüklenmesi
with open('conf/conf.json') as f:
config = json.load(f)
# config variables / Yapılandırma değişkenleri
model_name = config["model"]
weights = config["weights"]
include_top = config["include_top"]
train_path = config["train_path"]
features_path = config["features_path"]
labels_path = config["labels_path"]
test_size = config["test_size"]
results = config["results"]
model_path = config["model_path"]
# start time / başlama zamanı
print ("[STATUS] start time - {}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
start = time.time()
# create the pretrained models / önceden eğitilmiş modellerin oluşturulması
# check for pretrained weight usage or not / önceden eğitilmiş ağırlık kullanımı olup olmadığının kontrolü
# check for top layers to be included or not / üst katmanların dahil edilip edilmeyeceğinin kararının verilmesi
if model_name == "vgg16":
base_model = VGG16(weights=weights)
model = Model(input=base_model.input, output=base_model.get_layer('fc1').output)
image_size = (224, 224)
elif model_name == "vgg19":
base_model = VGG19(weights=weights)
model = Model(input=base_model.input, output=base_model.get_layer('fc1').output)
image_size = (224, 224)
elif model_name == "xception":
base_model = Xception(weights=weights)
model = Model(input=base_model.input, output=base_model.get_layer('avg_pool').output)
image_size = (299, 299)
else:
base_model = None
print ("[INFO] successfully loaded base model and model...")
# path to training dataset / eğitim veriseti yolu
train_labels = os.listdir(train_path)
# encode the labels / etiketlerin kodlanması
print ("[INFO] encoding labels...")
le = LabelEncoder()
le.fit([tl for tl in train_labels])
# variables to hold features and labels / özellik ve etiketleri tutması için değişkenlerin tanımlanması
features = []
labels = []
# loop over all the labels in the folder / klasördeki tüm etiketlerin kullanılması
count = 1
for i, label in enumerate(train_labels):
cur_path = train_path + "/" + label
count = 1
for image_path in glob.glob(cur_path + "/*.jpg"):
img = image.load_img(image_path, target_size=image_size)
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
feature = model.predict(x)
flat = feature.flatten()
features.append(flat)
labels.append(label)
print ("[INFO] processed - " + str(count))
count += 1
print ("[INFO] completed label - " + label)
# encode the labels using LabelEncoder / LabelEncoder ile etiketlerin kodlanması -
le = LabelEncoder()
le_labels = le.fit_transform(labels)
# get the shape of training labels / eğitim etiketlerinin alınması
print ("[STATUS] training labels: {}".format(le_labels))
print ("[STATUS] training labels shape: {}".format(le_labels.shape))
# save features and labels / özellik ve etiketlerin kayıt edilmesi
h5f_data = h5py.File(features_path, 'w')
h5f_data.create_dataset('dataset_1', data=np.array(features))
h5f_label = h5py.File(labels_path, 'w')
h5f_label.create_dataset('dataset_1', data=np.array(le_labels))
h5f_data.close()
h5f_label.close()
# save model and weights / modeli ve ağırlıklarının kaydedilmesi
model_json = model.to_json()
with open(model_path + str(test_size) + ".json", "w") as json_file:
json_file.write(model_json)
model.save_weights(model_path + str(test_size) + ".h5")
print("[STATUS] saved model and weights to disk..")
print ("[STATUS] features and labels saved..")
# end time / bitiş zamanı
end = time.time()
print ("[STATUS] end time - {}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M"))) | [
"295092@ogr.ktu.edu.tr"
] | 295092@ogr.ktu.edu.tr |
61a718dd238b2f241619b76b104c65f04600c81c | 62bacd2f6bdeb9825aa25c1d7e4855bd1e605de0 | /apprise/plugins/NotifyMQTT.py | 48094e5fcf9b013e1e67c4aefb320c80365b4fe9 | [
"BSD-3-Clause"
] | permissive | mdedonno1337/apprise | 33a73bb092c4cab87923803b65e53a420c818577 | 76d44a317975a0f53899bf8bb558470045c0faec | refs/heads/master | 2023-04-07T10:22:52.638464 | 2023-03-24T10:02:21 | 2023-03-24T10:02:21 | 169,271,540 | 0 | 0 | MIT | 2019-02-05T16:19:52 | 2019-02-05T16:19:51 | null | UTF-8 | Python | false | false | 19,480 | py | # -*- coding: utf-8 -*-
# BSD 3-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# PAHO MQTT Documentation:
# https://www.eclipse.org/paho/index.php?page=clients/python/docs/index.php
#
# Looking at the PAHO MQTT Source can help shed light on what's going on too
# as their inline documentation is pretty good!
# https://github.com/eclipse/paho.mqtt.python\
# /blob/master/src/paho/mqtt/client.py
import ssl
import re
from time import sleep
from datetime import datetime
from os.path import isfile
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import parse_list
from ..utils import parse_bool
from ..AppriseLocale import gettext_lazy as _
# Default our global support flag
NOTIFY_MQTT_SUPPORT_ENABLED = False
try:
# 3rd party modules
import paho.mqtt.client as mqtt
# We're good to go!
NOTIFY_MQTT_SUPPORT_ENABLED = True
MQTT_PROTOCOL_MAP = {
# v3.1.1
"311": mqtt.MQTTv311,
# v3.1
"31": mqtt.MQTTv31,
# v5.0
"5": mqtt.MQTTv5,
# v5.0 (alias)
"50": mqtt.MQTTv5,
}
except ImportError:
# No problem; we just simply can't support this plugin because we're
# either using Linux, or simply do not have pywin32 installed.
MQTT_PROTOCOL_MAP = {}
# A lookup map for relaying version to user
HUMAN_MQTT_PROTOCOL_MAP = {
"v3.1.1": "311",
"v3.1": "31",
"v5.0": "5",
}
class NotifyMQTT(NotifyBase):
"""
A wrapper for MQTT Notifications
"""
# Set our global enabled flag
enabled = NOTIFY_MQTT_SUPPORT_ENABLED
requirements = {
# Define our required packaging in order to work
'packages_required': 'paho-mqtt'
}
# The default descriptive name associated with the Notification
service_name = 'MQTT Notification'
# The default protocol
protocol = 'mqtt'
# Secure protocol
secure_protocol = 'mqtts'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_mqtt'
# MQTT does not have a title
title_maxlen = 0
# The maximum length a body can be set to
body_maxlen = 268435455
# Use a throttle; but it doesn't need to be so strict since most
# MQTT server hostings can handle the small bursts of packets and are
# locally hosted anyway
request_rate_per_sec = 0.5
# Port Defaults (unless otherwise specified)
mqtt_insecure_port = 1883
# The default secure port to use (if mqtts://)
mqtt_secure_port = 8883
# The default mqtt keepalive value
mqtt_keepalive = 30
# The default mqtt transport
mqtt_transport = "tcp"
# The number of seconds to wait for a publish to occur at before
# checking to see if it's been sent yet.
mqtt_block_time_sec = 0.2
# Set the maximum number of messages with QoS>0 that can be part way
# through their network flow at once.
mqtt_inflight_messages = 200
# Define object templates
templates = (
'{schema}://{user}@{host}/{topic}',
'{schema}://{user}@{host}:{port}/{topic}',
'{schema}://{user}:{password}@{host}/{topic}',
'{schema}://{user}:{password}@{host}:{port}/{topic}',
)
template_tokens = dict(NotifyBase.template_tokens, **{
'host': {
'name': _('Hostname'),
'type': 'string',
'required': True,
},
'port': {
'name': _('Port'),
'type': 'int',
'min': 1,
'max': 65535,
},
'user': {
'name': _('User Name'),
'type': 'string',
'required': True,
},
'password': {
'name': _('Password'),
'type': 'string',
'private': True,
'required': True,
},
'topic': {
'name': _('Target Queue'),
'type': 'string',
'map_to': 'targets',
},
'targets': {
'name': _('Targets'),
'type': 'list:string',
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'to': {
'alias_of': 'targets',
},
'qos': {
'name': _('QOS'),
'type': 'int',
'default': 0,
'min': 0,
'max': 2,
},
'version': {
'name': _('Version'),
'type': 'choice:string',
'values': HUMAN_MQTT_PROTOCOL_MAP,
'default': "v3.1.1",
},
'client_id': {
'name': _('Client ID'),
'type': 'string',
},
'session': {
'name': _('Use Session'),
'type': 'bool',
'default': False,
},
})
def __init__(self, targets=None, version=None, qos=None,
client_id=None, session=None, **kwargs):
"""
Initialize MQTT Object
"""
super().__init__(**kwargs)
# Initialize topics
self.topics = parse_list(targets)
if version is None:
self.version = self.template_args['version']['default']
else:
self.version = version
# Save our client id if specified
self.client_id = client_id
# Maintain our session (associated with our user id if set)
self.session = self.template_args['session']['default'] \
if session is None or not self.client_id \
else parse_bool(session)
# Set up our Quality of Service (QoS)
try:
self.qos = self.template_args['qos']['default'] \
if qos is None else int(qos)
if self.qos < self.template_args['qos']['min'] \
or self.qos > self.template_args['qos']['max']:
# Let error get handle on exceptio higher up
raise ValueError("")
except (ValueError, TypeError):
msg = 'An invalid MQTT QOS ({}) was specified.'.format(qos)
self.logger.warning(msg)
raise TypeError(msg)
if not self.port:
# Assign port (if not otherwise set)
self.port = self.mqtt_secure_port \
if self.secure else self.mqtt_insecure_port
self.ca_certs = None
if self.secure:
# verify SSL key or abort
# TODO: There is no error reporting or aborting here?
# It could be useful to inform the user _where_ Apprise
# tried to find the root CA certificates file.
self.ca_certs = next(
(cert for cert in self.CA_CERTIFICATE_FILE_LOCATIONS
if isfile(cert)), None)
# Set up our MQTT Publisher
try:
# Get our protocol
self.mqtt_protocol = \
MQTT_PROTOCOL_MAP[re.sub(r'[^0-9]+', '', self.version)]
except (KeyError):
msg = 'An invalid MQTT Protocol version ' \
'({}) was specified.'.format(version)
self.logger.warning(msg)
raise TypeError(msg)
# Our MQTT Client Object
self.client = mqtt.Client(
client_id=self.client_id,
clean_session=not self.session, userdata=None,
protocol=self.mqtt_protocol, transport=self.mqtt_transport,
)
# Our maximum number of in-flight messages
self.client.max_inflight_messages_set(self.mqtt_inflight_messages)
# Toggled to False once our connection has been established at least
# once
self.__initial_connect = True
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform MQTT Notification
"""
if len(self.topics) == 0:
# There were no services to notify
self.logger.warning('There were no MQTT topics to notify.')
return False
# For logging:
url = '{host}:{port}'.format(host=self.host, port=self.port)
try:
if self.__initial_connect:
# Our initial connection
if self.user:
self.client.username_pw_set(
self.user, password=self.password)
if self.secure:
if self.ca_certs is None:
self.logger.error(
'MQTT secure communication can not be verified, '
'CA certificates file missing')
return False
self.client.tls_set(
ca_certs=self.ca_certs, certfile=None, keyfile=None,
cert_reqs=ssl.CERT_REQUIRED,
tls_version=ssl.PROTOCOL_TLS,
ciphers=None)
# Set our TLS Verify Flag
self.client.tls_insecure_set(not self.verify_certificate)
# Establish our connection
if self.client.connect(
self.host, port=self.port,
keepalive=self.mqtt_keepalive) \
!= mqtt.MQTT_ERR_SUCCESS:
self.logger.warning(
'An MQTT connection could not be established for {}'.
format(url))
return False
# Start our client loop
self.client.loop_start()
# Throttle our start otherwise the starting handshaking doesnt
# work. I'm not sure if this is a bug or not, but with qos=0,
# and without this sleep(), the messages randomly fails to be
# delivered.
sleep(0.01)
# Toggle our flag since we never need to enter this area again
self.__initial_connect = False
# Create a copy of the subreddits list
topics = list(self.topics)
has_error = False
while len(topics) > 0 and not has_error:
# Retrieve our subreddit
topic = topics.pop()
# For logging:
url = '{host}:{port}/{topic}'.format(
host=self.host,
port=self.port,
topic=topic)
# Always call throttle before any remote server i/o is made
self.throttle()
# handle a re-connection
if not self.client.is_connected() and \
self.client.reconnect() != mqtt.MQTT_ERR_SUCCESS:
self.logger.warning(
'An MQTT connection could not be sustained for {}'.
format(url))
has_error = True
break
# Some Debug Logging
self.logger.debug('MQTT POST URL: {} (cert_verify={})'.format(
url, self.verify_certificate))
self.logger.debug('MQTT Payload: %s' % str(body))
result = self.client.publish(
topic, payload=body, qos=self.qos, retain=False)
if result.rc != mqtt.MQTT_ERR_SUCCESS:
# Toggle our status
self.logger.warning(
'An error (rc={}) occured when sending MQTT to {}'.
format(result.rc, url))
has_error = True
break
elif not result.is_published():
self.logger.debug(
'Blocking until MQTT payload is published...')
reference = datetime.now()
while not has_error and not result.is_published():
# Throttle
sleep(self.mqtt_block_time_sec)
# Our own throttle so we can abort eventually....
elapsed = (datetime.now() - reference).total_seconds()
if elapsed >= self.socket_read_timeout:
self.logger.warning(
'The MQTT message could not be delivered')
has_error = True
# if we reach here; we're at the bottom of our loop
# we loop around and do the next topic now
except ConnectionError as e:
self.logger.warning(
'MQTT Connection Error received from {}'.format(url))
self.logger.debug('Socket Exception: %s' % str(e))
return False
except ssl.CertificateError as e:
self.logger.warning(
'MQTT SSL Certificate Error received from {}'.format(url))
self.logger.debug('Socket Exception: %s' % str(e))
return False
except ValueError as e:
# ValueError's are thrown from publish() call if there is a problem
self.logger.warning(
'MQTT Publishing error received: from {}'.format(url))
self.logger.debug('Socket Exception: %s' % str(e))
return False
return not has_error
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'version': self.version,
'qos': str(self.qos),
'session': 'yes' if self.session else 'no',
}
if self.client_id:
# Our client id is set if specified
params['client_id'] = self.client_id
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
# Determine Authentication
auth = ''
if self.user and self.password:
auth = '{user}:{password}@'.format(
user=NotifyMQTT.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''),
)
elif self.user:
auth = '{user}@'.format(
user=NotifyMQTT.quote(self.user, safe=''),
)
default_port = self.mqtt_secure_port \
if self.secure else self.mqtt_insecure_port
return '{schema}://{auth}{hostname}{port}/{targets}?{params}'.format(
schema=self.secure_protocol if self.secure else self.protocol,
auth=auth,
# never encode hostname since we're expecting it to be a valid one
hostname=self.host,
port='' if self.port is None or self.port == default_port
else ':{}'.format(self.port),
targets=','.join(
[NotifyMQTT.quote(x, safe='/') for x in self.topics]),
params=NotifyMQTT.urlencode(params),
)
@staticmethod
def parse_url(url):
"""
There are no parameters nessisary for this protocol; simply having
windows:// is all you need. This function just makes sure that
is in place.
"""
results = NotifyBase.parse_url(url)
if not results:
# We're done early as we couldn't load the results
return results
try:
# Acquire topic(s)
results['targets'] = parse_list(
NotifyMQTT.unquote(results['fullpath'].lstrip('/')))
except AttributeError:
# No 'fullpath' specified
results['targets'] = []
# The MQTT protocol version to use
if 'version' in results['qsd'] and len(results['qsd']['version']):
results['version'] = \
NotifyMQTT.unquote(results['qsd']['version'])
# The MQTT Client ID
if 'client_id' in results['qsd'] and len(results['qsd']['client_id']):
results['client_id'] = \
NotifyMQTT.unquote(results['qsd']['client_id'])
if 'session' in results['qsd'] and len(results['qsd']['session']):
results['session'] = parse_bool(results['qsd']['session'])
# The MQTT Quality of Service to use
if 'qos' in results['qsd'] and len(results['qsd']['qos']):
results['qos'] = \
NotifyMQTT.unquote(results['qsd']['qos'])
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'].extend(
NotifyMQTT.parse_list(results['qsd']['to']))
# return results
return results
@property
def CA_CERTIFICATE_FILE_LOCATIONS(self):
"""
Return possible locations to root certificate authority (CA) bundles.
Taken from https://golang.org/src/crypto/x509/root_linux.go
TODO: Maybe refactor to a general utility function?
"""
candidates = [
# Debian/Ubuntu/Gentoo etc.
"/etc/ssl/certs/ca-certificates.crt",
# Fedora/RHEL 6
"/etc/pki/tls/certs/ca-bundle.crt",
# OpenSUSE
"/etc/ssl/ca-bundle.pem",
# OpenELEC
"/etc/pki/tls/cacert.pem",
# CentOS/RHEL 7
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
# macOS Homebrew; brew install ca-certificates
"/usr/local/etc/ca-certificates/cert.pem",
]
# Certifi provides Mozilla’s carefully curated collection of Root
# Certificates for validating the trustworthiness of SSL certificates
# while verifying the identity of TLS hosts. It has been extracted from
# the Requests project.
try:
import certifi
candidates.append(certifi.where())
except ImportError: # pragma: no cover
pass
return candidates
| [
"noreply@github.com"
] | mdedonno1337.noreply@github.com |
b52b54ba5c3e9ba3ebba9288d1f5f7c30510f82f | 9db93c221c7f5dd81a9645f23f46e1404d51deca | /post/migrations/0004_auto_20200520_0935.py | c5959a60f38c5bf9ef9cfd7bc1bcf5c59bfc3611 | [] | no_license | m3h-D/NojavanC | 0f4b6377b76ff0b81532117d578aa6a149c6f0ad | 9fc1ebbac55797c583de2d4bca416e5c210bf0e8 | refs/heads/master | 2022-11-05T04:16:41.802241 | 2020-06-08T17:13:48 | 2020-06-08T17:13:48 | 269,400,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 593 | py | # Generated by Django 2.2.8 on 2020-05-20 05:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0003_auto_20200516_1439'),
]
operations = [
migrations.AlterField(
model_name='post',
name='category',
field=models.ManyToManyField(blank=True, related_name='post', to='category.Category'),
),
migrations.AlterField(
model_name='post',
name='slug',
field=models.SlugField(blank=True, max_length=120),
),
]
| [
"huskar_assassin@yahoo.com.com"
] | huskar_assassin@yahoo.com.com |
a272e1b11c4ec4f975a6dd241d020af9876ef059 | 6d8ed9e06e7783443fac3d100a4fdea304d5d64e | /dashboard/internet_nl_dashboard/migrations/0036_urllistreport_average_internet_nl_score.py | de64746ac2b34bee044538ef4f70302d905e030b | [
"Apache-2.0"
] | permissive | internetstandards/Internet.nl-dashboard | 399c6d13d66bbc56b1a5b964a727cc299d351bd8 | f1f68352a173689e2386d790f69bd28640a75e09 | refs/heads/main | 2023-08-31T21:01:42.739287 | 2023-07-12T10:51:16 | 2023-07-12T10:51:16 | 175,843,928 | 7 | 8 | Apache-2.0 | 2023-09-13T08:45:51 | 2019-03-15T15:16:49 | JavaScript | UTF-8 | Python | false | false | 667 | py | # Generated by Django 2.2.2 on 2019-06-28 07:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('internet_nl_dashboard', '0035_auto_20190624_0712'),
]
operations = [
migrations.AddField(
model_name='urllistreport',
name='average_internet_nl_score',
field=models.FloatField(
default=0, help_text='Internet.nl scores are retrieved in point. The calculation done for that is complex and subject to change over time. Therefore it is impossible to re-calculate that score here.Instead the score is stored as a given.'),
),
]
| [
"elger.jonker@gmail.com"
] | elger.jonker@gmail.com |
b85c5ff231152a9ea612584b993319d81f6a6d6a | f3b806c680a07a1fb62f6ca7be6995df41b6b59d | /test.py | dfe8ebdf54f04a69624372c37e3f62e36eb64fbb | [] | no_license | huqamp/proekt | 6bdf0998133b35198c0b61be4c62ccbfb120a518 | 799bafb386c5f654bdefc07eef118f70e063164c | refs/heads/master | 2022-06-01T08:34:32.601822 | 2020-03-03T12:44:13 | 2020-03-03T12:44:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,213 | py | import os
import sys
import requests
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton, QLineEdit
from PyQt5.QtCore import Qt
SCREEN_SIZE = [900, 450]
class Example(QWidget):
def __init__(self):
super().__init__()
self.x, self.y = 37.530887, 55.703118
self.m, self.n = 0.002, 0.002
self.style_map = 'map'
self.getImage()
self.initUI()
def getImage(self):
map_request = f"http://static-maps.yandex.ru/1.x/?ll={self.x},{self.y}&spn={self.m},{self.n}&l={self.style_map}"
response = requests.get(map_request)
if not response:
print("Ошибка выполнения запроса:")
print(map_request)
print("Http статус:", response.status_code, "(", response.reason, ")")
sys.exit(1)
return response.content
def initUI(self):
self.setGeometry(100, 100, *SCREEN_SIZE)
self.setFixedSize(900, 450)
self.setWindowTitle('Отображение карты')
#########
self.chg_map = QPushButton(f'Сменить стиль: Схема', self)
self.chg_map.move(620, 15)
self.chg_map.resize(260, 50)
self.chg_map.setFocusPolicy(Qt.NoFocus)
self.chg_map.clicked.connect(self.change_style_map)
#########
## Изображение
self.pixmap = QPixmap()
self.pixmap.loadFromData(self.getImage())
self.image = QLabel(self)
self.image.move(0, 0)
self.image.resize(600, 450)
self.image.setPixmap(self.pixmap)
def change_style_map(self):
if self.style_map == 'map':
self.style_map = 'sat'
self.chg_map.setText('Сменить стиль: Спутник')
elif self.style_map == 'sat':
self.chg_map.setText('Сменить стиль: Гибрид')
self.style_map = 'sat,skl'
else:
self.chg_map.setText('Сменить стиль: Схема')
self.style_map = 'map'
self.update_map()
def update_map(self):
self.pixmap.loadFromData(self.getImage())
self.image.setPixmap(self.pixmap)
def keyPressEvent(self, event):
if event.key() == Qt.Key_PageUp:
self.m /= 2
self.n /= 2
self.n = max(self.n, 0.0005)
self.m = max(self.m, 0.0005)
self.update_map()
elif event.key() == Qt.Key_PageDown:
self.m *= 2
self.n *= 2
self.m = min(self.m, 65.536)
self.n = min(self.n, 65.536)
self.update_map()
elif event.key() == Qt.Key_Up:
self.y += self.n
self.update_map()
elif event.key() == Qt.Key_Down:
self.y -= self.n
self.update_map()
elif event.key() == Qt.Key_Left:
self.x -= self.n
self.update_map()
elif event.key() == Qt.Key_Right:
self.x += self.n
self.update_map()
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
ex.show()
sys.exit(app.exec())
| [
"mantonov16@mail.ru"
] | mantonov16@mail.ru |
c1c208b7df225d208337baa30f65ea71849170b4 | 3d16b0256661a0d8a5f35f257776db57f0ea2f4d | /models/telegram_token.py | 495ee7cae4decfc034bbe86591bac4b12460e13d | [
"LicenseRef-scancode-public-domain"
] | permissive | DigitalSenses/Farm-Bot | d589400dab0151296f225c87d711c2c06be72d71 | aa9b9eac4edbf3dd828c92425c2d8d2e608fda28 | refs/heads/master | 2021-01-20T15:23:58.444848 | 2017-05-10T15:40:05 | 2017-05-10T15:40:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | # -*- coding: utf-8 -*-
TELEGRAM_TOKEN = "179454195:AAEHc6i1u9wMiAsTYVot3cgu2Fl1LssBQsc"
| [
"noreply@github.com"
] | DigitalSenses.noreply@github.com |
cdaa1fe4a0bff64677bc0a27acb19048df67624c | 936993be076c228c3bc68d78177289e2b98847b4 | /deploy/views.py | a7ee8a2ad419f14b51781751513f2b54f5384585 | [] | no_license | qyt2018/lyanadmin | d1bb6995e5addf220f427664fe8008e4b5e11d87 | 896db17535e11a5ba152f570afc692b0e867568d | refs/heads/master | 2021-05-07T14:00:07.498450 | 2017-09-08T07:55:22 | 2017-09-08T07:55:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,675 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render, HttpResponse, HttpResponseRedirect
from lyanadmin import settings
from deploy.api.saltapihttps import SaltAPI
from code_pub import Code_Work
from build_data import BuildData
import time
from deploy import models
from asset import models as asset_models
def batchcmd(request):
'''命令执行'''
if request.method == 'POST':
tgtli = request.POST.get('tgt').split(",")
arg = request.POST.get('arg')
sapi = SaltAPI(url=settings.SALT_API['url'], username=settings.SALT_API['user'],
password=settings.SALT_API['password'])
result = []
for tgt in tgtli:
params = {'client': 'local', 'fun': 'cmd.run', 'tgt': tgt, 'arg': arg}
result.append(sapi.saltCmd(params))
print(result)
return render(request, "deploy/batch_cmd.html", {'result': result})
else:
return render(request, "deploy/batch_cmd.html")
def salt_list(request):
"""所有key"""
# user = request.user
sapi = SaltAPI(url=settings.SALT_API['url'], username=settings.SALT_API['user'],
password=settings.SALT_API['password'])
minions, minions_pre = sapi.list_all_key()
return render(request, 'deploy/salt_key_list.html', {'all_minions': minions, 'all_minions_pre': minions_pre})
def key_accept(request):
'''允许key'''
node_name = request.GET.get('node_name')
sapi = SaltAPI(url=settings.SALT_API['url'], username=settings.SALT_API['user'],
password=settings.SALT_API['password'])
ret = sapi.accept_key(node_name)
models.Message.objects.create(type='salt', action='accept_key', action_ip=node_name, content='saltstack 接收key')
return HttpResponseRedirect('/deploy/salt_list/')
def key_delete(request):
'''删除key'''
node_name = request.GET.get('node_name')
sapi = SaltAPI(url=settings.SALT_API['url'], username=settings.SALT_API['user'],
password=settings.SALT_API['password'])
ret = sapi.delete_key(node_name)
models.Message.objects.create(type='salt', action='delete_key', action_ip=node_name, content='saltstack 删除key')
return HttpResponseRedirect('/deploy/salt_list/')
def module_deploy(request):
'''模块部署'''
ret = ''
if request.method == 'POST':
print (request.POST)
print (request.GET)
ret = []
action = request.GET.get('action')
if action == 'deploy':
tgt = request.POST.get('tgt') # 目标主机
arg = request.POST.getlist('module') # 待部署model
tgtcheck = asset_models.NIC.objects.filter(name=tgt) # 目标主机地址
print(tgt, arg, tgtcheck)
if tgtcheck:
models.Message.objects.create(type='salt', action='deploy', action_ip=tgt,
content='saltstack %s 模块部署' % arg) # 写入日志
sapi = SaltAPI(url=settings.SALT_API['url'], username=settings.SALT_API['user'],
password=settings.SALT_API['password'])
if 'sysinit' in arg:
obj = sapi.async_deploy(tgt, arg[-1]) # 先执行初始化模块,其他任意
ret.append(obj)
arg.remove('sysinit')
if arg:
for i in arg:
obj = sapi.async_deploy(tgt, i)
ret.append(obj)
else:
for i in arg:
obj = sapi.async_deploy(tgt, i)
ret.append(obj)
# sapi.async_deploy('test-01','zabbix.api') #调用zabbix.api执行模块监控
else:
ret = '目标主机不正确,请重新输入'
return render(request, 'deploy/salt_module_deploy.html', {'ret': ret})
# ////////////
def code_distribution(request):
"""构建代码推送到服务器"""
ret = ''
host = {'ga': 'test-01', 'beta': 'localhost.localdomain'}
user = request.user
if request.method == 'POST':
action = request.GET.get('action')
if action == 'push':
pro = request.POST.get('project')
url = request.POST.get('url')
version = request.POST.get('version')
env = request.POST.get('env')
print(pro, url, version, env)
capi = Code_Work(pro=pro, url=url, ver=version)
data = {pro: {'ver': version}}
obj = capi.work() # 构建rpm包
if obj['comment'][0]['result'] and obj['comment'][1]['result'] and obj['comment'][2]['result']:
json_api = BuildData()
json_api.build_data(host[env], data) # 刷新pillar数据,通过deploy下发SLS执行代码发布
sapi = SaltAPI(url=settings.SALT_API['url'], username=settings.SALT_API['user'],
password=settings.SALT_API['password'])
if env == 'beta':
jid = sapi.target_deploy('beta', 'deploy.' + pro)
elif env == 'ga':
jid = sapi.target_deploy('tg', 'deploy.' + pro)
else:
jid = sapi.target_deploy('beta', 'deploy.' + pro)
time.sleep(8)
# db = db_operate()
# sql = 'select returns from salt_returns where jid=%s'
# ret=db.select_table(settings.RETURNS_MYSQL,sql,str(jid)) #通过jid获取执行结果
return render(request, 'deploy/code_distribution.html')
| [
"wangchao@lyancoffee.com"
] | wangchao@lyancoffee.com |
9935bf17348ac4e6657c7fd6366d8f018c376ffd | ed6236094e9ad2e87556c1db72ccb6e1c8c53c18 | /fbank/transfer.py | 48f0e5aeaa2a51127b38285abac23c9c0d9ecd5c | [] | no_license | petersvec/bispp_lab_01 | c5a4935d88e9c00896b084d12addef67d4e4b51c | 1949f3e3dc9dd1ea4bbad76b59b63eaadcc2562d | refs/heads/master | 2020-12-30T06:53:11.272997 | 2020-02-24T12:54:15 | 2020-02-24T12:54:15 | 238,899,863 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 777 | py | from flask import g, render_template, request
from login import requirelogin
from fbankdb import *
from debug import *
import bank
import traceback
@catch_err
@requirelogin
def transfer():
warning = None
try:
if ('recipient' in request.form):
if request.form['fcoins'] == "":
raise ValueError('Error!')
fcoins = eval(request.form['fcoins'])
bank.transfer(g.user.person.username,
request.form['recipient'], fcoins)
warning = "Sent %d fcoins" % fcoins
except (KeyError, ValueError, AttributeError) as e:
traceback.print_exc()
warning = "Transfer to %s failed" % request.form['recipient']
return render_template('transfer.html', warning=warning)
| [
"peter.svec1992@gmail.com"
] | peter.svec1992@gmail.com |
6a485494fc182180fa4f12af5523d12255954d1a | f68508c09c8bff6b6e1aafac70fc03650b4bee60 | /backend/customer/api/serializers.py | c779ad53ac7564b02bd414f27d8d1a7655676749 | [
"MIT"
] | permissive | alyhoop/natic | 1f9dde9da313c0c58d16ad208682a08966c42929 | 61ee58ebdb7355cbc65521c9aca508437ed7fe31 | refs/heads/main | 2023-01-11T14:40:39.728948 | 2020-11-02T15:54:57 | 2020-11-02T15:54:57 | 307,185,374 | 0 | 0 | MIT | 2020-10-25T20:23:04 | 2020-10-25T20:23:04 | null | UTF-8 | Python | false | false | 202 | py | from rest_framework import serializers
from customer.models import Customer
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = "__all__"
| [
"vramirez209@gmail.com"
] | vramirez209@gmail.com |
c0c53b7f7ca65437a140ea2d2f6cacf5941d7868 | 544ec67ca2fdf4931130d9c87e395f031d7174bb | /logical_noofpalindromes.py | d0030bbc5660ae1e9c87d899af2aa94e1d2826e4 | [] | no_license | satnam1999/freshgrad | b8eb754637184ee6c63967c0cb296cc9a232a0c0 | 0e6cc6fb933a8aa188e47331e78acb190890252a | refs/heads/master | 2020-12-14T10:29:27.269975 | 2020-01-18T10:11:57 | 2020-01-18T10:11:57 | 234,712,358 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | n=int(input())
k=0
h=0
sum=0
for i in range(1,n+1):
if i%2==1:
sum=sum+(9*(10**k))
k=k+1
elif i%2==0:
sum=sum+(9*(10**h))
h=h+1
print(sum)
| [
"noreply@github.com"
] | satnam1999.noreply@github.com |
b363e6a7cb06107e6b57f522269db06f2372e699 | 7be4f595d555614a28f708c1ba7edda321f0cf30 | /practice/algorithms/implementation/find_digits/find_digits.py | 944aa7f96b379dc5bf1efe15f35da50bb098ef74 | [] | no_license | orel1108/hackerrank | de31a2d31aaf8aeb58477d1f2738744bfe492555 | 55da1f3a94e8c28ed0f0dea3103e51774f0047de | refs/heads/master | 2021-04-09T17:38:25.112356 | 2017-01-22T11:21:19 | 2017-01-22T11:21:19 | 50,198,159 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | t = int(raw_input())
for _ in range(t) :
n = raw_input()
digits = map(int, n)
print len(filter(lambda x: x > 0 and int(n) % x == 0, digits))
| [
"r.orlovskyi@gmail.com"
] | r.orlovskyi@gmail.com |
0439f06409ab6778d84756e8fa98bb4cb7bd9058 | cb20ef5b4048457a2e6dca4a4cb45c53c9843744 | /test/rosapi/1.0/actions/data.logrotate.py | c1b93af96b263f644f14def92db35a78ef176b57 | [] | no_license | rudecs/openvcloud | 5001b77e8d943427c1bed563f3dcc6b9467936e2 | 12ccce2a54034f5bf5842e000c2cc3d7e22836d8 | refs/heads/master | 2020-03-24T00:00:10.422677 | 2018-11-22T13:41:17 | 2018-11-22T13:41:17 | 142,267,808 | 2 | 1 | null | 2018-07-25T08:02:37 | 2018-07-25T08:02:36 | null | UTF-8 | Python | false | false | 54 | py | def main(j,jp):
#remove old logs
pass
| [
"devnull@localhost"
] | devnull@localhost |
e2892e68abe73168a69990de89742320450cbdd9 | 15ea28b856acee1a6bb3f841f880fc2ecdf66e1c | /tests/annotator/test_resolved_keyword_annotator.py | 2f02d4f34b574882518e279ead6af8efe75ec9cd | [
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] | permissive | dsidavis/EpiTator | 0db265ff10b3590ee5e368cfa3aaf890eac5e407 | e532af7cb9bb6157f5109b53451f22c74604f3b9 | refs/heads/master | 2021-09-08T05:35:53.719215 | 2018-03-07T17:41:49 | 2018-03-07T17:41:49 | 110,786,677 | 0 | 0 | null | 2017-11-15T05:13:53 | 2017-11-15T05:13:53 | null | UTF-8 | Python | false | false | 3,648 | py | #!/usr/bin/env python
from __future__ import absolute_import
import os
import unittest
from . import test_utils
from epitator.annotator import AnnoDoc
from epitator.resolved_keyword_annotator import ResolvedKeywordAnnotator
from six.moves import zip
class ResolvedKeywordAnnotatorTest(unittest.TestCase):
def setUp(self):
self.annotator = ResolvedKeywordAnnotator()
def test_contained_name_resolution(self):
doc = AnnoDoc(
"hepatitis B is also referred to as hepatitis B infection")
doc.add_tier(self.annotator)
expected_spans = [
dict(textOffsets=[0, 11],
uris=['http://purl.obolibrary.org/obo/DOID_2043']),
dict(textOffsets=[35, 56],
uris=['http://purl.obolibrary.org/obo/DOID_2043'])]
spans = doc.tiers['resolved_keywords'].spans
self.assertEqual(len(spans), len(expected_spans))
for span, expected_span in zip(spans, expected_spans):
self.assertEqual([r['entity_id'] for r in span.resolutions],
expected_span['uris'])
self.assertEqual([span.start, span.end],
expected_span['textOffsets'])
def test_capitalization_variations(self):
doc = AnnoDoc("Mumps is mumps")
doc.add_tier(self.annotator)
expected_uris = [
'http://purl.obolibrary.org/obo/DOID_10264',
'http://purl.obolibrary.org/obo/DOID_10264']
for span, expected_uri in zip(doc.tiers['resolved_keywords'].spans,
expected_uris):
self.assertEqual(span.resolutions[0]['entity_id'], expected_uri)
def test_MERS(self):
doc = AnnoDoc('There have been 6 new cases of MERS since last week.')
doc.add_tier(self.annotator)
first_span = doc.tiers['resolved_keywords'].spans[0]
self.assertEqual(first_span.resolutions[0]['entity_id'],
'https://www.wikidata.org/wiki/Q16654806')
def test_acroynms(self):
doc = AnnoDoc("Ebola Virus disease is EVD")
doc.add_tier(self.annotator)
resolved_keyword = doc.tiers['resolved_keywords'].spans[-1].to_dict()
test_utils.assertHasProps(
resolved_keyword, {'textOffsets': [[23, 26]]})
test_utils.assertHasProps(resolved_keyword['resolutions'][0], {
'entity_id': 'http://purl.obolibrary.org/obo/DOID_4325'
})
doc = AnnoDoc('AIDS as in the disease, not as in "he aids his boss"')
doc.add_tier(self.annotator)
resolved_keyword = doc.tiers['resolved_keywords'].spans[-1].to_dict()
test_utils.assertHasProps(
resolved_keyword, dict(
textOffsets=[[0, 4]]))
test_utils.assertHasProps(
resolved_keyword['resolutions'][0]['entity'],
{'id': 'http://purl.obolibrary.org/obo/DOID_635'})
def test_very_long_article(self):
path = os.path.dirname(__file__) + "/resources/WhereToItaly.txt"
with open(path) as file:
doc = AnnoDoc(file.read())
doc.add_tier(self.annotator)
def test_species(self):
doc = AnnoDoc("His illness was caused by cattle")
doc.add_tier(self.annotator)
resolved_keyword = doc.tiers['resolved_keywords'].spans[-1].to_dict()
test_utils.assertHasProps(resolved_keyword['resolutions'][0], {
'entity_id': 'tsn:180704',
'entity': {
'type': 'species',
'id': 'tsn:180704',
'label': 'Bovidae'}
})
if __name__ == '__main__':
unittest.main()
| [
"nath@nathanathan.com"
] | nath@nathanathan.com |
2445946f533ac0b091b9795e89cd5e5078fbae0c | a6be7a9e7c9b35c4f665e05a1f31764a103f8258 | /updater.py | a5a7913586cb80ce002ddb349fd92750b49605bf | [] | no_license | fedeb95/trainstats | 6f67b963069152f28ef75de58bbd23033baae024 | b81711f1ea68f7c8f07a879860679ac2de0fb462 | refs/heads/master | 2020-03-27T18:12:03.236281 | 2018-12-04T17:32:00 | 2018-12-04T17:32:00 | 146,904,987 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,279 | py | from urllib import request,parse
from bs4 import BeautifulSoup
from config_manager import ConfigManager
from dbmanager import DBManager
import datetime
import re
URL='http://mobile.my-link.it/mylink/mobile/stazione'
class Treno:
def __init__(self,direzione,destinazione,ora_arrivo,minuti_ritardo,blocco):
self.direzione=direzione
self.destinazione=destinazione
self.ora_arrivo=ora_arrivo
self.minuti_ritardo=minuti_ritardo
self.blocco=blocco
class Crawler:
_TEMPO_REGEX='<img[ ]+src=\"[\./a-zA-Z0-9 ]+\"\/>([a-zA-Z0-9 ]+)</div>'
_MINUTI_REGEX='ritardo[ ]+([0-9]+)[ ]+minuti'
_DESTINAZIONE_REGEX='(Per|Da)[ ]+<strong>([\.A-Z0-9 ]+)<\/strong>'
_ORA_ARRIVO_REGEX='Delle ore[ ]+<strong>([0-9]+:[0-9]+)<\/strong>'
def __init__(self,url,stazione):
self.stazione=stazione
self.url=url
def get_treni(self):
data = parse.urlencode({'stazione':self.stazione}).encode()
req=request.Request(self.url, data=data)
resp=request.urlopen(req)
page=resp.read()
soup = BeautifulSoup(page, 'html.parser')
blocchi=soup.find_all('div', class_="bloccotreno")
treni=[]
for b in blocchi:
b=str(b).replace('\r\n','').replace('\t','')
re_tempo=re.compile(Crawler._TEMPO_REGEX)
re_minuti=re.compile(Crawler._MINUTI_REGEX)
re_destinazione=re.compile(Crawler._DESTINAZIONE_REGEX)
re_ora_arrivo=re.compile(Crawler._ORA_ARRIVO_REGEX)
match=re_destinazione.search(b)
if match is not None:
dest=match.group(2).strip()
direzione=match.group(1).strip()
else:
dest=''
direzione=''
match=re_ora_arrivo.search(b)
if match is not None:
ora_arrivo=match.group(1)
else:
ora_arrivo=''
match=re_tempo.search(b)
if match is not None:
ritardo=match.group(1).strip()
match=re_minuti.search(ritardo)
if match is not None:
minuti_ritardo=int(match.group(1))
else:
minuti_ritardo=0
treni.append(Treno(direzione,dest,ora_arrivo,minuti_ritardo,b))
return treni
class Updater:
def __init__(self,path):
config=ConfigManager.get_instance(path)
self.dbman=DBManager('delays','all',conn_string=config.config['conn_string'])
def update(self,stazione):
c=Crawler(URL,stazione)
treni=c.get_treni()
now=datetime.datetime.now()
for t in treni:
res=self.dbman.collection.find_one({"direzione":t.direzione,"destinazione":t.destinazione,"ora_arrivo":t.ora_arrivo,'stazione':stazione,'year':now.year,'month':now.month,'day':now.day})
if res is not None:
self.dbman.collection.update_one({'_id':res['_id']}, {"$set":{"ritardo":t.minuti_ritardo}}, upsert=False)
else:
self.dbman.collection.insert_one({"direzione":t.direzione,"destinazione":t.destinazione,"ora_arrivo":t.ora_arrivo,'stazione':stazione,'ritardo':t.minuti_ritardo,'year':now.year,'month':now.month,'day':now.day})
| [
"fedeb@fold.ml"
] | fedeb@fold.ml |
c8c9d6651f1979a2c8078b24776cc50b01228ba1 | 7eca6bdc4dbc379e37e031fbb546665e9b175f4d | /appcapstone/core/migrations/0001_initial.py | 83008272d2bd2aa8d3611c0473d8645d21ce130a | [] | no_license | JoseMBruce/app-capstone | 8f57af82919a796dfb2f413f893dabd7e65dd570 | 617e543bea6c7e90204ae3652fd424c88f559626 | refs/heads/main | 2023-08-20T06:21:55.742292 | 2021-10-19T22:54:01 | 2021-10-19T22:54:01 | 419,097,188 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 748 | py | # Generated by Django 3.2.7 on 2021-09-11 19:49
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Auto',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('patente', models.CharField(max_length=8)),
('estado', models.CharField(max_length=20)),
('tiempo_restante', models.IntegerField()),
('creado', models.DateTimeField(auto_now_add=True)),
('actualizado', models.DateTimeField(auto_now=True)),
],
),
]
| [
"josem.bruce@gmail.com"
] | josem.bruce@gmail.com |
5f201d618fe43493b22f504567a2e912669eac34 | 5f7de1bc63d34e7b42fe39d1300b8f59bafb407b | /src/pgbdd.py | 196fa38dc62b6d926cfe43211bc91166051e9630 | [
"MIT"
] | permissive | rebryant/pgpbs-artifact | 9d5f210bb59e9fcdde89de50fdfd226d52633946 | 15e66b90873061e80cbb1c908a38b511c49f71e8 | refs/heads/master | 2023-04-06T21:46:21.191507 | 2022-11-28T23:24:56 | 2022-11-28T23:24:56 | 413,060,756 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,502 | py | #!/usr/bin/python
# Simple, proof-generating SAT solver based on BDDs and pseudo-Boolean reasoning
#####################################################################################
# Copyright (c) 2021 Marijn Heule, Randal E. Bryant, Carnegie Mellon University
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
########################################################################################
import sys
import getopt
import datetime
import stream
import pseudoboolean
import solver
# Increase maximum recursion depth
sys.setrecursionlimit(10 * sys.getrecursionlimit())
def usage(name):
sys.stderr.write("Usage: %s [-h] [-b] [-v LEVEL] [-i CNF] [-o file.{proof,lrat,lratb}] [-p PERMUTE] [-s SCHEDULE] [-L logfile]\n" % name)
sys.stderr.write(" -h Print this message\n")
sys.stderr.write(" -b Process terms via bucket elimination ordered by variable levels\n")
sys.stderr.write(" -v LEVEL Set verbosity level\n")
sys.stderr.write(" -i CNF Name of CNF input file\n")
sys.stderr.write(" -o pfile Name of proof output file (.proof = tracecheck, .lrat = LRAT text, .lratb = LRAT binary)\n")
sys.stderr.write(" -p PERMUTE Name of file specifying mapping from CNF variable to BDD level\n")
sys.stderr.write(" -s SCHEDULE Name of action schedule file\n")
sys.stderr.write(" -L logfile Append standard error output to logfile\n")
# Verbosity levels
# 0: Totally silent
# 1: Key statistics only
# 2: Summary information
# 3: Proof information
# 4: ?
# 5: Tree generation information
def run(name, args):
cnfName = None
proofName = None
doLrat = False
doBinary = False
permuter = None
bpermuter = None
doBucket = False
scheduler = None
verbLevel = 1
logName = None
# No that some deprecated options are still implemented but not available
optlist, args = getopt.getopt(args, "hbv:i:o:p:s:L:")
for (opt, val) in optlist:
if opt == '-h':
usage(name)
return
if opt == '-b':
doBucket = True
elif opt == '-B':
bpermuter = solver.readPermutation(val)
if bpermuter is None:
return
elif opt == '-v':
verbLevel = int(val)
elif opt == '-i':
cnfName = val
elif opt == '-o':
proofName = val
extension = proofName.split('.')[-1]
if extension == 'lrat' or extension == 'lratb':
doLrat = True
doBinary = extension[-1] == 'b'
elif opt == '-M':
proofName = None
if val == 'b':
doLrat = True
doBinary = True
elif val == 't':
doLrat = True
elif opt == '-p':
permuter = solver.readPermutation(val)
if permuter is None:
return
elif opt == '-s':
scheduler = solver.readScheduler(val)
if scheduler is None:
return
elif opt == '-L':
logName = val
else:
sys.stderr.write("Unknown option '%s'\n" % opt)
usage(name)
return
writer = stream.Logger(logName)
if (doBucket or bpermuter is not None) and scheduler is not None:
writer.write("Cannot have both bucket scheduling and defined scheduler\n")
return
if (doBucket and bpermuter is not None):
writer.write("Cannot do bucket scheduling on levels and with defined permutation\n")
return
try:
prover = solver.Prover(proofName, writer = writer, verbLevel = verbLevel, doLrat = doLrat, doBinary = doBinary)
except Exception as ex:
writer.write("Couldn't create prover (%s)\n" % str(ex))
return
start = datetime.datetime.now()
solve = solver.Solver(cnfName, prover = prover, permuter = permuter, verbLevel = verbLevel)
if doBucket:
solve.runBucketSchedule()
elif bpermuter is not None:
solve.runBucketSchedulePerm(bpermuter)
elif scheduler is not None:
solve.runSchedule(scheduler, None)
else:
solve.runNoSchedule()
delta = datetime.datetime.now() - start
seconds = delta.seconds + 1e-6 * delta.microseconds
if verbLevel > 0:
writer.write("Elapsed time for SAT: %.2f seconds\n" % seconds)
if writer != sys.stderr:
writer.close()
if __name__ == "__main__":
run(sys.argv[0], sys.argv[1:])
| [
"bryant@BRYANT-MC2.VLSI.CS.CMU.EDU"
] | bryant@BRYANT-MC2.VLSI.CS.CMU.EDU |
53aa261bea547ff98b2e9322162afed7164fdebc | 61e6887400391d5e15c83afa7d31b57291b0d8f5 | /manager/migrations/0023_auto_20210325_1651.py | 6cbfe28b59d594c0ae51bf66dcd2bba18565fb3d | [] | no_license | ShevAlexa/ShevtsovAlexei | f0d8f53313c933dbf9cf45428f57f99289faa708 | b1540111072f8eb9b254636eda27cdcb0116ad2e | refs/heads/main | 2023-03-27T19:02:03.268220 | 2021-03-28T15:04:19 | 2021-03-28T15:04:19 | 344,165,896 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,180 | py | # Generated by Django 3.1.7 on 2021-03-25 13:51
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('manager', '0022_delete_testtale'),
]
operations = [
migrations.CreateModel(
name='TestTale',
fields=[
('title', models.CharField(max_length=50, primary_key=True, serialize=False)),
],
),
migrations.CreateModel(
name='TMPBook',
fields=[
('title', models.CharField(db_index=True, help_text='ну эт тип погоняло', max_length=50, verbose_name='название')),
('date', models.DateTimeField(auto_now_add=True, null=True)),
('text', models.TextField(null=True)),
('rate', models.DecimalField(decimal_places=2, default=0.0, max_digits=3)),
('count_rated_users', models.PositiveIntegerField(default=0)),
('count_all_stars', models.PositiveIntegerField(default=0)),
('slug', models.SlugField(primary_key=True, serialize=False)),
('authors', models.ManyToManyField(related_name='tmp_books', to=settings.AUTH_USER_MODEL)),
('users_like', models.ManyToManyField(related_name='tmp_Liked_books', through='manager.LikeBookUser', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Книга',
'verbose_name_plural': 'Книги',
},
),
migrations.AddField(
model_name='comment',
name='tmp_book',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='manager.tmpbook'),
),
migrations.AddField(
model_name='likebookuser',
name='tmp_book',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='liked_user_table', to='manager.tmpbook'),
),
]
| [
"zzxalex-chertiakazzx@mail.ru"
] | zzxalex-chertiakazzx@mail.ru |
2fb4bc7f4f907b31d83c97078f40552abbe1e256 | aab4b499a63c17139360d642513bce6419507fb3 | /game.py | 6e01f8b3af21239a0026d05351adbb3ddff2c495 | [] | no_license | amanda-bmelo/si-lovers | e596f83ac03c18592b2cccac86fa4a2daca4f8d6 | 5fc71f3828be67e748ff310a04813254470630af | refs/heads/master | 2023-07-16T16:23:01.580894 | 2021-08-31T11:19:54 | 2021-08-31T11:19:54 | 376,057,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,013 | py | from PPlay.window import *
from PPlay.gameimage import *
from PPlay.sprite import *
from PPlay.keyboard import *
from random import randint
from winner import winner_screen
from aliens import aliens, alien_heart, sleep, lose
from gameover import gameover
def moveMatriz(array, position):
for line in array:
for alien in line:
alien.move_x(position), alien.move_y(10)
def game(level, points=0, nome=''):
janela = Window(500,720)
janela.set_title("Space Lovers")
background = GameImage("images/game.png")
teclado = Window.get_keyboard()
spaceship = GameImage("images/ship.png")
spaceship.set_position(200, 580)
all_hearts = []
time, time_player = 2, 0
lives = 3
loser = False
blink = False
# DIFICULDADE
if level == 0: return 0
elif level == 1: a = 0.7
elif level == 2: a = 1
else: a = 1.5
# ALIENS
all_aliens = aliens()
all_aliens_hearts = []
alienSpeed = a
time_alien = 0
cd = 0
while True:
background.draw()
if not loser and time > 2:
spaceship.draw()
blink = False
elif randint(0,1) > 0:
spaceship.draw()
blink = True
heartSpeed = -250*janela.delta_time()*a
time += janela.delta_time()
time_player += janela.delta_time()
time_alien += janela.delta_time()
cd += janela.delta_time()
# PLAYER
if teclado.key_pressed("ESC"):
return 0
if teclado.key_pressed("LEFT") and spaceship.x > 0:
spaceship.x -= 2
if teclado.key_pressed("RIGHT") and spaceship.x < 400:
spaceship.x += 2
if teclado.key_pressed("SPACE") and time_player > 0.5:
hearts = Sprite("images/shot.png", 1)
while teclado.key_pressed("SPACE") and cd > 10:
time_player += janela.delta_time()
if time_player > 3:
hearts = Sprite("images/super_heart.png", 1)
cd = 0
break
time_player = 0
hearts.set_position(spaceship.x, spaceship.y-30)
all_hearts.append(hearts)
# COLISÕES
# Tiros da nave
for heart in all_hearts:
if heart.y > 0:
heart.move_y(heartSpeed)
if not all_aliens[-1]:
all_aliens.pop(-1)
if not all_aliens:
level = winner_screen(level, points)
game(level, points)
slept = False
normal_heart = True
if all_aliens[-1][0].y + all_aliens[-1][0].height >= heart.y:
all_aliens, slept, normal_heart = sleep(heart, all_aliens)
if not slept and normal_heart:
heart.draw()
else:
points += 25*level
if normal_heart:
all_hearts.pop(all_hearts.index(heart))
else:
all_hearts.pop(all_hearts.index(heart))
# Tiros dos aliens
for heart in all_aliens_hearts:
if heart.y < 720:
heart.move_y(-heartSpeed)
loser = False
if heart.y + heart.height >= spaceship.y:
all_aliens_hearts, loser, lives = lose(all_aliens_hearts, heart, spaceship, lives)
if not loser:
heart.draw()
elif blink:
lives += 1
loser = False
else:
time = 0
spaceship.set_position(200, 580)
else:
all_aliens_hearts.pop(all_aliens_hearts.index(heart))
# ALIENS
for line in all_aliens:
if line:
# Deslocando quando atinge laterais
if line[0].x < 0 or line[-1].x >= 500-line[-1].width:
alienSpeed *= -1
moveMatriz(all_aliens, alienSpeed)
# Setando tiros dos aliens
if time_alien > randint(1, 3//level):
time_alien = 0
all_aliens_hearts = alien_heart(all_aliens, all_aliens_hearts, level)
for alien in line:
if alien.y+alien.height >= 580: return 4
alien.move_x(alienSpeed)
alien.draw()
else:
all_aliens.pop(all_aliens.index(line))
# GAMEOVER // WINNER
if lives == 0:
return gameover(points)
elif not all_aliens:
level = winner_screen(level, points)
game(level, points)
janela.draw_text(f"Pontos: {points}", 30, 20, size=30, color=(240,240,240), font_name="Computer_says_no")
janela.draw_text(f"Vidas: {lives}", 400, 20, size=30, color=(240,240,240), font_name="Computer_says_no")
janela.update()
| [
"amanda.melo@injunior.com.br"
] | amanda.melo@injunior.com.br |
cd83ea2627ef6176db3686837284b051c47b6707 | 18a61a2c47ffd164e33497757390cb2714c53b2e | /codeforces/Weird_Rounding.py | 624382aec20abe31b648db2d59329521c313d5dd | [] | no_license | abhinashjain/codes | c2cb84816602c696b5021e1d69d5b4342b8b5f2a | 5bc0403125e69bc4d0eb7a0620525cc84384f4f3 | refs/heads/main | 2022-12-20T10:03:58.278660 | 2020-10-03T12:10:58 | 2020-10-03T12:10:58 | 300,869,756 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,822 | py | #!/usr/bin/python
# coding: utf-8
(n,k)=raw_input().split(' ')
k=int(k)
arr=[]
for i in n:
arr.append(int(i))
length=len(arr)
ind=length-1
tmp=cnt=0
while(ind>=0 and tmp<k):
if(arr[ind]==0):
tmp+=1
else:
cnt+=1
ind-=1
if(tmp==k):
print cnt
else:
print length-1
'''
Polycarp is crazy about round numbers. He especially likes the numbers divisible by 10^k.
In the given number of n Polycarp wants to remove the least number of digits to get a number that is divisible by 10^k. For example, if k = 3, in the number 30020 it is enough to
delete a single digit (2). In this case, the result is 3000 that is divisible by 10^3 = 1000.
Write a program that prints the minimum number of digits to be deleted from the given integer number n, so that the result is divisible by 10^k. The result should not start with the
unnecessary leading zero (i.e., zero can start only the number 0, which is required to be written as exactly one digit).
It is guaranteed that the answer exists.
Input
The only line of the input contains two integer numbers n and k (0 ≤ n ≤ 2000000000, 1 ≤ k ≤ 9).
It is guaranteed that the answer exists. All numbers in the input are written in traditional notation of integers, that is, without any extra leading zeros.
Output
Print w — the required minimal number of digits to erase. After removing the appropriate w digits from the number n, the result should have a value that is divisible by 10^k. The
result can start with digit 0 in the single case (the result is zero and written by exactly the only digit 0).
Examples
Input
30020 3
Output
1
Input
100 9
Output
2
Input
10203049 2
Output
3
Note
In the example 2 you can remove two digits: 1 and any 0. The result is number 0 which is divisible by any number.
'''
| [
"labor.omnia.vincit18@gmail.com"
] | labor.omnia.vincit18@gmail.com |
3c658f2be08a799408b2af832e442f8d9a5dbe98 | b095173b2dbc77c8ad61c42403258c76169b7a63 | /tests/integ/sagemaker/jumpstart/retrieve_uri/training.py | 7e62aa5e54c9bcdb6b42347e861e3db45e6f59e6 | [
"Apache-2.0"
] | permissive | aws/sagemaker-python-sdk | 666665e717cfb76698ba3ea7563b45344634264d | 8d5d7fd8ae1a917ed3e2b988d5e533bce244fd85 | refs/heads/master | 2023-09-04T01:00:20.663626 | 2023-08-31T15:29:19 | 2023-08-31T15:29:19 | 110,621,895 | 2,050 | 1,255 | Apache-2.0 | 2023-09-14T17:37:15 | 2017-11-14T01:03:33 | Python | UTF-8 | Python | false | false | 5,679 | py | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import os
import time
import boto3
from botocore.config import Config
from sagemaker.jumpstart.constants import JUMPSTART_DEFAULT_REGION_NAME
from tests.integ.sagemaker.jumpstart.utils import (
get_test_artifact_bucket,
get_sm_session,
)
from tests.integ.sagemaker.jumpstart.retrieve_uri.utils import (
get_full_hyperparameters,
)
from sagemaker.jumpstart.utils import get_jumpstart_content_bucket
from tests.integ.sagemaker.jumpstart.constants import (
ENV_VAR_JUMPSTART_SDK_TEST_SUITE_ID,
)
class TrainingJobLauncher:
def __init__(
self,
image_uri,
script_uri,
model_uri,
hyperparameters,
instance_type,
training_dataset_s3_key,
suffix=time.strftime("%Y-%m-%d-%H-%M-%S", time.gmtime()),
region=JUMPSTART_DEFAULT_REGION_NAME,
boto_config=Config(retries={"max_attempts": 10, "mode": "standard"}),
base_name="jumpstart-training-job",
execution_role=None,
) -> None:
self.account_id = boto3.client("sts").get_caller_identity()["Account"]
self.suffix = suffix
self.test_suite_id = os.environ[ENV_VAR_JUMPSTART_SDK_TEST_SUITE_ID]
self.region = region
self.config = boto_config
self.base_name = base_name
self.execution_role = execution_role or get_sm_session().get_caller_identity_arn()
self.image_uri = image_uri
self.script_uri = script_uri
self.model_uri = model_uri
self.hyperparameters = hyperparameters
self.instance_type = instance_type
self.training_dataset_s3_key = training_dataset_s3_key
self.sagemaker_client = self.get_sagemaker_client()
def get_sagemaker_client(self) -> boto3.client:
return boto3.client(service_name="sagemaker", config=self.config, region_name=self.region)
def get_training_job_name(self) -> str:
timestamp_length = len(self.suffix)
non_timestamped_name = f"{self.base_name}-training-job-"
if len(non_timestamped_name) > 63 - timestamp_length:
non_timestamped_name = non_timestamped_name[: 63 - timestamp_length]
return f"{non_timestamped_name}{self.suffix}"
def wait_until_training_job_complete(self):
print("Waiting for training job to complete...")
self.sagemaker_client.get_waiter("training_job_completed_or_stopped").wait(
TrainingJobName=self.training_job_name
)
def create_training_job(self) -> None:
self.training_job_name = self.get_training_job_name()
self.output_tarball_base_path = (
f"s3://{get_test_artifact_bucket()}/{self.test_suite_id}/training_model_tarballs"
)
training_params = {
"AlgorithmSpecification": {
"TrainingImage": self.image_uri,
"TrainingInputMode": "File",
},
"RoleArn": self.execution_role,
"OutputDataConfig": {
"S3OutputPath": self.output_tarball_base_path,
},
"ResourceConfig": {
"InstanceCount": 1,
"InstanceType": self.instance_type,
"VolumeSizeInGB": 50,
},
"TrainingJobName": self.training_job_name,
"EnableNetworkIsolation": True,
"HyperParameters": get_full_hyperparameters(
self.hyperparameters, self.training_job_name, self.model_uri
),
"StoppingCondition": {"MaxRuntimeInSeconds": 86400},
"InputDataConfig": [
{
"ChannelName": "training",
"DataSource": {
"S3DataSource": {
"S3DataType": "S3Prefix",
"S3Uri": f"s3://{get_jumpstart_content_bucket(self.region)}/{self.training_dataset_s3_key}",
"S3DataDistributionType": "FullyReplicated",
}
},
"CompressionType": "None",
},
{
"ChannelName": "model",
"DataSource": {
"S3DataSource": {
"S3DataType": "S3Prefix",
"S3Uri": self.model_uri,
"S3DataDistributionType": "FullyReplicated",
}
},
"CompressionType": "None",
},
{
"ChannelName": "code",
"DataSource": {
"S3DataSource": {
"S3DataType": "S3Prefix",
"S3Uri": self.script_uri,
"S3DataDistributionType": "FullyReplicated",
}
},
"CompressionType": "None",
},
],
}
print("Creating training job...")
self.sagemaker_client.create_training_job(
**training_params,
)
| [
"89424143+mufaddal-rohawala@users.noreply.github.com"
] | 89424143+mufaddal-rohawala@users.noreply.github.com |
6b4c8e128ddc051f119d3d5b18cbe1bec186b048 | 8279b30a91f32365349717e61788f4189017751e | /ownerapp/urls.py | c263b3484fe9ad0c3ccd7d512e7c111d500b7852 | [] | no_license | Sindhu-Chinnaswamy31/appsproject | bff184dd46de2986defc8139af60ac1f517e584e | 239c2baa4841fade106b9299497dc8fde495831f | refs/heads/master | 2023-04-22T18:54:24.815564 | 2021-05-10T06:29:17 | 2021-05-10T06:29:17 | 365,939,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | from django.contrib import admin
from django.urls import path
from ownerapp import views
urlpatterns = [
# path('admin/', admin.site.urls),
path('owner',views.ownerapp,name='ownerapp'),
]
| [
"sindhucswamy@gmail.com"
] | sindhucswamy@gmail.com |
8d8dcabafa79ff4bd5c2aa8af5390710acb2a4bd | 0d5db8c704c691429fffd36b9ba6f3f9b30c0fc9 | /MoviePro/movie/migrations/0004_customer_profile_pic.py | bc964ba4018e24bab71aaabcd535afe27834993e | [] | no_license | eliferyurek/django-MovieProject | 7a9c6dd64e1cd0b69d1b442e0fae245aa434cee4 | b4632a060939583b47a451a221a8bd6ef7747fed | refs/heads/master | 2022-09-25T13:23:07.200865 | 2020-06-05T20:29:32 | 2020-06-05T20:29:32 | 269,766,347 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | # Generated by Django 3.0.5 on 2020-05-02 17:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movie', '0003_customer_user'),
]
operations = [
migrations.AddField(
model_name='customer',
name='profile_pic',
field=models.ImageField(blank=True, null=True, upload_to=''),
),
]
| [
"43632577+eliferyurek@users.noreply.github.com"
] | 43632577+eliferyurek@users.noreply.github.com |
8089a80bc987b55952b1111437312a6348d155f9 | ea907f98d2601891f2e00fb30441b60e3bfbf504 | /BTP code/Paper Code/PAPERunknownMain.py | cbcc7d5bb266700d4bf4365e716cf0fe00a60b1d | [] | no_license | vivien98/MultiArmedBandit-simulations | 67f342a5de5d7fca4d4175fd3dc099d487dc6900 | a0b6b59312b302495c8de1f073f73f142307309c | refs/heads/master | 2021-07-13T03:12:32.572800 | 2020-07-10T08:54:11 | 2020-07-10T08:54:11 | 174,494,281 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,389 | py | # BTP code for TWO ARMS ONLY
import numpy as np
import math
import matplotlib.pyplot as pl
import scipy as sp
import matplotlib as mpl
def simulateOPT(nSim,nTime,B,initUrn,initProp):
randomArrPref = np.random.rand(nSim,nTime);
randomArrRec = np.random.rand(nSim,nTime);
randomArrRew = np.random.rand(nSim,nTime);
avgProp = np.zeros(nTime)
for i in range(nSim):
print(str(i))
urn = np.array([initUrn*initProp,initUrn*(1-initProp)])
propT = np.zeros(nTime)
p = float(B[0,0]+B[0,1]-1 > 0)
q = float(B[1,1]+B[1,0]-1 < 0)
for j in range(nTime):
prop = urn/(initUrn+j)
if prop[0] > randomArrPref[i,j]: # generating user with some preference
armPref = 0
else:
armPref = 1
if armPref==0:
if p > randomArrRec[i,j]: # generating arm to be shown
armChosen = 0
else:
armChosen = 1
else:
if q > randomArrRec[i,j]: # generating arm to be shown
armChosen = 1
else:
armChosen = 0
if B[armPref,armChosen] > randomArrRew[i,j]: # generating reward
rew = 1
else:
rew = 0
urn[armChosen] += rew
urn[1-armChosen] += 1-rew
propT[j] = prop[0]
avgProp += propT
avgProp = avgProp/nSim
return avgProp
def simulateETC(nSim,nTime,B,initUrn,initProp,tThresh):
randomArrPref = np.random.rand(nSim,nTime);
randomArrRec = np.random.rand(nSim,nTime);
randomArrRew = np.random.rand(nSim,nTime);
randomArrReci = np.random.rand(nSim,nTime);
randomArrRewi = np.random.rand(nSim,nTime);
avgProp = np.zeros(nTime)
avgReg = np.zeros(nTime)
for i in range(nSim):
print(str(i))
urn = np.array([initUrn*initProp,initUrn*(1-initProp)])
propT = np.zeros(nTime)
regT = np.zeros(nTime)
prevReg = 0
p = 0.5
q = 0.5
pi = float(B[0,0]+B[0,1]-1 > 0) # p and q were the matrix known
qi = float(B[1,1]+B[1,0]-1 < 0)
bEst = np.zeros((2,2),dtype="float")
cnt = np.zeros((2,2),dtype="float")
for j in range(nTime):
prop = urn/(initUrn+j)
if prop[0] > randomArrPref[i,j]: # generating user with some preference
armPref = 0
else:
armPref = 1
if j < tThresh: # explore part
p = 0.5
q = 0.5
else:
if j==tThresh:
bEst = np.divide(bEst,cnt)
p = float(bEst[0,0]+bEst[0,1]-1 > 0)
q = float(bEst[1,1]+bEst[1,0]-1 < 0)
if armPref==0:
if p > randomArrRec[i,j]: # generating arm to be shown
armChosen = 0
else:
armChosen = 1
if pi > randomArrReci[i,j]: # generating arm to be shown for known B case
armChoseni = 0
else:
armChoseni = 1
else:
if q > randomArrRec[i,j]: # generating arm to be shown
armChosen = 1
else:
armChosen = 0
if qi > randomArrReci[i,j]: # generating arm to be shown for known B case
armChoseni = 1
else:
armChoseni = 0
if B[armPref,armChosen] > randomArrRew[i,j]: # generating reward
rew = 1
else:
rew = 0
if B[armPref,armChoseni] > randomArrRewi[i,j]: # generating reward
rewi = 1
else:
rewi = 0
if j<tThresh:
bEst[armPref,armChosen] += rew
cnt[armPref,armChosen] += 1
urni = [0.0,0.0]
prevZ = urn[0]
prevZi = urni[0]
urn[armChosen] += rew
urn[1-armChosen] += 1-rew
urni[armChoseni] += rewi
urni[1-armChoseni] += 1-rewi
delZ = urn[0] - prevZ
delZi = urni[0] - prevZi
propT[j] = prop[0]
regT[j] = prevReg + delZi - delZ
prevReg = regT[j]
avgProp += propT
avgReg += regT
avgProp = avgProp/nSim
avgReg = avgReg/nSim
return avgProp,avgReg
def simulateUCB(nSim,nTime,B,initUrn,initProp,polyAnn):
randomArrPref = np.random.rand(nSim,nTime);
randomArrRec = np.random.rand(nSim,nTime);
randomArrRew = np.random.rand(nSim,nTime);
avgProp = np.zeros(nTime)
for i in range(nSim):
print(str(i))
urn = np.array([initUrn*initProp,initUrn*(1-initProp)])
propT = np.zeros(nTime)
bEst = np.zeros((2,2),dtype="float")
bSum = np.zeros((2,2),dtype="float")
bUCB = np.zeros((2,2),dtype="float")
cnt = np.zeros((2,2),dtype="float") + 0.01
for j in range(nTime):
prop = urn/(initUrn+j)
p = 0.5
q = 0.5
if polyAnn == 0:
bUCB = np.sqrt(np.divide(np.log(j+2),32*cnt))
bEst = np.divide(bSum,cnt)
if prop[0] > randomArrPref[i,j]: # generating user with some preference
armPref = 0
else:
armPref = 1
if bEst[0,0]+bUCB[0,0]+bEst[0,1]+bUCB[0,1] < 1: # UCB rules to choose arm recommended
p = 0
if bEst[0,0]-bUCB[0,0]+bEst[0,1]-bUCB[0,1] > 1:
p = 1
if bEst[1,1]-bUCB[1,1]+bEst[1,0]-bUCB[1,0] > 1:
q = 0
if bEst[1,1]+bUCB[1,1]+bEst[1,0]+bUCB[1,0] < 1:
q = 1
if armPref==0:
if p > randomArrRec[i,j]: # generating arm to be shown
armChosen = 0
else:
armChosen = 1
else:
if q > randomArrRec[i,j]: # generating arm to be shown
armChosen = 1
else:
armChosen = 0
if B[armPref,armChosen] > randomArrRew[i,j]: # generating reward
rew = 1
else:
rew = 0
bSum[armPref,armChosen] += rew
cnt[armPref,armChosen] += 1
urn[armChosen] += rew
urn[1-armChosen] += 1-rew
propT[j] = prop[0]
avgProp += propT
avgProp = avgProp/nSim
return avgProp
def simulateTHO(nSim,nTime,B,initUrn,initProp):
randomArrPref = np.random.rand(nSim,nTime);
randomArrRec = np.random.rand(nSim,nTime);
randomArrRew = np.random.rand(nSim,nTime);
randomArrReci = np.random.rand(nSim,nTime);
randomArrRewi = np.random.rand(nSim,nTime);
avgProp = np.zeros(nTime)
avgReg = np.zeros(nTime)
for i in range(nSim):
print(str(i))
urn = np.array([initUrn*initProp,initUrn*(1-initProp)])
propT = np.zeros(nTime)
regT = np.zeros(nTime)
prevReg = 0
p = 0.5
q = 0.5
pi = float(B[0,0]+B[0,1]-1 > 0) # p and q were the matrix known
qi = float(B[1,1]+B[1,0]-1 < 0)
alpha = np.ones((2,2))
beta = np.ones((2,2))
for j in range(nTime):
prop = urn/(initUrn+j)
if prop[0] > randomArrPref[i,j]: # generating user with some preference
armPref = 0
else:
armPref = 1
sampleMat = np.random.beta(alpha,beta) # sampling a matrix
if(sampleMat[0,0] + sampleMat[0,1] - 1 > 0):
p = 1
else:
p = 0
if(sampleMat[1,1] + sampleMat[1,0] - 1 < 0):
q = 1
else:
q = 0
if armPref==0:
if p > randomArrRec[i,j]: # generating arm to be shown
armChosen = 0
else:
armChosen = 1
if pi > randomArrReci[i,j]: # generating arm to be shown for known B case
armChoseni = 0
else:
armChoseni = 1
else:
if q > randomArrRec[i,j]: # generating arm to be shown
armChosen = 1
else:
armChosen = 0
if qi > randomArrReci[i,j]: # generating arm to be shown for known B case
armChoseni = 1
else:
armChoseni = 0
if B[armPref,armChosen] > randomArrRew[i,j]: # generating reward
rew = 1
else:
rew = 0
if B[armPref,armChoseni] > randomArrRewi[i,j]: # generating reward
rewi = 1
else:
rewi = 0
thompsonFactor = 2
alpha[armPref,armChosen] += thompsonFactor*rew
beta[armPref,armChosen] += thompsonFactor*(1 - rew)
urni = [0.0,0.0]
prevZ = urn[0]
prevZi = urni[0]
urn[armChosen] += rew
urn[1-armChosen] += 1-rew
urni[armChoseni] += rewi
urni[1-armChoseni] += 1-rewi
delZ = urn[0] - prevZ
delZi = urni[0] - prevZi
propT[j] = prop[0]
regT[j] = prevReg + delZi - delZ
prevReg = regT[j]
avgProp += propT
avgReg += regT
avgProp = avgProp/nSim
avgReg = avgReg/nSim
return avgProp,avgReg
#___________________________________________________________MAIN___________________________________________________________#
b00 = 0.9 # initialise bernoulli reward matrix #
b01 = 0.7
b10 = 0.7
b11 = 0.9
B = np.matrix([[b00,b01],[b10,b11]])
nSim = 1000
nTime = 1000
initUrn = 20
initProp = 0.5
tThresh = 150
out1 = simulateOPT(nSim,nTime,B,initUrn,initProp)
out2,reg2 = simulateETC(nSim,nTime,B,initUrn,initProp,tThresh)
out3,reg3 = simulateTHO(nSim,nTime,B,initUrn,initProp)
pl.subplot(1,2,1)
pl.plot(out1 , 'r-',label='Optimal policy',linewidth = 2.5) # POPULATION PROPORTION PLOTTING # NORMAl
pl.plot(out2 ,'b--',label='ETC policy',linewidth = 2.5)
pl.plot(out3 ,'g-.',label='TS policy',linewidth = 2.5)
pl.legend(loc='lower right',frameon=True,prop={"size":20})
pl.xlabel('Time',fontsize=20)
pl.ylabel('Proportion of Type 1 users',fontsize=20)
pl.tick_params(labelsize=20);
pl.subplot(1,2,2)
pl.plot(reg2 ,'b-',label='Regret for ETC policy',linewidth = 2.5) # REGRET PLOTTING
pl.plot(reg3 ,'g--',label='Regret for TS policy',linewidth = 2.5)
pl.legend(loc='lower right',frameon=True,prop={"size":20})
pl.xlabel('Time',fontsize=20)
pl.ylabel('Cumulative Regret',fontsize=20)
pl.tick_params(labelsize=20);
pl.show()
pl.subplot(1,2,1)
pl.plot(out1 , 'r-',label='Optimal policy',linewidth = 2.5) # POPULATION PROPORTION PLOTTING #LOG
pl.plot(out2 ,'b--',label='ETC policy',linewidth = 2.5)
pl.plot(out3 ,'g-.',label='TS policy',linewidth = 2.5)
pl.legend(loc='lower right',frameon=True,prop={"size":20})
pl.xlabel('Time',fontsize=20)
pl.ylabel('Proportion of Type 1 users',fontsize=20)
pl.tick_params(labelsize=20);
pl.subplot(1,2,2)
pl.plot(reg2 ,'b-',label='Regret for ETC policy',linewidth = 2.5) # REGRET PLOTTING LOG
pl.plot(reg3 ,'g--',label='Regret for TS policy',linewidth = 2.5)
pl.legend(loc='upper left',frameon=True,prop={"size":20})
pl.xscale("log")
pl.xlabel('Time',fontsize=20)
pl.ylabel('Cumulative Regret',fontsize=20)
pl.tick_params(labelsize=20);
pl.show()
| [
"nadkarniv4198@gmail.com"
] | nadkarniv4198@gmail.com |
6bb6dc5c72e6398b4458c6e340d841392a36f3d5 | 6f6ef83e18315952cdbe919d421cd151a775ced9 | /guess_word/models.py | 88917b2509c11cc3d40fd5e491542c9ffb9e80fe | [] | no_license | wolf427/whoisundercover | f2803e407d7212816c1d2a1d8ad1ee7086770d37 | 5498317cd7ee679b0e82bce13329987615535c27 | refs/heads/master | 2021-01-10T01:53:53.422100 | 2016-03-19T14:53:27 | 2016-03-19T14:53:27 | 53,417,587 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Word(models.Model):
word_content = models.CharField(max_length=20)
word_type = models.CharField(max_length=10)
def __unicode__(self):
return self.word | [
"wolf_fei@foxmail.com"
] | wolf_fei@foxmail.com |
3758100b3550dd8e259883e5fd420c5270aced17 | 54c07f4e781838f50c8220c275d94e69cec40483 | /tabel_competition/pipline/regression/lib/create_features.py | 484ef57e21229aab9ea9ec301763265bd5416293 | [] | no_license | hirayukis/kaggle-my-favorite-technique | b39f102dd008731c6516ac8b2c43e1e5ec2b4db7 | 9128d5a098116eb23a39e08d3ede45802c87b89d | refs/heads/master | 2023-04-20T18:43:11.462673 | 2021-05-13T07:38:20 | 2021-05-13T07:38:20 | 261,166,945 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | def create_features(data, num_features, cat_features):
return data, num_features, cat_features
| [
"ykbhiralmec@gmail.com"
] | ykbhiralmec@gmail.com |
9437d331df6bf3f964edbc04126396de14dffc46 | e04311a545baa4672f170ee471ed47f67c383d73 | /QSP/JSPtoQSP/02_Area_MethodWithParameter_NoReturnType/Area03.py | e22684b4137a83f81ddd2272488e35f7cbcb26d6 | [] | no_license | chetanDN/Python | 55fa39cd2e15396d0db66f01b08e574bf5387699 | 35a4461b8673e94189199859b4a5776035232629 | refs/heads/master | 2021-04-29T11:05:31.963179 | 2019-02-27T08:58:20 | 2019-02-27T08:58:20 | 77,855,793 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | def find_area(x, y, d):
area = ((x + y) / 2) * d # Area_of_trapezoid
print("area of trapezoid of side {} , {} and height {} is {}".format(a, b, h, area))
a = 10 # parallel_side_a
b = 20 # parallel_side_b
h = 5 # distance_between_them
find_area(a, b, h)
| [
"chetandevanaik@gmail.com"
] | chetandevanaik@gmail.com |
40e066ab1ac58766f4ce42f987ad1239ce7e4926 | 01715399a1d93914f61a34bd45f2d20485086a35 | /tempview.py | c91bb3d8ff86c6fa8f07832651c865743421bd50 | [] | no_license | lessthan41/Protect-me-Diana | 2746a1c80a0886094cd3eed40edb545b04ffcad2 | 469e525cc40b825aaa2e68552645be340d2d622f | refs/heads/master | 2022-12-13T11:13:55.452223 | 2018-12-03T01:36:38 | 2018-12-03T01:36:38 | 152,830,138 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,674 | py | from linebot.models import (
MessageEvent, TextMessage, TextSendMessage,
TemplateSendMessage, ConfirmTemplate, MessageAction,
ButtonsTemplate, ImageCarouselTemplate, ImageCarouselColumn, URIAction,
PostbackAction, DatetimePickerAction, PostbackTemplateAction,
CameraAction, CameraRollAction, LocationAction,
CarouselTemplate, CarouselColumn, PostbackEvent,
StickerMessage, StickerSendMessage, LocationMessage, LocationSendMessage,
ImageMessage, VideoMessage, AudioMessage, FileMessage,
UnfollowEvent, FollowEvent, JoinEvent, LeaveEvent, BeaconEvent,
FlexSendMessage, BubbleContainer, ImageComponent, BoxComponent,
TextComponent, SpacerComponent, IconComponent, ButtonComponent,
SeparatorComponent, QuickReply, QuickReplyButton
)
from class_DB import DB #DB抓問題
from class_DB import DB #DB抓問題
def takeFirst(elem):
return elem[0]
def tempview(output):
db = DB()
questions = db.get_all()
render = []
output.sort(key=takeFirst)
for i, value in output:
i -= 1 #題號校正
display = """題目:{} Q{}({})
回覆:{}""".format(questions[i][2], str(questions[i][3]), questions[i][1], value)
render.append(display)
return """您好,您的回覆如下:
{}
未顯示之題目為『沒問題』
【注意】:當您填寫快速檢核時,不能修改其他四類問題;反之亦然。""".format('\n\n'.join(render))
# 功能:給他暫時看看他剛剛到底說了什麼要待改進的東西
# 輸入:output = feedback[userid]
# 輸出:str (他所回覆要待改進的內容)
def tempview_confirm(output):
ret = [
StickerSendMessage(package_id=2,sticker_id=150),
TextSendMessage(text=tempview(output)),
TemplateSendMessage(
alt_text='Confirm template',
template=ConfirmTemplate(
text = '請問您要修改您的回答嗎?',
actions=[
PostbackTemplateAction(
label='要',
text='我要修改我的答案', #給使用者看相對題號
data='edit=OK' #questions是整份問卷第幾題 絕對題號
),
PostbackTemplateAction(
label='不要',
text='我已確認沒問題', #給使用者看相對題號
data='edit=NO'
)
]
))
]
return ret
| [
"noreply@github.com"
] | lessthan41.noreply@github.com |
0dd2f1d472eca5dfb2e04203f1bdd94d4d358772 | 8ef5a09d76a11c56963f18e6a08474a1a8bafe3c | /leet_code/496. Next Greater Element I.py | e9bb3f4763ac15426b6e2d62b8c5d8fb410258cb | [] | no_license | roiei/algo | 32c4677649c7666db148f6183fbfbf66c8b1969f | ae8bb8bf4ae4026ccaf1dce323b4098547dd35ec | refs/heads/master | 2022-04-01T19:21:27.768675 | 2022-02-19T06:15:29 | 2022-02-19T06:15:29 | 169,021,154 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 916 | py | import time
from util.util_list import *
from util.util_tree import *
import copy
import collections
import heapq
from typing import List
class Solution:
def nextGreaterElement(self, nums1: List[int], nums2: List[int]) -> List[int]:
stk = []
right_bigger = collections.defaultdict(int)
for i, num in enumerate(nums2):
while stk and stk[-1] < num:
val = stk.pop()
right_bigger[val] = num
stk += num,
while stk:
val = stk.pop()
right_bigger[val] = -1
res = []
for num in nums1:
res += right_bigger[num],
return res
stime = time.time()
print([-1,3,-1] == Solution().nextGreaterElement(nums1 = [4,1,2], nums2 = [1,3,4,2]))
print('elapse time: {} sec'.format(time.time() - stime))
| [
"roiei.sw@gmail.com"
] | roiei.sw@gmail.com |
0475ba553988902407c1ad6cae683e37b085efb6 | 80f2a1e37f5b5cabcc3357f2747bd44b6696541f | /scripts/cv_detector_logo.py | 880d6ac8b64208e052508b934b2ff054cfe6fdfd | [] | no_license | LeekaiDel/opencv_drone | 35b4c98c7e8d73ade46f2ce0db67b41f59c42b40 | 2ec7bf912fd099d883f8e91d19d637b94fbae874 | refs/heads/master | 2023-01-13T07:35:36.949823 | 2020-07-28T12:37:46 | 2020-07-28T12:37:46 | 274,360,935 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,726 | py | #!/usr/bin/env python
#coding=utf8
import rospy
import cv2 as cv
import numpy as np
import math
import tf
from cv_bridge import CvBridge
from std_msgs.msg import Float32
from geometry_msgs.msg import PoseStamped, Quaternion
from drone_msgs.msg import Goal
from sensor_msgs.msg import Image
# класс хранящий основные параметры найденных контуров
class contour_obj:
# конструктор
def __init__(self):
self.name = None
self.cords = []
self.mask = []
# флаги
view_window_flag = False # фдаг отображения окон с результатами обработки изображений сделано для отладки
# переменные
drone_alt = 0.0 # текущая высота дрона
drone_pose = PoseStamped() # текущая позиция дрона в глобальной системе координат
goal_point = Goal() # целевая точка, в которую должен лететь дрон
max_resize = (64, 64) # задаем максимальный размер кадра для "ресайза" выделенных контуров
# названия путей
logo_of_object = 'land_point_blue.png'
camera_file_port = "/dev/video2" # stereo elp >> /dev/video2, /dev/video4
# topics
alt_topic = "/drone/alt" # топик текущей высоты
drone_pose_topic = "/mavros/local_position/pose" # топик текущей позиции
drone_goal_pose = "/goal_pose" # топик целевой точки
camera_server_topic = "/camera_server" # топик передачи картинки на сервер просмотра(для удаленного отображения картинки на ПК управления)
# делаем захват видео с камеры в переменную cap
cap = cv.VideoCapture(camera_file_port)
cap.set(cv.CAP_PROP_FPS, 24) # Частота кадров
cap.set(cv.CAP_PROP_FRAME_WIDTH, 1280) # Ширина кадров в видеопотоке.
cap.set(cv.CAP_PROP_FRAME_HEIGHT, 720) # Высота кадров в видеопотоке.
# функция считывания текущего положения дрона
def call_back_Drone_Pose(data):
global drone_pose, quaternion
drone_pose = data
quaternion = (
data.pose.orientation.x,
data.pose.orientation.y,
data.pose.orientation.z,
data.pose.orientation.w)
# функция считывания текущей высоты
def call_back_Drone_Alt(data):
global drone_alt
drone_alt = data.data
# функция определяющая какой маркер обнаружен
def detect_marker(cut_frame, origin_frame_bin):
difference_val = 0
similarity_val = 0
try:
for i in range(64):
for j in range(64):
if cut_frame[i][j] == origin_frame_bin[i][j]:
similarity_val += 1
elif cut_frame[i][j] != origin_frame_bin[i][j]:
difference_val += 1
except:
similarity_val = 0
difference_val = 0
return similarity_val, difference_val
# функция вырезает детектируемый контур из кадра и возвращает его в бинаризованном виде с фиксированным размером кадра
def cut_contour(frame, cords, minVal, maxVal):
try:
# print(cords)
cut_contour_frame = frame[cords[1]: (cords[1] + cords[3]) + 1, cords[0]: (cords[0] + cords[2]) + 1]
# делаем фиксированный размер картинки 64 x 64
cut_contour_frame = cv.resize(cut_contour_frame, max_resize)
hsv_local = cv.cvtColor(cut_contour_frame, cv.COLOR_BGR2HSV)
cut_contour_frame = cv.inRange(hsv_local, minVal, maxVal)
except:
cut_contour_frame = None
return cut_contour_frame
# функция выделения контуров
def contour_finder(frame, ValMinBGR, ValMaxBGR):
# создаём объект хранящий в себе основные параметры детектируемого объекта
detect_obj = contour_obj()
# переводим картинку с камеры из формата BGR в HSV
hsv = cv.cvtColor(frame, cv.COLOR_BGR2HSV)
# делаем размытие картинки HSV
hsv = cv.blur(hsv, (4, 4))
if view_window_flag:
cv.imshow('Blur', hsv)
# делаем бинаризацию картинки и пихаем её в переменную mask
detect_obj.mask = cv.inRange(hsv, ValMinBGR, ValMaxBGR) #OrangeMinBGR, OrangeMaxBGR
# cv.imshow('mask', mask)
# Уменьшаем контуры белых объектов - делаем две итерации
detect_obj.mask = cv.erode(detect_obj.mask, None, iterations = 3)
# cv.imshow("Erode", mask)
# Увеличиваем контуры белых объектов (Делаем противоположность функции erode) - делаем две итерации
detect_obj.mask = cv.dilate(detect_obj.mask, None, iterations = 3)
if view_window_flag:
cv.imshow('Dilate', detect_obj.mask)
# ищем контуры в результирующем кадре
contours = cv.findContours(detect_obj.mask, cv.RETR_TREE , cv.CHAIN_APPROX_NONE) # cv.RETR_TREE
# вычленяем массив контуров из переменной contours и переинициализируем переменную contours
contours = contours[1]
# проверяем найдены ли контуры в кадре
if contours:
# сортируем элементы массива контуров по площади по убыванию
contours = sorted(contours, key = cv.contourArea, reverse = True)
# выводим все контуры на изображении
# cv.drawContours(frame, contours, -1, (0, 180, 255), 1) # cv.drawContours(кадр, массив с контурами, индекс контура, цветовой диапазон контура, толщина контура)
# получаем координаты прямоугольника описанного относительно контура
detect_obj.cords = cv.boundingRect(contours[0]) # возвращает кортеж в формате (x, y, w, h)
return detect_obj
else:
return detect_obj
| [
"kir.surov@gmail.com"
] | kir.surov@gmail.com |
f6b97b2d6c1f5a295b1b5294ad9f786a89c890ae | b353f7aa481f8b8f7ca4215edf16c810b3720bdd | /oop/1.py | 52d6587ae4a920394bbb0ec3af861b268a4f9a20 | [] | no_license | Lev1sDev/webinar_oop_2sprint | 6908864b3a4a53989aa610616244d6d82dcf3d03 | ebf6bacb40eff8c32e39b8bfb37ae3e7e3fce94c | refs/heads/main | 2022-12-30T09:58:44.026399 | 2020-10-18T11:09:47 | 2020-10-18T11:09:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 881 | py | # Калькулятор стрельбы
# начальные условия
SHELL_VELOCITY: float = 1000 # Скорость снаряда м/с
G: float = 9.81 # Ускорение свободного падения м/c**2
# где-то хранить историю???
class Shoot:
# Определяем название орудия
# Определяем время выстрела
# Определяем угол
# Определяем полетное время
# Добавляем в историю
def fire(self):
"""Печатает параметры выстрела"""
print('Стреляет орудие: \n'
'Время выстрела: \n'
'Угол: \n'
'Время полета: \n'
)
if __name__ == "__main__":
shoot1 = Shoot()
shoot1.fire()
| [
"noreply@github.com"
] | Lev1sDev.noreply@github.com |
bc81b8cbfed2f62d5eb6de9e415066e3957ec536 | 813a13a3c4146a60db1fd7af22d8334880c32acf | /check/check/settings.py | 247b9197b76077a10a57a9ed1aa57b01b0c6be90 | [] | no_license | vahgar/checksmartselect | 63f74b477799f25addcca1a9beb1b205b69eda68 | df122f508b084b6776ba6fc99bdddbef5d31fb27 | refs/heads/master | 2021-01-09T21:58:15.413346 | 2016-04-01T17:14:29 | 2016-04-01T17:14:29 | 54,667,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,880 | py | """
Django settings for check project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'd8jl5*ui7le(o!$^gkpb&m2+)f2o44d&ov03&)bq221=fw#_@n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'smart_selects',
'selects',
'jquery',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'check.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'check.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'check_final',
'HOST': 'localhost',
'USER': 'root',
'PASSWORD': '1470',
'PORT': 3306
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'static') ]
| [
"vahgarkhurana@gmail.com"
] | vahgarkhurana@gmail.com |
8bb06ad6057d6414e13d1c58626606491602b408 | 93fc0dbbaa7ea32fb10788d5180905b24a2d152a | /tests/test_core.py | ad5d1dd574f677b43d22ea202ba9b0b383d25225 | [] | no_license | DanielAndreasen/varcomb | a97e57d21bdbe32f5f0edc5bcd2f11d99a315af5 | 9576649173d0addcb73de01b4f99c64ea1204160 | refs/heads/master | 2022-12-19T18:57:28.207364 | 2020-10-15T12:44:04 | 2020-10-15T12:44:04 | 292,203,368 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,864 | py | import unittest
from varcomb.core import VCF, Info, Location, VCFrow
from varcomb.exceptions import LocationShiftError
class TestCoreLocation(unittest.TestCase):
def test_location(self):
loc = Location(chrom='chr1', pos=42)
self.assertEqual(loc.chrom, 'chr1')
self.assertEqual(loc.pos, 42)
def test_locations_distance_same_chrom(self):
loc1 = Location(chrom='chr7', pos=1337)
loc2 = Location(chrom='chr7', pos=1001)
distance = loc1 - loc2
inverse_distance = loc2 - loc1
self.assertEqual(distance, 1337 - 1001)
self.assertEqual(inverse_distance, 1001 - 1337)
def test_locations_distance_different_chrom(self):
loc1 = Location(chrom='chr4', pos=1337)
loc2 = Location(chrom='chr7', pos=1001)
distance = loc1 - loc2
inverse_distance = loc2 - loc1
self.assertEqual(distance, None)
self.assertEqual(inverse_distance, None)
def test_location_shift(self):
loc = Location(chrom='chr1', pos=42)
shift = -2
shifted_loc1 = loc + shift
shifted_loc2 = loc.shift(shift)
self.assertIsInstance(shifted_loc1, Location)
self.assertEqual(shifted_loc1.pos, loc.pos + shift)
self.assertEqual(shifted_loc1, shifted_loc2)
def test_location_shift_error(self):
loc = Location(chrom='chr1', pos=42)
shift = 'not_valid'
with self.assertRaises(LocationShiftError):
loc + shift
def test_location_less_than_same_chrom(self):
loc1 = Location(chrom='chr7', pos=1337)
loc2 = Location(chrom='chr7', pos=1001)
self.assertEqual(loc2 < loc1, True)
self.assertEqual(loc1 > loc2, True)
def test_location_less_than_different_chrom(self):
loc1 = Location(chrom='chr7', pos=42)
loc2 = Location(chrom='chr20', pos=1001)
self.assertEqual(loc1 < loc2, True)
def test_location_less_than_sex_chrom(self):
loc1 = Location(chrom='7', pos=42)
loc2 = Location(chrom='X', pos=1001)
self.assertEqual(loc1 < loc2, True)
def test_location_greater_than_sex_chrom(self):
loc1 = Location(chrom='Y', pos=42)
loc2 = Location(chrom='4', pos=1001)
self.assertEqual(loc2 > loc1, False)
class TestCoreInfo(unittest.TestCase):
def setUp(self):
self.loc = Location(chrom='chr1', pos=42)
self.info = Info('k1=v1;k2=v2;k3=v3')
self.vcfrow = VCFrow(loc=self.loc, id='1234', ref='A', alt='G', qual='.',
filter='PASS', info=self.info, format='format',
samples=['sample1', 'sample2'])
def test_infofield(self):
self.assertIsInstance(self.vcfrow.info, Info)
self.assertEqual(len(self.vcfrow.info), 3)
self.assertIn('k1', self.vcfrow.info.keys())
self.assertIn('v2', self.vcfrow.info.values())
def test_infofield_add(self):
self.vcfrow.info['k4'] = 'v4'
self.assertEqual(len(self.vcfrow.info), 4)
self.assertIn('k4', self.vcfrow.info.keys())
self.assertIn('v4', self.vcfrow.info.values())
def test_infofield_get(self):
self.assertEqual(self.vcfrow.info['k1'], 'v1')
self.assertEqual(self.vcfrow.info['k2'], 'v2')
self.assertEqual(self.vcfrow.info['k3'], 'v3')
def test_infofield_to_str(self):
self.assertIsInstance(str(self.vcfrow.info), str)
self.assertEqual(str(self.vcfrow.info), 'k1=v1;k2=v2;k3=v3')
def test_infofield_no_value(self):
info = Info('k1=v1;k2=v2;k3=v3;field1;field2')
self.assertEqual(len(info), 5)
self.assertEqual(info['field1'], True)
self.assertEqual(info['field2'], True)
class TestCoreVCFrow(unittest.TestCase):
def setUp(self):
self.loc = Location(chrom='chr1', pos=42)
self.vcfrow = VCFrow(loc=self.loc, id='1234', ref='A', alt='G', qual='.',
filter='PASS', info='info', format='format',
samples=['sample1', 'sample2'])
def test_vcfrow(self):
self.assertEqual(self.vcfrow.loc, self.loc)
self.assertEqual(self.vcfrow.id, '1234')
self.assertEqual(self.vcfrow.ref, 'A')
self.assertEqual(self.vcfrow.alt, 'G')
self.assertEqual(self.vcfrow.qual, '.')
self.assertEqual(self.vcfrow.filter, 'PASS')
self.assertEqual(self.vcfrow.info, 'info')
self.assertEqual(self.vcfrow.format, 'format')
self.assertEqual(self.vcfrow.samples, ['sample1', 'sample2'])
def test_format_line(self):
actual = self.vcfrow._format_row()
row = [self.vcfrow.loc.chrom, self.vcfrow.loc.pos, self.vcfrow.id, self.vcfrow.ref,
self.vcfrow.alt, self.vcfrow.qual, self.vcfrow.filter, self.vcfrow.info, self.vcfrow.format]
expected = '\t'.join(map(str, row + self.vcfrow.samples))
self.assertEqual(actual, expected)
class TestCoreVCF(unittest.TestCase):
def setUp(self):
loc1 = Location(chrom='chr2', pos=21)
loc2 = Location(chrom='chr16', pos=50)
self.vcfrow1 = VCFrow(loc=loc1, id='id1', ref='G', alt='A', qual='.',
filter='PASS', info='info', format='format',
samples=['sample1', 'sample2'])
self.vcfrow2 = VCFrow(loc=loc2, id='id2', ref='T', alt='ATTGC', qual='.',
filter='PASS', info='info', format='format',
samples=['sample1', 'sample2'])
self.vcf = VCF(rows=[self.vcfrow1, self.vcfrow2])
def test_vcf(self):
self.assertIsInstance(self.vcf.rows, list)
self.assertEqual(len(self.vcf), len(self.vcf.rows))
self.assertEqual(self.vcf[0], self.vcf.rows[0])
self.assertEqual(self.vcf[0], self.vcfrow1)
def test_vcf_with_header(self):
header = '# This is a header'
vcf = VCF(rows=[self.vcfrow1, self.vcfrow2], header=header)
self.assertIsNotNone(vcf.header)
self.assertEqual(len(vcf), 2)
def test_different_vcfs(self):
vcfrow = self.vcfrow1
vcfrow.ref = 'T'
new_vcf = VCF(rows=[vcfrow])
self.assertNotEqual(self.vcf, new_vcf)
def test_get_from_chrom(self):
vcf = self.vcf + self.vcf
chrom = 'chr2'
vcf_chrom2 = vcf.get_from_chrom(chrom)
self.assertEqual(len(vcf_chrom2), 2)
self.assertEqual(vcf_chrom2[0].loc.chrom, chrom)
def test_get_near_location(self):
chrom = 'chr16'
pos = 55
vcf1 = self.vcf.get_near_location(chrom, pos, tol=10)
vcf2 = self.vcf.get_near_location(chrom, pos, tol=2)
vcf3 = self.vcf.get_near_location('chr2', pos, tol=10)
self.assertEqual(len(vcf1), 1)
self.assertEqual(len(vcf2), 0)
self.assertEqual(len(vcf3), 0)
| [
"daniel.andreasen@hotmail.com"
] | daniel.andreasen@hotmail.com |
20ea0483a27f1041660bd88552c58835f78d876e | b08d42933ac06045905d7c005ca9c114ed3aecc0 | /src/coefSubset/evaluate/ranks/tenPercent/rank_1bj1_H.py | 1bc2f459e6bf336ab3b52b79623a3fec22f210a6 | [] | no_license | TanemuraKiyoto/PPI-native-detection-via-LR | d148d53f5eb60a4dda5318b371a3048e3f662725 | 897e7188b0da94e87126a4acc0c9a6ff44a64574 | refs/heads/master | 2022-12-05T11:59:01.014309 | 2020-08-10T00:41:17 | 2020-08-10T00:41:17 | 225,272,083 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,386 | py | # 9 July 2019
# Kiyoto Aramis Tanemura
# Several metrics are used to assess the performance of the trained RF model, notably native ranking. This script returns a ranking of the native protein-protein complex among a decoy set. For convenience, I will define as a function and will call in a general performance assessment script.
# Modified 11 July 2019 by Kiyoto Aramis Tanemura. To parallelize the process, I will replace the for loop for the testFileList to a multiprocessing pool.
# Modified 9 September 2019 by Kiyoto Aramis Tanemura. I will use the function to perform the calculation on one CSV file only. Thus instead of a function to import in other scripts, they will be individual jobs parallelized as individual jobs in the queue.
import os
import pandas as pd
import numpy as np
import pickle
os.chdir('/mnt/scratch/tanemur1/')
# Read the model and trainFile
testFile = '1bj1.csv'
identifier = 'H'
coefFrac = 0.1
testFilePath = '/mnt/scratch/tanemur1/CASF-PPI/nonb_descriptors/complete/'
modelPath = '/mnt/home/tanemur1/6May2019/2019-11-11/results/coefSubset/tenPercent/'
outputPath = '/mnt/home/tanemur1/6May2019/2019-11-11/results/coefSubset/evaluate/tenPercent/ranks/'
pdbID = testFile[:4]
with open(modelPath + 'model' + identifier + '.pkl', 'rb') as f:
clf = pickle.load(f)
result = pd.DataFrame()
scoreList = []
df1 = pd.read_csv(testFilePath + testFile)
dropList = ['Unnamed: 0', 'Unnamed: 0.1', 'ref']
df1 = df1.drop(dropList, axis = 1)
df1 = df1.set_index('Pair_name')
df1 = pd.DataFrame(df1.values.T, columns = df1.index, index = df1.columns)
df1.fillna(0.0, inplace = True)
#df1 = df1.reindex(sorted(df1.columns), axis = 1)
# Keep coefficients within the given fraction when ordered by decreasing order of coefficient magnitude
coefs = pd.read_csv('/mnt/home/tanemur1/6May2019/2019-11-11/results/medianCoefs.csv', index_col = 0, header = None, names = ['coefficients'])
coefs['absVal'] = np.abs(coefs['coefficients'])
coefs.sort_values(by = 'absVal', ascending = False, inplace = True)
coefs = coefs[:int(14028 * coefFrac + 0.5)]
keepList = list(coefs.index)
del coefs
df1 = df1[keepList]
df1 = df1.reindex(sorted(df1.columns), axis = 1)
with open(modelPath + 'standardScaler' + identifier + '.pkl', 'rb') as g:
scaler = pickle.load(g)
for i in range(len(df1)):
# subtract from one row each row of the dataframe, then remove the trivial row[[i]] - row[[i]]. Also some input files have 'class' column. This is erroneous and is removed.
df2 = pd.DataFrame(df1.iloc[[i]].values - df1.values, index = df1.index, columns = df1.columns)
df2 = df2.drop(df1.iloc[[i]].index[0], axis = 0)
# Standardize inut DF using the standard scaler used for training data.
df2 = scaler.transform(df2)
# Predict class of each comparison descriptor and sum the classes to obtain score. Higher score corresponds to more native-like complex
predictions = clf.predict(df2)
score = sum(predictions)
scoreList.append(score)
# Make a new DataFrame to store the score and corresponding descriptorID. Add rank as column. Note: lower rank corresponds to more native-like complex
result = pd.DataFrame(data = {'score': scoreList}, index = df1.index.tolist()).sort_values(by = 'score', ascending = False)
result['rank'] = range(1, len(result) + 1)
with open(outputPath + pdbID + identifier + '.csv', 'w') as h:
result.to_csv(h)
| [
"tanemur1@msu.edu"
] | tanemur1@msu.edu |
17d1f35fec60aa42c530c87886cd13ff94e9e3cc | 55608f76e3a5999debf54690a3c7ce1f3dc08466 | /news/migrations/0008_auto_20160515_1534.py | 980eb413b6d3b6ceddebf07440f5fc97d187a26b | [] | no_license | eric-guanwy/my_test_web | da78f1dd479e11c91257a4e756d71b50a68fdea3 | 8fffee662315a92aaedfc5fb49aa593629242e03 | refs/heads/master | 2020-12-24T18:32:26.774166 | 2016-05-16T16:50:17 | 2016-05-16T16:50:17 | 58,667,817 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0007_auto_20160515_1530'),
]
operations = [
migrations.AlterField(
model_name='article',
name='photo',
field=models.ImageField(upload_to='photos', null=True, verbose_name='upload picture', blank=True),
),
]
| [
"guanwy01@gmail.com"
] | guanwy01@gmail.com |
befd4c281191afb1e2d3456a2b6e893d51ce67d3 | 3e8a086ddfc63d6d387115716532d57fbc6c22a1 | /mandelbrot.py | d589b50bd904f9ca10c8c2318e55354c591d9c45 | [] | no_license | GuangSenNi/daily_work | fcfe9a4ea8c31359893d07ffcd827312cc3e9696 | ce52cdf1a31a74c266aaac9952ab1a8702d46ee7 | refs/heads/master | 2020-08-30T03:25:54.816575 | 2019-11-12T07:38:26 | 2019-11-12T07:38:26 | 218,248,210 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,117 | py | import time
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import multiprocessing
from multiprocessing import Manager
wide = 300
height = 200
count = 100
# 图形中心的位置
orig_x = wide * 2 / 3
orig_y = height / 2
def iteration(x, y):
limit = count + 1
a = (x - orig_x) / (wide / 3)
b = (orig_y - y) / (height / 2)
# complex生成复数
c = complex(a, b)
z = complex(0, 0)
for m in range(limit):
z = z * z + c
if z.real > 2 or z.imag > 2:
return 1
return 0
# 使用消息队列 按行分任务
def sub_p_calculate(x, mq):
for y in range(height):
limit = count + 1
a = (x - orig_x) / (wide / 3)
b = (orig_y - y) / (height / 2)
# complex生成复数
c = complex(a, b)
z = complex(0, 0)
t = 0
for m in range(limit):
z = z * z + c
if z.real > 2 or z.imag > 2:
t = 1
break
mq.put([y, x, t])
# 使用map分任务
def sub_use_map(v):
x = v[1]
y = v[0]
limit = count + 1
a = (x - orig_x) / (wide / 3)
b = (orig_y - y) / (height / 2)
# complex生成复数
c = complex(a, b)
z = complex(0, 0)
t = 0
for m in range(limit):
z = z * z + c
if z.real > 2 or z.imag > 2:
t = 1
break
return t
# 串行计算
def serial_cal():
img = Image.new("RGB", (wide, height))
img2 = np.array(img)
t0 = time.perf_counter()
for i in range(height):
for j in range(wide):
ite = iteration(j, i)
if ite:
img2[i, j, 0] = ite
img2[i, j, 1] = abs(j - orig_x) / wide * 255
img2[i, j, 2] = abs(i - orig_y) / height * 255
else:
img2[i, j, :] = 0
t1 = time.perf_counter()
print("单进程执行时间:", t1 - t0)
return img2
# 进程池pool.apply_async+消息队列 进程通信耗时较大 按行分配任务以减小通信开销
def pool_use_mq():
img1 = Image.new("RGB", (wide, height))
img3 = np.array(img1)
# 巨坑 进程池通信要用Manager下的queue
q = Manager().Queue()
pool = multiprocessing.Pool(processes=4)
t2 = time.perf_counter()
for j in range(wide):
pool.apply_async(sub_p_calculate, (j, q,))
pool.close()
pool.join()
while not q.empty():
result = q.get()
i = result[0]
j = result[1]
if result[2]:
img3[i, j, 0] = 1
img3[i, j, 1] = abs(j - orig_x) / wide * 255
img3[i, j, 2] = abs(i - orig_y) / height * 255
else:
img3[i, j, :] = 0
t3 = time.perf_counter()
print("并行使用消息队列执行时间:", t3 - t2)
return img3
# pool使用map优化,进一步减小通信开销
def pool_use_map():
pool = multiprocessing.Pool(processes=4)
t4 = time.perf_counter()
arr = []
for i in range(height):
for j in range(wide):
arr.append([i, j])
result = pool.map_async(sub_use_map, arr)
pool.close()
pool.join()
t5 = time.perf_counter()
img = Image.new("RGB", (wide, height))
img2 = np.array(img)
for k in range(len(arr)):
j = arr[k][1]
i = arr[k][0]
if result.get()[k]:
img2[i, j, 0] = 1
img2[i, j, 1] = abs(j - orig_x) / wide * 255
img2[i, j, 2] = abs(i - orig_y) / height * 255
else:
img2[i, j, :] = 0
print("并行使用map运行时间", t5 - t4)
return img2
# 进程切换及消息队列都会严重影响运行时间 迭代次数越多并行优势越明显
if __name__ == '__main__':
img2 = serial_cal()
plt.imshow(img2)
plt.show()
# fig = plt.figure()
# sub1 = fig.add_subplot(211)
# sub1.imshow(img2)
# sub2 = fig.add_subplot(212)
# sub2.imshow(img3)
# plt.tight_layout()
# plt.show()
| [
"1624191058@qq.com"
] | 1624191058@qq.com |
cc6d27dacb0e54927f94ba6bc4b048bc7998907d | 5c803a6240aaaca3c528c9baf7cbaada6e9b1471 | /adminprofile/migrations/0002_item.py | 156b246fed237cc22ac86aa7f70362d99bd52749 | [] | no_license | bansalshruti691/Inventory-Management | 441e9ddf9b6e22cbfc13010f1602f4e9cef8eefb | f44c1d26baee1143e96c9faaeb9c4f4ab47ee5bb | refs/heads/master | 2020-08-14T05:17:47.013552 | 2019-10-04T12:53:31 | 2019-10-04T12:53:31 | 215,104,634 | 0 | 0 | null | 2019-10-14T17:25:48 | 2019-10-14T17:25:48 | null | UTF-8 | Python | false | false | 648 | py | # Generated by Django 2.2.2 on 2019-07-02 08:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('adminprofile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item_no', models.CharField(max_length=500)),
('item_name', models.CharField(max_length=1000)),
],
options={
'db_table': 'item',
},
),
]
| [
"bansalshruti69@gmail.com"
] | bansalshruti69@gmail.com |
24ea72915ae2b784cf560e268c35fc7e0066040b | f9be50b9dc39bfdfd3fe76c193e4dd33cb995d7a | /Iris/urls.py | 501abf257a33679e6b48a26dcae417dac79e2572 | [] | no_license | Preeta192821/Iris_Machinelearning_with_SVM_ | f81d5eacd9aa621aa6372a63258d805aae6f129e | 7d2aa37d853068d588673adea0381b5839576abd | refs/heads/master | 2022-11-13T15:21:42.756175 | 2020-07-08T00:44:33 | 2020-07-08T00:44:33 | 277,951,943 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,001 | py | """Iris URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls import url
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('predict.urls', namespace='predict'))
]+static(settings.STATIC_URL,document_root=settings.STATIC_ROOT)
| [
"preetiverma1028@gmail.com"
] | preetiverma1028@gmail.com |
c182f0367bc67bd0ad479f941bedcdf78a03f201 | 2644df7f018bac7bc4e42a327b76ad183a124381 | /Codeforces.com/Design Tutorial-Learn from Math.py | 4b84a281fdecd94eab09b0333e0889843b88a250 | [] | no_license | ShubhamMaht0/Competitive_Programming | b535aa9e366f3d1aacbe843e382561cca29a46a0 | 10913476fdffa3cab4643a6488772d19fdb6cf2a | refs/heads/main | 2023-08-03T19:49:26.108444 | 2021-09-10T14:08:58 | 2021-09-10T14:08:58 | 403,300,263 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | def compo(c):
'''it will whether the number is composite or not.'''
for i in range(2,c):
if c%i==0:
return(True)
break
else:
return(False)
n=int(input())
f=n//2
s=n//2
if n%2!=0:
f=f+1
while True:
if compo(f):
if compo(s):
print(f,' ',s,sep='')
break
else:
f=f+1
s=s-1
else:
f=f+1
s=s-1
| [
"noreply@github.com"
] | ShubhamMaht0.noreply@github.com |
cc25ee6ddf01f2f06ff9b62b22ac1f611e0af289 | 21f15c9d39e8bb00771b22e5455b5efb2e817ef3 | /KeyLogger/system/send_email.py | 17f17d020001acd16b1fb77fabf2fa3187843c69 | [] | no_license | Hrafh/pylogger | 02c045e8e9b9b58788bf0a373cc8bbac6e55b093 | b83218d2381c67a7a3dea5777944f17eb66ff1ad | refs/heads/master | 2020-04-10T11:18:07.056008 | 2018-12-08T23:59:16 | 2018-12-08T23:59:16 | 160,989,236 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,672 | py | import time
import getpass
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email import encoders
#Initialisation
fromaddr = "keylog.python@gmail.com"
#Veuillez mettre votre adresse email :vous allez recevoir le fichier key_log.txt sur cet email
toaddr = "hamane.achraf@gmail.com"
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = "hello key"
#message envoye avec le fichier key_log.txt
body = "fichier envoye avec succes"
msg.attach(MIMEText(body, 'plain'))
filename = "key_log.txt"
attachment = open(filename, "rb")
#cette fonction permet de recuperer le fichier key_log.txt
# En outre elle permet de rentrer dans le server de gmail via le port 587
# Et envoyer le fichier key_log.txt
def envoi():
part = MIMEBase('application', 'octet-stream')
msg.attach(part)
part.add_header('Content-Disposition', "attachment; filename= %s" % filename)
part.set_payload((attachment).read())
encoders.encode_base64(part)
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(fromaddr, "Keyloggerm1")
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
#Boucle while qui permet de renvoyer un email chaque 60 secondes
#Appel a la fonction envoi pour l'envoi du mail
# Vous pouvez toujours modifier la duree dans sleep()
# par exemple, si vous voulez mettre 90 secondes , il suffit juste de
#supprimer 60 et la remplacer par 90 : time.sleep(90)
while 1:
time.sleep(60)
envoi()
| [
"noreply@github.com"
] | Hrafh.noreply@github.com |
98d58a3a932a3cde1121c2ebd8ab9c7a51a2162e | 8e3b4de568897eea12bfeb6128fe93cdc2b1fc90 | /react_example_1/app/flask_example1/forms.py | ed81fb622016c83943f33fac3701b146c5628299 | [] | no_license | DevYadav-D/java-and-java-script | 1792a87d2029d2ffbedc756b3c79b8db905a459d | 05cf1888670b343aec18b7948e7fa1c3069f3acb | refs/heads/master | 2023-07-23T17:08:42.100241 | 2021-09-10T14:15:18 | 2021-09-10T14:15:18 | 403,563,643 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 244 | py | from flask_wtf import FlaskForm
from wtforms import TextAreaField, SubmitField
from wtforms.validators import DataRequired
class Todo(FlaskForm):
content = TextAreaField(validators=[DataRequired()])
submit = SubmitField('Submit')
| [
"devyadav5392@gmail.com"
] | devyadav5392@gmail.com |
aebad8db8c5ae23c1c4cd8b991fc56c634e14307 | b28438df838514b7dde4458f4e8fadaa58118751 | /notes4.py | 4eedd1a457159f1a4ce3f882652bb680cc8feb5d | [] | no_license | junaid238/class_files | dcd31bf4540d539f40653b7aa75d82302eae12b1 | 3ba2dae62e9da446e7086889ab732db033992896 | refs/heads/master | 2020-03-06T22:57:25.332420 | 2018-08-23T10:04:19 | 2018-08-23T10:04:19 | 127,119,102 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,765 | py | # loopimg structures
# --> iterative
# --> repetitions
# --> finite , infinite
# --> for , while
# --> XXX do while XXX
# components
# - initialisation
# - limit / condition
# - statements ( compulsory )
# 3 data types
# ------------
# - numbers
# - strings
# - collections
# numbers --> range()
# range(<end>) --> 0 to end-1 (inc = 1)
# range(<start> , <end>) --> <start> to <end>-1 (inc =1)
# range(<start> , <end> , <step>) --> <start> to <end>-1 (inc =<step>)
# syntax
# ------
# for <dummy> in range():
# <statements>
# initialisation = 0
# condition = i <10
# statements = print(i)
# for i in range(10):
# print(i)
# for i in range(12,30):
# print(i)
# for i in range(12,30,3):
# print(i)
# dec step = -1 (--)
# ------------------
# for i in range(12,3,-1):
# print(i)
# patterns
# --------
# star /char
# number
# *
# **
# ***
# ****
# *****
# ***** --> print("*" * 5)
# **** --> print("*" * 4)
# print("*" * 3)
# print("*" * 4)
# print("*" * 5)
# for var in range(1,6):
# print("*" * var)
# for num in range(1,6):
# print(str(num) * num)
# 1
# 22
# 333
# 4444
# 55555
# n= 5
# * # 4 space 1 star
# ** # 3 space 2 star
# *** # 2 space 3 star
# **** # 1 space 4 star
# ***** # 0 space 5 star
# i n-i
# n = 6
# for i in range(n,0, -1):
# print((" "*i) + ("*"*(n-i)))
# task1
# -----
# *
# ***
# *****
# *******
# task2
# -----
# num = 4
# 00 00 00
# 01 01 01
# 02 04 08
# 03 09 27
# 04 16 64
# num = 5
# 000 000 000
# 001 001 001
# 002 004 008
# 003 009 027
# 004 016 064
# 005 025 125
# nested loops
# ------------
# for :
# for :
# for :
# <statements>
# for
# if
# else
# even odd classifier
# -------------------
# st = 100
# en = 120
# if type(st) is str or type(en) is str :
# print("enter only numbers")
# else:
# for i in range(st , en+1):
# if(i%2 == 0 ):
# print("%d is %s" %(i , "even"))
# else:
# print("%d is %s" %(i , "odd"))
# 1
# 12
# 123
# 1234
# 12345
# 12345
# for i in range(1,6):
# print(i , end ="")
# print()
# # 1234
# for i in range(1,5):
# print(i , end ="")
# print()
# # 123
# for i in range(1,4):
# print(i , end ="")
# for num in range(1,6):
# for var in range(1, num+1):
# print(var , end = "")
# print()
# for num in range(6,1,-1):
# for var in range(1, num):
# print(var , end = "")
# print()
# composite - 12 - 1,2,3,4,6,12
# prime - 7 - 1,7
# num = 12
# composite
# num = 13
# prime
# 12 % 1 ==0
# 12 % 2 ==0
# 12 % 3 ==0
# .
# .
# .
# .
# 12 % 12 ==0
# 0's - 6 - composite'
# 7 % 1 == 0
# 7 % 2 != 0
# .
# .
# .
# 7 % 7 == 0
# 0's - 2 - prime'
# count = 0
# num = 12
# for i in range(1,num+1):
# if (num%i == 0 ):
# count += 1
# if(count > 2):
# print("%d is %s"%(num , "composite"))
# else:
# print("%d is %s"%(num , "prime"))
# s = 13
# e = 20
# 13 - prime
# 14 - composite
# 15 - composite
# num = 5
# 5 * 1 = 5
# 5 * 2 = 10
# .
# .
# .
# 5 * 10 = 50
# composite and prime in a range
# num = 20
# e = 30
# for j in range(num , e+1):
# count = 0
# for i in range(1,j+1):
# if (j%i == 0 ):
# count += 1
# if(count > 2):
# print("%d is %s no of factors --> %d"%(j , "composite" , count))
# else:
# print("%d is %s"%(j , "prime"))
# for i in range(3,0,-1):
# for j in range(1,7,2):
# print(" "*i,end="")
# print("*"*j)
# n = 32
# l = len(str(n**3))
# for num in range(0,n):
# print("%s %s %s"%(str(num).zfill(l) , str(num**2).zfill(l) , str(num**3).zfill(l)))
# strings + loops
# ----------------
# tech = "python and data sceinces and Data Analytics"
# print(tech[0])
# print(tech[1])
# print(tech[2])
# print(tech[3])
# all languages
# -------------
# for i in range(0,len(tech)):
# print(tech[i])
# syntax
# ------
# for <dummy> in strname:
# <statements>
# for dummy in tech:
# print(dummy)
# for i in tech:
# if(i.lower() == "a" or i.lower() == "e" or i.lower() == "i" or i.lower() == "o" or i.lower() == "u" ):
# print(i , end =" ")
# n = 5
# a
# bb
# ccc
# dddd
# eeeee
# ASCII --> a-z => 97-122 , 65-90
# ord() chr()
# n = 4
# en = 97 + n # 97 + 4 = 101 -> d
# st = ""
# for i in range(97,en):
# st = st+chr(i)
# print(st)
# for j in st:
# for i in range(1,5):
# print(j*i)
# for j in range(1,n+1):
# for i in range(97,en):
# print(chr(i) * (j))
# print(chr(97) * 1)
# print(chr(98) * 2)
# print(chr(99) * 3)
# print(chr(100) * 4)
# while loop
# ----------
# - initialisation
# - condition
# - statements
# - inc/dec
# - infinite loop
# syntax
# ------
# <initialisation>
# while (<condition/limit>):
# <statements>
# <inc/dec>
# infinite --> condition always true
# a = 1
# while (a<=10):
# print(a)
# a= a+1
# a = 10
# while (a>=1):
# print(a)
# a= a-1
# infinite loops
# a = 10
# while a ==10:
# print(a)
# while (True):
# print("hai")
# control statements
# -------------------
# pass , break , continue --> keywords
# pass --> overcome unimplementations / no statements
# - condition , loops , functions , classes
# break --> exit the loop at condition-> loops
# -> atm -- 3 wrong attempts
# continue --> exit the loop at condition and returns ( pause )
# -> ticketing , queues
# for i in range(1,40):
# if(i == 26):
# break
# print(i)
# d = 20
# while (d<40):
# print(d)
# d = d + 1
# if(d == 25):
# break
# d = 30
# while (d<50):
# d += 1
# if(d == 40):
# continue
# print(d)
# for --> continue --> ??
# patterns --> while --> ??
# Collections:
# ------------
# derived data types
# - lists
# - tuples
# - dictionaries
# - sets and frozen sets
# collection ==> module ( queue )
# --> grouped storage spaces
# LIFO -> stack
# FIFO -> queue
# --> stacks
# Lists
# -----
# - heterogeneous collection
# - mutable
# - infinite length
# - 1 dimensional
# - LIFO
# - nested
# - <class list>
# - iterable
# - indexed
# - sliced
# - concatenated
# - extended
# syntax
# ------
# names = [1 ,"1", "khan" , [1,2,3,4]]
# access(indexed/sliced)
# ------
# names[0] # 1
# names[1] # "1"
# names[3] # [1,2,3,4]
# names[3][3] # 4
l1 = [1,2,3,"a" , "khan" , [20,40,50,89]]
# print(l1)
# print(type(l1))
# indexing
# --------
# print(l1[0])
# print(l1[4])
# print(l1[5])
# print(l1[5][0])
# print(l1[5][3])
l2 = [[[[[[[[5]]]]]]]]
# print(l2[0][0][0][0][0][0][0][0])
# slicing
# -------
# <listname>[<start>:<end>] --> start to end-1
# <listname>[:<end>] --> first element to end-1
# <listname>[<start>:] --> start to last element
# print(l1[2:5])
# print(l1[:5])
# print(l1[3:])
# concatenated
# print(l1 + l2 )
# print(l1)
# print(l2)
# iterable
# print(l1[0])
# print(l1[1])
# print(l1[2])
# for i in range(0,5):
# print(l1[i])
# for i in l1 :
# print(i)
# functions
# ---------
# len(<listname>) --> length of list
# <listname>.index(<item>) --> index of item passed
# <listname>.count(<item>) --> frequency of items
names = ["khan" , "ravi" , "hari " ,"khan" ]
# print(names)
# print(len(names))
# print(names.index("ravi"))
# print(names.index("khan" , 1)) # index = 1
# print(names.count("khan"))
# mutable properties
# ------------------
# add , del , remove , extend
# add --> append() insert() # LIFO
# <listname>.append(<item>)
# names.append("rajesh")
# print(names)
# <listname>.insert( <index> , <item> )
# names.insert( 3, "kumar" )
# print(names)
# remove --> pop() , remove()
# pop() --> last element removed
# remove(<item>) --> element gets removed
# names.pop()
# print(names)
# names.pop()
# print(names)
# names.remove("khan")
# print(names)
# index --> indexing --> item(remove)
# delete single element
# del <listname>[<index>]
# del names[2]
# print(names)
# delete multiple element
# del <listname>[<start> : <end>]
# del names[2:4]
# print(names)
# delete entire list
# del <listname>
# del names
# print(names)
# modify element ( indexing )
# --------------
# names[2] = "rajesh"
# print(names)
# modify multiple elements ( slicing )
# --------------
# names[2:4] = "rajesh" , "rahul"
# print(names)
# extending of lists:
# -------------------
# concatenated --> different
# print(l1)
# l3 = names + l1
# print(l3)
# print(names)
# print(l1)
# <listname1>.extend(<listname2>)
# print(names)
# print(l1)
# print("after extending")
# names.extend(l1) # all l1 elements gets attached to names LIFO
# print(names)
# print(l1)
# print(names)
# nums = [10,20,30,40,50]
# # print(nums)
# for i in names:
# nums.append(i)
# # print(names)
# print(nums)
repList = [1,2,3,4,5,7,8,23,5,13,19,2,1,2,3,4,5,6,78,8] # question list
# unique = [1,2,3,4,5,6. . .. . .] # all unique elements
# reps = [6 , 8 , 1 ,2 ,3, 4 .. .] # all repeated elements
# print(6 in repList)
# print(60 in repList)
# make an empty list
unique = []
reps = []
for i in repList:
if(i not in unique):
unique.append(i)
else:
reps.append(i)
# print(unique)
# print(reps)
# squares of unique elements
# squ = []
# notPo = []
# for i in unique:
# if (type(i) is not str ):
# squ.append(i**2)
# else:
# notPo.append(i)
# print(squ)
# print(notPo)
# unpacking of lists
# listinlist = [1,2,3,[4,5,7,8],23,5,13,[19,"a",2,1],2,3]
# # elems = [1,2,3,4,5,7,8,23,5,13,19,"a",2,1,2,3,]
# elems = []
# for i in listinlist:
# if(type(i) is list):
# for j in i:
# elems.append(j)
# else:
# elems.append(i)
# print(elems)
# enter no of elements 4
# enter element 10
# enter element 100
# enter element 20
# enter element 30
# [10,100,20,30]
# emp = []
# length = int(input("enter no of elems you need"))
# while length > 0 :
# ele = input("enter the element")
# emp.append(ele)
# length -= 1
# for i in range(0,length):
# ele = input("enter the element")
# emp.append(ele)
# print(emp)
# namesNums = [. . .. . ]
# 1 add element
# 2 remove element
# enter your option
# 1
# enter the element
# 10
# [ .. . . , 10]
# enter your option
# 2
# enter the element
# 10
# [ .. . . , ]
# namesNums = [10 , 20 , 30]
# print(namesNums)
# print("1. add an element. ")
# print("2. remove an element. ")
# option = int(input("enter your option. "))
# if option == 1:
# element = int(input("enter the element. "))
# namesNums.append(element)
# print(namesNums)
# elif option ==2 :
# print(namesNums)
# element = int(input("enter the element. "))
# namesNums.remove(element)
# print(namesNums)
# else:
# print("wrong input")
# print(repList)
# sort() , append() , remove() --> no return
# <listname>.sort() # directly sort the list without return
# ascending order
# repList.sort()
# print("after sorting")
# print(repList)
# # descinding list
# repList.sort(reverse = True)
# print("sorting in reverse format")
# print(repList)
# names = ["a" , "c " , "r" , "w" , "b " , "e"]
# names.sort()
# print(names)
# Tuples
# ------
# - collection
# - immutable
# - <class tuple>
# - infinite length
# - heterogeneous collection
# - indexed , sliced , concatenated
# - iterable collection
# syntax
# ------
# <tupleName> = (<elements>)
# <tupleName> = <elements> ,
# nums = (1,2,3,4,5)
# print(nums)
# print(type(nums))
# names = "khan" , "hari" , "ravi" ,
# print(names)
# print(type(names))
# indexing
# print(nums[0])
# print(nums[4])
# slicing
# print(nums[2:4])
# print(nums[1:3])
# a = nums[2:4]
# print(type(a))
# nums[2] = "khan"
# len() , index() , count() --> do not modify content
# del nums # entire tuple
# del num[2] # cannot delete a single item
# print(nums.count(2))
# print(nums.index(2))
# print(len(nums))
# nums.append(4)
# tuple --> ??
# -> constants
# mathconst = (3.14, 2.71 ,6.3)
# fees = ("20k" , "15k" , "25k")
# type casting
# ------------
# list --> tuple ==> tuple()
# tuple --> list ==> list()
# print(fees)
# print(type(fees))
# fees = list(fees)
# print(fees)
# print(type(fees))
# fees[2] ="30k"
# fees = tuple(fees)
# print(fees)
# print(type(fees))
# List comprehensions
# -------------------
# -> making lists
# -> assigned
# -> assigned + condition
# -> assigned + operation + condition
#declaring an empty list
# nList = []
# # looping /iterating
# for i in range(0,10):
# # assignment
# nList.append(i)
# print(nList)
# <listname> = [ <assignmentVar> <loop structures> ]
# numList = [ x for x in range(0,10) ]
# print(numList)
# #declaring an empty list
# emList = []
# # # looping /iterating
# for i in range(1,100):
# # condition check
# if i % 5 == 0 :
# # assignment
# emList.append(i)
# print(emList)
# <listname> = [ <assignmentVar> <looping structures> < condition> ]
# emList = [ x for x in range(1,100) if x%5 == 0 ]
# print(emList)
# # #declaring an empty list
# listo = []
# # # # looping /iterating
# for i in range(1,21):
# # assignment + operation
# listo.append(i**2)
# print(listo)
# <listname> = [ <assignmentVar + operation> <looping structures> ]
# listo = [ x**2 for x in range(1,21) ]
# print(listo)
# <listname> = [ <assignmentVar + operation> <looping structures> <condition]
# fiveMuls = [i**2 for i in range(1,100) if i%5 == 0 ]
# print(fiveMuls)
# Tuple comprehensions --> ???
# --------------------
# XXX no tuple comprehensions XXX
# 0 1 2 3 4 5 6
# form -> fname , lname , age , email , mobile , peradd , preadd
# 1can -> ---- , ------ , -----,------ , ------ , ------ , ------
# 2can -> ---- , ------ , -----,------ , ------ , ------
# 1can[5] --> peradd
# 2can[5] --> preadd
# --> indices will be ours
# --> named tuple and Dictionary
# Dictionary
# ----------
# -> collection
# -> <class dict>
# -> mutable
# -> indexed
# -> concatenated
# -> pairs of data --> keys and values
# -> indices are customisable --> keys
# -> elements --> values
# -> keys + values --> items
# -> heterogeneous (conditions)
# syntax
# ------
# empDict = {}
# namesDict = { <key1>:<value1> , <key2>:<value2> , <key3>:<value3> }
# keys --> immutable objects -> numbers , strings , tuples , unique
# values --> anything
# print(empDict)
# print(type(empDict))
# formData = {"fname": "khan" , "age" :27 , "mobile": 9876543210 , "email": "khan@gmail.com"}
# keys -- fname , age , mobile , email
# values -- khan , 27 , 9876543210 , khan@gmail.com
# formData = {"fname": "khan" , (1,2,3,4) :27 , "mobile": 9876543210 , "email": "khan@gmail.com"}
# print(formData)
# keys -- fname , (1,2,3,4) , mobile , email
# formData = {"fname": "khan" , [1,2,3,4] :27 , "mobile": 9876543210 , "email": "khan@gmail.com"}
# print(formData)
# keys -- fname , [1,2,3,4] , mobile , email
# formData = {"fname": "khan" , (1,2,3,4) :[1,2,3,4,45] , "mobile": 9876543210 , "email": "khan@gmail.com"}
# print(formData)
# keys -- fname , (1,2,3,4) , mobile , email
# access of elements --> keying
# -----------------------------
# <dictName>[<key>]
# print(formData["age"])
# print(formData["mobile"])
# print(formData["post"]) # key error unexisting
# re assignment of values
# ------------------------
# <oldvalue> --> <newValue> # key is existing
# <dictName>[<key>] = <newValue>
# print(formData["age"])
# formData["age"] = 30
# print(formData["age"])
# print(formData)
# if unexisting it adds at the last
# formData["role"] = "py developer"
# print(formData)
# functions
# ---------
# len() --> length of keys
# len(<dictName>)
# print(len(formData))
# .keys() --> all the keys in Dictionary
# <dictName>.keys()
# print(formData.keys())
# .values() --> all the values in Dictionary
# <dictName>.values()
# print(formData.values())
# .items() --> all the items in Dictionary
# <dictName>.items()
# print(formData.items())
# for i in formData.keys():
# print(i)
# for i in formData.items():
# print(i)
# pop() , popitem()
# <dictName>.pop(<key>) --> item of that key
# <dictName>.popitem() --> last item of that dictName
# formData.popitem()
# print(formData)
# formData.pop("age")
# print(formData)
# formData2 = {"lname": "khan" , "myage" :27 , "mob": 9876543210 , "mail": "khan@gmail.com"}
# # <dictName1>.update(<dictName2>)
# formData.update(formData2)
# print(formData)
# print(formData2)
# formData.update({"add" : "hyd" , "role" : "PyDev"})
# print(formData)
# dict --> list of Keys and list of values
# 2 lists --> 1 dict
# l1 = [1,2,3,4,5]
# l2 = ["a" , "b" , "c" , "d" , "e"]
# chrDict = {}
# # o/p ---> {1:"a" , 2:"b" , 3:"c" , 4:"d" , 5:"e" }
# for i in range(0,len(l1)):
# chrDict[l1[i]] = l2[i]
# print(chrDict)
# zip() ---> lists => dict
# zip() --> XX dict XX ==> zip object --> type casting -->dict
# <zipObject> = zip(<listname1> , <listname2>)
# <dictName> = dict(<zipObject>)
# zobj = zip(l1,l2)
# print(zobj) # <zip object at 0x1103bc848>
# chrdict = dict(zobj)
# print(chrdict)
# d = dict(zip(l1,l2))
# print(d)
# [1,2,3,4,5,6] i/p
# {1:1 , 2:4 , 3:9 ...} o/p
# sq = {}
# l1 = [1,2,3,4,5,6]
# for i in l1:
# sq[i] = i**2
# print(sq)
# dictionary comprehensions
# -------------------------
# <dictName> = { <var1>:<var2> <loop with dependency> }
# sq = { x:x**2 for x in range(1,6)}
# print(sq)
# sq = { x:x**2 for x in range(1,30) if x%3 == 0 }
# print(sq)
# Sets and frozen sets
# --------------------
# -> Collection
# -> 2.5 + , 3.X
# -> storage elements
# -> unique elements
# -> math sets
# -> XXX indexed , sliced XXX
# -> iterated
# -> mutable
# -> arbitary
# -> <class set>
# syntax
# ------
# numsSet = {1,2,3,4,5,6, 1,2 ,6 }
# numsSet = set({1,2,3,4,5,60})
# print(numsSet)
# print(type(numsSet))
# empSet = set({})
# print(type(empSet))
# print(numsSet[0])
# for i in numsSet:
# print(i)
# numsSet.add(100)
# print(numsSet)
# numsSet.remove(100)
# print(numsSet)
# numsSet.remove(1000) # KeyError: 1000
# print(numsSet)
# numsSet.discard(100)
# print(numsSet)
# numsSet.discard(1000) # XX no error XX
# print(numsSet)
# nuumsSet = { 1 ,3 }
# print(numsSet + nuumsSet)
# .union
# .intersection
# .difference
# .issubset
# .issuperset
# print(numsSet)
# print(nuumsSet)
# print(numsSet.union(nuumsSet)) # all elements
# print(numsSet.intersection(nuumsSet)) # common elements
# print(numsSet.difference(nuumsSet)) # only numsSet
# print(nuumsSet.difference(numsSet)) # only nuumsSet
# print(numsSet.issubset(nuumsSet))
# print(numsSet.issuperset(nuumsSet))
# chrTuple = ("a" ,"man " , "is " ," learning")
# empstr = "-".join(chrTuple)
# print(empstr)
# print(type(empstr))
# Frozen sets --> ????
# sets --> immutable
# sets --> lists
# Frozensets --> tuples
# <class frozenset>
# syntax
# ------
# fsNum = frozenset((1,2,3,4,5))
# print(fsNum)
# print(type(fsNum))
# Functions
# ---------
# - block of a code
# - only one task
# - components
# - definition (mandatory)
# - implementation (mandatory)
# - call (optional)
# - first class objects
# - nested functions also possible
# - pre defined functions (python)
# - user definition functions (user)
# parameters return type
# 0 0 sort()
# 1 1 index(<item>)
# 1 0 remove()
# 0 1 pop()
# syntax
# ------
# def <functionName>(): ---> Function definition
# <implementation> ---> Function implementation
# <functionName>() ---> Function call 1
# <functionName>() ---> Function call 2
# <functionName>() ---> Function call 3
# <functionName>() ---> Function call 4
# def sayHello(): # definition
# print("say hello") # implementation
# sayHello() # call 1
# sayHello() # call 2
# not recommended
# def sayHello(): # definition
# print("say hai ") # implementation
# sayHello()
# Return type
# ------------
# - output of a Function
# - one output
# - one return statement
# - var , value , Function , Function call , object
# syntax
# ------
# return <component>
# - if no return --> None
# - if return --> component
# - <component> --> save --> function call
# return a value
# def sayHello(): # definition
# print("say hello")
# return " say hello "
# a = sayHello()
# print(a)
# return a variable
# num = 100
# def sayHai(): # definition
# print("say hai ")
# return num
# a = sayHai()
# print(a)
# return of function and function call
# def returner():
# print("returning a function name")
# return addNums
# print(type(returner)) # function
# print(type(returner())) # function
# a = returner()
# print(a(100,200))
# print(type(a)) # function
# print(type(a(100,200))) # int
# def returnerTwo(a , b):
# print("returning call ")
# return addNums(a , b)
# print(returnerTwo(20,30)) # 50 from addNums
# empty function --> pass
# --------------
# def empfunc():
# pass
# b = empfunc()
# print(b)
# multiple return type
# --------------------
# def saybye(): # definition
# print("say bye ")
# return "bye"
# return "hai"
# a = saybye()
# print(a)
# parameters
# ----------
# - inputs to function
# - arguements
# - any type
# - two kinds
# - formal parameters
# - actual parameters
# - formal parameters ==> definition , variables
# - actual parameters ==> function call , values
# - 4 types
# - positional
# - default
# - variable
# - keyworded
# def hello(name): # name --> formal parameter
# print("Hai " + name)
# return name
# a = hello("khan") # "khan" --> actual parameter
# print(a)
# a = 10 # global variable
# def incnum(num):
# global a # bring global variable into the function
# # a = 100 # incnum variable (function variable)
# print("your parameter " , num) # formal parameter
# print("increment by " , a)
# print("after increment ")
# print(num + a)
# return num+a # variable + formal parameter
# incnum(20)
# print(a)
# multiple parameters
# def addnum(a,b): # a=10 b = 20
# ''' it calculates sum of two nums'''
# ''' dummy'''
# ans = a+b # ans = 30
# print("sum is " , ans) # ans = 30
# return ans # 30 -->XX ans = 30 XX , addnum(10 , 20) = 30
# print(addnum.__doc__)
# a = addnum(10 , 20)
# print(a)
# print(addnum(30,40))
# sumnum(20,30) # 50
# diff(<50> , 10) # 40
# mul(<40> , 10) # 400
# div(<400> , 20) # 20
# enter your list length
# 4
# enter num 10
# enter num 20
# enter num 30
# enter num 40
# enter your choice
# 1. mean
# 2. median
# 3. both
# 1 --> mean = 25
# 2 --> median = 25
# 3 --> mean = 25 , median = 25
# length = 4
# numberlist = [10,20,30,40]
# def calc(listNums):
# print("1. Mean")
# print("2. Median")
# print("3. Both")
# choice = int(input("Enter your choice"))
# if(type(choice) is int):
# if(choice == 1):
# mean(listNums)
# elif(choice == 2):
# median(listNums)
# elif(choice == 3):
# mean(listNums)
# median(listNums)
# else:
# print("enter 1 or 2 or 3 ")
# else:
# print("Enter only numbers")
# def mean(nums):
# sumOfList = 0
# for i in nums:
# sumOfList += i
# ans = sumOfList // len(nums)
# print("Mean is " , ans)
# def median(nums):
# if(len(nums) % 2 != 0):
# ans = nums[len(nums)//2]
# print("Median " , ans)
# else:
# ans = (nums[len(nums)//2] + nums[(len(nums)//2)+1]) //2
# print("Median " ,ans)
# calc(numberlist)
# docstring
# ---------
# - decription of a function
# - string ''' '''
# - __doc__
# - just after definition
# print(range.__doc__)
# [_ , _ , _]
# [_ , _ , _]
# [_ , _ , _]
# enter player 1 X "khan"
# enter player 2 O "prabal"
# khan enter the position (1-9) 4
# [_ , _ , _]
# [X , _ , _]
# [_ , _ , _]
# prabal enter the position (1-9) 5
# [_ , _ , _]
# [X , O , _]
# [_ , _ , _]
# default parameters
# ------------------
# cake - flav , wei , shape
# using positional parameters
# ---------------------------
# def makeCake(flav , wei , shape): # fixed values to formal parameters
# print("you have oredered " +flav +" flavoured cake of "+wei +"kgs and "+shape +" shape")
# # 1 - chco , 2 , square
# # 2 - 3 , vannila , round
# # 3 - pine , 5
# # 4 - almond , rect
# # 5 - cake
# makeCake("chco" , "2" , "square")
# makeCake(wei = "3" , flav = "vannila" ,shape = "round") # formal parameters in function call
# makeCake("pine" , 5)
# makeCake("almond" , , "rect")
# makeCake()
# using default parameters
# ------------------------
# def makeCake(flav = "vannila" , wei ="1", shape = "round"): # fixed values to formal parameters
# print("you have oredered " +flav +" flavoured cake of "+wei +"kgs and "+shape +" shape")
# # # 1 - chco , 2 , square
# # # 2 - 3 , vannila , round
# # # 3 - pine , 5
# # # 4 - almond , rect
# # # 5 - cake
# makeCake("chco" , "2" , "square")
# makeCake(wei = "3" , flav = "vannila" ,shape = "round") # formal parameters in function call
# makeCake("pine" , "5")
# makeCake(flav = "almond" , shape = "rect")
# makeCake()
# makeList --> length , collection
# 4 , list --> [ ]
# 5 , dictionary --> { }
# 3 , tuple --> ( )
# 6 --> [ ]
# variable parameters
# -------------------
# def addNum(a,b):
# ans = a+b
# print(ans)
# addNum(10,20)
# --> 0 - n
# --> tuple --> *args , *vars , *abcd -> formal parameters
# --> single , multiple arguements ---> actual parameters
# def addNum(*args):
# print(args)
# print(type(args))
# count = 0
# for i in args:
# count += i
# print(count)
# # addNum()
# # addNum(10)
# # addNum(10,20)
# addNum(10,20,30)
# positional arguements
# avg --> a,b,*args
# avg() --> XX
# avg(10) --> XX
# avg(10,20) --> a = 10 , b = 20 , args =
# avg(10,20,30) --> a = 10 , b = 20 , args = (30)
# def avgNums(a,b,*nums):
# count = a + b
# print("a is ",a)
# print("b is ",b)
# print("nums is ",nums)
# for i in nums:
# count += i
# ans = count / (len(nums) + 2)
# print(ans)
# avgNums()
# avgNums(10)
# avgNums(10,20)
# avgNums(10,20,47,50,39,100)
# Keyworded parameters
# --------------------
# - Dictionary args
# - **kwargs , **kwvars , **abcds
# def makeCake(flav = "vannila" , wei ="1", **kwargs): # fixed values to formal parameters
# print(kwargs)
# print("you have ordered " +flav +" flavoured cake of "+wei +"kgs and ")
# makeCake(wei = "3" , flav = "vannila" ,shape ="round",toppings= "almonds")
# positional --> unlimited --> variable --> *args
# default --> unlimited --> keyworded --> **kwargs
# inputs
# ------
# - keyboard --> input()
# - console --> cmd
# - list of input values from cmd
# c:/>python myFile.py 4
# 10
# c:/>python myFile.py 5
# 15
# c:/>python myFile.py "khan"
# a
# list--> cmd args --> argv
# argv[0] --> fileName --> myFile.py
# argv[n] --> arguements followed -->"khan"
# import sys
# print(sys.argv)
# import sys
# print(sys.argv[1])
# Lambda functions
# ----------------
# - definition
# - implementation
# - call
# Lambda functions - single line implementation
# anonymous functions / Lambda operator
# auto returned
# syntax
# ------
# <functionName> = lambda <parameters> : <implementation>
# def addNums(a,b):
# ans = a+b
# return ans # return 10
# addNums(10,20)
# print(type(addNums))
# addNums = lambda a,b : a+b
# print(addNums(10,20))
# print(addNums(11,21))
# sq = lambda a : a**2
# print(sq(10))
# print(sq(110))
# map filter reduce
# -----------------
l1 = [2,4,5,8,2]
l2 = [4,9,7,54,1]
# l3 = [6 .......,3]
# l3 = []
# for i in range(0,len(l1)): # i --> 0 1 2 3 4 --> indices
# l3.append(l1[i] + l2[i]) # l1[i] l1[0 1 2 3 4 ] --> element
# print(l3)
l4 = [1,2,4,4,7,8,6,5,43,2,2,1]
# even = []
# for i in l4:
# if i % 2 == 0 :
# even.append(i)
# print(even)
# map
# ----
# - function
# - associative operation function
# - function and collections
# - return map object
# syntax
# ------
# <mapObject> = map(<function> , <collections>)
# mo = map(lambda a,b : a+b , l1 , l2 )
# print(mo)
# l4 = list(mo)
# print(l4)
# print(list(map(lambda a,b : a+b , l1 , l2 )))
# mo = map(lambda a : a**2 , l1 )
# print(mo)
# l4 = list(mo)
# print(l4)
# print(list.__doc__)
# filter
# ------
# - function
# - associative checking function
# - function and collections
# - return filter object
# syntax
# ------
# <filterObject> = filter(<function> , <collections>)
# fo = filter(lambda a:a%2 != 0 , l4)
# print(fo)
# ans = list(fo)
# print(ans)
# print(list(filter(lambda a:a%2 != 0 , l4)))
# print(tuple(filter(lambda a:a%2 != 0 , l4)))
# reduce
# ------
# - cumulative operation
# - return value
# - function collections
# - not pre defined --> import from functools
# syntax
# ------
# from functools import reduce
# <answer> = reduce(<function> , <collection>)
# from functools import reduce
# res = reduce(lambda a,b : a+b , l4 )
# count = 0 = b
# a+b --> count += a
# print(res)
# res = reduce(lambda a,b : a*b , l4 )
# print(res)
# Modules and packages
# --------------------
# module --> python file (.py , .ipynb, .....)
# pre built already imported --> print()
# pre built explicitly imported --> reduce() , sys.argv
# external and explicitly imported
# folder --> collection of Modules
# package --> folder + initialisation module
# initialisation module --> __init__.py
# folder --> + __init__.py --> package
# __init__.py --> empty file
# importing
# ---------
# import <moduleName>
# - import all the components from <moduleName>
# - use --> <moduleName>.<component>
# - first import creates _pycache_ folder
# import check
# print(check.l)
# print(check.numprint())
# from <moduleName> import <component>
# - import only the component from moduleName
# - use --> <component>
# from check import l
# print(l)
# print(check.a) # error
# from <moduleName> import *
# - import all components except some specials
# - use --> <component>
# from check import *
# print(l)
# print(a)
# alias --> nick name to module
# -----------------------------
# import classEighteenOnline as ei
# print(ei.a)
# import check as ch
# print(ch.l)
# from <packageName> import <moduleName>
# import <packageName>.<moduleName>
# from <packageName>.<moduleName> import <component>
# import <packageName>.<moduleName>.<component>
# External Modules/packages
# -------------------------
# pip --> python packaging index 10.x
# pip --> installed
# - win --> auto installed with python
# - if not installed --> get-pip.py --> run in cmd
# cmd --> c:/>pip
# commands in pip
# pip install <extPackage> # internet
# pip uninstall <extPackage> # internet
# pip list
# pip freeze
# location of download --> <python/install/path>/site-packages
# pip install numpy
# pip uninstall numpy
# Classes and Objects
# -------------------
# class --> collection of entities
# entities --> variables and methods
# variable --> where we do ?
# method --> what we do ?
# three components
# ----------------
# - definition (mandatory)
# - implementation (mandatory)
# - object creation
# syntax
# ------
# class <className>: --> definition of a class
# <implementation1>
# <implementation2> --> implementation of a class
# <implementation3>
# object --> instance of class
# class --> function
# object --> call
# Function --> 100 lines --> output
# Function --> 100 lines --> call --> output
# class --> 1000 lines --> output
# class --> 1000 lines --> object --> output
# single class --> multiple objects
# syntax
# ------
# <objectName> = <className>()
# --> entities
# - class reference (.) --> <className>.<entity>
# - object reference (.) --> <objectName>.<entity>
# two kinds
# ---------
# - class attributes (variables and methods)
# - instance attributes (variables and methods)
# self --> stores instance or objectName which calls the method
class Employee: # class definition
name = "Digital Lync" # class variables
address = "Hyderabad" # class variables
def named(self,name): # method with self # instance method
print("hello " + name)
print(self)
emp1 = Employee() # object creation
# emp2 = Employee() # object creation
# print(emp1.name) # object reference to name
# print(emp1.address) # object reference to address
# emp2.name = "Lync"
# print(emp2.name) # object reference to name
# # print(emp2.address) # object reference to address
# Employee.name = "DL"
# print(Employee.name) # class reference to name
# print(Employee.address) # class reference to address
# print(emp1.name) # object reference to name
# print(emp1.address) # object reference to address
# print(emp2.name) # object reference to name
# print(emp2.address) # object reference to address
# emp1.named("Khan") # object reference for a method
# emp2.named("Ravi") # object reference for a method
# Employee.named("Hari") # error for class reference
# three types of methods
# ----------------------
# - instance methods
# - self --> object name
# - object reference
# - XX class reference XX
# - changes -- instance , object
# - class methods
# - cls --> class name
# - class , object references
# - changes --> instance and class
# - decorator --> @classmethod
# - static methods
# - logical implementation
# - XX parameter
# - object reference
# - XXX changes --> class or object
# - decorator --> @staticmethod
# constructor (__init__())
# -----------
# - initialisation of a class
# - default constructor
# - parameterize constructor
# object create --> constructor --> class --> output
# <objectName> --> <__init__> --> <className> --> output
# syntax
# ------
# def __init__(self):
# # <implementation>
# class Organisation: # class definition
# org_name = "Digital Lync" # class variables
# address = "Hyderabad" # class variables
# def __init__(self,name,role):
# self.name = name
# self.role = role
# print("hello " + name + " has been assigned " + role +" role")
# def details(self): # method with self # instance method
# print("Your name : " + self.name + "\n your role: " + self.role +" role")
# @classmethod
# def changeOrgName(cls , newname):
# cls.newname = newname
# cls.org_name = cls.newname
# print("Organisation name has been changed to "+cls.newname)
# @staticmethod
# def calc(amount , perc):
# ans = ((amount * perc) //100) + amount
# print(ans)
# e1 = Organisation("khan" , "pd") # constructor is called name and role
# e2 = Organisation("ravi" , "Dr")
# print(e1.org_name)
# print(e2.org_name)
# e1.details()
# e2.details()
# Organisation.changeOrgName("lync")
# print(e1.org_name)
# print(e2.org_name)
# print(Organisation.org_name)
# e1.calc(1876567 , 22)
# Modules
# -------
# random , math , cmath , os
# random
# ------
# pseudo random numbers
# - numbers
# - collection
# Functions
# ---------
# Numbers
# - random()
# - randrange()
# - randint()
# Collection
# - choice()
# - sample()
# - shuffle()
import random
# random() --> float value 0<num<1
# print(random.random())
# randint(<start> , <end>) --> random integer b/w start and end inclusive
# print(random.randint(2,40))
# randrange(<start> , <end>) --> random integer b/w start and end inclusive
# print(random.randrange(2,40))
# l1 = [x for x in range(2,23)]
# print(l1)
# random.shuffle(<list>) --> all elements of a collection gets shuffled
# random.shuffle(l1)
# print(l1)
#random.choice(<list>) --> one random element from a collection
# print(random.choice(l1))
#random.sample(<list> , k=<population>) --> multiple elements from a collection
# print(random.sample(l1 , k = 2))
# c:/>python randChr.py 5
# jdskb
# c:/>python randChr.py 9 4
# ytgfvgkfi
# 8673
# import sys
# print(sys.argv)
# count = 0
# for i in range(0, int(sys.argv[1])+1):
# count += i
# print(count)
# numbers --> ascii --> chars
import sys , random
# char = ""
# for i in range(0 , int(sys.argv[1])):
# char = char + chr(random.randrange(97,122))
# print(char)
# char = ""
# num = ""
# for i in range(0 , int(sys.argv[1])):
# char = char + chr(random.randrange(97,122))
# print(char)
# for i in range(0 , int(sys.argv[2])):
# num = num + str(random.randrange(0,9))
# print(num)
# c:/>python p wdmake.py "khan"
# dfgg9821hgvj6536
# c:/>python pwdmake.py "Ravi"
# jhdf7236bfjb3390
# c:/>python pwdmake.py "khan"
# user already exists
# pwds.txt
# khan -- dfgg9821hgvj6536
# Ravi -- jhdf7236bfjb3390
# a = 10
# b = 20
# ans = a+b
# ans2 = a+ans
# with open("demo.txt" , "w") as myf:
# myf.write(str(ans)+"\n")
# myf.write(str(ans2))
# # myf.close()
# with open("demo.txt" , "r") as myfr:
# listOflines = myfr.readlines()
# print(listOflines)
# res = int(listOflines[0][:-1]) + int(listOflines[1])
# print(res) | [
"mkhan@digitallynctech.om"
] | mkhan@digitallynctech.om |
444d45bf3c5ac155b55dfd08b8250911a948e0c8 | a550aece79bda789826b463280b91abffbf2d372 | /books/python-3-oop-packt/Chapter7/7_28_callable_repeat.py | 09746de64e095feb18df107627ebdb96c1fe1546 | [
"MIT"
] | permissive | phiratio/learn_python | 20376470eaa292c157fd01f52b3077e3a983cd5a | a32240d4355fb331805d515f96e1d009914e5c47 | refs/heads/master | 2022-11-27T07:07:45.712373 | 2020-12-03T22:04:31 | 2020-12-03T22:04:31 | 189,397,679 | 1 | 0 | MIT | 2022-11-22T04:40:27 | 2019-05-30T10:56:10 | Python | UTF-8 | Python | false | false | 469 | py | from timer import Timer
import datetime
def format_time(message, *args):
now = datetime.datetime.now().strftime("%I:%M:%S")
print(message.format(*args, now=now))
class Repeater:
def __init__(self):
self.count = 0
def __call__(self, timer):
format_time("{now}: repeat {0}", self.count)
self.count += 1
timer.call_after(5, self)
timer = Timer()
timer.call_after(5, Repeater())
format_time("{now}: Starting")
timer.run()
| [
"phiratio161@gmail.com"
] | phiratio161@gmail.com |
77894ac6f1047875d92cf14c4eff714083db0443 | 9702347e5bf0529156ba36a7152ad79b51edfc00 | /supercalculadora/ut_expr_aritmetica.py | 541750c8958a207afb620d29e1a2b2b261b571f9 | [] | no_license | jmarrieta98/Entorno-de-Desarrollo | 4d925239797c0c2bb1cf098dad1b736761902fb5 | 8d4600946e757b540673150355d665ffdb98706f | refs/heads/master | 2022-12-27T11:02:25.539070 | 2020-06-08T17:55:09 | 2020-06-08T17:55:09 | 270,764,122 | 0 | 0 | null | 2020-10-13T22:39:19 | 2020-06-08T17:38:12 | HTML | UTF-8 | Python | false | false | 915 | py | import unittest
import expr_aritmetica
class TestsExprAritmetica(unittest.TestCase):
def setUp(self):
self.expresion = expr_aritmetica.ExprAritmetica()
def tearDown(self):
pass
def test_extraer_operandos_y_operadores_en_2_mas_2(self):
self.assertEqual({'Operandos': [2, 2],
'Operadores': ['+']},
self.expresion.parse("2 + 2"))
def test_extraer_operandos_y_operadores_en_10_entre_menos_5(self):
self.assertEqual({'Operandos': [10, -5],
'Operadores': ['/']},
self.expresion.parse("10 / -5"))
def test_extraer_operandos_y_operadores_en_expr_sin_ptsis(self):
self.assertEqual({'Operandos': [5, 4, 2, 2],
'Operadores': ['+', '*', '/']},
self.expresion.parse("5 + 4 * 2 / 2"))
| [
"noreply@github.com"
] | jmarrieta98.noreply@github.com |
f32738a9240d9dfd4604baa3a07a0d6baa15d499 | 95501652d6ea255f02675cd91a99b0f0c4982d88 | /iconcept/messages/device_unit.py | e829f9f1cd612ae909f7bdd4f2f5ed21191eb80e | [] | no_license | blinding-answers/iconcept | 1408747c1da6b435c24e459c80a600bb51b7b3fb | 87ad3426674f95e7cfcdbaf6a5c092a6cb264234 | refs/heads/master | 2021-07-25T06:16:18.890789 | 2021-06-09T12:17:54 | 2021-06-09T12:17:54 | 253,416,797 | 0 | 0 | null | 2021-06-08T03:59:36 | 2020-04-06T06:45:20 | Python | UTF-8 | Python | false | false | 1,020 | py | from iconcept.message_extractor import extract_datagram
from iconcept.messages.abstract_datagram import AbstractDatagram
class DeviceUnit(AbstractDatagram):
message: str = None
def ingest_data(self, data: str) -> None:
self.message = extract_datagram(data, self.get_header_pattern(), self.get_total_length())
def get_header_pattern(self) -> str:
return '550C01'
def get_header_length(self) -> int:
return 6
def get_message_length(self) -> int:
return 2
def is_valid(self) -> bool:
return self.message is not None
def get_unit(self) -> int:
if not self.is_valid():
return 0
unit_hex = self.message[6: 6 + 2]
return int(unit_hex, 16)
def get_unit_description(self) -> str:
units = [
'Metric',
'Imperial'
]
return units[self.get_unit()]
def __str__(self):
return f"{self.__class__.__name__}: {self.message}: unit={self.get_unit_description()}"
| [
"zodimo@gmail.com"
] | zodimo@gmail.com |
dc82c11920bf67130d710a41245f727668e33a0d | 44f10da06c0e72f5e02bd44531a48974491bf751 | /test_interact.py | d569effa813a73e43d5e18085dafb0b0bbf335f8 | [
"ISC"
] | permissive | mi-skam/bkapi | e7f150da3f749f22a34394586b47ac5a91ea7a96 | 8221e8972aa4b98520526269983861a3cebde33c | refs/heads/master | 2023-05-11T17:25:30.582361 | 2020-11-29T12:13:15 | 2020-11-29T12:13:15 | 273,483,161 | 0 | 0 | ISC | 2021-06-02T02:20:46 | 2020-06-19T12:00:21 | Nix | UTF-8 | Python | false | false | 659 | py | import sys
from interact import BKAPI
def compare(f, e, i=None):
if i == None:
o = f()
i = "-"
else:
o = f(i)
if not o == e:
raise ValueError(f"{f.__name__}({i}) -> {o} != {e}")
# tests creation of BKAPI object
sys.argv.append("vserver_info")
sys.argv.append("1234")
bk = BKAPI(config_file="./EXAMPLE.interact.yaml")
# tests _remove_brackets
compare(bk._remove_brackets, 'p', i='<p>')
# tests _load_config
expected = {'username': 'BKAPI-12345-abcdefgh123', 'password': 'mypass123'}
compare(bk._load_config, expected)
# tests _select
expected = ('vserver_info', "vid: '1234'\n")
compare(bk._select, expected)
| [
"plumps.codes@codeberg.org"
] | plumps.codes@codeberg.org |
d50604ea062e7b35e336fb49358b0b8b1bea3bfe | b8e1f63c431ac693ffbea8b6135c15b9bb5f975f | /training/test_dataset_fashion_mnist/generate_metadata.py | b02de2d7153208b76f76ec726574958da8a099f4 | [] | no_license | websterbei/SlakingML | d235544b9fb7fb46a13571186e3995602b2440f8 | 283e32d3d5bc9620645dcf450edb1cd2ae88d561 | refs/heads/master | 2022-09-02T14:09:04.271428 | 2020-05-24T03:13:06 | 2020-05-24T03:13:06 | 236,241,239 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 489 | py | # @Author: Webster Bei Yijie, Joey Junyu Liang
# @Date: 5/8/2020, 3:12:33 PM
# @Email: yijie.bei@duke.edu, junyu.liang@duke.edu
import json
metadata = {}
metadata["column_names"] = ["label"] + ["pixel{}".format(x) for x in range(1, 785)]
metadata["column_types"] = ["integer"] + ["float"] * 784
metadata["author_name"] = "Somebody"
metadata["dataset_name"] = "Fashion Mnist"
metadata["resource_files"] = ["fashion_mnist.csv"]
with open("metadata", "w") as f:
json.dump(metadata, f) | [
"yijie.bei@duke.edu"
] | yijie.bei@duke.edu |
088099fe03d5e3dee4df77f61ea5cb2aa08d45d5 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /M8hDPzNZdie8aBMcb_11.py | f8aa226b0f1d8e00377c89c45f1f158d226792c3 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | """
Implement a function count_substring that counts the number of substrings that
begin with character "A" and ends with character "X".
For example, given the input string `"CAXAAYXZA"`, there are four substrings
that begin with "A" and ends with "X", namely: "AX", "AXAAYX", "AAYX", and
"AYX".
### Examples
count_substring("CAXAAYXZA") ➞ 4
count_substring("AAXOXXA") ➞ 6
count_substring("AXAXAXAXAX") ➞ 15
### Notes
* You should aim to avoid using nested loops to complete the task.
* You can assume that the input string is composed of English upper case letters only.
"""
def count_substring(txt: str) -> int:
return sum(txt[i:].count('X') for i,v in enumerate(txt[:-1]) if v == 'A')
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
3f4b9ff2d5b4fceb61cfaae5f55364c669c7d0f7 | 6c28e9315ef84e8e9f8685f493e759e5040b3ce0 | /MAC_Pro/MAC_Pro/wsgi.py | bb5a21444633dcd2e3862391b46012144ceec845 | [] | no_license | kumarshankar0398/MAC | 6db6252563c0dbc8b5480529910e9fc84c6047a4 | 22df053501627a058019f4e8d3a30766d4b24630 | refs/heads/master | 2023-04-04T19:54:54.254044 | 2021-03-23T08:31:10 | 2021-03-23T08:31:10 | 350,618,550 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
WSGI config for MAC_Pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'MAC_Pro.settings')
application = get_wsgi_application()
| [
"kumarshankar0398@gmail.com"
] | kumarshankar0398@gmail.com |
eb5d5a9c2b2764decf330b9cd8ad92779c370fd3 | c132b9ec81b934167b2390a67499104aaafbcc98 | /Code/Week2.py | 8490812ec085061144781aa5d363f9afdcc9323f | [
"MIT"
] | permissive | JSeam2/Schrodinger-Assignment | 747379164bbdfa6fd8312427a9db4be34b674040 | 8860dcc537b491d5bb3e3cb396f977087a0ab161 | refs/heads/master | 2021-01-18T18:38:07.842803 | 2017-03-22T08:18:44 | 2017-03-22T08:18:44 | 80,585,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,203 | py | from __future__ import print_function
import numpy as np
import unittest
import scipy.constants as c
"""
Testing Framework with unittest
"""
class codeTester(unittest.TestCase):
def test_qn1(self):
self.assertEqual(energy_n(1), -13.60569)
self.assertEqual(energy_n(2), -3.40142)
self.assertEqual(energy_n(3), -1.51174)
def test_qn2(self):
self.assertEqual(degToRad(90), 1.5708)
self.assertEqual(degToRad(180), 3.14159)
self.assertEqual(degToRad(270), 4.71239)
self.assertEqual(radToDeg(3.14), 179.90875)
self.assertEqual(radToDeg(3.14/2.0), 89.95437)
self.assertEqual(radToDeg(3.14*3/4), 134.93156)
def test_qn3(self):
self.assertEqual(sphericalToCartesian(3, 0, np.pi), (-0.0, 0.0, 3.0))
self.assertEqual(sphericalToCartesian(3, np.pi/2.0, np.pi/2.0), (0.0, 3.0, 0.0))
self.assertEqual(sphericalToCartesian(3, np.pi, 0), (0.0, 0.0, -3.0))
self.assertEqual(cartesianToSpherical(3, 0, 0), (3.0, 1.5708, 0.0))
self.assertEqual(cartesianToSpherical(0, 3, 0), (3.0, 1.5708, 1.5708))
self.assertEqual(cartesianToSpherical(0, 0, 3), (3.0, 0.0, 0.0))
self.assertEqual(cartesianToSpherical(0, -3, 0), (3.0, 1.5708, -1.5708))
# Question 1
def energy_n(n):
"""
Create a function to calculate the energy level of a given
principal quantum number. This function should take 1 int
argument and return the energy level in eV. Round to 5
decimal places
:param: n(int) : nodes
:output: float (rounded to 5 decimal places)
"""
# Do we need to calculate the energy levels properly???
# This feels rather hacky
assert type(n) == int
return round(-13.60569/n**2, 5)
# Question 2
def degToRad(deg):
"""
Convert deg to rad. 5 decimal places output
:param: deg(float): degrees
:output: rad(float): radians
"""
# Convert to float if int
if type(deg) == int:
deg = float(deg)
assert type(deg) == float
return round(deg*3.14159265359/180, 5)
def radToDeg(rad):
"""
Convert rad to deg. 5 decimal places output
:param: rad(float): radians
:output: deg(float): degrees
"""
# Convert to float if int
if type(rad) == int:
rad = float(rad)
assert type(rad) == float
return round(rad * 180 / 3.14159265359, 5)
# Question 3
def sphericalToCartesian(r, theta, phi):
'''
convert spherical coord to cartesian coord
:param r: radius
:param theta: theta angle
:param phi: phi angle
:return: (x, y, z) tuple of floats
'''
x = float(r) * np.sin(theta) * np.cos(phi)
y = float(r) * np.sin(theta) * np.sin(phi)
z = float(r) * np.cos(theta)
return (round(x,5), round(y,5), round(z,5))
def cartesianToSpherical(x, y, z):
r = round(np.sqrt(x**2 + y**2 + z**2),5)
theta = round(np.arctan2(y,x),5)
phi = round(np.arctan2(np.sqrt(x**2+y**2),z), 5)
return (r, phi, theta)
# Run the testing framework to verify the code
if __name__ == "__main__":
#unittest.main()
suite = unittest.TestLoader().loadTestsFromTestCase(codeTester)
unittest.TextTestRunner(verbosity=2).run(suite)
| [
"joshiaseam@gmail.com"
] | joshiaseam@gmail.com |
9942778035a9277ce4999950a72d25089133331b | aab7bb79b8c40be59c240fb39c8cd9dbfd25c378 | /CreateConsumer/CreateBasic.py | ee8751d71d982c4866866ae171a7903f9c152fbb | [] | no_license | judahschvimer/senior-thesis | d19f967fce9088c4202b90a459019d2f51e5c6cc | 726ed91c36ecad302b44eb67348d95ca592f1bdb | refs/heads/master | 2020-03-29T11:10:44.880294 | 2015-05-05T16:51:35 | 2015-05-05T16:51:35 | 25,487,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,779 | py | import sys
def print_transition(f, action, start_state, end_state, probability):
f.write('T : {0} : {1} : {2} {3}\n'.format(action, start_state, end_state, probability))
def print_observation(f, action, end_state, observation, probability):
f.write('O : {0} : {1} : {2} {3}\n'.format(action, end_state, observation, probability))
def print_reward(f, action, start_state, end_state, observation, probability):
f.write('R : {0} : {1} : {2} : {3} {4}\n'.format(action, start_state, end_state, observation, probability))
def wtp(p):
return 'wtp-' + str(p)
def price(p):
return 'p-' + str(p)
# Inputs: a willingness to pay and a price
# Outputs: probability of leaving given the wtp and price
def leave_function(price, wtp, leave_probability):
if price <= wtp:
return 0
else:
return leave_probability
def write_header(f, discount, values, prices, wtps):
f.write('discount: {0}\n'.format(discount))
f.write('values: {0}\n'.format(values))
f.write('states: {0} done\n'.format(' '.join([wtp(p) for p in wtps])))
f.write('actions: {0}\n'.format(' '.join([price(p) for p in prices])))
f.write('observations: o-1 o-2\n')
f.write('\n')
f.write('start include: {0}\n'.format(' '.join([wtp(p) for p in wtps])))
f.write('\n')
def write_transitions(f, prices, wtps, leave_probability):
print_transition(f, '*', 'done', 'done', 1.0)
f.write('\n')
for p in prices:
for w in wtps:
if p <= w:
print_transition(f, price(p), wtp(w), 'done', 1.0)
else:
trans_prob = leave_function(p, w, leave_probability)
print_transition(f, price(p), wtp(w), wtp(w), 1.0 - trans_prob)
print_transition(f, price(p), wtp(w), 'done', trans_prob)
f.write('\n')
def write_observations(f, prices, wtps):
print_observation(f, '*', '*', 'o-1', 1.0)
f.write('\n')
def write_rewards(f, prices, wtps):
for p in prices:
for w in wtps:
if p <= w:
print_reward(f, price(p), wtp(w), 'done', '*', float(p))
f.write('\n')
def write_pomdp(out_file_name, discount, num_prices, values, leave_probability):
wtps = range(0, num_prices)
prices = range(0, num_prices)
with open(out_file_name, 'w') as f:
write_header(f, discount, values, prices, wtps)
write_transitions(f, prices, wtps, leave_probability)
write_observations(f, prices, wtps)
write_rewards(f, prices, wtps)
def main():
out_file_name = 'SolveConsumer.POMDP'
discount = 0.95
num_prices = 5
values = 'reward'
leave_probability = 0.2
write_pomdp(out_file_name, discount, num_prices, values, leave_probability)
if __name__ == '__main__':
main() | [
"jaschvimer@gmail.com"
] | jaschvimer@gmail.com |
ade1072b0cc6ce68df43625680267acb40f5accb | 168e28aea5875f250e814569932693c285ac9f09 | /common/event.py | b13d36c1ba0c49babd37c6aae4932bfc968ccac5 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | RobotVU/das | cd3b48ad8c314d56699b8366e4d88c9c70a519ab | 4cdda44c10504b4c3b384ad295640c00d5d8d7f7 | refs/heads/master | 2020-04-06T23:01:38.659050 | 2017-12-14T16:11:46 | 2017-12-14T16:11:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py |
class Event:
def __init__(self, eventType, playerId, timeStamp):
self.type = type(self).__name__
self.eventType = eventType
self.playerId = playerId
self.timeStamp = timeStamp
| [
"Felipe.Santos.Batista.de.Souza@ing.nl"
] | Felipe.Santos.Batista.de.Souza@ing.nl |
d15e1ebc2023d39d5c922d33689ab367aa095de4 | e538c4963369fee76181df42d4309fd1d377e5d8 | /dend_json_to_df.py | b9c1c513f98232c5a635b1e1157daad301314646 | [
"MIT"
] | permissive | AllenInstitute/celltype_hierarchy | 7fd72a0758a972cfccfa2afdf1f07dc9089a00d5 | 43284d6d373759285d2a0312c5f995baf4ba5c9f | refs/heads/master | 2023-06-08T05:54:27.464040 | 2023-05-28T00:25:52 | 2023-05-28T00:25:52 | 205,469,824 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,811 | py | import json
import pandas as pd
import numpy as np
from flatten_json import flatten
# Load dendrogram that is saved as a .json file
json_file = './dend.json'
with open(json_file, 'r') as f:
s = f.read()
s = s.replace('\t', '')
s = s.replace('\n', '')
s = s.replace(',}', '}')
s = s.replace(',]', ']')
dend = json.loads(s)
flatten_dend = flatten(dend)
label, members, height, color, index, midpoint = [], [], [], [], [], []
org_label, parent, leaf, cex, xpos = [], [], [], [], []
dend_keys = list(flatten_dend.keys())
for i, _ in enumerate(dend_keys):
if i < 1:
index = i
if index < len(dend_keys):
entry = dend_keys[index]
if 'leaf_attribute' in entry:
ind_0 = [i for i, x in enumerate(entry) if x == '0']
tag = entry[:ind_0[-1]+2]
key = tag + '_row'
label.append(flatten_dend[key])
key = tag + 'members'
members.append(flatten_dend[key])
key = tag + 'height'
height.append(flatten_dend[key])
key = tag + 'nodePar.col'
color.append(flatten_dend[key])
midpoint.append('')
key = tag + 'nodePar.cex'
cex.append(flatten_dend[key])
leaf.append(True)
number_ind = label[-1].find('_')
xpos.append(np.float16(label[-1][:number_ind]))
ind_child = [i for i, _ in enumerate(entry[:-8])
if entry[i:i+8] == 'children']
key_parent = entry[:ind_child[-2]+10] + '_node_attributes_0__row'
if key_parent in flatten_dend:
parent.append(flatten_dend[key_parent])
else:
parent.append('')
index += 21
if 'node_attribute' in entry:
ind_0 = [i for i, x in enumerate(entry) if x == '0']
tag = entry[:ind_0[-1]+2]
key = tag + '_row'
label.append(flatten_dend[key])
key = tag + 'members'
members.append(flatten_dend[key])
key = tag + 'height'
height.append(flatten_dend[key])
color.append('')
key = tag + 'midpoint'
midpoint.append(flatten_dend[key])
cex.append('')
leaf.append(False)
xpos.append(0.)
ind_child = [i for i, _ in enumerate(entry[:-8])
if entry[i:i + 8] == 'children']
if len(ind_child) > 0:
if len(ind_child) > 1:
key_parent = entry[:ind_child[-2] + 10] + \
'_node_attributes_0__row'
else:
key_parent = 'node_attributes_0__row'
parent.append(flatten_dend[key_parent])
else:
parent.append('')
index += 15
# find x position for all non leaf nodes
x = np.array(xpos)
for i, l in enumerate(label):
if not leaf[i]:
parent_ind = np.where(np.array(parent) == l)[0]
x[i] = np.mean(x[parent_ind])
# build a dataframe from the flatten dendrogram
dend_df = pd.DataFrame({'x': list(x),
'y': height,
'cex': cex,
'col': color,
'members': members,
'midpoint': midpoint,
'height': height,
'leaf': leaf,
'label': label,
'parent': parent})
# reverse the order nodes in the dataframe
dend_df = dend_df.iloc[::-1].reset_index(drop=True)
# replace empty values with nan
dend_df = dend_df.replace(r'', np.NaN)
# start the dataframe index from 1
dend_df.index += 1
# save the flatten dendrogram as a table in a csv file
dend_df.to_csv('./dend.csv')
| [
"ymmarghi@gmail.com"
] | ymmarghi@gmail.com |
5550b3b0624fdd7fcd13461ed8e211f1d6dd6762 | 6381c6b81c1ed54bb0c3171773f2e74b297227f5 | /python/Codelab_3D_table.py | 27398bf25022de8a7eb7ef2e03cf0bfb8acb0deb | [] | no_license | GMetola/Herramientas | e807dab4e7a8cde67dc7124b9fea12bc0cb8767d | b58d37af345ac003aa79ca04d898c93977f1003c | refs/heads/master | 2020-04-26T05:14:53.747889 | 2019-06-20T21:04:48 | 2019-06-20T21:04:48 | 173,327,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,415 | py | import numpy as np
import matplotlib.pyplot as plt
import argparse
# parser = argparse.ArgumentParser()
# parser.add_argument('-rg', '--rutaG', required=True, help='ruta de groundtruth')
# args = vars(parser.parse_args())
mypath = "F:/Portatil/Vision Artificial/Herramientas/RecognitionStats/"
# no consigo utilizar loadtxt
ground = np.genfromtxt(mypath + 'groundtruth.csv',
delimiter=",", skip_header=1,
dtype="unicode")
detection = np.genfromtxt(mypath + 'detection.csv',
delimiter=",", skip_header=1,
dtype="unicode")
# before dividing detection by ground we must exclude non-numerical terms
# this function looks for "-" in column 1 and gives back the indexes
find_errors = np.where(ground[:, 1] == "-")[0]
print(find_errors.shape)
asdfjasdjklfhasklñdfjñaskld = asklñdfjañklsdfjkl;
cvvvv2.asñdlfuasio. = aiouqwerñkjasdñklfj
print(wolowloooooo)
# NO ES SOLUCIÓN SUSTITUIRLO POR CEROS PORQUE AFECTA A LA GRÁFICA, CAMBIAR!!
# sustituir por NaN ??
# detection[find_errors,:] = 0
# ground[find_errors,:] = 0
"""
ground = ground.astype(np.float32)
detection = detection.astype(np.float32)
relation = detection / ground * 100
koi=[]
for column in np.arange(1,relation.shape[1]):
for d in range(6):
k = 50 + 50*d
koi[d] = np.where(np.asarray(relation[:,column]) < k)[0].size
"""
| [
"metolag@gmail.com"
] | metolag@gmail.com |
17bc25d1845288d25e974f97e7b157de4cb50246 | 505b852085c1f0fe779d88f8fcd54900cf0a5e7a | /lib/carre_connection_sample.py | 9b5c995c93129af7c6df44bda4caa7a8e7689434 | [] | no_license | KMax/fitbeople | dc9059ad2bbcc6fb6b48ae4cc9a3acf5c046efbc | d6af434792684725191c4e3091e37d5f2dd68d25 | refs/heads/master | 2020-04-04T00:19:16.155537 | 2014-09-05T16:54:02 | 2014-09-05T16:54:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 655 | py | # -*- coding: utf-8 -*-
import pyodbc
def _fetchTriples(sql, username, password):
import pandas.io.sql as psql
try:
c = pyodbc.connect("DSN=CARRE Virtuoso;UID=" + username + ";PWD=" + password)
data = psql.read_sql(sql, con=c)
c.commit()
c.close()
except Exception, e:
data = None
return data
username = "kolchinmax"
password = "159951ghj"
sql = "SPARQL SELECT ?subject ?predicate ?object FROM <http://carre.kmi.open.ac.uk/users/"+username+"> WHERE { ?subject ?predicate ?object }"
data = _fetchTriples(sql, username, password)
print data
| [
"kolchinmax@gmail.com"
] | kolchinmax@gmail.com |
e44b0c1db50c12e71483d86ca1a7a2d68ba3c320 | a4bd2b028f9327ba82351fefe7f68fd5c6c2ae17 | /python/seafes/indexes/repo_status.py | e91723e814b44291f491904c5df300a1d2c9db63 | [] | no_license | panSdu2/pro-set | 2b8e3d317600c4c1b5461e12ea98c6416d9b27e2 | 4cf350cad435c19cb63333a9641228247f7b606e | refs/heads/master | 2023-06-01T01:33:58.059737 | 2021-06-24T07:46:37 | 2021-06-24T07:46:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,139 | py | # coding: utf8
import logging
from elasticsearch.exceptions import NotFoundError
from elasticsearch.helpers import scan
from .base import SeafileIndexBase
logger = logging.getLogger('seafes')
class RepoStatus(object):
def __init__(self, repo_id, from_commit, to_commit):
self.repo_id = repo_id
self.from_commit = from_commit
self.to_commit = to_commit
def need_recovery(self):
return self.to_commit is not None
class RepoStatusIndex(SeafileIndexBase):
'''The repo-head index is used to store the status for each repo.
For each repo:
(1) before update: commit = <previously indexed commit>, updatingto = None
(2) during updating: commit = <previously indexed commit>, updatingto = <current latest commit>
(3) after updating: commit = <newly indexed commit>, updatingto = None
When error occured during updating, the status is left in case (2). So the
next time we update that repo, we can recover the failed process again.
The elasticsearch document id for each repo in repo_head index is its repo
id.
'''
INDEX_NAME = 'repo_head'
MAPPING_TYPE = 'repo_commit'
MAPPING = {
'_source': {
'enabled': True
},
'properties': {
'repo': {
'type': 'text',
'index': False
},
'commit': {
'type': 'text',
'index': False
},
'updatingto': {
'type': 'text',
'index': False
}
},
}
def __init__(self, es):
super(RepoStatusIndex, self).__init__(es)
self.create_index_if_missing()
def get_repo_status(self, repo_id):
"""Query status of a repo form ``repo_head`` index, add this repo if
not found.
Arguments:
- `self`:
- `repo_id`:
Returns:
A ``RepoStatus`` instance and a flag indicates whether this repo
is corrupted.
"""
try:
# we use repo_id as the doucment id of repo_head index
doc = self.es.get(index=self.INDEX_NAME, doc_type=self.MAPPING_TYPE, id=repo_id)
doc = doc['_source']
except NotFoundError:
doc = None
commit_id = updatingto = None
if doc is not None:
commit_id = doc.get('commit', None)
updatingto = doc.get('updatingto', None)
return RepoStatus(repo_id, commit_id, updatingto)
# repo not found in the repo_head index
data = {
'commit': None,
'updatingto': None
}
try:
self.es.index(
index=self.INDEX_NAME,
doc_type=self.MAPPING_TYPE,
body=data,
id=repo_id
)
except:
logger.exception('Failed to add repo to index: %s', repo_id)
raise
self.refresh()
return RepoStatus(repo_id, commit_id, updatingto)
def begin_update_repo(self, repo_id, old_commit_id, new_commit_id):
doc = {
'commit': old_commit_id,
'updatingto': new_commit_id,
}
self.es.update(index=self.INDEX_NAME, doc_type=self.MAPPING_TYPE, id=repo_id, body=dict(doc=doc))
self.refresh()
def finish_update_repo(self, repo_id, commit_id):
doc = {
'commit': commit_id,
'updatingto': None,
}
self.es.update(index=self.INDEX_NAME, doc_type=self.MAPPING_TYPE, id=repo_id, body=dict(doc=doc))
self.refresh()
def delete_repo(self, repo_id):
if len(repo_id) != 36:
return
self.es.delete(index=self.INDEX_NAME, doc_type=self.MAPPING_TYPE, id=repo_id)
self.refresh()
logger.debug('delete_repo called on %s', repo_id)
def get_all_repos_from_index(self):
resp = scan(self.es,
query={"query": {"match_all": {}}},
index=self.INDEX_NAME,
doc_type=self.MAPPING_TYPE
)
return [{'id': entry['_id']} for entry in resp]
| [
"rentenglong@163.com"
] | rentenglong@163.com |
5458665911175eba625d5f5b7fc0cc7853562425 | 9d82e37d34ed4d836fcef98ed37ed7ac5c49b316 | /ibmsecurity/isam/web/embedded_ldap/admin.py | a068695630ebb77e72b212d90faed1a94943d4d2 | [
"Apache-2.0"
] | permissive | keiran-ibm/ibmsecurity | 075c156961e371c0e85a7c360fb2d82954315bb6 | b1a77f7a1e8c3cce67e2c3af85c20626d42c0bbd | refs/heads/master | 2022-02-14T14:24:15.687461 | 2019-01-18T05:21:19 | 2019-01-18T05:21:19 | 116,325,033 | 0 | 0 | Apache-2.0 | 2019-01-18T05:16:46 | 2018-01-05T01:23:35 | Python | UTF-8 | Python | false | false | 643 | py | import logging
logger = logging.getLogger(__name__)
def set_pw(isamAppliance, password, check_mode=False, force=False):
"""
Changing the administrator password of the embedded LDAP server
"""
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post("Changing the administrator password of the embedded LDAP server",
"/isam/embedded_ldap/change_pwd/v1",
{
"password": password
})
| [
"rsreera@us.ibm.com"
] | rsreera@us.ibm.com |
d8ecc28c6dafc469b8914f815e57867ae0ba16a1 | e4f66020bffff9bfdfb28dfc4493681a0a6d03d6 | /setup.py | 5b02a84a3e40f09bbf04d748ed41dd45ac278133 | [
"Unlicense",
"LicenseRef-scancode-public-domain"
] | permissive | djmattyg007/python-webpagecache | 2ee190cba8f08456ec85ed9619e6277a4b9cdfea | 0e398a4b42c61959c3329bf86af0553d76e1948e | refs/heads/master | 2021-01-13T00:16:06.983319 | 2017-01-07T02:49:15 | 2017-01-07T02:49:15 | 44,864,758 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 612 | py | #!/usr/bin/env python
from setuptools import setup, find_packages
_version = "1.0.1"
setup(
name="webpagecache",
version=_version,
description="A utility class for downloading webpages and storing them in an SQLite3 database as a cache.",
author="Matthew Gamble",
author_email="git@matthewgamble.net",
url="https://github.com/djmattyg007/python-webpagecache",
download_url="https://github.com/djmattyg007/python-webpagecache/archive/{0}.zip".format(_version),
packages=find_packages(),
package_data={},
data_files=[],
license="Unlicense",
install_requires=[]
)
| [
"git@matthewgamble.net"
] | git@matthewgamble.net |
b3b88bd1a16d62d0f62de83a7828d1de33bc36f1 | 360c109cd2ebaf852d35a3cfc15cbe6d42d2c02a | /code-projects/twitter-scraper/twitter-scraper.py | 6ab0a3307ea2e9c53304c6af7a03ddefb3ff050a | [] | no_license | alex9311/alex9311.github.io | d7ba9bcef010bae76911059c83419c9484055a7a | 3da111c90aad01eac9ccfe601fbb5944bf495b4c | refs/heads/master | 2023-07-11T18:11:44.680327 | 2023-06-27T18:20:04 | 2023-06-27T18:20:04 | 145,275,367 | 1 | 0 | null | 2022-12-08T21:07:50 | 2018-08-19T05:17:45 | Python | UTF-8 | Python | false | false | 892 | py | import time
import csv
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
browser = webdriver.Chrome()
base_url = u'https://twitter.com/search?q='
query = u'%40ismtrainierout'
url = u'https://twitter.com/IsMtRainierOut' #base_url + query
browser.get(url)
time.sleep(1)
body = browser.find_element_by_tag_name('body')
for _ in range(1000):
body.send_keys(Keys.PAGE_DOWN)
time.sleep(0.2)
tweets = browser.find_elements_by_class_name('tweet')
outputCsv = csv.writer(open('mountain-tweets.csv', 'w'))
for tweet in tweets:
text = tweet.find_elements_by_class_name('js-tweet-text-container');
picture = tweet.find_elements_by_class_name('js-adaptive-photo');
text = text[0].text if text else False
imageUrl = picture[0].get_attribute('data-image-url') if picture else False
if (text and imageUrl):
outputCsv.writerow([text, imageUrl])
browser.close()
| [
"alex.simes29@gmail.com"
] | alex.simes29@gmail.com |
74328f66c4d98ae6c3675f5d7a1752151fad38d3 | 0c89e84ac76829cf2040614ac652e238c76da04a | /plugins/connection/buildah.py | bb4f71642b1c36c4cd3a727dda4096bd618b763e | [
"Apache-2.0"
] | permissive | TomasTomecek/ansible-podman-collections | 4022875a583281c660ca301dd8ba0d07dd987316 | cfd27b44be233b5d65b3282598cbb705791afefd | refs/heads/master | 2022-05-31T00:01:00.686055 | 2020-05-04T18:33:45 | 2020-05-04T18:33:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,695 | py | # Based on the docker connection plugin
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Connection plugin for building container images using buildah tool
# https://github.com/projectatomic/buildah
#
# Written by: Tomas Tomecek (https://github.com/TomasTomecek)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
connection: buildah
short_description: Interact with an existing buildah container
description:
- Run commands or put/fetch files to an existing container using buildah tool.
author: Tomas Tomecek (ttomecek@redhat.com)
options:
remote_addr:
description:
- The ID of the container you want to access.
default: inventory_hostname
vars:
- name: ansible_host
# keyword:
# - name: hosts
remote_user:
description:
- User specified via name or ID which is used to execute commands inside the container.
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
# keyword:
# - name: remote_user
'''
import os
import shlex
import shutil
import subprocess
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_native
from ansible.plugins.connection import ConnectionBase, ensure_connect
from ansible.utils.display import Display
display = Display()
# this _has to be_ named Connection
class Connection(ConnectionBase):
"""
This is a connection plugin for buildah: it uses buildah binary to interact with the containers
"""
# String used to identify this Connection class from other classes
transport = 'containers.podman.buildah'
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self._container_id = self._play_context.remote_addr
self._connected = False
# container filesystem will be mounted here on host
self._mount_point = None
# `buildah inspect` doesn't contain info about what the default user is -- if it's not
# set, it's empty
self.user = self._play_context.remote_user
display.vvvv("Using buildah connection from collection")
def _set_user(self):
self._buildah(b"config", [b"--user=" + to_bytes(self.user, errors='surrogate_or_strict')])
def _buildah(self, cmd, cmd_args=None, in_data=None):
"""
run buildah executable
:param cmd: buildah's command to execute (str)
:param cmd_args: list of arguments to pass to the command (list of str/bytes)
:param in_data: data passed to buildah's stdin
:return: return code, stdout, stderr
"""
local_cmd = ['buildah', cmd, '--', self._container_id]
if cmd_args:
local_cmd += cmd_args
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
display.vvv("RUN %s" % (local_cmd,), host=self._container_id)
p = subprocess.Popen(local_cmd, shell=False, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(input=in_data)
display.vvvvv("STDOUT %s" % stdout)
display.vvvvv("STDERR %s" % stderr)
display.vvvvv("RC CODE %s" % p.returncode)
stdout = to_bytes(stdout, errors='surrogate_or_strict')
stderr = to_bytes(stderr, errors='surrogate_or_strict')
return p.returncode, stdout, stderr
def _connect(self):
"""
no persistent connection is being maintained, mount container's filesystem
so we can easily access it
"""
super(Connection, self)._connect()
rc, self._mount_point, stderr = self._buildah("mount")
if rc != 0:
display.v("Failed to mount container %s: %s" % (self._container_id, stderr.strip()))
raise AnsibleError(stderr.strip())
else:
self._mount_point = self._mount_point.strip() + to_bytes(os.path.sep, errors='surrogate_or_strict')
display.vvvvv("MOUNTPOINT %s RC %s STDERR %r" % (self._mount_point, rc, stderr))
self._connected = True
@ensure_connect
def exec_command(self, cmd, in_data=None, sudoable=False):
""" run specified command in a running OCI container using buildah """
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
# shlex.split has a bug with text strings on Python-2.6 and can only handle text strings on Python-3
cmd_args_list = shlex.split(to_native(cmd, errors='surrogate_or_strict'))
rc, stdout, stderr = self._buildah("run", cmd_args_list, in_data)
display.vvvvv("STDOUT %r STDERR %r" % (stderr, stderr))
return rc, stdout, stderr
def put_file(self, in_path, out_path):
""" Place a local file located in 'in_path' inside container at 'out_path' """
super(Connection, self).put_file(in_path, out_path)
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._container_id)
real_out_path = self._mount_point + to_bytes(out_path, errors='surrogate_or_strict')
shutil.copyfile(
to_bytes(in_path, errors='surrogate_or_strict'),
to_bytes(real_out_path, errors='surrogate_or_strict')
)
# alternatively, this can be implemented using `buildah copy`:
# rc, stdout, stderr = self._buildah(
# "copy",
# [to_bytes(in_path, errors='surrogate_or_strict'),
# to_bytes(out_path, errors='surrogate_or_strict')]
# )
def fetch_file(self, in_path, out_path):
""" obtain file specified via 'in_path' from the container and place it at 'out_path' """
super(Connection, self).fetch_file(in_path, out_path)
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._container_id)
real_in_path = self._mount_point + to_bytes(in_path, errors='surrogate_or_strict')
shutil.copyfile(
to_bytes(real_in_path, errors='surrogate_or_strict'),
to_bytes(out_path, errors='surrogate_or_strict')
)
def close(self):
""" unmount container's filesystem """
super(Connection, self).close()
rc, stdout, stderr = self._buildah("umount")
display.vvvvv("RC %s STDOUT %r STDERR %r" % (rc, stdout, stderr))
self._connected = False
| [
"sshnaidm@redhat.com"
] | sshnaidm@redhat.com |
f193a3fe22dcce035aedbcef7df2f77045bd4fee | 9d8c95abb1c92378ed443536977676adea247f0a | /strings/first_non_repeating_character.py | e748398eb581e65b99e78a28baa5dab2c0c0959e | [] | no_license | AjeshRPai/Algorithms-Python | 01fc8a1ad48eb8a6a5f02f1f72c1a4fac052e5b6 | a3810fffb60d3813f300a9140b0fbdf4a13e4117 | refs/heads/main | 2023-07-16T04:04:58.509742 | 2021-08-06T07:36:28 | 2021-08-06T07:36:28 | 335,621,552 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | def firstNonRepeatingCharacter(string):
index_map = {}
for index in range(0, len(string)):
if index_map.__contains__(string[index]):
index_map[string[index]] += 1
else:
index_map[string[index]] = 1
for index in range(0, len(string)):
if index_map[string[index]] == 1:
return index
return -1
if __name__ == '__main__':
print(firstNonRepeatingCharacter("abcdcaf"))
| [
"ajeshr2k1@gmail.com"
] | ajeshr2k1@gmail.com |
4c221e2986b4bc54abf175502f09ac3da11d55cd | 6453e00ddd7a341510a6e191b0b8d2bf9d1dda09 | /study11_正则表达式/ch06_常用匹配规则-匹配开头和结尾.py | c6b2e91e636b158a93cab9ee22461e018d88b127 | [] | no_license | zjkj-2020/studyPython | 1c85751c2468caa977510d30864ddadecc62cac3 | 9d527ed6058b041f8d2e441ea96a18733cc0040a | refs/heads/master | 2022-12-04T00:43:53.807157 | 2020-08-01T05:09:32 | 2020-08-01T05:09:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | import re
print('--------------- ^ 的使用:匹配字符串的开头')
# ^ 匹配字符串的开头
result = re.match('^P.*', 'Python is langage')
result2 = re.match('^P\w{5}', 'Python is langage')
if result:
print(result.group()) # Python is langage
pass
if result2:
print(result2.group()) # Python
pass
print('--------------- $ 的使用:匹配邮箱的结尾')
# $ 匹配邮箱的结尾
result1 = re.match('[\w]{5,15}@[\w]{2,5}.com$', 'myfunckmail@mail.com')
result2 = re.match('[\w]{5,15}@[\w]{2,5}.com$', 'myfunckmail@mail.comTest')
if result1:
print(result1.group())
pass
if result2:
print(result2.group())
pass
| [
"897961047@qq.com"
] | 897961047@qq.com |
7cb69293cb54c8c3c0f69b669b9c34c9efcd5d7f | 4cfd7ed73a61bf9deebafbcdca55b560498b7e53 | /HW2/hw02.py | 2a518ac340b837d918edf053ebb3f1cc4ff63daf | [] | no_license | kcenan/Machine-Learning-ENGR421 | 5bff7fb8cb3561c5d0b556745a87a37a17667847 | 57c7055d8f78be8d204406780a6a966b23f1bea0 | refs/heads/master | 2021-03-09T20:54:25.963736 | 2020-03-10T18:53:31 | 2020-03-10T18:53:31 | 246,379,731 | 7 | 3 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | #import libs
import pandas as pd
import numpy as np
import math
#read features and labels
imagesdf = pd.read_csv('hw02_images.csv',header=None)
labeldf = pd.read_csv('hw02_labels.csv',header=None)
#read weights for regression initial state
i_W_df = pd.read_csv('initial_W.csv',header=None)
i_w0_df = pd.read_csv('initial_w0.csv',header=None)
#split data into train and test data
train_x = imagesdf.iloc[0:500]
test_x = imagesdf.iloc[-500:]
train_y = labeldf.iloc[0:500]
test_y = labeldf.iloc[-500:]
#sigmoid function
def sigmoid(z):
return 1.0 / (1 + np.exp(-z))
| [
"k.cenann@gmail.com"
] | k.cenann@gmail.com |
289b6c7fb79a23f29885816e3674c0b089259f91 | 1fd0a6435ef40da2247ec310c0c10f02d01d7cb7 | /binarytree.py | b2fac37ef80671ae3553562ea0bd0bd968fd4be3 | [] | no_license | zhixianggg/revision | 5e9a7471b59adb1feaca8dfc7cdd2091ef828b9e | c4bc3c6d60e5a57d90e358330bee26701345a872 | refs/heads/master | 2016-09-06T09:08:51.094420 | 2015-03-08T03:28:52 | 2015-03-08T03:28:52 | 31,836,987 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,070 | py | class Node():
def __init__(self,rootdata):
self.LeftP = int(0)
self.Data = str(rootdata)
self.RightP = int(0)
class BinaryTree(Node):
'''inherit the attributes of Node'''
def setRightChild(self, pointer):
self.RightP = int(pointer)
def getRightChild(self):
return self.RightP
def setLeftChild(self, pointer):
self.LeftP = int(pointer)
def getLeftChild(self):
return self.LeftP
def setRootVal(self, data):
self.Data = data
def getRootVal(self):
return self.Data
def AddItemToBinaryTree(NewFreeItem):
global Root, NextFreePosition, LastMove, PreviousPosition
if NextFreePosition == 0:
print("ERROR No free node available.")
else:
ThisTree[NextFreePosition].setRootVal(str(NewTreeItem))
ThisTree[NextFreePosition].setLeftChild(int(0))
ThisTree[NextFreePosition].setRightChild(int(0))
if Root == 0:
Root = NextFreePosition
else:
'''traverse the tree to find the position for the new value'''
CurrentPosition = Root
LastMove = 'X'
while not CurrentPosition == 0:
PreviousPosition == CurrentPosition:
if NewFreeItem < ThisTree[CurrentPosition].data:
'''move left'''
LastMove = 'L'
CurrentPosition = ThisTree[CurrentPosition].LeftP
else:
'''move right'''
LastMove = 'R'
CurrentPosition = ThisTree[CurrentPosition].RightP
if LastMove = 'R':
ThisTree[PreviousPosition].RightP = NextFreePosition
else:
ThisTree[PreviousPosition].LeftP = NextFreePosition
NextFreePosition = ThisTree[NextFreePosition].LeftP
#initialise variables
Root = 0 #set Root to be 0 for initial empty binary tree
NextFreePosition = 1 #first free node is initialise to 1
LastMove = 'X'
PreviousPosition = Root
#initialise array of linked list of 20 nodes
ThisTree = [BinaryTree('') for x in range(21)]
#declare array of 20 nodes
#index 0 is ignored, start from index 1
#node 1 has index 1 in ThisTree, node 2 has index 2, and so on
#initialise left pointer of all nodes to point to next node
#except for last node, the left pointer is 0
for i in range(1,20):
ThisTree[i].LeftP = int(i+1)
ThisTree[20].LeftP = int(0) #assign left pointer of node 20 to 0
#initialise right pointer of all nodes to be 0
for i in range(1,21):
ThisTree[i].RightP = int(0)
def OutputData():
print("Value of Root is", Root)
print("Value of NextFreePosition is", NextFreePosition)
print("Contents of ThisTree in index order is")
print("-"*56) #print a line on screen
print("|{0:^6}|{1:^15}|{2:^15}|{3:^15}|".format("Node","Left","Data","Right"))
print("-"*56) #print a line on screen
for i in range(1,21):
if ThisTree[i].getRootVal() != '': #check for non-empty data
print(("|{0:^6}|{1:^15}|{2:^15}|{3:^15}|").\
format(i,ThisTree[i].getLeftChild(),ThisTree[i].getRootVal(),ThisTree[i].RightChild())
print("-"*56) #print a line on screen
def inorder(tree, index): #Task 3.6: display in alphabetical order
if index != 0: #if not child/leaf node
inorder(tree, tree[index].getLeftChild()) #process left subtree in inorder
print(tree[index].getRootVal()) # access root node
inorder(tree, tree[index].getRightChild()) #process right subtree in inorder
def main(): #Task 3.4
global ThisTree
newdata = input("Enter new data items to be added to binary tree. [XXX to end]\n>>>")
while newdata != "XXX":
AddItem
| [
"zhixianggo@Oons-Air.gateway.2wire.net"
] | zhixianggo@Oons-Air.gateway.2wire.net |
aab625fb7585de7ede79181fa0eb29adc02fdc5b | f3affe270244f252b306ee0acaee19417b721422 | /core/analytics.py | a817dd137af15b3422bdc928a450eaeaddba1846 | [] | no_license | Dom-of-damn/starnavi_social_network | de6d40307ad1e8e8191d37791c0c62b4acce0a3a | 97b5a78a89f27efdac0b1e57fa887cd5683e5764 | refs/heads/main | 2023-03-28T07:27:57.830694 | 2021-03-29T08:22:57 | 2021-03-29T08:22:57 | 346,704,327 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | from django.db.models import Count
from django.db.models.functions import TruncDay
from core.models import PostsFeedBack
def get_like_analytics(date_from, date_to):
"""
Implements statistics of likes in day.
:return: list which contains dicts with statistics values.
"""
queryset = PostsFeedBack.objects.filter(created__range=[date_from, date_to])
likes = queryset.filter(like=True)
likes_in_day = likes.annotate(day=TruncDay('created')).values('day') \
.annotate(count=Count('id')).values('day', 'count')
return likes_in_day
| [
"tatunashvilif@mail.ru"
] | tatunashvilif@mail.ru |
81353c6d2cc155dfe67389e8d6a0edb7a67f208b | 918582eb6ddd4aa233ceb847358ea106f21bbbc1 | /shb-sanet/exp/12-05_19-05_SHHB_SANet_0.0001_[flip+cutout]/code/train.py | 99e7009e7223c38f1af83d711ae8f0bc5f21b8e7 | [
"MIT"
] | permissive | m-konopka/CCAugmentation-Experiments-Env | 4906886155818c1ac919f38b1f44f7ea7a4e6f52 | ea86a775a3088b851feb42e0fdd8bd6c4f07d4c7 | refs/heads/main | 2023-06-14T04:24:39.294853 | 2021-07-01T22:12:16 | 2021-07-01T22:12:16 | 315,702,459 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,282 | py | import os
import numpy as np
import torch
from config import cfg
#------------prepare enviroment------------
seed = cfg.SEED
if seed is not None:
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
gpus = cfg.GPU_ID
if len(gpus)==1:
torch.cuda.set_device(gpus[0])
torch.backends.cudnn.benchmark = True
#------------prepare data loader------------
data_mode = cfg.DATASET
if data_mode is 'SHHA':
from datasets.SHHA.loading_data import loading_data
from datasets.SHHA.setting import cfg_data
elif data_mode is 'SHHB':
from datasets.SHHB.loading_data import loading_data
from datasets.SHHB.setting import cfg_data
elif data_mode is 'QNRF':
from datasets.QNRF.loading_data import loading_data
from datasets.QNRF.setting import cfg_data
elif data_mode is 'UCF50':
from datasets.UCF50.loading_data import loading_data
from datasets.UCF50.setting import cfg_data
elif data_mode is 'WE':
from datasets.WE.loading_data import loading_data
from datasets.WE.setting import cfg_data
elif data_mode is 'GCC':
from datasets.GCC.loading_data import loading_data
from datasets.GCC.setting import cfg_data
elif data_mode is 'Mall':
from datasets.Mall.loading_data import loading_data
from datasets.Mall.setting import cfg_data
elif data_mode is 'UCSD':
from datasets.UCSD.loading_data import loading_data
from datasets.UCSD.setting import cfg_data
# CCAugmentation :D
# TODO: copy SHH dataset to /datasets/ShanghaiTech in its original form to load it properly
from load_data import loading_data
#------------Prepare Trainer------------
net = cfg.NET
if net in ['MCNN', 'AlexNet', 'VGG', 'VGG_DECODER', 'Res50', 'Res101', 'CSRNet','Res101_SFCN']:
from trainer import Trainer
elif net in ['SANet']:
from trainer_for_M2TCC import Trainer # double losses but signle output
elif net in ['CMTL']:
from trainer_for_CMTL import Trainer # double losses and double outputs
elif net in ['PCCNet']:
from trainer_for_M3T3OCC import Trainer
#------------Start Training------------
pwd = os.path.split(os.path.realpath(__file__))[0]
cc_trainer = Trainer(loading_data,cfg_data,pwd)
cc_trainer.forward()
| [
"marcin.edw.konopka@gmail.com"
] | marcin.edw.konopka@gmail.com |
eb520b4a629428beba4bf54cb09f45522d9781a9 | 0f1203f5ba9986b689c9dd8a8a6bf9f5340d589c | /week1/pymongo_test.py | c66b0dcf535d3a1a0ef11e0797abc7e5acc79b87 | [] | no_license | mulhod/mongodb_course | d1e6d01477d3519a7674b75a0073a3e2ed26c08c | a7ead805a571946bc4d4a06d1ac15dcbea105f9c | refs/heads/master | 2021-01-22T23:48:24.073015 | 2015-05-05T03:58:54 | 2015-05-05T03:58:54 | 33,253,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | #!/usr/env python2.7
import pymongo
from pymongo import MongoClient
# Connect to database
connection = MongoClient('localhost', 27017)
db = connection.test
# Handle to names collection
names = db.names
item = names.find_one()
print item['name']
print list(names.find())
| [
"mulhodm@gmail.com"
] | mulhodm@gmail.com |
b7c8b1f2d2812a986e94522fc5edb89f4b0cf461 | 5ee034dc04ce51567d2f29c433d919c510407b73 | /imdb_app/models.py | 3c91222a32fb1110dc41ef9cb3255f12e6194e68 | [] | no_license | vinay-kotian/imdb_project | 86df89e4b5ab5f219796499370a5c4b2347050e1 | 96da41e32d5d3c4db537fb1553fe740f16b257f2 | refs/heads/master | 2020-04-06T07:06:27.872524 | 2015-07-31T20:23:02 | 2015-07-31T20:23:02 | 40,083,897 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,314 | py | from django.db import models
# Create your models here.
USER_ACCESS = (('Normal', 'Normal'),
('Admin', 'Admin'),)
class GenreCategory(models.Model):
""" This class will have all the Genre type listed.
"""
genre_type = models.CharField(max_length=50, blank=False)
def __unicode__(self):
return "%s" %self.genre_type
class MovieDetails(models.Model):
""" This class hold the data of the Movies Details.
"""
name = models.CharField(max_length=100, blank=False)
imdb_score = models.IntegerField()
popularity = models.IntegerField()
director = models.CharField(max_length=100, blank=False)
genre = models.ManyToManyField(GenreCategory)
def __unicode__(self):
return "%s-%s" %(self.name, self.director)
# class Meta:
# ordering = ('-popularity',)
class UserProfile(models.Model):
""" This class hold the data the User Profile information.
"""
email_id = models.EmailField(blank=False)
password = models.CharField(blank=False, max_length=50)
name = models.CharField(blank=False, max_length=100)
last_name = models.CharField(max_length=100)
access_type = models.CharField(choices=USER_ACCESS, max_length=50)
def __unicode__(self):
return "%s-%s" %(self.email_id, self.access_type) | [
"vinay.kotian@thinklabs.in"
] | vinay.kotian@thinklabs.in |
d9cf49ce79c5631890c6369ebe1860603adae250 | 91fff895c5dc3498d56ec529ee555e508f62b98e | /custom_fastai/module_customized_method_from_list_dict.py | 925bd72c4890adde22831633c86e4ba58ccc6c78 | [] | no_license | zhangjiekui/myNotes | 9923e9c71282800e04f02ff149321d81a263f55f | 8a6c5d2b356f672bc1b52225bd6c16fa150d08e5 | refs/heads/master | 2021-11-22T09:47:42.501213 | 2021-11-19T15:21:55 | 2021-11-19T15:21:55 | 195,907,571 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,046 | py | # -*- coding: utf-8 -*-
# Author: HP/ZhangJieKui
# Date: 2019-7-30 16:57
# Project: 00codes
# IDE:PyCharm
# torch.set_printoptions(linewidth=300)
# device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# net=Net()
# if torch.cuda.device_count() > 1:
# print("Let's use", torch.cuda.device_count(), "GPUs!")
# # dim = 0 [30, xxx] -> [10, ...], [10, ...], [10, ...] on 3 GPUs
# net = nn.DataParallel(net)
# net.to(device)
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
import torch.nn as nn
class DummyModule():
def __init__(self, n_in, nh, n_out):
self._modules = {}
self.l1 = nn.Linear(n_in, nh)
self.l2 = nn.Linear(nh, n_out)
def __setattr__(self, k, v):
if not k.startswith("_"): self._modules[k] = v
super().__setattr__(k, v)
def __repr__(self):
return f'{self._modules}'
def parameters(self):
for l in self._modules.values():
for p in l.parameters(): yield p
# class DummyModule1():
# def __init__(self, n_in, nh, n_out):
# self._m = {}
# self.l1 = nn.Linear(n_in, nh)
# self.l2 = nn.Linear(nh, n_out)
# # print("ok",self._m)
#
# def __setattr__(self, key, value):
# # print(key)
# if not key.startswith('_'):
# # print(self._m)
# self._m[key] = value
# super().__setattr__(key, value)
#
# # print(f"key not startswith('_')={key},value={value}")
# # print(type(super()).__name__)
#
# def __repr__(self):
# return f"{self._m}"
#
# def parameters(self):
# for l in self._m.values():
# for p in l.parameters():
# yield p
class DummyModule1():
def __init__(self, n_in, nh, n_out):
self._m = {}
self.l1 = nn.Linear(n_in, nh)
self.l2 = nn.Linear(nh, n_out)
print("ok", self._m)
def __setattr__(self, key, value):
print(key)
if not key.startswith('_'):
print(f"key not startswith('_')={key},value={value}")
self._m[key] = value
super().__setattr__(key, value)
print(type(super()).__name__)
def __repr__(self):
return f"{self._m}"
def parameters(self):
for l in self._m.values():
for p in l.parameters():
yield p
# 登记模块 Registering modules
class ModuleReg(nn.Module):
def __init__(self,layers,l_names):
super().__init__()
self.layers=layers
self.l_names=l_names
for i,l in enumerate(layers):
self.add_module(self.l_names[i],l)
self.__setattr__(self.l_names[i],l)
def __call__(self, x):
for l in self.layers:
x=l(x)
if __name__ == '__main__':
mdl = DummyModule(10, 20, 1)
print(mdl)
# mdl1 = DummyModule1(784, 50, 10)
# ps=[o.shape for o in mdl1.parameters()]
# print(ps)
# print(mdl1)
m, nh, c= 784, 50, 10
layers = [nn.Linear(m, nh), nn.ReLU(), nn.Linear(nh, c)]
l_names=["l1_zhang1","l2_relu2","l3_zhao3"]
mr=ModuleReg(layers,l_names)
print("new:",mr)
print("named_children()",list(mr.named_children()))
# print(list(mdl.named_children()))
module_dict={}
layers = [nn.Linear(m, nh), nn.ReLU(), nn.Linear(nh, c)]
l_names=["l1_zhang1","l2_relu2","l3_zhao3"]
for i, x_layer in enumerate(layers):
module_dict[l_names[i]]=x_layer
print(module_dict)
print(module_dict.values())
print("model = nn.Sequential(nn.Linear(m, nh), nn.ReLU(), nn.Linear(nh, 10))-----------")
model = nn.Sequential(nn.Linear(m, nh), nn.ReLU(), nn.Linear(nh, 10))
print("model:",model)
layers_list = [nn.Linear(m, nh), nn.ReLU(), nn.Linear(nh, c),nn.LSTM(m, nh)]
model_from_list=nn.Sequential(*layers_list)
print(model_from_list)
pass
| [
"noreply@github.com"
] | zhangjiekui.noreply@github.com |
be65e8b6843e01ce485befc48c2d14bde2967dea | 7bc54bae28eec4b735c05ac7bc40b1a8711bb381 | /src/contradiction/medical_claims/alamri/tfrecord_gen.py | 5ddfa13b77c7f7eda2cea802707861ff4e6e6373 | [] | no_license | clover3/Chair | 755efd4abbd5f3f2fb59e9b1bc6e7bc070b8d05e | a2102ebf826a58efbc479181f1ebb5de21d1e49f | refs/heads/master | 2023-07-20T17:29:42.414170 | 2023-07-18T21:12:46 | 2023-07-18T21:12:46 | 157,024,916 | 0 | 0 | null | 2023-02-16T05:20:37 | 2018-11-10T21:55:29 | Python | UTF-8 | Python | false | false | 3,932 | py | import json
import os
from typing import Iterator
from contradiction.medical_claims.alamri.pairwise_gen import enum_true_instance, enum_neg_instance, enum_neg_instance2, \
enum_neg_instance_diff_review
from contradiction.medical_claims.biobert.voca_common import get_biobert_tokenizer
from cpath import at_output_dir, output_path
from data_generator.cls_sep_encoder import get_text_pair_encode_fn, PairedInstance
from data_generator.tokenizer_wo_tf import get_tokenizer
from misc_lib import DataIDManager, exist_or_mkdir
from tf_util.record_writer_wrap import write_records_w_encode_fn
Entailment = 0
Neutral = 1
Contradiction = 2
def generate_true_pairs(data_id_man):
yield from generate_inner(data_id_man, enum_true_instance)
def generate_neg_pairs(data_id_man):
enum_fn = enum_neg_instance
yield from generate_inner(data_id_man, enum_fn)
def generate_neg_pairs2(data_id_man) -> Iterator[PairedInstance]:
enum_fn = enum_neg_instance2
yield from generate_inner(data_id_man, enum_fn)
def generate_neg_pairs_diff_review(data_id_man):
enum_fn = enum_neg_instance_diff_review
yield from generate_inner(data_id_man, enum_fn)
def generate_inner(data_id_man, enum_fn) -> PairedInstance:
for c1, c2, pair_type in enum_fn():
info = {
'text1': c1.text,
'text2': c2.text,
'pair_type': pair_type
}
inst = PairedInstance(c1.text, c2.text, data_id_man.assign(info), Neutral)
yield inst
def generate_and_write(file_name, generate_fn, tokenizer):
data_id_man = DataIDManager()
inst_list = generate_fn(data_id_man)
max_seq_length = 300
save_path = at_output_dir("alamri_tfrecord", file_name)
encode_fn = get_text_pair_encode_fn(max_seq_length, tokenizer)
write_records_w_encode_fn(save_path, encode_fn, inst_list)
info_save_path = at_output_dir("alamri_tfrecord", file_name + ".info")
json.dump(data_id_man.id_to_info, open(info_save_path, "w"))
def bert_true_pairs():
tokenizer = get_tokenizer()
file_name = "bert_true_pairs"
generate_fn = generate_true_pairs
generate_and_write(file_name, generate_fn, tokenizer)
def bert_neg_pairs():
tokenizer = get_tokenizer()
file_name = "bert_neg_pairs"
generate_fn = generate_neg_pairs
generate_and_write(file_name, generate_fn, tokenizer)
def biobert_true_pairs():
tokenizer = get_biobert_tokenizer()
file_name = "biobert_true_pairs"
generate_fn = generate_true_pairs
generate_and_write(file_name, generate_fn, tokenizer)
def biobert_neg_pairs():
tokenizer = get_biobert_tokenizer()
file_name = "biobert_neg_pairs"
generate_fn = generate_neg_pairs
generate_and_write(file_name, generate_fn, tokenizer)
def bert_neg_pairs2():
tokenizer = get_tokenizer()
file_name = "bert_neg_pairs2"
generate_fn = generate_neg_pairs2
generate_and_write(file_name, generate_fn, tokenizer)
def biobert_neg_pairs2():
tokenizer = get_biobert_tokenizer()
file_name = "biobert_neg_pairs2"
generate_fn = generate_neg_pairs2
generate_and_write(file_name, generate_fn, tokenizer)
def bert_neg_pairs_diff_review():
tokenizer = get_tokenizer()
file_name = "bert_neg_pairs_diff_review"
generate_fn = generate_neg_pairs_diff_review
generate_and_write(file_name, generate_fn, tokenizer)
def biobert_neg_pairs_diff_review():
tokenizer = get_biobert_tokenizer()
file_name = "biobert_neg_pairs_diff_review"
generate_fn = generate_neg_pairs_diff_review
generate_and_write(file_name, generate_fn, tokenizer)
def main():
exist_or_mkdir(os.path.join(output_path, "alamri_tfrecord"))
bert_neg_pairs_diff_review()
biobert_neg_pairs_diff_review()
# bert_neg_pairs2()
# biobert_neg_pairs2()
# bert_true_pairs()
# bert_neg_pairs()
# biobert_true_pairs()
# biobert_neg_pairs()
if __name__ == "__main__":
main()
| [
"lesterny@gmail.com"
] | lesterny@gmail.com |
8916449a87d75db87b58f25ac656eed155f8e15c | bedc7a4479f8bf43d7ed21e0262878b7d5f18c03 | /car_sales/settings.py | e5638effa8be99b648fe1881505ddc5e94bad36d | [] | no_license | petrosernivka/car_sales | 3ca8fec4fcd3725eaf9b57cabb6c4b2d5aee1ee2 | 0a917cef4cd235a9f33b6ef383a5958d4b89f3a9 | refs/heads/master | 2020-04-11T03:46:42.937924 | 2018-12-17T00:42:07 | 2018-12-17T00:42:07 | 161,489,252 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,298 | py | """
Django settings for car_sales project.
Generated by 'django-admin startproject' using Django 2.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')e(5u+6*=g1(h(t8lpvw*%e6uvzf03@uepbdm-=l(u^8v)klmv'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', 'localhost']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'advert',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'car_sales.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'car_sales.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
# STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
| [
"p.sernivka@gmail.com"
] | p.sernivka@gmail.com |
a5c5aa9d62e72bd6af18112fef8a862f75d3cf32 | 581c1da5a41467de122e41caeecaac88dc62b35d | /evaluation.py | 1a5b46e280c6a5c6490a24f647267b65d19df510 | [] | no_license | jiangnan3/MaliciousConnectionClassifier | 6cd25d83a69587b34a3cdf1e67cb3e2c6702569e | 203e1ebc675074043908cd544d881cadfd591b10 | refs/heads/master | 2021-07-14T11:50:04.585454 | 2020-05-31T16:54:19 | 2020-05-31T16:54:19 | 155,445,451 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,185 | py | import pandas
from keras.models import load_model
import numpy as np
from sklearn.externals import joblib
scaler = joblib.load('idsscaler.pkl')
ids = load_model("IDS.h5")
encoder = load_model("new_encoder.h5")
data = pandas.read_csv("testdatacopy", header=None)
feature = data.drop([41], 1)
label = data[41]
dosfeature = feature[data[41].isin([0])]
doslabel = label[data[41].isin([0])]
notdosfeature = feature[~data[41].isin([0])]
notdoslabel = np.repeat(doslabel[0:1].values, [notdosfeature.shape[0]], axis=0)
u2rfeature = feature[data[41].isin([1])]
u2rlabel = label[data[41].isin([1])]
notu2rfeature = feature[~data[41].isin([1])]
notu2rlabel = np.repeat(u2rlabel[0:1].values, [notu2rfeature.shape[0]], axis=0)
r21feature = feature[data[41].isin([2])]
r21label = label[data[41].isin([2])]
notr21feature = feature[~data[41].isin([2])]
notr21label = np.repeat(r21label[0:1].values, [notr21feature.shape[0]], axis=0)
probefeature = feature[data[41].isin([3])]
probelabel = label[data[41].isin([3])]
notprobefeature = feature[~data[41].isin([3])]
notprobelabel = np.repeat(probelabel[0:1].values, [notprobefeature.shape[0]], axis=0)
normalfeature = feature[data[41].isin([4])]
normallabel = label[data[41].isin([4])]
notnormalfeature = feature[~data[41].isin([4])]
notnormallabel = np.repeat(normallabel[0:1].values, [notnormalfeature.shape[0]], axis=0)
# ---------------dos------------------ #
dospredict = ids.predict(encoder.predict(scaler.transform(dosfeature)))
x = np.argmax(dospredict, axis=1)
y = ~(x == doslabel)
y = y.astype(int)
print sum(y), y.shape[0]
dosfn = (sum(y)) / float(y.shape[0])
print "dosfn:", dosfn
print "\n"
notdospredict = ids.predict(encoder.predict(scaler.transform(notdosfeature)))
x = np.argmax(notdospredict, axis=1)
y = (x == notdoslabel)
y = y.astype(int)
print (sum(y))/float(y.shape[0])
dosfp = (sum(y))/float(y.shape[0])
print "dosfp:", dosfp
print "------\n\n"
# ---------------u2r------------------ #
u2rpredict = ids.predict(encoder.predict(scaler.transform(u2rfeature)))
x = np.argmax(u2rpredict, axis=1)
y = ~(x == u2rlabel)
y = y.astype(int)
print sum(y), y.shape[0]
u2rfn = (sum(y)) / float(y.shape[0])
print "u2rfn:", u2rfn
notu2rpredict = ids.predict(encoder.predict(scaler.transform(notu2rfeature)))
x = np.argmax(notu2rpredict, axis=1)
y = (x == notu2rlabel)
y = y.astype(int)
print "\n"
print (sum(y))/float(y.shape[0])
u2rfp = (sum(y))/float(y.shape[0])
print "u2rfp:", u2rfp
print "------\n\n"
# ---------------r21------------------ #
r21predict = ids.predict(encoder.predict(scaler.transform(r21feature)))
x = np.argmax(r21predict, axis=1)
y = ~(x == r21label)
y = y.astype(int)
print sum(y), y.shape[0]
r21fn = (sum(y)) / float(y.shape[0])
print "r21fn:", r21fn
print "\n"
notr21predict = ids.predict(encoder.predict(scaler.transform(notr21feature)))
x = np.argmax(notr21predict, axis=1)
y = (x == notr21label)
y = y.astype(int)
print (sum(y))/float(y.shape[0])
r21fp = (sum(y))/float(y.shape[0])
print "r21fp:", r21fp
print "-------\n\n"
# ---------------probe------------------ #
probepredict = ids.predict(encoder.predict(scaler.transform(probefeature)))
x = np.argmax(probepredict, axis=1)
y = ~(x == probelabel)
y = y.astype(int)
print sum(y), y.shape[0]
probefn = (sum(y)) / float(y.shape[0])
print "probefn:", probefn
print "\n"
notprobepredict = ids.predict(encoder.predict(scaler.transform(notprobefeature)))
x = np.argmax(notprobepredict, axis=1)
y = (x == notprobelabel)
y = y.astype(int)
print (sum(y))/float(y.shape[0])
probefp = (sum(y))/float(y.shape[0])
print "probefp:", probefp
print "-----\n\n"
# ---------------normal------------------ #
normalpredict = ids.predict(encoder.predict(scaler.transform(normalfeature)))
x = np.argmax(normalpredict, axis=1)
y = ~(x == normallabel)
y = y.astype(int)
print sum(y), y.shape[0]
normalfn = (sum(y)) / float(y.shape[0])
print "normalfn:", normalfn
print "\n"
notnormalpredict = ids.predict(encoder.predict(scaler.transform(notnormalfeature)))
x = np.argmax(notnormalpredict, axis=1)
y = (x == notnormallabel)
y = y.astype(int)
print (sum(y))/float(y.shape[0])
normalfp = (sum(y))/float(y.shape[0])
print "normalfp:", normalfp
print "------\n\n" | [
"noreply@github.com"
] | jiangnan3.noreply@github.com |
3ba2b5e72fc35b21a8ec66eab6afa33ad57e4315 | db579fc523398f6617cfdcdb87ce53b30326d26f | /linux/main.py | 24ed159db79e4eb3ee00aa35f0b9cb5520ca6c57 | [] | no_license | iiiyu/old-oh-my-password | 4ec167f657ace2b6a5d80869de2aba63b319f73c | d8c8665b327f7156bf8d9d7ec04f47214faf3cc3 | refs/heads/master | 2016-09-06T18:08:46.682592 | 2012-03-01T00:56:47 | 2012-03-01T00:56:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,924 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from gui.application import Application
from gui.constant import *
from gui.menu import *
from gui.navigatebar import *
from gui.statusbar import *
from gui.categorybar import *
from gui.scrolledWindow import *
from gui.box import *
from gui.button import *
from gui.listview import *
from gui.tooltip import *
from gui.popupWindow import *
from gui.ompButton import *
if __name__ == "__main__":
# Init application.
application = Application()
# Set application default size.
application.set_default_size(DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT)
# Set application icon.
application.set_icon("icon.ico")
# Draw application background.
application.set_background(BACKGROUND_IMAGE)
button = gtk.Button()
button.set_size_request(200,300)
# Init menu callback.
menu = Menu(
[("menu/menuItem1.png", "测试测试测试1", lambda :PopupWindow(application.window)),
("menu/menuItem2.png", "测试测试测试2", None),
("menu/menuItem3.png", "测试测试测试3", None),
None,
(None, "测试测试测试", None),
(None, "测试测试测试", None),
None,
("menu/menuItem6.png", "测试测试测试4", None),
("menu/menuItem7.png", "测试测试测试5", None),
("menu/menuItem8.png", "测试测试测试6", None),
])
application.set_menu_callback(lambda button: menu.show(get_widget_root_coordinate(button)))
# Add body box.
body_box = gtk.HBox()
application.main_box.pack_start(body_box, True, True)
category_box = gtk.HBox()
body_box.add(category_box)
vbox = gtk.VBox()
vvbox = gtk.VBox()
hbox = gtk.HBox()
mb = ompButton(None, '1aaaaaaaaaasdfasdfasdf','bbbbbbbbb')
mb2 = ompButton(None, '2aaaaaaaaa','bbbbbbbbb')
mb3 = ompButton(None, '3aaaaaaaaa','bbbbbbbbb')
sb = ompSmallButton("../data/add.png")
sb1 = ompSmallButton("../data/delete.png")
sb2 = ompSmallButton("../data/settings.png")
vbox.pack_start(mb, False, False)
vbox.pack_start(mb2, False, False)
vbox.pack_start(mb3, False, False)
hbox.pack_start(sb, False, False)
hbox.pack_start(sb1, False, False)
hbox.pack_start(sb2, False, False)
# Add scrolled window.
scrolled_window = ScrolledWindow()
category_box.pack_start(vvbox, False, False)
vvbox.pack_start(scrolled_window, False, False)
vvbox.pack_start(hbox, False, False)
scrolled_window.add_child(vbox)
scrolled_window.set_size_request(160, 540)
# Add statusbar.
statusbar = Statusbar(36)
application.main_box.pack_start(statusbar.status_event_box, False)
application.add_move_window_event(statusbar.status_event_box)
application.add_toggle_window_event(statusbar.status_event_box)
# Run.
application.run()
| [
"yunsn0303@gmail.com"
] | yunsn0303@gmail.com |
81fea0f2ccd99ea670c2cb87202fd23f0678642b | c4242163d8d0e694d334dba3b534ad6798554351 | /binary_search_tree.py | e3f30008c316f35d509c95abc95d3738dfa8ffd6 | [] | no_license | Yahiy/LeetCode | 8a99735274e0d46ecfc3002b24a7cd93c25a9fb3 | ddebed65e224ddbcd64f3795e8c0e03679a0b089 | refs/heads/master | 2023-06-24T01:31:00.187785 | 2021-07-17T05:39:09 | 2021-07-17T05:39:09 | 103,735,285 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,954 | py | """
230. Kth Smallest Element in a BST
Medium
Given a binary search tree, write a function kthSmallest to find the kth smallest element in it.
Note:
You may assume k is always valid, 1 ≤ k ≤ BST's total elements.
Example 1:
Input: root = [3,1,4,null,2], k = 1
3
/ \
1 4
\
2
Output: 1
Example 2:
Input: root = [5,3,6,2,4,null,null,1], k = 3
5
/ \
3 6
/ \
2 4
/
1
Output: 3
Follow up:
What if the BST is modified (insert/delete operations) often
and you need to find the kth smallest frequently? How would you optimize the kthSmallest routine?
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def kthSmallest(self, root, k):
"""
:type root: TreeNode
:type k: int
:rtype: int
"""
ans = self.inorderTraversal(root,k)
return ans[-1]
def kthSmallest2(self, root, k):
"""
:type root: TreeNode
:type k: int
:rtype: int
"""
ans = [] #output
s = [] # stack
while root or len(s):
while root:
s.append(root)
root = root.left
root = s.pop()
ans.append(root.val)
if len(ans) == k:
return root.val
root = root.right
def inorderTraversal(self, root, k):
"""
:type root: TreeNode
:rtype: List[int]
stack Time O(n) Space O(h)
"""
ans = [] #output
s = [] # stack
while root or len(s):
while root:
s.append(root)
root = root.left
root = s.pop()
ans.append(root.val)
if len(ans) >= k:
break
root = root.right
return ans
| [
"noreply@github.com"
] | Yahiy.noreply@github.com |
b324821f4e1cb588672bdca6d07e05ff834b9547 | a939ec03a7eb31962817d6cffea7f125ea4d69db | /DataScience/pandas/Example02_series.py | 9983ac491066a66c45841303c88fcc293db3bfb3 | [] | no_license | dipayandutta/python3 | e21e50d7a21315bc63702a103af79f3d61d91ab1 | f3d01ea52d05a23103cf86afbf5dff64a5d36634 | refs/heads/master | 2022-12-10T09:13:04.967400 | 2021-07-25T15:20:40 | 2021-07-25T15:20:40 | 153,072,927 | 0 | 0 | null | 2022-11-22T02:24:01 | 2018-10-15T07:46:28 | Python | UTF-8 | Python | false | false | 144 | py | #Manually assign index values to a series
import pandas as pd
series = pd.Series(['Dipayan','Ruby'],index=['Husband','spouse'])
print(series) | [
"inbox.dipayan@gmail.com"
] | inbox.dipayan@gmail.com |
5c5210b5c913361470d57dd98760a28cd83755c0 | fd3440771706715a49e296cc80a4ab4f26c2f3b1 | /Projects/Exercise 2/Francis Taylor - 1820531 - Exercise 2.py | 449141d8f94df04e555950b3e46f04929f2b0bbb | [
"MIT"
] | permissive | franc-ist/physics-projects | 608a3b7057a16f734f2ca8a6456f376d94191c74 | d6f4385aaf9a4c45969b5407066d8397de2f1015 | refs/heads/master | 2022-04-23T20:22:39.458829 | 2020-04-23T12:32:41 | 2020-04-23T12:32:41 | 203,757,588 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,737 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: Francis Taylor
# Most comments are above the line they are commenting
import numpy as np
import matplotlib.pyplot as plt
# define constants
g = -9.80665
h = 7640
v_sound = 343.2 # in air at sea level
def speed_of_sound(height: float):
'''
Calculates the speed of sound at various altitudes, to allow for a
horizontal line to be added to the graphs showing the speed of sound.
https://www.grc.nasa.gov/WWW/BGH/atmosmet.html
http://hyperphysics.phy-astr.gsu.edu/hbase/Sound/souspe3.html
Parameters
----------
height : float
The height at which to calculate the speed of sound.
Returns
-------
v_sound : float
The speed of sound at the given altitude.
'''
temp = 273
# temperature in the atmosphere depends on the layer of the atmosphere
if height > 25000:
temp += -131.21 + 0.00299*height
elif 11000 < h <= 25000:
temp += -56.46
else:
temp += 15.04 - 0.00649*height
# speed of sound is dependent on the temperature of the medium it is
# travelling through
v_sound = np.sqrt((1.4*8.314*temp)/0.02895)
return v_sound
def _custom_values(fn: str):
'''
Allows the user to specify custom values for the analytical predictions,
Eulers method, and varying air density problems.
Parameters
----------
fn : str
The function that the values will be used for.
Returns
-------
n : int
The number of terms to iterate over. Defaults to 1000.
y0 : float
Initial position of the object. Defaults to 1000 m.
v0 : float
Initial velocity of the object. Defaults to 0 m/s.
mass : float
The mass of the object in freefall. Defaults to 100 kg.
x_section_area : float
The cross sectional area of the object. Defaults to 0.95m^2.
drag_coefficient : float
The drag coefficient of the object. Defaults to 1.0.
air_density : float
The density of the air. Defaults to 1.2 kg/m^3.
t0 : float[Optional]
Initial time. Defaults to 0.0 s.
delta_t : float[Optional]
The time period step (dt). Defaults to 1.0 s.
t_max : float[Optional]
The final time value for the prediction. Defaults to 60.0s.
t_min : float[Optional]
The initial time value for the prediction range. Defaults to 0.0s.
'''
print('Please enter the values you wish to use. Leave blank for '
'the default.')
while True:
try:
n = abs(int(input("Please enter a value for n: ") or 1000))
y0 = float(input("Please enter a value for y0: ") or 1000.00)
v0 = float(input("Please enter a value for v0: ") or 0.0)
mass = float(input(
"Please enter a value for the mass of the object: ") or 100.0)
x_section_area = float(input(
"Please enter a value for the cross-sectional area of the "
"object: ") or 0.95)
drag_coefficient = float(input(
"Please enter a value for the drag coefficient: ") or 1.0)
air_density = float(input(
"Please enter a value for the density of the air: ") or 1.2)
# euler specific values
if fn == 'euler':
t0 = float(input("Please enter a value for t0: ") or 0.0)
delta_t = float(input("Please enter a value for dt: ") or 1.0)
return n, y0, v0, mass, x_section_area, drag_coefficient, air_density, t0, delta_t
# analytical specific values
elif fn == 'analytical':
t_min = float(
input("Please enter a value for the lower bound of t (t_min): ") or 0.0)
t_max = float(
input("Please enter a value for the upper bound of t (t_max): ") or 300.0)
return n, y0, v0, mass, x_section_area, drag_coefficient, air_density, t_max, t_min
except ValueError:
print('Invalid input.')
continue
else:
break
def plot_graphs(t, dep_var, dep_type, comparison: bool = False, comparison_t=None, comparison_dep_var=None, fn: str = 'Analytical'):
'''
Utitlity function to handle plotting of graphs.
Parameters
----------
t : array
Array of time values to be plotted as the independent variable.
dep_var : array
Array of values to be plotted as the dependent variable.
dep_type : str
Specifies whether the dependent variable is height or velocity.
comparison : bool[Optional]
Determines whether to plot the analytical solution on the same
graph as the Euler solution
comparison_t : array[Optional]
Array of time values to be plotted as the second independent
variable.
comparison_dep_var : array[Optional]
Array of values to be plotted as the second dependent variable.
fn : str[Optional]
Determines which function should be labelled in the comparison
graph.
'''
if comparison is True:
plt.plot(t, dep_var, color='red', label='Euler')
plt.plot(comparison_t, comparison_dep_var,
color='blue', label=fn)
plt.legend()
else:
# no comparison
plt.plot(t, dep_var)
plt.xlabel('$Time (s)$', size=12)
# velocity graph
if dep_type == 'v':
plt.title('Velocity-Time', size=22)
plt.ylabel('$Velocity (m/s)$', size=12)
# height graph
elif dep_type == 'y':
plt.title('Height-Time', size=22)
plt.ylabel('$Height (m)$', size=12)
plt.grid(alpha=0.7, linewidth=1)
def trim_zeros(v, y, t):
'''
Trims '0' values from the ends of the velocity and height arrays. Resizes
the time array to the dimensions of the trimmed velocity and height arrays.
Parameters
----------
v : NumPy array
Array of vertical velocity values to be trimmed.
y : NumPy array
Array of height values to be trimmed.
t : NumPy array
Array of time values to be resized to the dimensions of the
trimmed velocity and height arrays
Returns
-------
v_trimmed : NumPy array
Array of vertical velocity values, with end zeros trimmed.
y_trimmed : NumPy array
Array of height values, with end zeros trimmed.
t_v : NumPy array
Time array resized to the dimensions of the v_trimmed array.
t_y : NumPy array
Time array resized to the dimensions of the y_trimmed array.
'''
# trims 0 values at the end of the array
v_trimmed = np.trim_zeros(v, 'b')
y_trimmed = np.trim_zeros(y, 'b')
# trims time array to length of the velocity array
m_v = len(v_trimmed)
t_v = np.resize(t, m_v)
# trims height array to length of the velocity array
m_y = len(y_trimmed)
t_y = np.resize(t, m_y)
return v_trimmed, y_trimmed, t_v, t_y
def analytical_predictions(n: int = 200, y0: float = 1000.0, v0: float = 0.0, t_max: float = 300.0, t_min: float = 0.0, mass: float = 100.0, x_section_area: float = 0.95, drag_coefficient: float = 1.0, air_density: float = 1.2):
'''
Predicts the height and vertical speed of an object in freefall with
constant gravitational acceleration and drag, using an analytical method.
Parameters
----------
n : int
The number of terms to iterate over. Defaults to 1000.
y0 : float
Initial position of the object. Defaults to 1000 m.
v0 : float
Initial velocity of the object. Defaults to 0 m/s.
t_max : float
The final time value for the prediction. Defaults to 60.0s.
t_min : float
The initial time value for the prediction range. Defaults to 0.0s.
mass : float
The mass of the object in freefall. Defaults to 100 kg.
x_section_area : float
The cross sectional area of the object. Defaults to 0.95m^2.
drag_coefficient : float
The drag coefficient of the object. Defaults to 1.0.
air_density : float
The density of the air. Defaults to 1.2 kg/m^3.
Returns
-------
v_vals : NumPy array
An array of vertical speed values as time progresses for the
object.
y_vals : NumPy array
An array of height values as time progresses for the object.
t_v : NumPy array
Time array resized to the dimensions of the v array.
t_y : NumPy array
Time array resized to the dimensions of the y array.
'''
k = (drag_coefficient*air_density*x_section_area)/2
# initialise arrays
t_vals = np.linspace(t_min, t_max, n)
y_vals = np.zeros(n)
v_vals = np.zeros(n)
# set t=0 values
y_vals[0], v_vals[0] = y0, v0
# iteratively calculate y and v values for varying t
for i in range(0, n):
y_vals[i] = y0 - \
((mass/k) * np.log(np.cosh(np.sqrt((k*abs(g))/mass) * t_vals[i])))
v_vals[i] = -1 * \
np.sqrt((mass*abs(g))/k) * \
np.tanh((np.sqrt((k*abs(g))/mass) * t_vals[i]))
# stop if we hit the ground
if y_vals[i] <= 0:
break
# trims trailing zeros and resizes time array
return trim_zeros(v_vals, y_vals, t_vals)
def euler(n: int = 1000, y0: float = 1000.0, t0: float = 0.0, delta_t: float = 1.0, v0: float = 0.0, mass: float = 100.0, x_section_area: float = 0.95, drag_coefficient: float = 1.0, air_density: float = 1.2, var_air_dens: bool = False):
'''
Uses the Euler method to solve a second order ODE for an object in
freefall with constant air density and gravitational acceleration.
Parameters
----------
n : int
The number of terms to iterate over. Defaults to 1000.
y0 : float
Initial position of the object. Defaults to 1000 m.
t0 : float
Initial time. Defaults to 0.0 s.
delta_t : float
The time period step (dt). Defaults to 1.0 s.
v0 : float
Initial velocity of the object. Defaults to 0 m/s.
mass : float
The mass of the object in freefall. Defaults to 100 kg.
x_section_area : float
The cross sectional area of the object. Defaults to 0.95 m^2.
drag_coefficient : float
The drag coefficient of the object. Defaults to 1.0.
air_density : float
The density of the air. Defaults to 1.2 kg/m^3.
var_air_dens : bool
If true, uses the equation for varying air density. Defaults to
false.
Returns
-------
v_vals : NumPy array
An array of vertical speed values as time progresses for the
object.
y_vals : NumPy array
An array of height values as time progresses for the object.
t_v : NumPy array
Time array resized to the dimensions of the v array.
t_y : NumPy array
Time array resized to the dimensions of the y array.
'''
# initialise arrays
v = np.zeros(n)
t = np.zeros(n)
y = np.zeros(n)
# set t0 values of v, y and t
v[0], t[0], y[0] = v0, t0, y0
# redefine k using new, varying air density
if var_air_dens is True:
# initialise air density and k arrays for varying values
ad = np.zeros(n)
k = np.zeros(n)
for i in range(0, n-1):
ad[i] = air_density*np.exp(-1*y[i]/h)
k[i] = (drag_coefficient*ad[i]*x_section_area)/2
t[i+1] = t[i] + delta_t
v[i+1] = v[i] - (delta_t * (-g + (k[i]/mass) * abs(v[i])*v[i]))
y[i+1] = y[i] + (delta_t * v[i])
# stop when we hit the ground
if y[i+1] <= 0:
break
else:
for i in range(0, n-1):
k = (drag_coefficient*air_density*x_section_area)/2
t[i+1] = t[i] + delta_t
v[i+1] = v[i] - (delta_t * (-g + (k/mass) * abs(v[i])*v[i]))
y[i+1] = y[i] + (delta_t * v[i])
# stop when we hit the ground
if y[i+1] <= 0:
break
# trims trailing zeros and resizes time array
return trim_zeros(v, y, t)
user_input = '0'
while user_input != 'q':
user_input = input(
'\nChoose an option:'
'\na: Plot graphs of vertical velocity and height as a function of '
'time using an analytical method for a object in freefall,'
'\nb: Plot graphs of vertical velocity and height as a function of '
'time using Euler\'s method for an object in freefall,'
'\nc: Same as option b, but with varying air density,'
'\nOr type "q" to quit. ').lower()
if user_input == 'a':
print("Plotting the analytical predictions for height and vertical "
"speed.")
# loop until valid input
while True:
try:
# allows the user to change the paramters used in the calculation
custom_values = str(input(
"Would you like to specify the values used in the problem? (y/n) ")).lower()
except ValueError:
# not a string
print("Invalid input. Please try again.")
continue
if custom_values == 'y':
fn = 'analytical'
# calls _custom_values() for the analytical solution to allow # the parameters used to be changed
n, y0, v0, mass, x_section_area, drag_coefficient, air_density, t_max, t_min = _custom_values(
fn)
# runs the analytical method using the custom values
v, y, t_v, t_y = analytical_predictions(
n, y0, v0, t_max, t_min, mass, x_section_area, drag_coefficient, air_density)
break
elif custom_values == 'n':
print("Using default values.")
v, y, t_v, t_y = analytical_predictions()
break
else:
print("Invalid input. Please try again.")
continue
# checks if v >= v_sound at any point in the freefall (as it is
# variable), and plots v_sound on the graph
v_sound = np.zeros(len(y))
v_sound_passed = 343
for i in range(0, len(y)):
v_sound[i] = speed_of_sound(y[i])
if abs(v[i]) >= abs(v_sound[i]):
v_sound_passed = -1*v_sound[i]
plt.axhline(v_sound_passed, color='orange', linestyle='--',
label='Speed of sound = {:.2f}m/s\n(at {:.1f}m) '.format(v_sound_passed, y[i]))
plt.legend()
break
# plots graphs of velocity and height against time
plot_graphs(t_v, v, 'v')
plt.show()
plot_graphs(t_y, y, 'y')
plt.show()
elif user_input == 'b':
print("Solving the freefall of a body using Euler's method.")
# loop until valid input
while True:
try:
# allows the user to change the paramters used in the calculation
custom_values = str(input(
"Would you like to specify the values used in the problem? (y/n) ")).lower()
except ValueError:
# not a string
print("Invalid input. Please try again.")
continue
if custom_values == 'y':
fn = 'euler'
# calls _custom_values() for the euler solution to allow the
# parameters used to be changed
n, y0, v0, mass, x_section_area, drag_coefficient, air_density, t0, delta_t = _custom_values(
fn)
# runs the euler method using the custom values
v, y, t_v, t_y = euler(
n, y0, t0, delta_t, v0, mass, x_section_area, drag_coefficient, air_density)
break
elif custom_values == 'n':
# uses default values
print('Using default values.')
v, y, t_v, t_y = euler()
break
else:
print("Invalid input. Please try again.")
continue
while True:
try:
# allows the user to compare the euler result with the
# analytical soln
compare_graphs = str(input(
"Would you like to compare the results with the analytical solution? (y/n) ")).lower()
except ValueError:
# not a string
print("Invalid input. Please try again.")
continue
if compare_graphs == 'y':
# sets the maximum time to the last value of t_y
t_max = int(t_y[-1])
if custom_values == 'y':
# uses the custom values specified previously
v_a, y_a, t_v_a, t_y_a = analytical_predictions(
n, y0, v0, t_max, 0.0, mass, x_section_area, drag_coefficient, air_density)
else:
v_a, y_a, t_v_a, t_y_a = analytical_predictions(
t_max=t_max)
# checks if v >= v_sound at any point in the freefall (as it is
# variable), and plots v_sound on the graph
v_sound = np.zeros(len(y))
v_sound_passed = 343
for i in range(0, len(y)):
v_sound[i] = speed_of_sound(y[i])
if abs(v[i]) >= abs(v_sound[i]):
v_sound_passed = -1*v_sound[i]
plt.axhline(v_sound_passed, color='orange', linestyle='--',
label='Speed of sound = {:.2f}m/s\n(at {:.1f}m) '.format(v_sound_passed, y[i]))
plt.legend()
break
# plot comparison graphs
plot_graphs(t_v, v, 'v', True, t_v_a, v_a)
plt.show()
plot_graphs(t_y, y, 'y', True, t_y_a, y_a)
plt.show()
break
elif compare_graphs == 'n':
# checks if v >= v_sound at any point in the freefall (as it is
# variable), and plots v_sound on the graph
v_sound = np.zeros(len(y))
v_sound_passed = 343
for i in range(0, len(y)):
v_sound[i] = speed_of_sound(y[i])
if abs(v[i]) >= abs(v_sound[i]):
v_sound_passed = -1*v_sound[i]
plt.axhline(v_sound_passed, color='orange', linestyle='--',
label='Speed of sound = {:.2f}m/s\n(at {:.1f}m) '.format(v_sound_passed, y[i]))
plt.legend()
break
# calls the plot_graphs function to handle pyplot settings
plot_graphs(t_v, v, 'v')
plt.show()
plot_graphs(t_y, y, 'y')
plt.show()
break
else:
print("Invalid input. Please try again.")
continue
elif user_input == 'c':
print("Solving the freefall of a body with varying air density.")
# loop until valid input
while True:
try:
custom_values = str(input(
"Would you like to specify the values used in the problem? (y/n) ")).lower()
except ValueError:
# not a string
print("Invalid input. Please try again.")
continue
if custom_values == 'y':
fn = 'euler'
# calls _custom_values() for the euler solution to allow the
# parameters used to be changed
n, y0, v0, mass, x_section_area, drag_coefficient, air_density, t0, delta_t = _custom_values(
fn)
# runs the euler method with varying air density
v, y, t_v, t_y = euler(
n, y0, t0, delta_t, v0, mass, x_section_area, drag_coefficient, air_density, var_air_dens=True)
break
elif custom_values == 'n':
print('Using default values.')
v, y, t_v, t_y = euler(var_air_dens=True)
break
else:
print("Invalid input. Please try again.")
continue
# plots graphs of velocity and height against time
while True:
try:
compare_graphs = str(input(
"Would you like to compare the results with the Euler solution with fixed drag? (y/n) ")).lower()
except ValueError:
# not a string
print("Invalid input. Please try again.")
continue
if compare_graphs == 'y':
t_max = int(t_y[-1])
if custom_values == 'y':
v_a, y_a, t_v_a, t_y_a = euler(
n, y0, t0, delta_t, v0, mass, x_section_area, drag_coefficient, air_density)
else:
v_a, y_a, t_v_a, t_y_a = euler()
# checks if v >= v_sound at any point in the freefall (as it is
# variable), and plots v_sound on the graph
v_sound = np.zeros(len(y))
v_sound_passed = 343
for i in range(0, len(y)):
v_sound[i] = speed_of_sound(y[i])
if abs(v[i]) >= abs(v_sound[i]):
v_sound_passed = -1*v_sound[i]
plt.axhline(v_sound_passed, color='orange', linestyle='--',
label='Speed of sound = {:.2f}m/s\n(at {:.1f}m) '.format(v_sound_passed, y[i]))
plt.legend()
break
# plot comparison graphs
plot_graphs(t_v_a, v_a, 'v', True, t_v, v, 'Modified Euler')
plt.show()
plot_graphs(t_y_a, y_a, 'y', True, t_y, y, 'Modified Euler')
plt.show()
break
elif compare_graphs == 'n':
# checks if v >= v_sound at any point in the freefall (as it is
# variable), and plots v_sound on the graph
v_sound = np.zeros(len(y))
v_sound_passed = 343
for i in range(0, len(y)):
v_sound[i] = speed_of_sound(y[i])
if abs(v[i]) >= abs(v_sound[i]):
v_sound_passed = -1*v_sound[i]
plt.axhline(v_sound_passed, color='orange', linestyle='--',
label='Speed of sound = {:.2f}m/s\n(at {:.1f}m) '.format(v_sound_passed, y[i]))
plt.legend()
break
# calls the plot_graphs function to handle pyplot settings
plot_graphs(t_v, v, 'v')
plt.show()
plot_graphs(t_y, y, 'y')
plt.show()
break
else:
print("Invalid input. Please try again.")
continue
# handle any other input
elif user_input != 'q':
print('Invalid input.')
| [
"contact@franc.ist"
] | contact@franc.ist |
f257abdf7ddb168c41dfe0b2af3507fb6eed05c1 | 14e8d32ee1b5169ee765745ee6cbe7795a84331c | /forum/admin.py | 33437a3a6466fb27692a75f15007759314d754e2 | [] | no_license | kyizet/SLAP | 1a5464cd35465e81507c1892ca7ea7bc80bce835 | fba5bcfe2a299d9fdb4e219eb0737023c883c1d9 | refs/heads/master | 2022-12-11T04:33:45.805036 | 2020-09-15T12:27:10 | 2020-09-15T12:27:10 | 291,477,560 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | from django.contrib import admin
# Register your models here.
from .models import ThreadType, Thread, Topic, Comment
class ThreadAdmin(admin.ModelAdmin):
list_display = ('pk', 'thread_title', 'for_community')
class TopicAdmin(admin.ModelAdmin):
list_display = ('pk', 'topic_title', 'thread_title', 'owner', 'get_email')
def get_email(self, obj):
return obj.owner.email
admin.site.register(ThreadType)
admin.site.register(Thread, ThreadAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(Comment)
| [
"zet11zet@icloud.com"
] | zet11zet@icloud.com |
ddfcf112b3eda06c22f852a04ad233b0fcd9aad8 | 2706bb3b3b0ad1b1b8db204ee51c4f73c8c77ff7 | /lstm/trainLSTM.py | 3e35d57cd28256e0f02ef5ae20d55f558fb99c7b | [] | no_license | fangzhao2019/multi-class_sentiment_analysis | c8f3ee675f01c4940be23956ddf3fe10ec24b69a | e427e572efa641325553de82ad1ad617adf9e34b | refs/heads/master | 2023-06-07T20:29:34.979404 | 2023-06-01T16:47:45 | 2023-06-01T16:47:45 | 322,667,976 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,478 | py | import numpy as np
import keras
from keras.layers import Flatten
from keras.models import load_model
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.layers import Dense, LSTM, Embedding, Dropout, Conv1D, MaxPooling1D, Bidirectional
from keras.models import Sequential
import matplotlib.pyplot as plt
batch_size = 128
epochs = 50
def trans(testLabel,labelSet,num_classes):
newTestLabel=np.zeros((len(testLabel),num_classes)).astype('float32')
for i in range(len(testLabel)):
label=testLabel[i]
index=labelSet.index(label)
newTestLabel[i][index]=1.0
return newTestLabel
def evaluate(testLabel,predictLabel,labelSet):
results_count=np.zeros((len(labelSet),len(labelSet)))
for i in range(len(testLabel)):
index1=testLabel[i].argmax()
index2=predictLabel[i].argmax()
results_count[index1][index2]+=1
fmeasure={}
total_TP= 0
for idx in range(len(labelSet)):
metric={}
TP=results_count[idx,idx]
total_TP += TP
precision= TP/float(np.sum(results_count,axis=0)[idx]+0.5)
recall= TP/float(np.sum(results_count,axis=1)[idx]+0.5)
f_score=2*precision*recall/float(recall+precision)
metric['p']=precision
metric['r']=recall
metric['f']=f_score
fmeasure[labelSet[idx]]=metric
accuracy=total_TP/np.sum(results_count)
fmeasure['acc']=accuracy
return fmeasure
def drawFigure(x,y1,y2,y3):
plt.figure()
plt.plot(x,y1)
plt.plot(x,y2)
plt.plot(x,y3)
plt.xlabel('epoch')
plt.ylabel('accuracy')
plt.title('the accuracy of CNN iteration')
plt.savefig('/home/som/Documents/lee/multi-class_sentiment_analysis/lstm/result/accuracy_cnn.jpg')
plt.show()
def saveToTxt(x,train,dev,test):
f=open('/home/som/Documents/lee/multi-class_sentiment_analysis/lstm/result/accuracy_cnn.txt','w',encoding='utf-8')
for i in range(len(x)):
f.write('%.4f %.4f %.4f %.4f'%(x[i],train[i],dev[i],test[i]))
f.write('\n')
f.close()
trainMat=np.load('mat/trainMat.npy')
trainLabel=np.load('mat/trainLabel.npy')
devMat=np.load('mat/testMat.npy')
devLabel=np.load('mat/testLabel.npy')
testMat=np.load('mat/testMat.npy')
testLabel=np.load('mat/testLabel.npy')
trainMat=np.sum(trainMat,axis=1).reshape(trainMat.shape[0],1,trainMat.shape[1])
devMat=np.sum(devMat,axis=1).reshape(devMat.shape[0],1,devMat.shape[1])
testMat=np.sum(testMat,axis=1).reshape(testMat.shape[0],1,testMat.shape[1])
trainMat = trainMat.astype('float32')
devMat = devMat.astype('float32')
testMat = testMat.astype('float32')
print('x_train shape:', trainMat.shape)
print(trainMat.shape[0], 'train samples')
print(devMat.shape[0], 'dev samples')
print(testMat.shape[0], 'test samples')
labelSet=list(set(trainLabel))
num_classes=len(labelSet)
print('共有%d个类别'%num_classes)
# convert class vectors to binary class matrices
trainLabel = trans(trainLabel,labelSet,num_classes)
devLabel = trans(devLabel,labelSet,num_classes)
testLabel = trans(testLabel,labelSet,num_classes)
model = Sequential()
model.add(Bidirectional(LSTM(32, recurrent_dropout=0.1)))
model.add(Dropout(0.25))
model.add(Dense(64))
model.add(Dropout(0.3))
model.add(Dense(num_classes, activation='softmax'))
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])
best_dev_accuracy=-1
x=[]
train=[]
dev=[]
test=[]
print('正在训练模型')
for i in range(epochs):
print('第%d次迭代'%i)
model.fit(trainMat, trainLabel,
batch_size=batch_size,
epochs=1,
verbose=1,
validation_data=(devMat, devLabel))
score1=model.evaluate(trainMat,trainLabel)[1]
score2=model.evaluate(devMat,devLabel)[1]
score3=model.evaluate(testMat,testLabel)[1]
if score2>best_dev_accuracy:
best_dev_accuracy=score2
model.save('lstm.model')
print(best_dev_accuracy)
print('\n')
x.append(i+1)
train.append(score1)
dev.append(score2)
test.append(score3)
model=load_model('lstm.model')
result=model.predict(testMat,verbose=0)
fmeasure=evaluate(testLabel,result,labelSet)
accuracy=fmeasure['acc']
print('\n')
print("acc: %.4f" %(accuracy))
for k in fmeasure.keys():
if k=='acc':continue
print('label %s p: %.4f, r: %.4f, f: %.4f'%(k, fmeasure[k]['p'], fmeasure[k]['r'], fmeasure[k]['f']))
drawFigure(x,train,dev,test)
saveToTxt(x,train,dev,test)
| [
"1311778207@qq.com"
] | 1311778207@qq.com |
e8a7a6fa4b339d66ea690f00c1ee3c267ca4acf6 | 6580e5a90c15898af4a4d9317c6372c9e3e36a29 | /tokens.py | 02db1964798167d2bc287abbd4dd0395123d7531 | [
"MIT"
] | permissive | 22842219/SQL2Cypher | ee66a749d223fc116d3f359a100624fbaaaa8f87 | 4687b22792c947c2fecb3ae788c3ef7e6a074344 | refs/heads/master | 2023-09-02T13:25:05.945151 | 2021-10-20T14:38:12 | 2021-10-20T14:38:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,571 | py | import sqlparse
from sqlparse import sql
query = "UPDATE Customers c SET c.ContactName = 'Alfred Schmidt', c.City= 'Frankfurt' WHERE c.CustomerID = 1;"
query_tokens = sqlparse.parse(query)[0].tokens
def count_tables(tokens):
"""
count how many tables
:param tokens:
:return:
"""
count = 0
for token in tokens:
if token.ttype is None and type(token) is sql.Identifier:
if token.get_name() == token.get_real_name():
raise Exception("Please use a different alias name")
count += 1
return count
def value_condition(tokens):
"""
extract value condition in the tokens
:param tokens: sql queries token
:return:
"""
values = " SET "
match = "MATCH "
where = " "
for token in tokens:
# print('token[%s] type[%s]' % (token, token.ttype) )
if token.ttype is None and isinstance(token, sql.IdentifierList):
for id in token.get_identifiers():
if values != " SET ":
values += ", " + str(id)
else:
values += str(id)
elif token.ttype is None and type(token) is sql.Identifier:
# tables
match += "({}:{}) ".format(str(token.get_name()), str(token.get_real_name()))
elif token.ttype is None and type(token) is sql.Where:
# where condition
where += str(token).replace(";", "")
return match + where + values
def parse(tokens):
if count_tables(tokens) == 1:
query = value_condition(tokens)
print(query)
else:
raise Exception("Can not parse relationships now")
if __name__ == '__main__':
parse(query_tokens)
# for token in query_tokens:
# print('token[%s] type[%s] id: [%s]' % (token, token.ttype, type(token)))
#
# # print(sql.IdentifierList.value)
# for token in query_tokens:
# # print('token[%s] type[%s]' % (token, token.ttype) )
# if token.ttype is None and isinstance(token, sql.IdentifierList):
# print('Identifierlist:')
# for id in token.get_identifiers():
# print(id)
#
#
# for token in query_tokens:
# # print('token[%s] type[%s]' % (token, token.ttype) )
# if token.ttype is None and type(token) is sql.Identifier:
# print('Table:')
# print(token.get_name(), token.get_real_name())
#
#
# for token in query_tokens:
# # print('token[%s] type[%s]' % (token, token.ttype) )
#
# if token.ttype is None and type(token) is sql.Where:
# print('Where:')
# print(token)
| [
"shunyang.li@unsw.edu.au"
] | shunyang.li@unsw.edu.au |
5f956a3f925ac4a9a724e8128b079d5b8afa2c82 | 45734abde30b437c2a1ba80653d7323e5c1d8c7f | /python/0320_generalized_abbreviation.py | f56e22143bc7bf3043f75dbf895dd29533b46079 | [] | no_license | rdtr/leetcode_solutions | 6629e03dd5b5fee15aaabe7f53204778f237ed96 | 51800d33c57e36ef62b6067d6f91a82c0e55dc6d | refs/heads/main | 2022-05-21T12:17:23.201832 | 2022-03-12T09:20:46 | 2022-03-12T09:20:46 | 80,395,988 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | from collections import deque
class Solution:
def generateAbbreviations(self, word: str) -> List[str]:
res = []
self.helper(word, 0, 0, '', res)
return res
def helper(self, word, pos, length, cur, res):
if pos >= len(word):
if length > 0:
cur += str(length)
res.append(cur)
return
if length == 0: # just consume one character
self.helper(word, pos + 1, 0, cur + word[pos], res)
else: # perform abbr
self.helper(word, pos + 1, 0, cur + str(length) + word[pos], res)
# skip this character and increment abbr length
self.helper(word, pos + 1, length + 1, cur, res)
| [
"redtree.dev1112@gmail.com"
] | redtree.dev1112@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.