id stringlengths 2 8 | text stringlengths 16 264k | dataset_id stringclasses 1 value |
|---|---|---|
28231 | <reponame>chfw/gease
from mock import MagicMock, patch
from nose.tools import eq_
from gease.contributors import EndPoint
from gease.exceptions import NoGeaseConfigFound
class TestPublish:
@patch("gease.contributors.get_token")
@patch("gease.contributors.Api.get_public_api")
def test_all_contributors(self, fake_api, get_token):
get_token.side_effect = [NoGeaseConfigFound]
sample_reply = [
{"login": "howdy", "url": "https://api.github.com/users/howdy"}
]
fake_api.return_value = MagicMock(
get=MagicMock(
side_effect=[
sample_reply,
{"name": "<NAME>", "html_url": ""},
]
)
)
repo = EndPoint("test", "repo")
contributors = repo.get_all_contributors()
eq_(
contributors,
[{"name": "<NAME>", "html_url": ""}],
)
@patch("gease.contributors.get_token")
@patch("gease.contributors.Api.get_public_api")
def test_private_api(self, fake_api, get_token):
get_token.side_effect = [NoGeaseConfigFound]
sample_reply = [
{"login": "howdy", "url": "https://api.github.com/users/howdy"}
]
fake_api.return_value = MagicMock(
get=MagicMock(
side_effect=[sample_reply, {"name": None, "html_url": ""}]
)
)
repo = EndPoint("test", "repo")
contributors = repo.get_all_contributors()
eq_(
contributors,
[{"name": "howdy", "html_url": ""}],
)
@patch("gease.contributors.get_token")
@patch("gease.contributors.Api.get_api")
def test_no_names(self, fake_api, _):
sample_reply = [
{"login": "howdy", "url": "https://api.github.com/users/howdy"}
]
fake_api.return_value = MagicMock(
get=MagicMock(
side_effect=[sample_reply, {"name": None, "html_url": ""}]
)
)
repo = EndPoint("test", "repo")
contributors = repo.get_all_contributors()
eq_(
contributors,
[{"name": "howdy", "html_url": ""}],
)
| StarcoderdataPython |
3463500 | <reponame>hpu12138/pkg
import numpy as np
import pandas as pd
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
from matplotlib import gridspec
from scsim import CortexDataset, Sampler
import torch
from sklearn.mixture import GaussianMixture as GM
n_epochs_all = None
save_path='data/'
dataset = CortexDataset(save_path=save_path, total_genes=30000)
sampler= Sampler(dataset, n_label=7)
sampler.train()
n_sample=100
xs, labels=sampler.sample(n_sample)
| StarcoderdataPython |
5033001 | <gh_stars>0
import sys
import multiprocessing
import numpy as np
import pandas as pd
from HLTIO import IO
from HLTIO import preprocess
from HLTvis import vis
from HLTvis import postprocess
import xgboost as xgb
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import importlib
import pickle
import os
def doXGB(version, seed, seedname, tag, doLoad, stdTransPar=None):
plotdir = 'plot_'+version
if not os.path.isdir(plotdir): os.makedirs(plotdir)
colname = list(seed[0].columns)
print(colname)
print(seedname+"|"+tag + r' C0: %d, C1: %d' %( (seed[1]==0).sum(), (seed[1]==1).sum() ) )
x_train, x_test, y_train, y_test = preprocess.split(seed[0], seed[1])
if doLoad and stdTransPar==None:
print("doLoad is True but stdTransPar==None --> return")
return
if stdTransPar==None:
x_train, x_test, x_mean, x_std = preprocess.stdTransform(x_train, x_test)
with open("scalefiles/%s_%s_%s_scale.py" % (version, tag, seedname), "w") as f_scale:
f_scale.write( "%s_%s_%s_ScaleMean = %s\n" % (version, tag, seedname, str(x_mean.tolist())) )
f_scale.write( "%s_%s_%s_ScaleStd = %s\n" % (version, tag, seedname, str(x_std.tolist())) )
f_scale.close()
else:
x_train, x_test = preprocess.stdTransformFixed(x_train, x_test, stdTransPar)
y_wgtsTrain, y_wgtsTest, wgts = preprocess.computeClassWgt(y_train, y_test)
dtrain = xgb.DMatrix(x_train, weight=y_wgtsTrain, label=y_train, feature_names=colname)
dtest = xgb.DMatrix(x_test, weight=y_wgtsTest, label=y_test, feature_names=colname)
evallist = [(dtest, 'eval'), (dtrain, 'train')]
param = {
'max_depth':6, #10
'eta':0.03,
'gamma':10, #35
'alpha':1,
'lambda':100,
'subsample':0.9,
'colsample_bytree':0.9,
'min_child_weight':1,
'objective':'binary:logistic',
'eval_metric':'logloss',
}
param['tree_method'] = 'exact'
param['nthread'] = 4
num_round = 400 #1200
if doLoad:
bst = xgb.Booster()
bst.load_model('model/'+version+'_'+tag+'_'+seedname+'.model')
IO.print_params("%s_%s_%s" % (version, tag, seedname), bst.save_config())
plotdir += '_loaded'
if not os.path.isdir(plotdir): os.makedirs(plotdir)
else:
bst = xgb.Booster(param)
bst = xgb.train(param, dtrain, num_round, evallist, early_stopping_rounds=100, verbose_eval=100)
bst.save_model('model/'+version+'_'+tag+'_'+seedname+'.model')
IO.print_params("%s_%s_%s" % (version, tag, seedname), bst.save_config())
dTrainPredict = bst.predict(dtrain)
dTestPredict = bst.predict(dtest)
dTrainPredictRaw = bst.predict(dtrain, output_margin=True)
dTestPredictRaw = bst.predict(dtest, output_margin=True)
labelTrain = postprocess.binaryLabel(dTrainPredict)
labelTest = postprocess.binaryLabel(dTestPredict)
#print(dTestPredict)
#print(labelTest)
# -- ROC -- #
for cat in range(1,2):
if ( np.asarray(y_train==cat,dtype=np.int).sum() < 1 ) or ( np.asarray(y_test==cat,dtype=np.int).sum() < 1 ): continue
fpr_Train, tpr_Train, thr_Train, AUC_Train, fpr_Test, tpr_Test, thr_Test, AUC_Test = postprocess.calROC(
dTrainPredict,
dTestPredict,
np.asarray(y_train==cat,dtype=np.int),
np.asarray(y_test==cat, dtype=np.int)
)
vis.drawROC( fpr_Train, tpr_Train, AUC_Train, fpr_Test, tpr_Test, AUC_Test, version+'_'+tag+'_'+seedname+r'_logROC_cat%d' % cat, plotdir)
vis.drawROC2(fpr_Train, tpr_Train, AUC_Train, fpr_Test, tpr_Test, AUC_Test, version+'_'+tag+'_'+seedname+r'_linROC_cat%d' % cat, plotdir)
vis.drawThr( thr_Train, tpr_Train, thr_Test, tpr_Test, version+'_'+tag+'_'+seedname+r'_logThr_cat%d' % cat, plotdir)
vis.drawThr2( thr_Train, tpr_Train, thr_Test, tpr_Test, version+'_'+tag+'_'+seedname+r'_linThr_cat%d' % cat, plotdir)
# -- Confusion matrix -- #
confMat, confMatAbs = postprocess.confMat(y_test,labelTest)
vis.drawConfMat(confMat, version+'_'+tag+'_'+seedname+'_testConfMatNorm', plotdir)
vis.drawConfMat(confMatAbs,version+'_'+tag+'_'+seedname+'_testConfMat', plotdir, doNorm = False)
confMatTrain, confMatTrainAbs = postprocess.confMat(y_train,labelTrain)
vis.drawConfMat(confMatTrain, version+'_'+tag+'_'+seedname+'_trainConfMatNorm', plotdir)
vis.drawConfMat(confMatTrainAbs,version+'_'+tag+'_'+seedname+'_trainConfMat', plotdir, doNorm = False)
# -- Score -- #
TrainScoreCat = dTrainPredict
TestScoreCat = dTestPredict
TrainScoreCatSig = np.array( [ score for i, score in enumerate(TrainScoreCat) if y_train[i]==1 ] )
TrainScoreCatBkg = np.array( [ score for i, score in enumerate(TrainScoreCat) if y_train[i]!=1 ] )
vis.drawScore(TrainScoreCatSig, TrainScoreCatBkg, version+'_'+tag+'_'+seedname+r'_trainScore', plotdir)
TestScoreCatSig = np.array( [ score for i, score in enumerate(TestScoreCat) if y_test[i]==1 ] )
TestScoreCatBkg = np.array( [ score for i, score in enumerate(TestScoreCat) if y_test[i]!=1 ] )
vis.drawScore(TestScoreCatSig, TestScoreCatBkg, version+'_'+tag+'_'+seedname+r'_testScore', plotdir)
TrainScoreCat = dTrainPredictRaw
TestScoreCat = dTestPredictRaw
TrainScoreCatSig = np.array( [ score for i, score in enumerate(TrainScoreCat) if y_train[i]==1 ] )
TrainScoreCatBkg = np.array( [ score for i, score in enumerate(TrainScoreCat) if y_train[i]!=1 ] )
vis.drawScoreRaw(TrainScoreCatSig, TrainScoreCatBkg, version+'_'+tag+'_'+seedname+r'_trainScoreRaw', plotdir)
TestScoreCatSig = np.array( [ score for i, score in enumerate(TestScoreCat) if y_test[i]==1 ] )
TestScoreCatBkg = np.array( [ score for i, score in enumerate(TestScoreCat) if y_test[i]!=1 ] )
vis.drawScoreRaw(TestScoreCatSig, TestScoreCatBkg, version+'_'+tag+'_'+seedname+r'_testScoreRaw', plotdir)
TrainScoreCat = postprocess.sigmoid( dTrainPredictRaw )
TestScoreCat = postprocess.sigmoid( dTestPredictRaw )
TrainScoreCatSig = np.array( [ score for i, score in enumerate(TrainScoreCat) if y_train[i]==1 ] )
TrainScoreCatBkg = np.array( [ score for i, score in enumerate(TrainScoreCat) if y_train[i]!=1 ] )
vis.drawScore(TrainScoreCatSig, TrainScoreCatBkg, version+'_'+tag+'_'+seedname+r'_trainScoreRawSigm', plotdir)
TestScoreCatSig = np.array( [ score for i, score in enumerate(TestScoreCat) if y_test[i]==1 ] )
TestScoreCatBkg = np.array( [ score for i, score in enumerate(TestScoreCat) if y_test[i]!=1 ] )
vis.drawScore(TestScoreCatSig, TestScoreCatBkg, version+'_'+tag+'_'+seedname+r'_testScoreRawSigm', plotdir)
# -- Importance -- #
if not doLoad:
gain = bst.get_score( importance_type='gain')
cover = bst.get_score(importance_type='cover')
vis.drawImportance(gain,cover,colname,version+'_'+tag+'_'+seedname+'_importance', plotdir)
return
def run_quick(seedname):
doLoad = False
ntuple_path = '/home/wjun/MuonHLTML/data/DY_M50_Summer21/ntuple_1.root'
tag = 'TESTBarrel'
print("\n\nStart: %s|%s" % (seedname, tag))
stdTrans = None
if doLoad:
scalefile = importlib.import_module("scalefiles."+tag+"_"+seedname+"_scale")
scaleMean = getattr(scalefile, version+"_"+tag+"_"+seedname+"_ScaleMean")
scaleStd = getattr(scalefile, version+"_"+tag+"_"+seedname+"_ScaleStd")
stdTrans = [ scaleMean, scaleStd ]
seed = IO.readMinSeeds(ntuple_path, 'seedNtupler/'+seedname, 0.,99999., True, False)
doXGB('vTEST',seed,seedname,tag,doLoad,stdTrans)
def run(version, seedname, tag):
doLoad = False #True
isB = ('Barrel' in tag)
#ntuple_path = '/home/wjun/MuonHLTML/data/DY_M50_Summer21/ntuple_*.root' ## DY-M50 Only, Jpsi_Summer21 (Jpsi), MuGun_Summer21 (MuGun), DY_M4_Summer21 (DY-M4)
ntuple_path = '/home/wjun/MuonHLTML/data/DY_M50*.pkl' ## If there are pre-defined pickles from root files, using these is much faster
usePkl = ('.pkl' in ntuple_path)
stdTrans = None
if doLoad:
scalefile = importlib.import_module("scalefiles."+version+"_"+tag+"_"+seedname+"_scale")
scaleMean = getattr(scalefile, version+"_"+tag+"_"+seedname+"_ScaleMean")
scaleStd = getattr(scalefile, version+"_"+tag+"_"+seedname+"_ScaleStd")
stdTrans = [ scaleMean, scaleStd ]
print("\n\nStart: %s|%s" % (seedname, tag))
seed = IO.readMinSeeds(ntuple_path, 'seedNtupler/'+seedname, 0.,99999., isB, usePkl)
doXGB(version, seed, seedname, tag, doLoad, stdTrans)
VER = 'Test_PatatrackSeedv2'
seedlist = ['NThltIterL3OI','NThltIter0','NThltIter2','NThltIter3','NThltIter0FromL1','NThltIter2FromL1','NThltIter3FromL1']
seedlist = ['NThltIter2', 'NThltIter2FromL1']#, 'NThltIter3', 'NThltIter3FromL1']
taglist = ['Barrel', 'Endcap']
seed_run_list = [ (VER, seed, tag) for tag in taglist for seed in seedlist ]
if __name__ == '__main__':
from warnings import simplefilter
simplefilter(action='ignore', category=FutureWarning)
#run_quick('NThltIter2FromL1')
pool = multiprocessing.Pool(processes=5)
pool.starmap(run,seed_run_list)
pool.close()
pool.join()
print('Finished')
| StarcoderdataPython |
9639601 | <reponame>pjgrandinetti/mrsimulator<gh_stars>10-100
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Coupled spin-1/2 (Static dipolar spectrum)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
¹³C-¹H static dipolar coupling simulation.
"""
# %%
import matplotlib.pyplot as plt
from mrsimulator import Simulator, SpinSystem
from mrsimulator.methods import BlochDecaySpectrum
from mrsimulator import signal_processing as sp
# sphinx_gallery_thumbnail_number = 1
# %%
# **Spin Systems**
#
# Create a 13C-1H coupled spin system.
spin_system = SpinSystem(
sites=[
{"isotope": "13C", "isotropic_chemical_shift": 0.0},
{"isotope": "1H", "isotropic_chemical_shift": 0.0},
],
couplings=[{"site_index": [0, 1], "dipolar": {"D": -2e4}}],
)
# %%
# **Methods**
#
# Create a BlochDecaySpectrum method.
method = BlochDecaySpectrum(
channels=["13C"],
magnetic_flux_density=9.4, # in T
spectral_dimensions=[{"count": 2048, "spectral_width": 8.0e4}],
)
# %%
# **Simulator**
#
# Create the Simulator object and add the method and the spin system object.
sim = Simulator()
sim.spin_systems += [spin_system] # add the spin system.
sim.methods += [method] # add the method.
sim.run()
# %%
# **Post-Simulation Processing**
#
# Add post-simulation signal processing.
processor = sp.SignalProcessor(
operations=[
sp.IFFT(),
sp.apodization.Exponential(FWHM="500 Hz"),
sp.FFT(),
]
)
processed_data = processor.apply_operations(data=sim.methods[0].simulation)
# %%
# **Plot**
#
plt.figure(figsize=(4.25, 3.0))
ax = plt.subplot(projection="csdm")
ax.plot(processed_data.real, color="black", linewidth=1)
ax.invert_xaxis()
plt.tight_layout()
plt.show()
| StarcoderdataPython |
3387669 | <reponame>kunalk3/Machine_Learning_using_Python<gh_stars>0
#---------------------------------------------------------------------
# File Name : MultilinearRegression.py
# Author : <NAME>.
# Description : Implementing MLR
# Date: : 13 Nov. 2020
# Version : V1.0
# Ref No : DS_Code_P_K07
#---------------------------------------------------------------------
# Multilinear Regression
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# loading the data
cars = pd.read_csv("cars.csv")
# to get top 6 rows
cars.head(40) # to get top n rows use cars.head(10)
# Correlation matrix
cars.corr()
# we see there exists High collinearity between input variables especially between
# [Hp & SP] , [VOL,WT] so there exists collinearity problem
# Scatter plot between the variables along with histograms
import seaborn as sns
sns.pairplot(cars)
# columns names
cars.columns
# pd.tools.plotting.scatter_matrix(cars); -> also used for plotting all in one graph
# preparing model considering all the variables
import statsmodels.formula.api as smf # for regression model
# Preparing model
ml1 = smf.ols('MPG~WT+VOL+SP+HP',data=cars).fit() # regression model
# Getting coefficients of variables
ml1.params
# Summary
ml1.summary()
# p-values for WT,VOL are more than 0.05 and also we know that [WT,VOL] has high correlation value
# preparing model based only on Volume
ml_v=smf.ols('MPG~VOL',data = cars).fit()
ml_v.summary() # 0.271
# p-value <0.05 .. It is significant
# Preparing model based only on WT
ml_w=smf.ols('MPG~WT',data = cars).fit()
ml_w.summary() # 0.268
# Preparing model based only on WT & VOL
ml_wv=smf.ols('MPG~WT+VOL',data = cars).fit()
ml_wv.summary() # 0.264
# Both coefficients p-value became insignificant...
# So there may be a chance of considering only one among VOL & WT
# Checking whether data has any influential values
# influence index plots
import statsmodels.api as sm
sm.graphics.influence_plot(ml1)
# index 76 AND 78 is showing high influence so we can exclude that entire row
# Studentized Residuals = Residual/standard deviation of residuals
cars_new=cars.drop(cars.index[[76,70]],axis=0)
#cars.drop(["MPG"],axis=1)
# X => A B C D
# X.drop(["A","B"],axis=1) # Dropping columns
# X.drop(X.index[[5,9,19]],axis=0)
#X.drop(["X1","X2"],aixs=1)
#X.drop(X.index[[0,2,3]],axis=0)
# Preparing model
ml_new = smf.ols('MPG~WT+VOL+HP+SP',data = cars_new).fit()
# Getting coefficients of variables
ml_new.params
# Summary
ml_new.summary() # 0.806
# Confidence values 99%
print(ml_new.conf_int(0.01)) # 99% confidence level
# Predicted values of MPG
mpg_pred = ml_new.predict(cars_new[['WT','VOL','HP','SP']])
mpg_pred
cars_new.head()
# calculating VIF's values of independent variables
rsq_hp = smf.ols('HP~WT+VOL+SP',data=cars_new).fit().rsquared
vif_hp = 1/(1-rsq_hp) # 16.33
rsq_wt = smf.ols('WT~HP+VOL+SP',data=cars_new).fit().rsquared
vif_wt = 1/(1-rsq_wt) # 564.98
rsq_vol = smf.ols('VOL~WT+SP+HP',data=cars_new).fit().rsquared
vif_vol = 1/(1-rsq_vol) # 564.84
rsq_sp = smf.ols('SP~WT+VOL+HP',data=cars_new).fit().rsquared
vif_sp = 1/(1-rsq_sp) # 16.35
# Storing vif values in a data frame
d1 = {'Variables':['Hp','WT','VOL','SP'],'VIF':[vif_hp,vif_wt,vif_vol,vif_sp]}
Vif_frame = pd.DataFrame(d1)
Vif_frame
# As weight is having higher VIF value, we are not going to include this prediction model
# Added varible plot
sm.graphics.plot_partregress_grid(ml_new)
# added varible plot for weight is not showing any significance
# final model
final_ml= smf.ols('MPG~VOL+SP+HP',data = cars_new).fit()
final_ml.params
final_ml.summary() # 0.809
# As we can see that r-squared value has increased from 0.810 to 0.812.
mpg_pred = final_ml.predict(cars_new)
import statsmodels.api as sm
# added variable plot for the final model
sm.graphics.plot_partregress_grid(final_ml)
###### Linearity #########
# Observed values VS Fitted values
plt.scatter(cars_new.MPG,mpg_pred,c="r");plt.xlabel("observed_values");plt.ylabel("fitted_values")
# Residuals VS Fitted Values
plt.scatter(mpg_pred,final_ml.resid_pearson,c="r"),plt.axhline(y=0,color='blue');plt.xlabel("fitted_values");plt.ylabel("residuals")
######## Normality plot for residuals ######
# histogram
plt.hist(final_ml.resid_pearson) # Checking the standardized residuals are normally distributed
# QQ plot for residuals
import pylab
import scipy.stats as st
# Checking Residuals are normally distributed
st.probplot(final_ml.resid_pearson, dist="norm", plot=pylab)
############ Homoscedasticity #######
# Residuals VS Fitted Values
plt.scatter(mpg_pred,final_ml.resid_pearson,c="r"),plt.axhline(y=0,color='blue');plt.xlabel("fitted_values");plt.ylabel("residuals")
### Splitting the data into train and test data
from sklearn.model_selection import train_test_split
cars_train,cars_test = train_test_split(cars_new,test_size = 0.2) # 20% size
# preparing the model on train data
model_train = smf.ols("MPG~HP+SP+VOL",data=cars_train).fit()
# train_data prediction
train_pred = model_train.predict(cars_train)
# train residual values
train_resid = train_pred - cars_train.MPG
# RMSE value for train data
train_rmse = np.sqrt(np.mean(train_resid*train_resid))
# prediction on test data set
test_pred = model_train.predict(cars_test)
# test residual values
test_resid = test_pred - cars_test.MPG
# RMSE value for test data
test_rmse = np.sqrt(np.mean(test_resid*test_resid))
| StarcoderdataPython |
358758 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sqlite3
import re
import time,threading
import urllib2,os
import hashlib
import MySQLdb
import datetime
from multiprocessing import Pool
_db_host = '127.0.0.1'
_db_user = 'root'
_db_pwd = '<PASSWORD>'
_db_name = 'spider'
_db_table = 'spider_data2'
_down_dir = '/roobo/webserver/download2'
def read_image_info(read_num):
db = MySQLdb.connect(_db_host,_db_user,_db_pwd,_db_name,charset='utf8')
cursor = db.cursor(cursorclass = MySQLdb.cursors.DictCursor)
sql = "select * from %s where down_status=0 limit %d" % (_db_table, read_num)
cursor.execute(sql)
rows = cursor.fetchall()
cursor.close()
db.close()
return rows
def down_image(image_info):
db = MySQLdb.connect(_db_host,_db_user,_db_pwd,_db_name,charset='utf8')
cursor = db.cursor(cursorclass = MySQLdb.cursors.DictCursor)
sql = "select * from %s where keyword='%s' " % ('keywords', image_info['keyword'])
cursor.execute(sql)
row = cursor.fetchone()
image_url = image_info['img_link']
save_path_dir = _down_dir + '/' + str(row['id']) + '/'
image_ext = image_url[image_url.rindex('.')+1:]
if image_ext == '':
image_ext = 'jpg'
image_name = hashlib.md5(image_url).hexdigest() + '.' + image_ext
save_path = save_path_dir + image_name
down_status = 1
if os.path.exists(save_path_dir) == False:
os.makedirs(save_path_dir)
try:
if os.path.exists(save_path) == False:
request = urllib2.Request(image_url)
request.add_header('User-Agent','Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36')
response = urllib2.urlopen(request, timeout=10)
data = response.read()
f = open(save_path, 'wb')
f.write(data)
f.close()
print image_url,' done!'
except:
down_status = -1
save_path = ''
print image_url, ' failed!'
now = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
sql = "update %s set down_status=%d , download_at='%s', local_path='%s' where id=%d" % (_db_table, down_status, now, save_path, int(image_info['id']))
if down_status == 1 :
sql2 = "update %s set download_count=download_count+1 where id=%d limit 1" % ('keywords', int(row['id']))
cursor.execute(sql2)
cursor.execute(sql)
db.commit()
cursor.close()
db.close()
return
if __name__=='__main__':
start_t = int(time.time())
read_num = 100
process_num = 5
image_list = read_image_info(read_num)
while image_list :
pool = Pool(process_num)
for key,image in enumerate(image_list):
pool.apply_async(down_image,args=(image,))
pool.close()
pool.join()
image_list = read_image_info(read_num)
print 'all done'
print int(time.time()) - start_t | StarcoderdataPython |
1908351 | @app.route('/magic/<number>/')
def do_magic(number):
try:
print "About to do some magic with {0}".format(number)
response = magic(number)
except Exception as e:
print "Got an exception {0} :(".format(e)
abort(500, "Oops :(")
else:
print "Magic done: {0}".format(response)
return response
"""
Output:
About to do some magic with 5
Magic done: 37
About to do some magic with 6
About to do some magic with 0
About to do some magic with Tomas
About to do some magic with -1
Got an exception MagicException
Magic done: 6
Got an exception MagicException
Got an exception MagicException
Magic done: 0
"""
| StarcoderdataPython |
8115838 | from django.db import models
from django.contrib.auth.models import User
from simple_history.models import HistoricalRecords
class ColorPalette(models.Model):
name = models.CharField(max_length=255)
user = models.ForeignKey(User, on_delete=models.CASCADE)
is_public = models.BooleanField(default=True)
history = HistoricalRecords()
def __str__(self):
return self.name
class DominantColor(models.Model):
name = models.CharField(max_length=255)
color_palette = models.ForeignKey(ColorPalette, related_name='dominant_colors', on_delete=models.CASCADE)
def __str__(self):
return self.name
class AccentColor(models.Model):
name = models.CharField(max_length=255)
color_palette = models.ForeignKey(ColorPalette, related_name='accent_colors', on_delete=models.CASCADE)
def __str__(self):
return self.name
class FavouritePalette(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
favorite_palettes = models.ForeignKey(ColorPalette, related_name='favourite_palattes', on_delete=models.CASCADE)
def __str__(self):
return f'Favourites-{self.user.username}'
| StarcoderdataPython |
4980877 | <reponame>verifid/idtext<gh_stars>10-100
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import argparse
import cv2
from mocr import TextRecognizer, face_detection
def display_image(image, results, file_name):
output = image.copy()
# loop over the results
for ((startX, startY, endX, endY), text) in results:
cv2.rectangle(output, (startX, startY), (endX, endY), (0, 0, 255), 2)
# show the output image
cv2.imshow("Text Detection", output)
cv2.imwrite("screenshots/processed_" + file_name, output)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Meaningful Optical Character Recognition from identity cards with Deep Learning."
)
parser.add_argument("--image", type=str, help="Path to input image on file system.")
parser.add_argument(
"--east", type=str, help="Path to input EAST text detector on file system."
)
parser.add_argument(
"--image-face", type=str, help="Path to input image on file system."
)
parser.add_argument(
"--video-face", type=str, help="Path to input video on file system."
)
args = parser.parse_args()
# Optional bash tab completion support
try:
import argcomplete
argcomplete.autocomplete(parser)
except ImportError:
pass
if sys.argv[1] == "--image-face":
if len(sys.argv) < 3:
print("Specify an image path")
sys.exit(1)
image_path = sys.argv[2]
file_name = os.path.basename(image_path)
face = face_detection.detect_face(image_path)
cv2.imshow("Found profile", face)
cv2.imwrite("screenshots/profile_" + file_name, face)
elif sys.argv[1] == "--video-face":
if len(sys.argv) < 3:
print("Specify an video path")
sys.exit(1)
video_path = sys.argv[2]
base = os.path.basename(video_path)
file_name = os.path.splitext(base)[0]
face = face_detection.detect_face_from_video(video_path)
cv2.imshow("Found profile", face)
print(file_name)
cv2.imwrite("screenshots/profile_" + file_name + ".png", face)
else:
if len(sys.argv) < 4:
print("Specify an image path and east path")
sys.exit(1)
image_path = sys.argv[2]
east_path = sys.argv[4]
text_recognizer = TextRecognizer(image_path, east_path)
file_name = os.path.basename(image_path)
(image, _, _) = text_recognizer.load_image()
(resized_image, ratio_height, ratio_width, _, _) = text_recognizer.resize_image(
image, 320, 320
)
(scores, geometry) = text_recognizer.geometry_score(east_path, resized_image)
boxes = text_recognizer.boxes(scores, geometry)
results = text_recognizer.get_results(boxes, image, ratio_height, ratio_width)
display_image(image, results, file_name)
| StarcoderdataPython |
4977141 | from scipy.special import comb
from Binomial import binomialTree
from scipy.stats import norm
from scipy.optimize import root
from scipy.optimize import brentq
import math
class Liquidity(binomialTree):
def __init__(self, k=1, rf=0.05, steps=50, vol=0.2, ttm=1):
if (steps % (k + 1) != 0):
raise Exception("Invalid rebalance frequency", k)
self.k = k
self.n1 = steps / (k + 1)
super(Liquidity, self).__init__(rf, steps, vol, ttm)
def reverseSharpRatio(targetProb, rf, vol, t, steps):
dt = t / steps
u = math.exp(math.sqrt(dt) * vol)
d = 1 / u
emut = targetProb * (u - d) + d
mu = math.log(emut) / dt
return (mu - rf) / vol
def calculateBeta(self, stk0, opt0, endOptPrice, n):
# Initialize
sumOptPrice, sumOptReturn, sumStkReturn, crssOptStk, sum2StkReturn = 0
for j in range(n):
stkReturn = math.pow(self.up, j) * math.pow(self.dn, (n - j))
optReturn = endOptPrice[j] / opt0
Prob = comb(n, j) * math.pow(self.pr, j) * math.pow((1 - self.pr), (n - j))
sumOptPrice = sumOptPrice + endOptPrice[j] * Prob
sumOptReturn = sumOptReturn + optReturn * Prob
sumStkReturn = sumStkReturn + stkReturn * Prob
crssOptStk = crssOptStk + optReturn * stkReturn * Prob
sum2StkReturn = sum2StkReturn + stkReturn * stkReturn * Prob
cov = crssOptStk - sumStkReturn * sumOptReturn;
varStkReturn = sum2StkReturn - sumStkReturn * sumStkReturn;
# Equation 22 in paper
beta = cov / varStkReturn;
return beta
def liquidPrice(self, s0, k1, k2, t1, t2, div):
self.Tree(s0=s0, func=lambda x: Liquidity.BSPricer(x, k2, self.rf, div, self.vol, t2 - t1, 1))
return self.expectedResult(func=binomialTree.optionExercise(k1))
def getLiquidPrice(self, k1, k2, t1, t2, div, marketCap):
return root(lambda x: self.liquidPrice(x, k1, k2, t1, t2, div) - marketCap, [20]).x[0]
def illquidPrice(a0, k, ttm, rf, sharpR, sigma):
# This is a function of closed form of illiquidPrice in Pro. Chen's paper Valuing a Liquidity Discount
# inputs are:
# k is 1/2 long term debt + short term debt;
# a0 is initial firm asset value
# ttm is time to maturity
# rf is risk free rate
# sharpR is sharp ratio
# sigma is volatility
mu = rf + sharpR * sigma
dp = (math.log(a0 / k) + (mu + 0.5 * sigma * sigma) * ttm) / (sigma * math.sqrt(ttm))
dd = dp - sigma * math.sqrt(ttm)
dpp = dp + sigma * math.sqrt(ttm)
EXT = a0 * math.exp(mu * ttm) * norm.cdf(dp) - k * norm.cdf(dd)
EVT = a0 * math.exp(mu * ttm)
EXTVT = math.pow(a0, 2) * math.exp((2 * mu + sigma * sigma) * ttm) * norm.cdf(dpp) - k * a0 * math.exp(
mu * ttm) * norm.cdf(dp)
cov = EXTVT - EXT * EVT
var = math.pow(a0, 2) * math.exp(2 * mu * ttm) * (math.exp(sigma * sigma * ttm) - 1)
beta = cov / var
result = math.exp(-rf * ttm) * (EXT - beta * (EVT - a0 * math.exp(rf * ttm)))
return result
def BSPricer(s, k, rf, div, vol, T, pcFlag):
# logging.debug(f'{s},{k}')
d1 = (math.log(s / k) + (rf - div + vol * vol / 2) * T) / (vol * math.sqrt(T))
d2 = d1 - vol * math.sqrt(T)
n1 = norm.cdf(d1 * pcFlag, 0, 1)
n2 = norm.cdf(d2 * pcFlag, 0, 1)
result = s * math.exp(-div * T) * pcFlag * n1 - k * math.exp(-rf * T) * pcFlag * n2
# logging.debug(f'-> {result}')
return result
def calibrateWealth(adjustmentFactor, rf, div, vol, T, pcFlag, equityTarket):
return brentq(lambda w: Liquidity.BSPricer(w, w * adjustmentFactor, rf, div, vol, T, pcFlag) - equityTarket,
0.01, 200 * equityTarket)
# return root(lambda w: Liquidity.BSPricer(w, w * adjustmentFactor, rf, div, vol, T, pcFlag) - equityTarket,
# [equityTarket]).x[0]
| StarcoderdataPython |
1792862 | import yaml
def read_config(config_path):
with open(config_path, "r") as f_config:
config = yaml.load(f_config)
return config
| StarcoderdataPython |
4982935 | <reponame>relax-space/python-xxm
import sys,os,traceback,json
import dto
"""
练习文件的读写
python.exe .\second_step\s2.py
参考:
https://www.w3cschool.cn/pythonlearn/dfmt1pve.html
https://www.runoob.com/python/file-methods.html
"""
class File:
def __init__(self):
pass
def read(self):
path ="second_step/temp_data/base_test.txt"
with open(path,mode="rt",encoding="utf-8") as fp:
content = fp.read()
return content
return ""
def write(self,content):
path ="second_step/temp_data/base_test.txt"
with open(path,mode="wt",encoding="utf-8") as fp:
fp.write(content)
def readList(self,path,mode="rt",encoding='utf-8',object_hook=None):
try:
contents=[]
with open(path,mode, encoding=encoding) as fp:
if object_hook ==None:
contents = json.load(fp)
else:
contents = json.load(fp,object_hook=object_hook)
return contents,None
except Exception as inst:
return None,traceback.format_exc()
def writeList(self,list,dirPath,fileName,mode="w",encoding='utf-8',default=None,ensure_ascii=False):
fileName="%s/%s" % (dirPath,fileName)
try:
if not os.path.exists(dirPath):
os.makedirs(dirPath)
with open(fileName, mode, encoding=encoding) as fp:
if default == None:
json.dump(list,
fp,ensure_ascii=ensure_ascii)
else:
json.dump(list,
fp,default=default,ensure_ascii=ensure_ascii)
return None
except Exception as inst:
return traceback.format_exc()
def base(self):
print("===> 1.基本 的read 和 write")
self.write('{"name":"hello world"}')
content = self.read()
print(f"type:{type(content)},content:{content}")
def rwJson(self):
print("===> 2.json 的read 和 write")
dict1 = {"name":"你好世界"}
err= self.writeList(dict1,"second_step/temp_data","file_test.json")
if err != None:
print(err)
return
content,err = self.readList("second_step/temp_data/file_test.json")
if err != None:
print(err)
return
print(f"type:{type(content)},content:{content}")
def rwDto(self):
print("===> 3.dto 的read 和 write")
fruitDto = dto.FruitDto()
fruitDto.name = "apple"
fruitDto.price = 6.5
err= self.writeList(fruitDto,"second_step/temp_data","dto_test.json",default=dto.FruitDto.toDict)
if err != None:
print(err)
return
content,err = self.readList("second_step/temp_data/dto_test.json",object_hook=dto.FruitDto.fromDict)
if err != None:
print(err)
return
print(f"type:{type(content)},content:{content.__dict__}")
if __name__ == "__main__":
f=File()
f.base()
f.rwJson()
f.rwDto() | StarcoderdataPython |
8172651 | <gh_stars>10-100
import bpy
from bpy.props import *
from ...nodes.BASE.node_base import RenderNodeBase
# from ...utility import source_attr
from mathutils import Color, Vector
def update_node(self, context):
if self.operate_type == 'MULTIPLY':
self.create_input('RenderNodeSocketInt', 'count', 'Count')
else:
self.remove_input('count')
self.create_input('RenderNodeSocketString', 'value2', 'Value')
if self.operate_type in {'SUB', 'JOIN', 'SPACE', 'DOT', 'UNDERSCORE'}:
self.create_input('RenderNodeSocketString', 'value2', 'Value')
else:
self.remove_input('value2')
if self.operate_type == 'REPLACE':
self.create_input('RenderNodeSocketString', 'replace_old', 'Old')
self.create_input('RenderNodeSocketString', 'replace_new', 'New')
else:
self.remove_input('replace_old')
self.remove_input('replace_new')
if self.operate_type == 'SLICE':
self.create_input('RenderNodeSocketString', 'value1', 'Value')
self.create_input('RenderNodeSocketInt', 'slice_from', 'From')
self.create_input('RenderNodeSocketInt', 'slice_to', 'To')
else:
self.remove_input('slice_from')
self.remove_input('slice_to')
if self.operate_type in {'ABS', 'REL'}:
pass
self.execute_tree()
class RenderNodeStringOperate(RenderNodeBase):
bl_idname = 'RenderNodeStringOperate'
bl_label = 'String Operate'
operate_type: EnumProperty(
name='Type',
items=[
('', 'Concat', ''),
('JOIN', 'Join', ''),
('SPACE', 'Space', ''),
('DOT', 'Dot', ''),
('UNDERSCORE', 'Underscore', ''),
('', 'Function', ''),
('REPLACE', 'Replace', ''),
('MULTIPLY', 'Multiply', ''),
('SLICE', 'Slice', ''),
('', 'Path', ''),
('SUB', 'Join Path', ''),
('ABS', 'Abs Path', ''),
('REL', 'Rel Path', ''),
],
update=update_node,
default='SUB'
)
def init(self, context):
self.create_input('RenderNodeSocketString', 'value1', 'Value')
self.create_input('RenderNodeSocketString', 'value2', 'Value')
self.create_output('RenderNodeSocketString', 'output', "Output")
def draw_label(self):
name = self.bl_rna.properties['operate_type'].enum_items[self.operate_type].name
return name
def draw_buttons(self, context, layout):
layout.prop(self, 'operate_type', text='')
def process(self, context, id, path):
s1 = self.inputs['value1'].get_value() if 'value1' in self.inputs else None
if self.operate_type == 'JOIN':
s2 = self.inputs['value2'].get_value()
self.outputs[0].set_value(s1 + s2)
elif self.operate_type == 'SUB':
s2 = self.inputs['value2'].get_value()
self.outputs[0].set_value(s1 + '/' + s2)
elif self.operate_type == 'REL':
self.outputs[0].set_value(bpy.path.relpath(s1))
elif self.operate_type == 'ABS':
self.outputs[0].set_value(bpy.path.abspath(s1))
elif self.operate_type == 'SPACE':
s2 = self.inputs['value2'].get_value()
self.outputs[0].set_value(s1 + ' ' + s2)
elif self.operate_type == 'DOT':
s2 = self.inputs['value2'].get_value()
self.outputs[0].set_value(s1 + '.' + s2)
elif self.operate_type == 'UNDERSCORE':
s2 = self.inputs['value2'].get_value()
self.outputs[0].set_value(s1 + '_' + s2)
elif self.operate_type == 'MULTIPLY':
s2 = self.inputs['count'].get_value()
self.outputs[0].set_value(s1 * s2)
elif self.operate_type == 'REPLACE':
old = self.inputs['replace_old'].get_value()
new = self.inputs['replace_new'].get_value()
res = s1.replace(old, new)
self.outputs[0].set_value(res)
elif self.operate_type == 'SLICE':
from_id = self.inputs['slice_from'].get_value()
to_id = self.inputs['slice_to'].get_value()
try:
res = s1[from_id:to_id]
except IndexError:
res = None
self.outputs[0].set_value(res)
def register():
bpy.utils.register_class(RenderNodeStringOperate)
def unregister():
bpy.utils.unregister_class(RenderNodeStringOperate)
| StarcoderdataPython |
158815 | <gh_stars>0
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import binascii
import json
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
import mock
import webtest.app
from refstack.tests import api
from refstack import db
class TestProfileEndpoint(api.FunctionalTest):
"""Test case for the 'profile' API endpoint."""
URL = '/v1/profile/'
def setUp(self):
super(TestProfileEndpoint, self).setUp()
self.user_info = {
'openid': 'test-open-id',
'email': '<EMAIL>',
'fullname': '<NAME>'
}
db.user_save(self.user_info)
@mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id')
def test_get(self, mock_get_user):
response = self.get_json(self.URL)
self.user_info['is_admin'] = False
self.assertEqual(self.user_info, response)
@mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id')
def test_pubkeys(self, mock_get_user):
"""Test '/v1/profile/pubkeys' API endpoint."""
url = self.URL + 'pubkeys'
key = rsa.generate_private_key(
public_exponent=65537,
key_size=1024,
backend=default_backend()
)
signer = key.signer(padding.PKCS1v15(), hashes.SHA256())
signer.update('signature'.encode('utf-8'))
sign = signer.finalize()
pubkey = key.public_key().public_bytes(
serialization.Encoding.OpenSSH,
serialization.PublicFormat.OpenSSH
).decode('utf-8')
body = {'raw_key': pubkey,
'self_signature': binascii.b2a_hex(sign).decode('utf-8')}
json_params = json.dumps(body)
# POST endpoint
pubkey_id = self.post_json(url, params=json_params)
# GET endpoint
user_pubkeys = self.get_json(url)
self.assertEqual(1, len(user_pubkeys))
self.assertEqual(pubkey.split()[1], user_pubkeys[0]['pubkey'])
self.assertEqual('ssh-rsa', user_pubkeys[0]['format'])
self.assertEqual(pubkey_id, user_pubkeys[0]['id'])
delete_url = '{}/{}'.format(url, pubkey_id)
# DELETE endpoint
response = self.delete(delete_url)
self.assertEqual(204, response.status_code)
user_pubkeys = self.get_json(url)
self.assertEqual(0, len(user_pubkeys))
# DELETE endpoint - nonexistent pubkey
self.assertRaises(webtest.app.AppError, self.delete, delete_url)
| StarcoderdataPython |
128023 | <gh_stars>0
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by <NAME> and <NAME>
# Email: <EMAIL>
# Details: SiamFC training script
# ------------------------------------------------------------------------------
import _init_paths
import argparse
import pprint
from torch.utils.data import DataLoader
from core.config import config, update_config
from dataset.siamfc import SiamFCDataset
from utils.utils import create_logger
eps = 1e-5
def parse_args():
"""
args for training.
"""
parser = argparse.ArgumentParser(description='Train SiamFC')
# general
parser.add_argument('--cfg', required=True, type=str, default='/home/syh/siamdw/experiments/train/SiamFC.yaml', help='yaml configure file name')
args, rest = parser.parse_known_args()
# update config
update_config(args.cfg)
parser.add_argument('--gpus', type=str, help='gpus')
parser.add_argument('--workers', type=int, help='num of dataloader workers')
args = parser.parse_args()
return args
def reset_config(config, args):
"""
set gpus and workers
"""
if args.gpus:
config.GPUS = args.gpus
if args.workers:
config.WORKERS = args.workers
def main():
# [*] args, loggers and tensorboard
args = parse_args()
reset_config(config, args)
logger, _, tb_log_dir = create_logger(config, 'SIAMFC', 'train')
logger.info(pprint.pformat(args))
logger.info(pprint.pformat(config))
# parallel
gpus = [int(i) for i in config.GPUS.split(',')]
gpu_num = len(gpus)
logger.info('GPU NUM: {:2d}'.format(len(gpus)))
logger.info('model prepare done')
# build dataloader, benefit to tracking
train_set = SiamFCDataset(config)
train_loader = DataLoader(train_set, batch_size=config.SIAMFC.TRAIN.BATCH * gpu_num, num_workers=config.WORKERS,pin_memory=True, sampler=None)
nCount=0
for iter, input in enumerate(train_loader):
# measure data loading time
# input and output/loss
label_cls = input[2]
template = input[0]
search = input[1]
nCount=nCount+1
if nCount==5:
break
print('=====')
if __name__ == '__main__':
main()
| StarcoderdataPython |
156033 | <reponame>Orion-Hunter/SaladoEmpreendedor
from pydantic import BaseModel
from typing import Optional, List
from datetime import datetime
####Schemas Servidor
class ServidorBase(BaseModel):
MATRICULA: int
NOME: str
SENHA: str
SECRETARIA: str
class ServidorCreate(ServidorBase):
SENHA: str
SECRETARIA: str
class Servidor(ServidorBase):
MATRICULA: int
class Config:
orm_mode = True
###Schemas pessoa
class PessoaBase(BaseModel):
NOME : str
CIDADE : str
ENDERECO : str
BAIRRO : str
CONTATO: str
CPFCNPJ: str
CATEGORIA: str
class PessoaCreate(PessoaBase):
pass
class Pessoa(PessoaBase):
NUMERO_CADASTRO : int
class Config:
orm_mode = True
#schema procedimento
class ProcedimentoBase(BaseModel):
NOME_PROCEDIMENTO: str
SECRETARIA: str
class ProcedimentoCreate(ProcedimentoBase):
pass
class Procedimento(ProcedimentoBase):
ID_PROCEDIMENTO: int
class Config:
orm_mode = True
##schema atividades
class AtividadeBase(BaseModel):
NOME_ATIVIDADE: str
CNAE: str
class AtividadeCreate(AtividadeBase):
pass
class Atividade(AtividadeBase):
ID_ATIVIDADE: int
class Config:
orm_mode = True
#schema atendimento
class AtendimentoBase(BaseModel):
DATA: datetime
ID_SERVIDOR: int
ID_PESSOA: int
class AtendimentoCreate(AtendimentoBase):
ATIVIDADES: List[Atividade]
PROCEDIMENTOS: List[Procedimento]
class Atendimento(AtividadeBase):
ID_ATENDIMENTO: int
SERVIDOR: Servidor
PESSOA: Pessoa
class Config:
orm_mode= True | StarcoderdataPython |
11337560 | <reponame>hutomadotAI/Research-Experiments
import tensorflow as tf
from layers import dropout, _last_relevant
class GRU:
def __init__(self, num_layers, num_units, batch_size, input_size, keep_prob=1.0,
is_train=None, seed=3435, scope=None):
self.num_layers = num_layers
self.grus = []
self.inits = []
self.dropout_mask = []
for layer in range(num_layers):
input_size_ = input_size if layer == 0 else num_units
gru = tf.contrib.cudnn_rnn.CudnnGRU(1, num_units, seed=seed)
init = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
mask = dropout(tf.ones([1, batch_size, input_size_], dtype=tf.float32),
keep_prob=keep_prob, is_train=is_train, mode=None)
self.grus.append(gru)
self.inits.append(init)
self.dropout_mask.append(mask)
def __call__(self, inputs, seq_len, concat_layers=True, use_last=False, scope='gru'):
outputs = [tf.transpose(inputs, [1, 0, 2])]
for layer in range(self.num_layers):
gru = self.grus[layer]
init = self.inits[layer]
mask = self.dropout_mask[layer]
with tf.variable_scope("{}_{}".format(scope, layer)):
out, _ = gru(outputs[-1] * mask, initial_state=(init, ))
outputs.append(out)
if concat_layers:
res = tf.concat(outputs[1:], axis=2)
else:
res = outputs[-1]
res = tf.transpose(res, [1, 0, 2])
if use_last:
last = _last_relevant(res, seq_len)
return last, res
else:
return res
class BiGRU:
def __init__(self, num_layers, num_units, batch_size, input_size, keep_prob=1.0,
is_train=None, seed=3435, scope=None):
self.num_layers = num_layers
self.grus = []
self.inits = []
self.dropout_mask = []
for layer in range(num_layers):
input_size_ = input_size if layer == 0 else 2 * num_units
gru_fw = tf.contrib.cudnn_rnn.CudnnGRU(1, num_units, seed=seed)
gru_bw = tf.contrib.cudnn_rnn.CudnnGRU(1, num_units, seed=seed)
init_fw = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
init_bw = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
mask_fw = dropout(tf.ones([1, batch_size, input_size_], dtype=tf.float32),
keep_prob=keep_prob, is_train=is_train, mode=None)
mask_bw = dropout(tf.ones([1, batch_size, input_size_], dtype=tf.float32),
keep_prob=keep_prob, is_train=is_train, mode=None)
self.grus.append((gru_fw, gru_bw, ))
self.inits.append((init_fw, init_bw, ))
self.dropout_mask.append((mask_fw, mask_bw, ))
def __call__(self, inputs, seq_len, concat_layers=True, use_last=False, scope='gru'):
outputs = [tf.transpose(inputs, [1, 0, 2])]
for layer in range(self.num_layers):
gru_fw, gru_bw = self.grus[layer]
init_fw, init_bw = self.inits[layer]
mask_fw, mask_bw = self.dropout_mask[layer]
with tf.variable_scope("fw_{}_{}".format(scope, layer)):
out_fw, _ = gru_fw(
outputs[-1] * mask_fw, initial_state=(init_fw, ))
with tf.variable_scope("bw_{}_{}".format(scope, layer)):
inputs_bw = tf.reverse_sequence(
outputs[-1] * mask_bw, seq_lengths=seq_len, seq_axis=0, batch_axis=1)
out_bw, _ = gru_bw(inputs_bw, initial_state=(init_bw, ))
out_bw = tf.reverse_sequence(
out_bw, seq_lengths=seq_len, seq_axis=0, batch_axis=1)
outputs.append(tf.concat([out_fw, out_bw], axis=2))
if concat_layers:
res = tf.concat(outputs[1:], axis=2)
else:
res = outputs[-1]
res = tf.transpose(res, [1, 0, 2])
if use_last:
last = _last_relevant(res, seq_len)
return last, res
else:
return res
class LSTM:
def __init__(self, num_layers, num_units, batch_size, input_size, keep_prob=1.0,
is_train=None, seed=3435, scope=None):
self.num_layers = num_layers
self.lstms = []
self.inits = []
self.inits2 = []
self.dropout_mask = []
for layer in range(num_layers):
input_size_ = input_size if layer == 0 else num_units
lstm = tf.contrib.cudnn_rnn.CudnnLSTM(1, num_units, seed=seed)
init = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
init2 = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
mask = dropout(tf.ones([1, batch_size, input_size_], dtype=tf.float32),
keep_prob=keep_prob, is_train=is_train, mode=None)
self.lstms.append(lstm)
self.inits.append(init)
self.inits2.append(init2)
self.dropout_mask.append(mask)
def __call__(self, inputs, seq_len, concat_layers=True, use_last=False, scope='lstm'):
outputs = [tf.transpose(inputs, [1, 0, 2])]
for layer in range(self.num_layers):
lstm = self.lstms[layer]
init = self.inits[layer]
init2 = self.inits2[layer]
mask = self.dropout_mask[layer]
with tf.variable_scope("{}_{}".format(scope, layer)):
out, _ = lstm(outputs[-1] * mask, initial_state=(init, init2))
outputs.append(out)
if concat_layers:
res = tf.concat(outputs[1:], axis=2)
else:
res = outputs[-1]
res = tf.transpose(res, [1, 0, 2])
if use_last:
last = _last_relevant(res, seq_len)
return last, res
else:
return res
class BiLSTM:
def __init__(self, num_layers, num_units, batch_size, input_size, keep_prob=1.0,
is_train=None, seed=3435, scope=None):
self.num_layers = num_layers
self.lstms = []
self.inits = []
self.inits2 = []
self.dropout_mask = []
for layer in range(num_layers):
input_size_ = input_size if layer == 0 else 2 * num_units
lstm_fw = tf.contrib.cudnn_rnn.CudnnLSTM(1, num_units, seed=seed)
lstm_bw = tf.contrib.cudnn_rnn.CudnnLSTM(1, num_units, seed=seed)
init_fw = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
init_fw2 = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
init_bw = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
init_bw2 = tf.tile(tf.Variable(
tf.zeros([1, 1, num_units])), [1, batch_size, 1])
mask_fw = dropout(tf.ones([1, batch_size, input_size_], dtype=tf.float32),
keep_prob=keep_prob, is_train=is_train, mode=None)
mask_bw = dropout(tf.ones([1, batch_size, input_size_], dtype=tf.float32),
keep_prob=keep_prob, is_train=is_train, mode=None)
self.lstms.append((lstm_fw, lstm_bw,))
self.inits.append((init_fw, init_bw,))
self.inits2.append((init_fw2, init_bw2))
self.dropout_mask.append((mask_fw, mask_bw,))
def __call__(self, inputs, seq_len, concat_layers=True, use_last=False, scope='lstm'):
outputs = [tf.transpose(inputs, [1, 0, 2])]
for layer in range(self.num_layers):
lstm_fw, lstm_bw = self.lstms[layer]
init_fw, init_bw = self.inits[layer]
init_fw2, init_bw2 = self.inits2[layer]
mask_fw, mask_bw = self.dropout_mask[layer]
with tf.variable_scope("fw_{}_{}".format(scope, layer)):
out_fw, _ = lstm_fw(
outputs[-1] * mask_fw, initial_state=(init_fw, init_fw2))
with tf.variable_scope("bw_{}_{}".format(scope, layer)):
inputs_bw = tf.reverse_sequence(
outputs[-1] * mask_bw, seq_lengths=seq_len, seq_axis=0, batch_axis=1)
out_bw, _ = lstm_bw(inputs_bw, initial_state=(init_bw, init_bw2))
out_bw = tf.reverse_sequence(
out_bw, seq_lengths=seq_len, seq_axis=0, batch_axis=1)
outputs.append(tf.concat([out_fw, out_bw], axis=2))
if concat_layers:
res = tf.concat(outputs[1:], axis=2)
else:
res = outputs[-1]
res = tf.transpose(res, [1, 0, 2])
if use_last:
last = _last_relevant(res, seq_len)
return last, res
else:
return res
| StarcoderdataPython |
4816056 | <reponame>Euromance/pycopy
# It's not possible to delete a global var at runtime in strict mode.
gvar = 1
del gvar
gvar = 2
def __main__():
print("in __main__")
global gvar
# In the current implementation, TypeError is thrown. This is considered
# an implementation detail and may change later to e.g. RuntimeError.
try:
del globals()["gvar"]
except:
print("Exception1")
print(gvar)
| StarcoderdataPython |
6565197 | from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import LargeBinary
from sqlalchemy import String
from chainerui import database
class Bindata(database.BASE):
__tablename__ = 'bindata'
id = Column(Integer, primary_key=True)
asset_id = Column(Integer, ForeignKey('asset.id'))
name = Column(String(512))
tag = Column(String(512))
note = Column(String(512))
content = Column(LargeBinary(1e7))
def __init__(self, asset_id=None, name=None, tag=None, note=None,
content=None):
self.asset_id = asset_id
self.name = name
self.tag = tag
self.note = note
self.content = content
def __repr__(self):
return '<Bindata id: %r />' % (self.id)
def mimetype(self):
ext = self.name.split('.')[-1].lower()
if ext == 'png':
return 'image/png'
elif ext in ['jpg', 'jpeg']:
return 'image/jpeg'
else:
raise ValueError('"%s" is not support' % ext)
@property
def serialize(self):
# omit content to reduce transport size
return {
'id': self.id,
'asset_id': self.asset_id,
'name': self.name,
'tag': self.tag,
'note': self.note
}
| StarcoderdataPython |
9696760 | <reponame>krzjoa/sciquence
import string
def load_txt(path):
with open(path, mode='r') as f:
return f.readlines()
def remove_punctuation(s):
return s.translate(None, string.punctuation)
def word2idx(path):
word2idx = {'START': 0, 'END': 1}
current_idx = 2
sentences = []
for line in open(path):
line = line.strip()
if line:
tokens = remove_punctuation(line.lower()).split()
sentence = []
for t in tokens:
if t not in word2idx:
word2idx[t] = current_idx
current_idx += 1
idx = word2idx[t]
sentence.append(idx)
sentences.append(sentence)
return sentences, word2idx | StarcoderdataPython |
6657190 | if __name__ == "__main__":
import sys
sys.path.insert(0, ".")
from zzgui.qt5.zzapp import ZzApp
from zzgui.qt5.zzform import ZzForm as ZzForm
from zzgui.qt5.zzform import zzMess
from zzgui.zzmodel import ZzCursorModel
from zzdb.schema import ZzDbSchema
from zzdb.db import ZzDb
from zzdb.cursor import ZzCursor
from random import randint
def mock_data_load(db: ZzDb):
customer_qt = 100
for x in range(1, customer_qt):
db.insert(
"customers",
{
"customer_id": x,
"name": f"Customer {x}{str(randint(0,600)*6)}",
"vip": {0: "", 1: "*"}[x % 2],
"combo_status": x % 3 + 1,
"list_status": x % 3 + 1,
"radio_status": x % 3 + 1,
},
)
class DemoApp(ZzApp):
def on_start(self):
self.customers()
def create_database(self):
self.db = ZzDb("sqlite3", database_name=":memory:")
def on_init(self):
self.create_database()
self.add_menu("File|About", lambda: zzMess("First application!"))
self.add_menu("File|-")
self.add_menu("File|Exit", self.close, toolbar=1)
self.add_menu("Catalogs|Customers", self.customers, toolbar=1)
data_schema = ZzDbSchema()
for x in self.form_customers().get_table_schema():
data_schema.add(**x)
self.db.set_schema(data_schema)
mock_data_load(self.db)
# self.customers()
def form_customers(self):
form = ZzForm("Customers")
form.add_control(name="customer_id", label="Customer Id", datatype="int", pk="*")
form.add_control("name", "Name", datatype="char", datalen=100)
form.add_control("/h", "3333")
form.add_control("ddd1", "T1", datatype="char", datalen=5)
form.add_control("ddd2", "T2", datatype="char", datalen=5)
form.add_control("summa", "Summa", datatype="num", datalen=15, datadec=2, pic="F")
form.add_control("/s")
form.add_control("/")
form.add_control("vip", "VIP", datatype="char", datalen=1, control="check", pic="VIP client")
status_control_num = {"datatype": "int", "datalen": 1, "pic": "active;frozen;blocked"}
status_control_char = {"datatype": "char", "datalen": 15, "pic": "active;frozen;blocked"}
form.add_control("radio_status", "Num Radio Status", control="radio", **status_control_num)
form.add_control("radio_status_char", "Char Radio Status", control="radio", **status_control_char)
form.add_control("combo_status", "Num Combo Status", control="combo", **status_control_num)
form.add_control("combo_status", "Char Combo Status", control="combo", **status_control_char)
form.add_control("/")
form.add_control("/h", "Group box title")
form.add_control("list_status", "Num List Status", control="list", **status_control_num)
form.add_control("list_status_char", "Char List Status", control="list", **status_control_char)
cursor: ZzCursor = self.db.table(table_name="customers")
model = ZzCursorModel(cursor)
form.set_model(model)
form.actions.add_action("/crud")
return form
def customers(self):
self.form_customers().show_mdi_modal_grid()
def demo():
app = DemoApp("zzgui - the database app")
app.run()
if __name__ == "__main__":
demo()
| StarcoderdataPython |
1910301 | import logging
import requests
import json
from operator import itemgetter
import urllib.parse
logger = logging.getLogger(__name__)
def _get_cf_url(self):
return self.get_apibase() + "/custom-fields/objects"
def supported_cf_object_types(self):
'''Get the types and cache them since they are static (on a per-release basis)'''
if not hasattr(self, "_cf_object_types"):
logger.debug("retrieving object types")
self._cf_object_types = [cfo['name'] for cfo in self.get_cf_objects().get('items', [])]
return self._cf_object_types
def get_cf_objects(self):
'''Get CF objects and cache them since these are static (on a per-release basis)'''
url = self._get_cf_url()
if not hasattr(self, "_cf_objects"):
logger.debug("retrieving objects")
response = self.execute_get(url)
self._cf_objects = response.json()
return self._cf_objects
def _get_cf_object_url(self, object_name):
for cf_object in self.get_cf_objects().get('items', []):
if cf_object['name'].lower() == object_name.lower():
return cf_object['_meta']['href']
def get_cf_object(self, object_name):
assert object_name in self.supported_cf_object_types(), "Object name {} not one of the supported types ({})".format(object_name, self.supported_cf_object_types())
object_url = self._get_cf_object_url(object_name)
response = self.execute_get(object_url)
return response.json()
def _get_cf_obj_rel_path(self, object_name):
return object_name.lower().replace(" ", "-")
def create_cf(self, object_name, field_type, description, label, position, active=True, initial_options=[]):
'''
Create a custom field for the given object type (e.g. "Project", "Project Version") using the field_type and other parameters.
Initial options are needed for field types like multi-select where the multiple values to choose from must also be provided.
initial_options = [{"label":"val1", "position":0}, {"label":"val2", "position":1}]
'''
assert isinstance(position, int) and position >= 0, "position must be an integer that is greater than or equal to 0"
assert field_type in ["BOOLEAN", "DATE", "DROPDOWN", "MULTISELECT", "RADIO", "TEXT", "TEXTAREA"]
types_using_initial_options = ["DROPDOWN", "MULTISELECT", "RADIO"]
post_url = self._get_cf_object_url(object_name) + "/fields"
cf_object = self._get_cf_obj_rel_path(object_name)
cf_request = {
"active": active,
"description": description,
"label": label,
"position": position,
"type": field_type,
}
if field_type in types_using_initial_options and initial_options:
cf_request.update({"initialOptions": initial_options})
response = self.execute_post(post_url, data=cf_request)
return response
def delete_cf(self, object_name, field_id):
'''Delete a custom field from a given object type, e.g. Project, Project Version, Component, etc
WARNING: Deleting a custom field is irreversiable. Any data in the custom fields could be lost so use with caution.
'''
assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types())
delete_url = self._get_cf_object_url(object_name) + "/fields/{}".format(field_id)
return self.execute_delete(delete_url)
def get_custom_fields(self, object_name):
'''Get the custom field (definition) for a given object type, e.g. Project, Project Version, Component, etc
'''
assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types())
url = self._get_cf_object_url(object_name) + "/fields"
response = self.execute_get(url)
return response.json()
def get_cf_values(self, obj):
'''Get all of the custom fields from an object such as a Project, Project Version, Component, etc
The obj is expected to be the JSON document for a project, project-version, component, etc
'''
url = self.get_link(obj, "custom-fields")
response = self.execute_get(url)
return response.json()
def get_cf_value(self, obj, field_id):
'''Get a custom field value from an object such as a Project, Project Version, Component, etc
The obj is expected to be the JSON document for a project, project-version, component, etc
'''
url = self.get_link(obj, "custom-fields") + "/{}".format(field_id)
response = self.execute_get(url)
return response.json()
def put_cf_value(self, cf_url, new_cf_obj):
'''new_cf_obj is expected to be a modified custom field value object with the values updated accordingly, e.g.
call get_cf_value, modify the object, and then call put_cf_value
'''
return self.execute_put(cf_url, new_cf_obj)
| StarcoderdataPython |
203709 | """Pytorch Resnet_RS
This file contains pytorch implementation of Resnet_RS architecture from paper
"Revisiting ResNets: Improved Training and Scaling Strategies"
(https://arxiv.org/pdf/2103.07579.pdf)
"""
from functools import partial
import torch.nn as nn
import torch.nn.functional as F
from .base import StemBlock, BasicBlock, Bottleneck, Downsample
from .util import get_pretrained_weights
PRETRAINED_MODELS = [
'resnetrs50',
'resnetrs101',
'resnetrs152',
'resnetrs200'
]
PRETRAINED_URLS = {
'resnetrs50': 'https://github.com/nachiket273/pytorch_resnet_rs/releases/download/v.0.0.1/resnetrs50_c578f2df.pth',
'resnetrs101': 'https://github.com/nachiket273/pytorch_resnet_rs/releases/download/v.0.0.1/resnetrs101_7c6d6621.pth',
'resnetrs152': 'https://github.com/nachiket273/pytorch_resnet_rs/releases/download/v.0.0.1/resnetrs152_3c858ed0.pth',
'resnetrs200': 'https://github.com/nachiket273/pytorch_resnet_rs/releases/download/v.0.0.1/resnetrs200_fddd5b5f.pth',
}
DEFAULT_CFG = {
'in_ch': 3,
'num_classes': 1000,
'stem_width': 32,
'down_kernel_size': 1,
'actn': partial(nn.ReLU, inplace=True),
'norm_layer': nn.BatchNorm2d,
'zero_init_last_bn': True,
'seblock': True,
'reduction_ratio': 0.25,
'dropout_ratio': 0.25,
'conv1': 'conv1.conv1.0',
'stochastic_depth_rate': 0.0,
'classifier': 'fc'
}
class Resnet(nn.Module):
def __init__(self, block, layers, num_classes=1000, in_ch=3, stem_width=64,
down_kernel_size=1, actn=nn.ReLU, norm_layer=nn.BatchNorm2d,
seblock=True, reduction_ratio=0.25, dropout_ratio=0.,
stochastic_depth_ratio=0., zero_init_last_bn=True):
super().__init__()
self.num_classes = num_classes
self.norm_layer = norm_layer
self.actn = actn
self.dropout_ratio = float(dropout_ratio)
self.stochastic_depth_ratio = stochastic_depth_ratio
self.zero_init_last_bn = zero_init_last_bn
self.conv1 = StemBlock(in_ch, stem_width, norm_layer, actn)
channels = [64, 128, 256, 512]
self.make_layers(block, layers, channels, stem_width*2,
down_kernel_size, seblock, reduction_ratio)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(512 * block.expansion, num_classes, bias=True)
def make_layers(self, block, nlayers, channels, inplanes, kernel_size=1,
seblock=True, reduction_ratio=0.25):
tot_nlayers = sum(nlayers)
layer_idx = 0
for idx, (nlayer, channel) in enumerate(zip(nlayers, channels)):
name = "layer" + str(idx+1)
stride = 1 if idx == 0 else 2
downsample = None
if stride != 1 or inplanes != channel * block.expansion:
downsample = Downsample(inplanes, channel * block.expansion,
kernel_size=kernel_size, stride=stride,
norm_layer=self.norm_layer)
blocks = []
for layer_idx in range(nlayer):
downsample = downsample if layer_idx == 0 else None
stride = stride if layer_idx == 0 else 1
drop_ratio = (self.stochastic_depth_ratio*layer_idx
/ (tot_nlayers-1))
blocks.append(block(inplanes, channel, stride, self.norm_layer,
self.actn, downsample, seblock,
reduction_ratio, drop_ratio,
self.zero_init_last_bn))
inplanes = channel * block.expansion
layer_idx += 1
self.add_module(*(name, nn.Sequential(*blocks)))
def init_weights(self):
for _, module in self.named_modules():
if isinstance(module, nn.Conv2d):
nn.init.kaiming_normal_(module.weight, mode='fan_out',
nonlinearity='relu')
if isinstance(module, nn.BatchNorm2d):
nn.init.ones_(module.weight)
nn.init.zeros_(module.bias)
def forward(self, x):
x = self.conv1(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avg_pool(x)
x = x.flatten(1, -1)
if self.dropout_ratio > 0.:
x = F.dropout(x, p=self.dropout_ratio, training=self.training)
x = self.fc(x)
return x
class ResnetRS():
def __init__(self):
super().__init__()
@classmethod
def create_model(cls, block, layers, num_classes=1000, in_ch=3,
stem_width=64, down_kernel_size=1,
actn=partial(nn.ReLU, inplace=True),
norm_layer=nn.BatchNorm2d, seblock=True,
reduction_ratio=0.25, dropout_ratio=0.,
stochastic_depth_rate=0.0,
zero_init_last_bn=True):
return Resnet(block, layers, num_classes=num_classes, in_ch=in_ch,
stem_width=stem_width, down_kernel_size=down_kernel_size,
actn=actn, norm_layer=norm_layer, seblock=seblock,
reduction_ratio=reduction_ratio,
dropout_ratio=dropout_ratio,
stochastic_depth_ratio=stochastic_depth_rate,
zero_init_last_bn=zero_init_last_bn)
@classmethod
def list_pretrained(cls):
return PRETRAINED_MODELS
@classmethod
def _is_valid_model_name(cls, name):
name = name.strip()
name = name.lower()
return name in PRETRAINED_MODELS
@classmethod
def _get_url(cls, name):
return PRETRAINED_URLS[name]
@classmethod
def _get_default_cfg(cls):
return DEFAULT_CFG
@classmethod
def _get_cfg(cls, name):
cfg = ResnetRS._get_default_cfg()
cfg['block'] = Bottleneck
if name == 'resnetrs50':
cfg['layers'] = [3, 4, 6, 3]
elif name == 'resnetrs101':
cfg['layers'] = [3, 4, 23, 3]
elif name == 'resnetrs152':
cfg['layers'] = [3, 8, 36, 3]
elif name == 'resnetrs200':
cfg['layers'] = [3, 24, 36, 3]
cfg['stochastic_depth_rate'] = 0.1
return cfg
@classmethod
def create_pretrained(cls, name, in_ch=0, num_classes=0, drop_rate=0.0):
if not ResnetRS._is_valid_model_name(name):
raise ValueError('Available pretrained models: ' +
', '.join(PRETRAINED_MODELS))
cfg = ResnetRS._get_cfg(name)
in_ch = cfg['in_ch'] if in_ch == 0 else in_ch
num_classes = cfg['num_classes'] if num_classes == 0 else num_classes
cfg['strict'] = True
if drop_rate > 0.0:
cfg['stochastic_depth_rate'] = drop_rate
url = ResnetRS._get_url(name)
model = Resnet(cfg['block'], cfg['layers'], num_classes=num_classes,
in_ch=in_ch, stem_width=cfg['stem_width'],
down_kernel_size=cfg['down_kernel_size'],
actn=cfg['actn'], norm_layer=cfg['norm_layer'],
seblock=cfg['seblock'],
dropout_ratio=cfg['dropout_ratio'],
reduction_ratio=cfg['reduction_ratio'],
stochastic_depth_ratio=cfg['stochastic_depth_rate'],
zero_init_last_bn=cfg['zero_init_last_bn'])
state_dict = get_pretrained_weights(url, cfg, num_classes, in_ch,
check_hash=True)
model.load_state_dict(state_dict, strict=cfg['strict'])
return model
| StarcoderdataPython |
369340 | """
A few global defintions
"""
from typing import TypeVar
from apischema import schema
#: A generic Type for use in type hints
T = TypeVar("T")
def desc(description: str):
"""a description Annotation to add to our Entity derived Types"""
return schema(description=description)
| StarcoderdataPython |
9656481 | <filename>CondCore/PopCon/test/PopConEffExampleTargetDB.py
import FWCore.ParameterSet.Config as cms
process = cms.Process("ProcessOne")
process.load("CondCore.DBCommon.CondDBCommon_cfi")
process.CondDBCommon.connect = 'sqlite_file:pop_test2.db'
process.MessageLogger = cms.Service("MessageLogger",
cout = cms.untracked.PSet(
threshold = cms.untracked.string('INFO')
),
destinations = cms.untracked.vstring('cout')
)
process.source = cms.Source("EmptyIOVSource",
firstValue = cms.uint64(1),
lastValue = cms.uint64(1),
timetype = cms.string('runnumber'),
interval = cms.uint64(1)
)
process.PoolDBOutputService = cms.Service("PoolDBOutputService",
process.CondDBCommon,
withWrapper = cms.untracked.bool(True),
logconnect = cms.untracked.string('sqlite_file:log.db'),
timetype = cms.untracked.string('runnumber'),
toPut = cms.VPSet(cms.PSet(
record = cms.string('ThisJob'),
tag = cms.string('Example_tag1')
)
)
)
process.Test1 = cms.EDAnalyzer("ExPopConEfficiency",
record = cms.string('ThisJob'),
Source = cms.PSet(
params = cms.untracked.vdouble(0.1, 0.95, 1.0, 5.5),
since = cms.untracked.int64(701),
type = cms.untracked.string('Pt')
),
targetDBConnectionString = cms.untracked.string('sqlite_file:pop_test.db'),
loggingOn = cms.untracked.bool(True),
IsDestDbCheckedInQueryLog = cms.untracked.bool(True)
)
process.Test2 = cms.EDAnalyzer("ExPopConEfficiency",
record = cms.string('ThisJob'),
Source = cms.PSet(
params = cms.untracked.vdouble(0.85, 0.0, 0.9, 2.3),
since = cms.untracked.int64(930),
type = cms.untracked.string('Eta')
),
targetDBConnectionString = cms.untracked.string('sqlite_file:pop_test.db'),
loggingOn = cms.untracked.bool(True),
IsDestDbCheckedInQueryLog = cms.untracked.bool(True)
)
process.Test3 = cms.EDAnalyzer("ExPopConEfficiency",
record = cms.string('ThisJob'),
Source = cms.PSet(
params = cms.untracked.vdouble(0.92, 0.0, 0.8, 2.5),
since = cms.untracked.int64(1240),
type = cms.untracked.string('Eta')
),
targetDBConnectionString = cms.untracked.string('sqlite_file:pop_test.db'),
loggingOn = cms.untracked.bool(True),
IsDestDbCheckedInQueryLog = cms.untracked.bool(True)
)
process.Test4 = cms.EDAnalyzer("ExPopConEfficiency",
record = cms.string('ThisJob'),
Source = cms.PSet(
params = cms.untracked.vdouble(0.1, 0.95, 1.0, 9.5),
since = cms.untracked.int64(1511),
type = cms.untracked.string('Pt')
),
targetDBConnectionString = cms.untracked.string('sqlite_file:pop_test.db'),
loggingOn = cms.untracked.bool(True),
IsDestDbCheckedInQueryLog = cms.untracked.bool(True)
)
process.p = cms.Path(process.Test1 +
process.Test2 +
process.Test3 +
process.Test4
)
# process.p = cms.Path(process.TestN)
| StarcoderdataPython |
6628144 | import pytest
import whwreader.whwreader as whwreader
from whwreader.whwreader import Reading
def test_transform():
#Preamble
whwreader.__sensor_time_offset['boris'] = 0
whwreader.__sensor_time_offset['charles'] = 1546804623.6360931
sensor_reading = "name=boris::time=2134457::temp=25.5::humid=56.8"
#expected = Reading('boris', 2134.457, readings={'temp': 25.5, 'humid': 56.8}, sensor_unknown=False)
expected = Reading('boris', 2134.457, {'temp': 25.5, 'humid': 56.8}, sensor_unknown=False)
structured_read = whwreader.transform_reading(sensor_reading)
assert structured_read == expected
#Now test against an offset time. N.B. Expecting time in ms
sensor_reading_2 = "name=charles::time=600::temp=32.5::humid=22.6"
expected_2 = Reading('charles', 1546804623.6360931 + 0.6, readings={'temp': 32.5, 'humid': 22.6})
structured_read = whwreader.transform_reading(sensor_reading_2)
assert structured_read == expected_2
| StarcoderdataPython |
1733634 | from pathlib import Path
from tqdm import tqdm
import tensorflow as tf
from modules.esrgan import rrdb_net
from modules.lr_scheduler import MultiStepLR
from modules.data import load_dataset
from modules.losses import get_pixel_loss
HAS_WANDB_ACCOUNT = True
PROJECT = 'esrgan-tf2'
import wandb
if not HAS_WANDB_ACCOUNT:
wandb.login(anonymous='allow')
else:
wandb.login()
INITIAL_LR = 2e-4
LR_RATE = 0.5
LR_STEPS = [200000, 400000, 600000, 800000]
ADAM_BETA1_G = 0.9
ADAM_BETA2_G = 0.99
W_PIXEL = 1.0
PIXEL_CRITERION = 'l1'
HR_HEIGHT = 128
HR_WIDTH = 128
SCALE = 4
BATCH_SIZE = 16
BUFFER_SIZE = 10240
INPUT_SHAPE=(None, None, 3)
NUM_ITER = 1000000
SAVE_STEPS = 5000
CHECK_POINT_PATH = "./saved/checkpoints/psnr"
Path(CHECK_POINT_PATH).mkdir(parents=True, exist_ok=True)
SAVE_MODEL_PATH = "./saved/models/psnr.h5"
Path(SAVE_MODEL_PATH).parent.mkdir(parents=True, exist_ok=True)
def main():
dataset = load_dataset(HR_HEIGHT, HR_WIDTH, SCALE)
dataset = dataset.repeat()
dataset = dataset.shuffle(BUFFER_SIZE).batch(BATCH_SIZE).prefetch(tf.data.experimental.AUTOTUNE)
model = rrdb_net(input_shape=INPUT_SHAPE,scale_factor=SCALE)
learning_rate = MultiStepLR(INITIAL_LR, LR_STEPS, LR_RATE)
optimizer = tf.keras.optimizers.Adam(learning_rate= learning_rate,
beta_1= ADAM_BETA1_G,
beta_2= ADAM_BETA2_G
)
pixel_loss = get_pixel_loss(PIXEL_CRITERION)
checkpoint = tf.train.Checkpoint(step=tf.Variable(0, name='step'),
optimizer=optimizer,
model=model)
manager = tf.train.CheckpointManager(checkpoint=checkpoint,
directory=CHECK_POINT_PATH,
max_to_keep=3)
if manager.latest_checkpoint:
checkpoint.restore(manager.latest_checkpoint)
print('[*] load ckpt from {} at step {}.'.format(
manager.latest_checkpoint, checkpoint.step.numpy()))
else:
print("[*] training from scratch.")
@tf.function
def train_step(lr, hr):
with tf.GradientTape() as tape:
generated_hr = model(lr, training=True)
loss = W_PIXEL * pixel_loss(hr, generated_hr)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
return loss
wandb_run_id = "psnr-training" #@param {type:"string"}
if HAS_WANDB_ACCOUNT:
wandb.init(entity='ilab', project=PROJECT, id=wandb_run_id)
else:
wandb.init(id=wandb_run_id)
remain_steps = max(NUM_ITER - checkpoint.step.numpy(), 0)
pbar = tqdm(total=remain_steps, ncols=50)
for lr, hr in dataset.take(remain_steps):
checkpoint.step.assign_add(1)
steps = checkpoint.step.numpy()
loss = train_step(lr, hr)
wandb.log({"steps": steps, "loss": loss, "learning_rate": optimizer.lr(steps).numpy()})
pbar.set_description("loss={:.4f}, lr={:.1e}".format(loss, optimizer.lr(steps).numpy()))
pbar.update(1)
if steps % SAVE_STEPS == 0:
manager.save()
print("\n[*] save ckpt file at {}".format(manager.latest_checkpoint))
model.save(SAVE_MODEL_PATH)
if __name__ == '__main__':
main() | StarcoderdataPython |
1633133 | #!/usr/bin/env python
import DIRAC
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Base import Script
Script.setUsageMessage( """
Insert random trigger file into the File Catalog
Usage:
%s [option]
""" % Script.scriptName )
fcType = 'FileCatalog'
Script.registerSwitch( "f:", "file-catalog=", "Catalog client type to use (default %s)" % fcType )
Script.registerSwitch( "j:", "jopts=", "jobOptions.txt" )
Script.registerSwitch( "r:", "runmin=", "Minimun run number" )
Script.registerSwitch( "R:", "runmax=", "Maximum run number" )
Script.registerSwitch( "e:", "se=", "SE name" )
Script.parseCommandLine( ignoreErrors = False )
options = Script.getUnprocessedSwitches()
from DIRAC.DataManagementSystem.Client.DataManager import DataManager
from DIRAC.Resources.Storage.StorageElement import StorageElement
from DIRAC.Resources.Catalog.FileCatalogFactory import FileCatalogFactory
from DIRAC.Core.Utilities.SiteSEMapping import getSEsForSite
import sys
import re
import socket
import time
import random
SeSiteMap = {
'BES.JINR.ru' : 'JINR-USER',
'BES.IHEP-PBS.cn' : 'IHEPD-USER',
'BES.GUCAS.cn' : 'IHEPD-USER',
'BES.USTC.cn' : 'USTC-USER',
'BES.WHU.cn' : 'WHU-USER',
}
SeDomainMap = {
'jinrru' : 'JINR-USER',
'ihepaccn' : 'IHEPD-USER',
'ustceducn' : 'USTC-USER',
'whueducn' : 'WHU-USER',
}
def determineSeFromSite():
siteName = DIRAC.siteName()
SEname = SeSiteMap.get(siteName, '')
if not SEname:
result = getSEsForSite(siteName)
if result['OK'] and result['Value']:
SEname = result['Value'][0]
return SEname
def determineSeFromDomain():
fqdn=socket.getfqdn()
domain=''.join(fqdn.split('.')[-2:])
if domain=='accn' or domain=='educn':
domain=''.join(socket.getfqdn().split('.')[-3:])
SEname = SeDomainMap.get(domain, '')
return SEname
def determineSe():
se = determineSeFromSite()
if se:
return se
return determineSeFromDomain()
def getFile(lfn, se=''):
dm = DataManager()
download_ok = 0
get_active_replicas_ok = False
lfn_on_se = False
error_msg = ''
if se:
for i in range(0, 5):
result = dm.getActiveReplicas(lfn)
if result['OK'] and result['Value']['Successful']:
get_active_replicas_ok = True
lfnReplicas = result['Value']['Successful']
if se in lfnReplicas[lfn]:
lfn_on_se = True
break
time.sleep(3)
print '- Get replicas for %s failed, try again' % lfn
if not get_active_replicas_ok:
return S_ERROR('Get replicas error: %s' % lfn)
if lfn_on_se:
se = StorageElement(se)
# try 5 times
for j in range(0, 5):
result = se.getFile(lfn)
if result['OK'] and result['Value']['Successful'] and result['Value']['Successful'].has_key(lfn):
break
time.sleep(random.randint(180, 600))
print '- %s getStorageFile(%s) failed, try again' % (lfn, se)
if result['OK']:
if result['Value']['Successful'] and result['Value']['Successful'].has_key(lfn):
download_ok = 1
else:
error_msg = 'Downloading %s from SE %s error!' % (lfn, se)
else:
error_msg = result['Message']
else:
if se:
print 'File %s not found on SE "%s" after %s tries, trying other SE' % (lfn, se, i+1)
# try 5 times
for j in range(0, 5):
result = dm.getFile(lfn)
if result['OK'] and result['Value']['Successful'] and result['Value']['Successful'].has_key(lfn):
break
time.sleep(random.randint(180, 600))
print '- getFile(%s) failed, try again' % lfn
if result['OK']:
if result['Value']['Successful'] and result['Value']['Successful'].has_key(lfn):
download_ok = 2
else:
error_msg = 'Downloading %s from random SE error!' % lfn
else:
error_msg = result['Message']
if download_ok:
return S_OK({lfn: {'DownloadOK': download_ok, 'Retry': j+1}})
return S_ERROR(error_msg)
def parseOpt(filename):
f = open(filename, 'r')
fileContent = f.read()
mat = re.findall('RealizationSvc\s*\.\s*RunIdList.*?;', fileContent, re.DOTALL)
if not mat:
return (0, 0)
line = mat[-1]
tmp = ''.join(line.split())
line = tmp.replace('[', '{').replace(']', '}')
vars = line.split('{')[1].split('}')[0].split(',')
if len(vars) == 1:
runmin = runmax = abs(int(vars[0]))
elif len(vars) == 3 and int(vars[1]) == 0:
runmin = abs(int(vars[0]))
runmax = abs(int(vars[2]))
if runmax < runmin:
temp = runmax
runmax = runmin
runmin = temp
else:
runmin = runmax = 0
return (runmin, runmax)
def findFiles(runnb):
for i in range(0, 16):
result = FileCatalogFactory().createCatalog(fcType)
if result['OK']:
break
time.sleep(random.randint(30, 120))
print '- Get FileCatalog failed, try again'
if not result['OK']:
print >>sys.stderr, 'Get FileCatalog error: %s. Retry %s' % (result['Message'], i+1)
return result
catalog = result['Value']
(runmin,runmax) = runnb[0]
for i in range(0, 16):
result = catalog.findFilesByMetadata({'runL':{'>=':runmin},'runH':{'<=':runmax}}, '/bes/File/randomtrg')
if result['OK']:
break
time.sleep(random.randint(30, 120))
print '- Find files failed, try again'
if not result['OK']:
print >>sys.stderr, 'Find files error in run (%s - %s). Retry %s' % (runmin, runmax, i+1)
print >>sys.stderr, result
return result
def main():
jfile = ''
runmin = 0
runmax = 0
se = ''
for option in options:
(switch, val) = option
if switch == 'j' or switch == 'jopts':
jfile = val
if switch == 'r' or switch == 'runmin':
runmin = int(val)
if switch == 'R' or switch == 'runmax':
runmax = int(val)
if switch == 'e' or switch == 'se':
se = val
if jfile != '':
(runmin, runmax) = parseOpt(jfile)
if (runmin, runmax) == (0, 0):
print >>sys.stderr, 'No input run range. Check arguments or jobOptions.txt'
sys.exit(68)
if(runmax < runmin):
temp = runmax
runmax = runmin
runmin = temp
print "Run range:", runmin, runmax
if not se:
se = determineSe()
print "Determine SE:", se
result = findFiles([(runmin, runmax)])
if not result['OK']:
print >>sys.stderr, 'Finally find file error: (%s, %s)' % (runmin, runmax)
print >>sys.stderr, result
sys.exit(65)
lfns = result['Value']
print '%s files found in run %s - %s' % (len(lfns), runmin, runmax)
for lfn in lfns:
result = getFile(lfn, se)
print result
if not result['OK']:
print >>sys.stderr, 'Finally download file %s from SE "%s" error:' % (lfn, se)
print >>sys.stderr, result
sys.exit(66)
if __name__ == '__main__':
main()
| StarcoderdataPython |
5044451 | #!/usr/bin/env python
import optparse
import os
import sys
import tempfile
import shutil
import subprocess
import re
import logging
import urllib2
from urlparse import urlparse
assert sys.version_info[:2] >= (2, 6)
log = logging.getLogger(__name__)
CHUNK_SIZE = 2**20 #1mb
def stop_err(msg):
sys.stderr.write("%s\n" % msg)
sys.exit()
def download_from_url( url, output_dir, basename=None, ext=None ):
o = urlparse(url)
src_parts = os.path.basename(o.path).split('.',1)
file_name = "%s.%s" % ( basename if basename else src_parts[0], ext if ext else src_parts[1] )
file_path = os.path.join(output_dir,file_name)
reader = urllib2.urlopen( url )
writer = open(file_path,'wb')
while True:
data = reader.read( CHUNK_SIZE )
if data:
writer.write( data )
else:
break
writer.close()
reader.close()
return file_path
def __main__():
parser = optparse.OptionParser()
parser.add_option( '-a', '--archive', dest='archive', default=None, help='URL to archive containing: <name>.wiff file <name>.wiff.scan <name>.wiff.mtd files' )
parser.add_option( '-w', '--wiff', dest='wiff', default=None, help='URL to <name>.wiff file' )
parser.add_option( '-s', '--scan', dest='scan', default=None, help='URL to <name>.wiff.scan file' )
parser.add_option( '-m', '--mtd', dest='mtd', default=None, help='URL to <name>.wiff.mtd file' )
parser.add_option( '-n', '--name', dest='name', default=None, help='base name for files' )
parser.add_option( '-o', '--output_dir', dest='output_dir', default=None, help='dir to copy files into' )
parser.add_option( '-f', '--output_file', dest='output_file', default=None, help='Galaxy dataset file' )
(options, args) = parser.parse_args()
if not (options.archive or options.wiff):
stop_err("No wiff input file specified")
output_dir = os.getcwd()
if options.output_dir:
output_dir = options.output_dir
if not os.path.exists( output_dir ):
os.makedirs(output_dir)
basename = options.name
rval = ['<html><head><title>Wiff Composite Dataset %s</title></head><body><p/>' % (basename if basename else '')]
rval.append('This composite dataset is composed of the following files:<p/><ul>')
if options.wiff:
file_path = download_from_url (options.wiff, output_dir, basename=basename, ext='wiff')
rel_path = os.path.basename(file_path)
os.symlink( rel_path, os.path.join(output_dir,'wiff'))
rval.append( '<li><a href="%s" type="application/octet-stream">%s</a></li>' % ( rel_path, rel_path ) )
print >> sys.stdout, "wiff: %s" % options.wiff
if options.scan:
file_path = download_from_url (options.scan, output_dir, basename=basename, ext='wiff.scan')
rel_path = os.path.basename(file_path)
os.symlink( rel_path, os.path.join(output_dir,'wiff_scan'))
rval.append( '<li><a href="%s" type="application/octet-stream">%s</a></li>' % ( rel_path, rel_path ) )
print >> sys.stdout, "scan: %s" % options.scan
if options.mtd:
file_path = download_from_url (options.mtd, output_dir, basename=basename, ext='wiff.mtd')
rel_path = os.path.basename(file_path)
os.symlink( rel_path, os.path.join(output_dir,'wiff_mtd'))
rval.append( '<li><a href="%s" type="application/octet-stream">%s</a></li>' % ( rel_path, rel_path ) )
print >> sys.stdout, "mtd: %s" % options.mtd
if options.output_file:
rval.append( '</ul></div></body></html>' )
f = open(options.output_file,'a')
f.write("\n".join( rval ))
f.close()
if __name__ == '__main__':
__main__()
| StarcoderdataPython |
4812152 | import discord
from discord.ext import commands
from discord.ext.commands import cooldown
from discord.ext.commands.cooldowns import BucketType
import time
import asyncio
import asyncpg
from datetime import datetime, timedelta
from random import randint
sorts = ['total_deaths','foes_killed','uwus','current_xp','current_level']
class uwulonian:
def __init__(self, bot):
self.bot = bot
@commands.command(description='Get an uwulonians or your stats', aliases=['bal', 'wallet'])
async def stats(self,ctx,user: discord.Member=None):
async with self.bot.pool.acquire() as conn:
user = user or ctx.author
uwulonian_name = await conn.fetchrow("SELECT * FROM user_settings WHERE user_id = $1",user.id)
uwulonian = await conn.fetchrow("SELECT * FROM user_stats WHERE user_id = $1",user.id)
if uwulonian is None:
return await ctx.send("You or the user doesn't have an uwulonian created.")
roles = "Yes"
is_patron = await conn.fetchrow("SELECT * FROM p_users WHERE user_id = $1", user.id)
if is_patron is None:
roles = "No"
e = discord.Embed(colour=0x7289da)
e.add_field(name=f"Stats for {uwulonian_name['user_name']}", value=f"""Foes killed - {uwulonian['foes_killed']}\nDeaths - {uwulonian['total_deaths']}\nuwus - {uwulonian['uwus']}""")
e.add_field(name="Levels", value=f"XP - {uwulonian['current_xp']}\n Level - {uwulonian['current_level']}")
e.add_field(name='Time created', value=f"""{uwulonian_name['time_created'].strftime("%x at %X")}""")
e.add_field(name='Is Patron?', value=roles)
await ctx.send(embed=e)
@commands.command(aliases=['lb','wowcheaterhenumber1onlb'])
async def leaderboard(self,ctx,sort=None):
if sort is None:
sort = 'uwus'
if sort not in sorts:
return await ctx.send(f"Invalid type. Valid [`total_deaths, foes_killed, uwus, and current_xp, current_level`]")
lb = await self.bot.pool.fetch(f"SELECT * FROM user_stats INNER JOIN user_settings ON user_stats.user_id = user_settings.user_id ORDER BY {sort} DESC LIMIT 5;")
e = discord.Embed(colour=0x7289da)
num = 0
e.set_author(name=f"Leaderboard - {sort}")
for i in lb:
e.add_field(name=f"{lb[num]['user_name']}", value=f"{sort} - {lb[num][sort]}",inline=False)
num += 1
await ctx.send(embed=e)
def setup(bot):
bot.add_cog(uwulonian(bot)) | StarcoderdataPython |
9699916 | <gh_stars>0
from livereload import Server
from microblog import app
# app.debug = True
# server = Server(app.wsgi_app)
# server.serve() | StarcoderdataPython |
188210 | <reponame>erwan-lemonnier/pymacaron-core<filename>pymacaron_core/swagger/server.py
import jsonschema
import logging
import uuid
import os
from functools import wraps
from werkzeug.exceptions import BadRequest
from flask import request, jsonify
from flask_cors import cross_origin
from pymacaron_core.exceptions import PyMacaronCoreException, ValidationError, add_error_handlers
from pymacaron_core.utils import get_function
from pymacaron_core.models import get_model
from pymacaron_core.swagger.request import FlaskRequestProxy
from bravado_core.request import unmarshal_request
log = logging.getLogger(__name__)
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
def spawn_server_api(api_name, app, api_spec, error_callback, decorator):
"""Take a a Flask app and a swagger file in YAML format describing a REST
API, and populate the app with routes handling all the paths and methods
declared in the swagger file.
Also handle marshaling and unmarshaling between json and object instances
representing the definitions from the swagger file.
"""
def mycallback(endpoint):
handler_func = get_function(endpoint.handler_server)
# Generate api endpoint around that handler
handler_wrapper = _generate_handler_wrapper(api_name, api_spec, endpoint, handler_func, error_callback, decorator)
# Bind handler to the API path
log.info("Binding %s %s ==> %s" % (endpoint.method, endpoint.path, endpoint.handler_server))
endpoint_name = '_'.join([endpoint.method, endpoint.path]).replace('/', '_')
app.add_url_rule(endpoint.path, endpoint_name, handler_wrapper, methods=[endpoint.method])
api_spec.call_on_each_endpoint(mycallback)
# Add custom error handlers to the app
add_error_handlers(app)
def _responsify(api_spec, error, status):
"""Take a bravado-core model representing an error, and return a Flask Response
with the given error code and error instance as body"""
result_json = api_spec.model_to_json(error)
r = jsonify(result_json)
r.status_code = status
return r
def log_endpoint(f, endpoint):
"""A decorator that adds start and stop logging around an endpoint"""
@wraps(f)
def decorator(*args, **kwargs):
from pymacaron.log import pymlogger
pymlog = pymlogger(__name__)
pymlog.info(" ")
pymlog.info(" ")
pymlog.info("=> INCOMING REQUEST %s %s -> %s" % (endpoint.method, endpoint.path, f.__name__))
pymlog.info(" ")
pymlog.info(" ")
res = f(*args, **kwargs)
pymlog.info("<= DONE %s %s -> %s" % (endpoint.method, endpoint.path, f.__name__))
pymlog.info(" ")
pymlog.info(" ")
return res
return decorator
def _generate_handler_wrapper(api_name, api_spec, endpoint, handler_func, error_callback, global_decorator):
"""Generate a handler method for the given url method+path and operation"""
# Add logging around the handler function
handler_func = log_endpoint(handler_func, endpoint)
# Decorate the handler function, if Swagger spec tells us to
if endpoint.decorate_server:
endpoint_decorator = get_function(endpoint.decorate_server)
handler_func = endpoint_decorator(handler_func)
@wraps(handler_func)
def handler_wrapper(**path_params):
if os.environ.get('PYM_DEBUG', None) == '1':
log.debug("PYM_DEBUG: Request headers are: %s" % dict(request.headers))
# Get caller's pym-call-id or generate one
call_id = request.headers.get('PymCallID', None)
if not call_id:
call_id = str(uuid.uuid4())
stack.top.call_id = call_id
# Append current server to call path, or start one
call_path = request.headers.get('PymCallPath', None)
if call_path:
call_path = "%s.%s" % (call_path, api_name)
else:
call_path = api_name
stack.top.call_path = call_path
if endpoint.param_in_body or endpoint.param_in_query or endpoint.param_in_formdata:
# Turn the flask request into something bravado-core can process...
has_data = endpoint.param_in_body or endpoint.param_in_formdata
try:
req = FlaskRequestProxy(request, has_data)
except BadRequest:
ee = error_callback(ValidationError("Cannot parse json data: have you set 'Content-Type' to 'application/json'?"))
return _responsify(api_spec, ee, 400)
try:
# Note: unmarshall validates parameters but does not fail
# if extra unknown parameters are submitted
parameters = unmarshal_request(req, endpoint.operation)
# Example of parameters: {'body': RegisterCredentials()}
except jsonschema.exceptions.ValidationError as e:
ee = error_callback(ValidationError(str(e)))
return _responsify(api_spec, ee, 400)
# Call the endpoint, with proper parameters depending on whether
# parameters are in body, query or url
args = []
kwargs = {}
if endpoint.param_in_path:
kwargs = path_params
if endpoint.param_in_body:
# Remove the parameters already defined in path_params
for k in list(path_params.keys()):
del parameters[k]
lst = list(parameters.values())
assert len(lst) == 1
# Now convert the Bravado body object into a pymacaron model
body = lst[0]
cls = get_model(body.__class__.__name__)
body = cls.from_bravado(body)
args.append(body)
if endpoint.param_in_query:
kwargs.update(parameters)
if endpoint.param_in_formdata:
for k in list(path_params.keys()):
del parameters[k]
kwargs.update(parameters)
if os.environ.get('PYM_DEBUG', None) == '1':
log.debug("PYM_DEBUG: Request args are: [args: %s] [kwargs: %s]" % (args, kwargs))
result = handler_func(*args, **kwargs)
if not result:
e = error_callback(PyMacaronCoreException("Have nothing to send in response"))
return _responsify(api_spec, e, 500)
# Did we get the expected response?
if endpoint.produces_html:
if type(result) is not tuple:
e = error_callback(PyMacaronCoreException("Method %s should return %s but returned %s" %
(endpoint.handler_server, endpoint.produces, type(result))))
return _responsify(api_spec, e, 500)
# Return an html page
return result
elif endpoint.produces_json:
if not hasattr(result, '__module__') or not hasattr(result, '__class__'):
e = error_callback(PyMacaronCoreException("Method %s did not return a class instance but a %s" %
(endpoint.handler_server, type(result))))
return _responsify(api_spec, e, 500)
# If it's already a flask Response, just pass it through.
# Errors in particular may be either passed back as flask Responses, or
# raised as exceptions to be caught and formatted by the error_callback
result_type = result.__module__ + "." + result.__class__.__name__
if result_type == 'flask.wrappers.Response':
return result
# We may have got a pymacaron Error instance, in which case
# it has a http_reply() method...
if hasattr(result, 'http_reply'):
# Let's transform this Error into a flask Response
log.info("Looks like a pymacaron error instance - calling .http_reply()")
return result.http_reply()
# Otherwise, assume no error occured and make a flask Response out of
# the result.
# TODO: check that result is an instance of a model expected as response from this endpoint
result_json = api_spec.model_to_json(result)
# Send a Flask Response with code 200 and result_json
r = jsonify(result_json)
r.status_code = 200
return r
handler_wrapper = cross_origin(headers=['Content-Type', 'Authorization'])(handler_wrapper)
# And encapsulate all in a global decorator, if given one
if global_decorator:
handler_wrapper = global_decorator(handler_wrapper)
return handler_wrapper
| StarcoderdataPython |
3270464 | class TweetCounter(object):
def __init__(self, **kwargs):
self.counter = 0
def add_tweet(self,tweet):
self.counter += 1
def get(self):
return [(self.counter,self.get_name())]
def get_name(self):
return 'TweetCounter'
def combine(self,new):
self.counter += new.counter
class ReTweetCounter(object):
def __init__(self, **kwargs):
self.counter = 0
def add_tweet(self,tweet):
if tweet['verb'] == 'share':
self.counter += 1
def get(self):
return [(self.counter,self.get_name())]
def get_name(self):
return 'ReTweetCounter'
def combine(self,new):
self.counter += new.counter
measurement_class_list = [TweetCounter, ReTweetCounter]
| StarcoderdataPython |
1946690 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('rsvp', '0005_event_caption'),
]
operations = [
migrations.AlterField(
model_name='person',
name='attending',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='rsvp',
name='created',
field=models.DateTimeField(default=datetime.datetime(2015, 12, 21, 23, 49, 24, 977114, tzinfo=utc)),
),
migrations.AlterField(
model_name='rsvp',
name='modified',
field=models.DateTimeField(auto_now=True),
),
]
| StarcoderdataPython |
1644990 | import shutil
from functools import partial
try:
from contextlib import asynccontextmanager
except ImportError:
from async_generator import asynccontextmanager
import pytest
import tus
from aiohttp import hdrs, web
from aiohttp.test_utils import TestClient
from aiohttp_tus import setup_tus
from aiohttp_tus.annotations import Decorator, Handler
from aiohttp_tus.constants import APP_TUS_CONFIG_KEY
from aiohttp_tus.data import Config, ResourceCallback
from tests.common import (
get_upload_url,
TEST_CHUNK_SIZE,
TEST_FILE_NAME,
TEST_FILE_PATH,
TEST_SCREENSHOT_NAME,
TEST_SCREENSHOT_PATH,
TEST_UPLOAD_URL,
)
SECRET_TOKEN = "secret-token"
@pytest.fixture
def aiohttp_test_client(tmp_path, aiohttp_client):
@asynccontextmanager
async def factory(
*,
upload_url: str,
upload_resource_name: str = None,
upload_suffix: str = None,
allow_overwrite_files: bool = False,
on_upload_done: ResourceCallback = None,
decorator: Decorator = None,
) -> TestClient:
upload_path = tmp_path / "aiohttp_tus"
app = setup_tus(
web.Application(),
upload_path=upload_path / upload_suffix if upload_suffix else upload_path,
upload_url=upload_url,
upload_resource_name=upload_resource_name,
allow_overwrite_files=allow_overwrite_files,
on_upload_done=on_upload_done,
decorator=decorator,
)
try:
yield await aiohttp_client(app)
finally:
shutil.rmtree(upload_path, ignore_errors=True)
return factory
def login_required(handler: Handler) -> Handler:
async def decorator(request: web.Request) -> web.StreamResponse:
header = request.headers.get(hdrs.AUTHORIZATION)
if header is None or header != f"Token {SECRET_TOKEN}":
raise web.HTTPForbidden()
return await handler(request)
return decorator
async def test_decorated_upload_200(aiohttp_test_client, loop):
upload = partial(
tus.upload,
file_name=TEST_FILE_NAME,
headers={"Authorization": "Token secret-token"},
)
async with aiohttp_test_client(
upload_url=TEST_UPLOAD_URL, decorator=login_required
) as client:
with open(TEST_FILE_PATH, "rb") as handler:
await loop.run_in_executor(
None, upload, handler, get_upload_url(client, TEST_UPLOAD_URL)
)
async def test_decorated_upload_403(aiohttp_test_client, loop):
upload = partial(
tus.upload,
file_name=TEST_FILE_NAME,
headers={"Authorization": "Token not-secret-token"},
)
async with aiohttp_test_client(
upload_url=TEST_UPLOAD_URL, decorator=login_required
) as client:
with open(TEST_FILE_PATH, "rb") as handler:
with pytest.raises(tus.TusError):
await loop.run_in_executor(
None, upload, handler, get_upload_url(client, TEST_UPLOAD_URL)
)
async def test_on_upload_callback(aiohttp_test_client, loop):
data = {}
upload = partial(tus.upload, file_name=TEST_FILE_NAME)
async def on_upload_done(request, resource, file_path):
data[resource.file_name] = file_path
async with aiohttp_test_client(
upload_url=TEST_UPLOAD_URL, on_upload_done=on_upload_done
) as client:
with open(TEST_FILE_PATH, "rb") as handler:
await loop.run_in_executor(
None, upload, handler, get_upload_url(client, TEST_UPLOAD_URL)
)
assert TEST_FILE_NAME in data
async def test_overwrite_file_allowed(aiohttp_test_client, loop):
upload = partial(tus.upload, file_name=TEST_FILE_NAME)
async with aiohttp_test_client(
upload_url=TEST_UPLOAD_URL, allow_overwrite_files=True
) as client:
tus_upload_url = get_upload_url(client, TEST_UPLOAD_URL)
with open(TEST_FILE_PATH, "rb") as handler:
await loop.run_in_executor(None, upload, handler, tus_upload_url)
with open(TEST_FILE_PATH, "rb") as handler:
await loop.run_in_executor(None, upload, handler, tus_upload_url)
async def test_overwrite_file_disallowed(aiohttp_test_client, loop):
upload = partial(tus.upload, file_name=TEST_FILE_NAME)
async with aiohttp_test_client(
upload_url=TEST_UPLOAD_URL, allow_overwrite_files=False
) as client:
tus_upload_url = get_upload_url(client, TEST_UPLOAD_URL)
with open(TEST_FILE_PATH, "rb") as handler:
await loop.run_in_executor(None, upload, handler, tus_upload_url)
with pytest.raises(tus.TusError):
with open(TEST_FILE_PATH, "rb") as handler:
await loop.run_in_executor(None, upload, handler, tus_upload_url)
@pytest.mark.parametrize(
"upload_url, canonical_upload_url, upload_suffix, tus_upload_url, match_info",
(
(TEST_UPLOAD_URL, TEST_UPLOAD_URL, None, TEST_UPLOAD_URL, {}),
(f"{TEST_UPLOAD_URL}/", f"{TEST_UPLOAD_URL}/", None, f"{TEST_UPLOAD_URL}/", {}),
(
r"/user/{username}/uploads",
r"/user/{username}/uploads",
None,
"/user/playpauseanddtop/uploads",
{},
),
(
r"/user/{username}/uploads/",
r"/user/{username}/uploads/",
None,
"/user/playpauseanddtop/uploads/",
{},
),
(
r"/user/{username:([a-zA-Z0-9_-])+}/uploads",
r"/user/{username}/uploads",
None,
"/user/playpauseanddtop/uploads",
{},
),
(
r"/user/{username:([a-zA-Z0-9_-])+}/uploads/",
r"/user/{username}/uploads/",
None,
"/user/playpauseanddtop/uploads/",
{},
),
(
r"/user/{username}/uploads",
r"/user/{username}/uploads",
r"{username}",
"/user/playpauseandstop/uploads",
{"username": "playpauseandstop"},
),
(
r"/user/{username}/uploads/",
r"/user/{username}/uploads/",
r"{username}",
"/user/playpauseandstop/uploads/",
{"username": "playpauseandstop"},
),
),
)
async def test_upload(
aiohttp_test_client,
loop,
upload_url,
canonical_upload_url,
upload_suffix,
tus_upload_url,
match_info,
):
upload = partial(tus.upload, file_name=TEST_FILE_NAME)
async with aiohttp_test_client(
upload_url=upload_url, upload_suffix=upload_suffix
) as client:
with open(TEST_FILE_PATH, "rb") as handler:
await loop.run_in_executor(
None, upload, handler, get_upload_url(client, tus_upload_url)
)
config: Config = client.app[APP_TUS_CONFIG_KEY][canonical_upload_url]
expected_upload_path = config.resolve_upload_path(match_info) / TEST_FILE_NAME
assert expected_upload_path.exists()
assert expected_upload_path.read_bytes() == TEST_FILE_PATH.read_bytes()
@pytest.mark.parametrize(
"chunk_size", (TEST_CHUNK_SIZE, TEST_CHUNK_SIZE * 2, TEST_CHUNK_SIZE * 4)
)
async def test_upload_large_file(aiohttp_test_client, loop, chunk_size):
upload = partial(tus.upload, file_name=TEST_SCREENSHOT_NAME, chunk_size=chunk_size)
async with aiohttp_test_client(upload_url=TEST_UPLOAD_URL) as client:
with open(TEST_SCREENSHOT_PATH, "rb") as handler:
await loop.run_in_executor(
None, upload, handler, get_upload_url(client, TEST_UPLOAD_URL)
)
config: Config = client.app[APP_TUS_CONFIG_KEY]["/uploads"]
expected_upload_path = config.resolve_upload_path({}) / TEST_SCREENSHOT_NAME
assert expected_upload_path.exists()
assert expected_upload_path.read_bytes() == TEST_SCREENSHOT_PATH.read_bytes()
async def test_upload_resource_name(aiohttp_test_client, loop):
upload = partial(tus.upload, file_name=TEST_FILE_NAME)
async with aiohttp_test_client(
upload_url=TEST_UPLOAD_URL, upload_resource_name="upload"
) as client:
upload_url = client.app.router["upload"].url_for()
with open(TEST_FILE_PATH, "rb") as handler:
await loop.run_in_executor(
None, upload, handler, get_upload_url(client, upload_url)
)
| StarcoderdataPython |
6414042 | """
This is a class to store the global chain params
"""
class CHAIN(object):
# Max possible target for a block
MAX_TARGET = 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
MAX_TARGET_HEX = 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
MAX_NONCE_LEN = 64
# The fork checkpoints, so we have a single reference for all the codebase and can use explicit names for forks.
POW_FORK_V2 = 60000
POW_FORK_V3 = 61110
RETARGET_PERIOD = 2016 # blocks
TWO_WEEKS = 1209600 # seconds
HALF_WEEK = 302400 # seconds
MAX_BLOCKS_PER_MESSAGE = 200 # Not really a chain param, but better if coherent across peers
MAX_RETRACE_DEPTH = 20 # Max allowed retrace. Deeper retrace would need manual chain truncating
TIME_TOLERANCE = 10 # MAX # of seconds in the future we allow a bloc or TX to be. NTP Sync required for nodes.
CHECK_TIME_FROM = 59710 # Begin checks there
MINING_AND_TXN_REFORM_FORK = 60000
ONE_DAY_IN_SECONDS = 1440 * 60
RETARGET_PERIOD_V2 = 144 # blocks = 1 day at 10 min per block
RETARGET_PERIOD_V3 = 1 # blocks = 1 day at 10 min per block
MAX_SECONDS_V2 = ONE_DAY_IN_SECONDS * 7 # seconds - avoid to drop to fast.
MIN_SECONDS_V2 = 3600 # seconds = 1h - avoid too high a raise.
MAX_SECONDS_V3 = ONE_DAY_IN_SECONDS * 7 # seconds - avoid to drop to fast.
MIN_SECONDS_V3 = 3600 # seconds = 1h - avoid too high a raise.
# target block time is now 600 sec
# special_min triggers after 2 * block time
# we want max target (= min diff) is reached after long enough it does not drop too fast.
# Could be raised later on depending on the net hash rate. calibrating for very low hash
MAX_TARGET_AFTER_V2 = 600 * 6 * 8 # after 8 hours, target will hit MAX_TARGET_V2. after twice that time, absolute max.
MAX_TARGET_AFTER_V3 = 600 * 3 # after 8 hours, target will hit MAX_TARGET_V2. after twice that time, absolute max.
# Max possible target for a block, v2 after MAX_TARGET_AFTER_V2: reasonable target for a single cpu miner.
MAX_TARGET_V2 = 0x000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffff
MAX_TARGET_HEX_V2 = '000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffff'
MAX_TARGET_V3 = 0x000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
MAX_TARGET_HEX_V3 = '000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
@classmethod
def target_block_time(cls, network:str):
"""What is the target block time for a specific network?"""
if network == 'mainnet':
return 600
elif network == 'testnet':
return 10
elif network == 'regnet':
# Avoid possible divisions by 0
return 1
raise ValueError("Unknown network")
@classmethod
def special_min_trigger(cls, network: str, block_height: int) -> int:
"""When should special_min be activated?"""
# For testnet and regnet, special min triggers at target_block_time + 1
try:
if network == 'testnet':
return 10 + 1
elif network == 'regnet':
return 1 +1
elif network == 'mainnet':
if int(block_height) <= cls.POW_FORK_V2:
# return 120 # temp debug
return 600
elif int(block_height) <= cls.POW_FORK_V3:
return 600
else:
return 600 * 2
raise ValueError("Unknown network")
except Exception as e:
print(e)
import sys, os
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
@classmethod
def special_target(cls, block_height: int, target:int, delta_t:int, network: str='mainnet') -> int:
"""Given the regular target and time since last block, gives the current target
This is supposed to be the only place where this is computed, to ease maintenance"""
if int(block_height) < cls.POW_FORK_V2:
target_factor = delta_t / cls.target_block_time(network)
special_target = int(target * (target_factor * 4))
elif int(block_height) >= cls.POW_FORK_V3:
# from 60k, POW_FORK_V3, we aim to reach MAX_TARGET_V3 after MAX_TARGET_AFTER_V3
if delta_t >= 2 * cls.MAX_TARGET_AFTER_V3:
# but after twice that time, if still stuck - hard block - allow anything (MAX_TARGET)
special_target = cls.MAX_TARGET
elif delta_t <= 60 * 2:
special_target = target
else:
delta_target = abs(cls.MAX_TARGET_V3 - target) # abs to make sure, should not happen
special_target = int(target + delta_target * (delta_t - 2 * 60) // (cls.MAX_TARGET_AFTER_V3 - 2 * 60 ) )
elif int(block_height) >= cls.POW_FORK_V2 and int(block_height) < cls.POW_FORK_V3:
# from 60k, POW_FORK_V2, we aim to reach MAX_TARGET_V2 after MAX_TARGET_AFTER_V2
if delta_t >= 2 * cls.MAX_TARGET_AFTER_V2:
# but after twice that time, if still stuck - hard block - allow anything (MAX_TARGET)
special_target = cls.MAX_TARGET
elif delta_t <= 600 * 2:
special_target = target
else:
delta_target = abs(cls.MAX_TARGET_V2 - target) # abs to make sure, should not happen
special_target = int(target + delta_target * (delta_t - 2 * 600) // (cls.MAX_TARGET_AFTER_V2 - 2 * 600 ) )
if special_target > cls.MAX_TARGET:
special_target = cls.MAX_TARGET
return special_target
@classmethod
def get_version_for_height(cls, height: int):
if int(height) <= 14484:
return 1
elif int(height) >= cls.POW_FORK_V3:
return 4
elif int(height) > cls.POW_FORK_V2 and int(height) < cls.POW_FORK_V3:
return 3
else:
return 2
@classmethod
def get_block_reward_deprecated(cls, block_index=None):
block_rewards = [
{"block": "0", "reward": "50"},
{"block": "210000", "reward": "25"},
{"block": "420000", "reward": "12.5"},
{"block": "630000", "reward": "6.25"},
{"block": "840000", "reward": "3.125"},
{"block": "1050000", "reward": "1.5625"},
{"block": "1260000", "reward": "0.78125"},
{"block": "1470000", "reward": "0.390625"},
{"block": "1680000", "reward": "0.1953125"},
{"block": "1890000", "reward": "0.09765625"},
{"block": "2100000", "reward": "0.04882812"},
{"block": "2310000", "reward": "0.02441406"},
{"block": "2520000", "reward": "0.01220703"},
{"block": "2730000", "reward": "0.00610351"},
{"block": "2940000", "reward": "0.00305175"},
{"block": "3150000", "reward": "0.00152587"},
{"block": "3360000", "reward": "0.00076293"},
{"block": "3570000", "reward": "0.00038146"},
{"block": "3780000", "reward": "0.00019073"},
{"block": "3990000", "reward": "0.00009536"},
{"block": "4200000", "reward": "0.00004768"},
{"block": "4410000", "reward": "0.00002384"},
{"block": "4620000", "reward": "0.00001192"},
{"block": "4830000", "reward": "0.00000596"},
{"block": "5040000", "reward": "0.00000298"},
{"block": "5250000", "reward": "0.00000149"},
{"block": "5460000", "reward": "0.00000074"},
{"block": "5670000", "reward": "0.00000037"},
{"block": "5880000", "reward": "0.00000018"},
{"block": "6090000", "reward": "0.00000009"},
{"block": "6300000", "reward": "0.00000004"},
{"block": "6510000", "reward": "0.00000002"},
{"block": "6720000", "reward": "0.00000001"},
{"block": "6930000", "reward": "0"}
]
if block_index is None:
from yadacoin.blockchainutil import BU
block_index = BU().get_latest_block()['index'] + 1
try:
for t, block_reward in enumerate(block_rewards):
if int(block_reward['block']) <= block_index < int(block_rewards[t+1]['block']):
break
return float(block_reward['reward'])
except:
return 0.0
@classmethod
def get_block_reward(cls, block_index=None):
"""Returns the reward matching a given block height, next block if None is provided"""
if block_index is None:
from yadacoin.blockchainutils import BU
block_index = BU().get_latest_block()['index'] + 1
index = block_index // 2100000
reward = int(50.0 * 1e8 / 2 ** index) / 1e8
return reward | StarcoderdataPython |
6705671 | import pandas as pd
import numpy as np
import redis
import json
import geopandas, astral
import time
from astral.sun import sun
import requests, zipfile, io, os, re
from tabulate import tabulate
METEO_FOLDER = r"C:/Users/48604/Documents/semestr5/PAG/pag2/Meteo/"
ZAPIS_ZIP = METEO_FOLDER + r"Meteo_"
url = "https://dane.imgw.pl/datastore/getfiledown/Arch/Telemetria/Meteo/2015/Meteo_2015-07.zip"
r = redis.Redis(host='localhost', port=6379, db=0)
def get_data(url, pth):
file = requests.get(url)
zip = zipfile.ZipFile(io.BytesIO(file.content))
#download zip from IMGW archive
url_end = url[-4:]
#later checking if file ends with .zip or .ZIP
pattern = "Meteo_(.*?)" + url_end
substring = re.search(pattern, url).group(1)
#pattern matching in order to name new dir properly
path = pth + substring + "/"
#path to dir with data from specified period
if os.path.isdir(path) == 0:
os.mkdir(path)
zip.extractall(path)
#creating dir if it doesnt exist and unpacking data
return path
path_data = get_data(url, ZAPIS_ZIP)
path_parametry = METEO_FOLDER + "kody_parametr.csv"
path_effacility = METEO_FOLDER + "effacility.geojson"
path_powiaty = METEO_FOLDER + "powiaty/powiaty.shp"
path_wojewodztwa = METEO_FOLDER + "woj/woj.shp"
def read_parametry(path_parametr):
parametr = pd.read_csv(path_parametr, sep=';', index_col=False, encoding='cp1250')
#separator=';' - by default ','
#index_col=False - store all data as columns not indexes
return parametr
#function to read parameters from the path_parametr file
def read_data(path_data):
fields = ["KodSH", "ParametrSH", "Date", "Wartosc"]
data = {}
#column names; empty dictionary for data from separate csv files in folder
for filename in os.listdir(path_data):
#for every file in folder
dataset_name = pd.read_csv(path_data + filename, sep=';', header=None, names=fields, index_col=False, low_memory=False, dtype={'KodSH': int, 'Wartosc': str}, parse_dates=['Date'])
#applying value
#separator=';' - by default ','
#no header by default
#names=fields - column names
#index_col=False - store all data as columns not indexes
#low_memory=false - way to get rid of different datatypes in columns warning
dataset_name["Wartosc"] = dataset_name["Wartosc"].str.replace(',','.').astype('float64')
#replace ',' with '.' and convert string to float
dataset_name["Date"] = dataset_name["Date"].dt.tz_localize("Europe/Warsaw")
#setting "Data" column to datetime64[ns, Europe/Warsaw] from datetime64[ns]
data[filename] = dataset_name
return data
#function to read data from the path_data file
def read_effacility(path_effacility):
path = open(path_effacility)
effacility = geopandas.read_file(path)
#read geojson
effacility["geometry"] = effacility["geometry"].to_crs(epsg=4258)
x = effacility["geometry"].x
y = effacility["geometry"].y
data = {"KodSH" : effacility["name"], "City" : effacility["name1"], "Lon" : x, "Lat" : y}
effacility = pd.DataFrame(data)
effacility["KodSH"] = effacility["KodSH"].astype('float64')
#store KodSH as number not string
return effacility
def f_init_mean(data):
init_mean = {}
for key in data:
init_mean[key] = data[key].groupby(["KodSH", data[key]["Date"].dt.date])["Wartosc"].mean()
init_mean[key] = init_mean[key].to_frame()
init_mean[key].drop(columns = ["Wartosc"], inplace=True)
return init_mean
def f_sun_info(init_mean, effacility):
sun_info = {}
for key in init_mean:
init_mean[key] = init_mean[key].reset_index("Date")
#Date as a non index value
#init_mean[key] = init_mean[key].drop(["24h"], axis=1)
sun_info[key] = pd.merge(init_mean[key], effacility, on = "KodSH", how = "left")
astral_info = {}
for key in sun_info:
shp = sun_info[key].shape[0]
Dawn = list(range(shp))
Dusk = list(range(shp))
for k in sun_info[key].index:
City = astral.LocationInfo(sun_info[key]["City"][k],"Poland", "Europe/Warsaw", sun_info[key]["Lat"][k], sun_info[key]["Lon"][k])
Dawn[k] = (sun(City.observer, date=sun_info[key]["Date"][k], tzinfo=City.timezone))["dawn"]
Dusk[k] = (sun(City.observer, date=sun_info[key]["Date"][k], tzinfo=City.timezone))["dusk"]
data = {"KodSH" : sun_info[key]["KodSH"], "Dawn" : Dawn ,"Dusk" : Dusk}
astral_info[key] = pd.DataFrame(data)
sun_info[key] = pd.merge(sun_info[key], astral_info[key], left_index=True, right_index=True)
sun_info[key].drop(["KodSH_y"], axis=1, inplace=True)
sun_info[key].rename(columns = {"KodSH_x" : "KodSH", "Date" : "Date"}, inplace=True)
sun_info[key]["Date"] = pd.to_datetime(sun_info[key]["Date"]).dt.tz_localize("Europe/Warsaw")
return sun_info
start_time = time.time()
parametry = read_parametry(path_parametry)
data = read_data(path_data)
effacility = read_effacility(path_effacility)
init_mean = f_init_mean(data)
sun_info = f_sun_info(init_mean, effacility)
def pass_effacility(eff):
#przeksztalcam effacility z dataframe do json
#wysylam do bazy danych
for item in eff.items():
r.set("effacility_" + item[0], json.dumps(item[1].tolist()))
def get_effacility():
#odbieram z bazy danych
#json->dataframe
columns = ["KodSH", "City", "Lon", "Lat"]
_container = {}
for col in columns:
_cached = r.get("effacility_" + col)
_container[col] = json.loads(_cached)
data = pd.DataFrame.from_dict(_container)
return data
def pass_IMGW(imgw):
names = []
for key in imgw:
for item in imgw[key].items():
name = "IMGW_" + key[:7] + "_" + key[10:12] + key[13:15]
names.append(name)
#nalezy zapisac date jako string, aby nie tracic strefy czasowej
imgw[key]["Date"] = imgw[key]["Date"].astype("str")
r.set(name + item[0], json.dumps(item[1].tolist()))
return names
def get_IMGW(names):
columns = ["KodSH", "ParametrSH", "Date", "Wartosc"]
_big_container = {}
_container = {}
for name in names:
for col in columns:
_cached = r.get(name + col)
_container[col] = json.loads(_cached)
data = pd.DataFrame.from_dict(_container)
data["Date"] = pd.to_datetime(data["Date"])
_big_container[name[5:]] = data
return _big_container
def pass_sun_info(sun_info):
names = []
for key in sun_info:
for item in sun_info[key].items():
name = "Sun_info_" + key[:7] + "_" + key[10:12] + key[13:15]
names.append(name)
#nalezy zapisac date jako string, aby nie tracic strefy czasowej
sun_info[key]["Date"] = sun_info[key]["Date"].astype("str")
sun_info[key]["Dawn"] = sun_info[key]["Dawn"].astype("str")
sun_info[key]["Dusk"] = sun_info[key]["Dusk"].astype("str")
r.set(name + item[0], json.dumps(item[1].tolist()))
return names
def get_sun_info(names):
columns = ["KodSH", "Date", "City", "Lon", "Lat", "Dawn", "Dusk"]
_big_container = {}
_container = {}
for name in names:
for col in columns:
_cached = r.get(name + col)
_container[col] = json.loads(_cached)
data = pd.DataFrame.from_dict(_container)
data["Date"] = pd.to_datetime(data["Date"])
data["Dawn"] = pd.to_datetime(data["Dawn"])
data["Dusk"] = pd.to_datetime(data["Dusk"])
_big_container[name[9:]] = data
return _big_container
def day_night(imgw, sun_info):
day_night = {}
for key in imgw:
date_time = imgw[key]["Date"]
#save old datetime
imgw[key]["Date"] = imgw[key]["Date"].dt.date
#trim Date of time, which is necessary to merge(unwanted conv from datetime64 to object)
imgw[key]["Date"] = pd.to_datetime(imgw[key]["Date"]).dt.tz_localize("Europe/Warsaw")
#conversion from object to datetime64
day_night[key] = pd.merge(imgw[key], sun_info[key], on=["KodSH", "Date"], how="inner")
#merging data with info about dusk and dawn
imgw[key].drop(["Date"], axis=1, inplace=True)
imgw[key].insert(2, "Date", date_time)
day_night[key].drop(["Date"], axis=1, inplace=True)
day_night[key].insert(2, "Date", date_time)
#bringing back proper "Date" VALUE
day_night[key]["day/night"] = np.where((day_night[key]["Date"] >= day_night[key]["Dawn"]) & (day_night[key]["Date"] < day_night[key]["Dusk"]), "day", "night")
#add column which determins if its day or night
return day_night
def f_analysis_basic(sun_info, day_night):
analysis_basic = {}
mean = {}
mean_day = {}
mean_night = {}
median = {}
median_day = {}
median_night = {}
for key in day_night:
mean[key] = day_night[key].groupby(["KodSH", day_night[key]["Date"].dt.date, day_night[key]["day/night"]], dropna=False)["Wartosc"].mean()
mean[key].to_frame
mean[key] = mean[key].reset_index()
#mean group by
median[key] = day_night[key].groupby(["KodSH", day_night[key]["Date"].dt.date, day_night[key]["day/night"]], dropna=False)["Wartosc"].median()
median[key].to_frame
median[key] = median[key].reset_index()
#median geoup by
mean_day[key] = mean[key][mean[key]["day/night"] != "night"]
mean_night[key] = mean[key][mean[key]["day/night"] != "day"]
median_day[key] = median[key][median[key]["day/night"] != "night"]
median_night[key] = median[key][median[key]["day/night"] != "day"]
#selecting values for different time of day(loss of nan data)
mean_day[key] = sun_info[key].merge(mean_day[key], how="left", right_on=["KodSH", "Date"], left_on=["KodSH", sun_info[key]["Date"].dt.date])
mean_night[key] = sun_info[key].merge(mean_night[key], how="left", right_on=["KodSH", "Date"], left_on=["KodSH", sun_info[key]["Date"].dt.date])
median_day[key] = sun_info[key].merge(median_day[key], how="left", right_on=["KodSH", "Date"], left_on=["KodSH", sun_info[key]["Date"].dt.date])
median_night[key] = sun_info[key].merge(median_night[key], how="left", right_on=["KodSH", "Date"], left_on=["KodSH", sun_info[key]["Date"].dt.date])
#bring nan data back
mean_day[key].drop(["Date_x", "Dawn", "Dusk", "Date_y", "day/night"], axis=1, inplace=True)
mean_night[key].drop(["Date_x", "Dawn", "Dusk", "Date_y", "day/night"], axis=1, inplace=True)
median_day[key].drop(["Date_x", "Dawn", "Dusk", "Date_y", "day/night"], axis=1, inplace=True)
median_night[key].drop(["Date_x", "Dawn", "Dusk", "Date_y", "day/night"], axis=1, inplace=True)
mean_day[key].rename(columns = {"Wartosc" : "Mean_value_day"}, inplace=True)
mean_night[key].rename(columns = {"Wartosc" : "Mean_value_night"}, inplace=True)
median_day[key].rename(columns = {"Wartosc" : "Median_value_day"}, inplace=True)
median_night[key].rename(columns = {"Wartosc" : "Median_value_night"}, inplace=True)
#basic dataframe maintenance
mean_day[key] = pd.concat([mean_day[key], mean_night[key]["Mean_value_night"], median_day[key]["Median_value_day"], median_night[key]["Median_value_night"]], axis=1)
analysis_basic[key] = mean_day[key]
return analysis_basic
def f_analysis_trim(sun_info, day_night):
analysis_trim = {}
return analysis_trim
def f_display_analysis(analysis_basic):
hdrs = ["KodSH", "Date", "City", "Lon", "Lat", "Mean value day", "Mean value night", "Median value day", "Median value night"]
for key in analysis_basic:
table = tabulate(analysis_basic[key], headers = hdrs, tablefmt = 'psql')
result = open("analysis_basic_" + key[:15] + ".txt", "w")
result.write(table)
result.close()
pass_effacility(effacility)
names_imgw = pass_IMGW(data)
names_sun_info = pass_sun_info(sun_info)
effacility_redis = get_effacility()
imgw_redis=get_IMGW(names_imgw)
sun_info_redis=get_sun_info(names_sun_info)
day_night = day_night(imgw_redis, sun_info_redis)
analysis_basic = f_analysis_basic(sun_info_redis, day_night)
analysis_trim = f_analysis_trim(sun_info_redis, day_night)
#f_display_analysis(analysis_basic)
print("--- %s seconds ---" % (time.time() - start_time))
print(r.dbsize())
r.flushdb()
| StarcoderdataPython |
5071280 | from kernel.kernel import Kernel
from kernel.output import Output, OutputResult
from modules import AbstractModule
class Dictionary(AbstractModule):
def check(self):
return True
def check_arguments(self):
self.dict_words = {}
self.encoding_list = []
if len(self.args) == 0:
Output.err("Dictionary: Arguments should contain at least one option")
return False
dictionary_list = []
try:
dictionary_list = self.args['dictionary']
except KeyError:
Output.err("Mandatory argument \"dictionary\" is missing")
return False
try:
arg_encoding_list = self.args['encoding']
except KeyError:
arg_encoding_list = ['utf-8', 'latin-1']
self.encoding_list = arg_encoding_list
for dict_file in dictionary_list:
try:
with open(dict_file, 'r') as file_words:
self.dict_words[dict_file] = []
for line in file_words:
if len(line) > 0:
self.dict_words[dict_file].append(line.strip())
except Exception as e:
Output.err("Could not read file \"%s\". %s" % (dict_file, e))
Kernel.end()
Output.do("Total amount of word dictionaries: %d" % len(self.dict_words))
return True
def description(self) -> str:
return "A module which is responsible for filtering files by their contents, which can be found in dictionaries (aka files with keywords)"
def is_filter_files(self) -> bool:
return True
def do_filter_files(self):
self.files_criteria = []
for dictionary in self.dict_words:
for f in self.files:
# The first successfully read encoding is applied
# ToDo: Improve, try to read and compare all listed codes
file_encoding = None
file_content = None
#for enc in self.encoding_list:
# Output.log("Trying file \"%s\" decode as \"%s\"" % (f, enc))
try:
file_content = open(f, 'rb').read()
except Exception as e:
Output.err("File \"%s\" could not be opened: %s" % (f, e))
Kernel.end()
if not file_content or len(file_content) == 0:
Output.log("A file \"%s\" is empty. Skipping it" % f)
continue
for line in self.dict_words[dictionary]:
if f in self.data.keys():
if line in self.data[f]:
continue
for enc in self.encoding_list:
if file_content.decode(enc, 'ignore').lower().find(line.lower()) != -1:
if f not in self.files_criteria:
self.files_criteria.append(f)
# Data collection part, which is responsible for collecting data.
# For each file there is a set of words, which were found in
# that file. In this case do_filter_files() function does
# do_collect_data(), but that is needed for optimization
# in order to avoid file content search procedure again
try:
self.data[f]
except KeyError:
self.data[f] = []
if line in self.data[f]:
continue
self.data[f].append(line)
self.files = self.files_criteria
| StarcoderdataPython |
1686225 | """
Generates the regret plot for an experiment. Includes the regret curves for
the random policy, conventional algorithms like SW-UCB, and the default and
best neural bandits.
Usage:
$ python3 regret_analysis.py experiment_folder/
With no additional flag this takes the 'best' rnn and ffnn policies from the
given_best_rnn_policy and given_best_ffnn_policy variables in this script.
$ python3 regret_analysis.py experiment_folder/ --computebest
Chooses the (R)NN policy with the highest return in this folder as 'best'
for the plot.
$ python3 regret_analysis.py experiment_folder/ --nondefaultasbest
Assumes that the other non-default configuration in this folder is the
'best'.
"""
import matplotlib
matplotlib.use('Agg')
import os
import argparse
import json
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from rnlps.environments.non_contextual_bandits import non_contextual_bandits
from rnlps.environments.contextual_bandits import contextual_bandits
from rnlps.environments.linear_bandits import linear_bandits
sns.set(context='paper', style='darkgrid', font_scale=3, rc={'legend.frameon':False, 'lines.linewidth':6.0})
# Modify these if using different defaults
contextual_default_ffnn = "ThompsonSinFeedforwardNetwork(order=1,periods=[],periods_dim=2n_units=[32, 32, 32],learning_rate=0.01,regL2=0.001,epochs=64,train_every=32,std_targets=0.1,std_weights=1.0)"
contextual_default_rnn = "ThompsonRecurrentNetwork(n_units=[32,32,32],learning_rate=0.001,epochs=64,train_every=32,std_targets=0.3,std_weights=0.5,regL2=0.001)"
non_contextual_default_ffnn = "ThompsonFeedforwardNetwork(order=1,periods=[],periods_dim=1n_units=[32,32,32],learning_rate=0.1,regL2=0.001,epochs=16,train_every=32,std_targets=0.1,std_weights=1.0)"
non_contextual_default_rnn = "ThompsonRecurrentNetwork(n_units=[32,32,32],learning_rate=0.01,regL2=0.001,epochs=16,train_every=32,std_targets=0.1,std_weights=0.5)"
given_best_ffnn_policy = ''
given_best_rnn_policy = ''
def get_empirical_regret(frame, bandit, trial_length):
ecr = bandit.expected_cumulative_rewards(trial_length)
return ecr - frame['Return']
def main():
# Policies to be considered in the plot, more will be appended later.
list_policies = ['Random()']
parser = argparse.ArgumentParser()
parser.add_argument('directory', help='Experiments directory.')
parser.add_argument('--computebest', help='Find the best (R)NN conifguration',
action='store_true')
parser.add_argument('--nondefaultasbest', help='Assumes the config other than default is the best',
action='store_true')
args = parser.parse_args()
dirs = [os.path.join(args.directory, d)
for d in os.listdir(args.directory) if not d.startswith('_')]
dirs = [d for d in dirs if os.path.isdir(d)]
configs = {}
for d in dirs:
f = open(os.path.join(d, 'config.json'), 'r')
configs[d] = json.load(f)
f.close()
reference = configs[dirs[0]]
# Consistency check
bandit_settings = ['bandit', 'bandit_parameters','trial_length']
for d, config in configs.items():
for p in bandit_settings:
if config[p] != reference[p]:
if p == "trial_length":
print("\nThe trial length is different\n")
else:
print("\nThe difference is in ", p)
print("Current config: \n")
print(config[p])
print("Reference config: \n")
print(reference[p])
if reference['bandit'] in non_contextual_bandits.keys():
default_ffnn_policy = non_contextual_default_ffnn
default_rnn_policy = non_contextual_default_rnn
elif reference['bandit'] in contextual_bandits.keys():
default_ffnn_policy = contextual_default_ffnn
default_rnn_policy = contextual_default_rnn
else:
default_ffnn_policy = contextual_default_ffnn
default_rnn_policy = contextual_default_rnn
default_rnn_policy = default_rnn_policy.replace(' ', '')
default_ffnn_policy = default_ffnn_policy.replace(' ','')
list_policies.append(default_ffnn_policy)
list_policies.append(default_rnn_policy)
df = pd.DataFrame()
for d in dirs:
if os.path.exists(os.path.join(d, 'trial.csv')):
frame = pd.read_csv(os.path.join(d, 'trial.csv'))
frame.Policy = frame.Policy.str.replace(' ', '')
frame = frame.sort_values(by='Pull')
# Currently, we report the empirical regret for the non-contextual and contextual bandit problems.
# This will be made consistent in the next version of the paper.
if reference['bandit'] in linear_bandits.keys():
frame['Regret'] = frame['Regret']
elif reference['bandit'] in non_contextual_bandits.keys():
bandit = non_contextual_bandits[reference['bandit']](**reference['bandit_parameters'])
frame['Regret'] = get_empirical_regret(frame, bandit, reference['trial_length'])
elif reference['bandit'] in contextual_bandits.keys():
bandit = contextual_bandits[reference['bandit']](**reference['bandit_parameters'])
frame['Regret'] = get_empirical_regret(frame, bandit, reference['trial_length'])
df = df.append(frame)
else:
msg = 'Warning: missing trial {0} for {1}({2}).'
print(msg.format(d, configs[d]['policy'],
configs[d]['policy_parameters']))
last = df[df['Pull'] == reference['trial_length']]
last = last.loc[:, ['Return', 'Policy']]
p_group = last.groupby('Policy').mean().sort_values(by='Return',
ascending=False)
with pd.option_context('display.max_colwidth', -1):
print(p_group)
# Add the best conventional bandit algorithms to the list of policies if they
# are present.
if reference['bandit'] in linear_bandits.keys():
if p_group.index.str.contains('D_LinUCB').any():
D_LinUCB_results = p_group[p_group.index.str.contains('D_LinUCB')]
best_D_LinUCB_policy = D_LinUCB_results.Return.idxmax()
list_policies.append(best_D_LinUCB_policy)
if p_group.index.str.contains('SW_LinUCB').any():
SW_LinUCB_results = p_group[p_group.index.str.contains('SW_LinUCB')]
best_SW_LinUCB_policy = SW_LinUCB_results.Return.idxmax()
list_policies.append(best_SW_LinUCB_policy)
elif reference['bandit'] in non_contextual_bandits.keys():
if p_group.index.str.contains('D_UCB').any():
D_UCB_results = p_group[p_group.index.str.contains('D_UCB')]
best_D_UCB_policy = D_UCB_results.Return.idxmax()
list_policies.append(best_D_UCB_policy)
if p_group.index.str.contains('SW_UCB').any():
SW_UCB_results = p_group[p_group.index.str.contains('SW_UCB')]
best_SW_UCB_policy = SW_UCB_results.Return.idxmax()
list_policies.append(best_SW_UCB_policy)
# Add the best neural policies
ffnn_policies = p_group[p_group.index.str.contains('FeedforwardNetwork')]
rnn_policies = p_group[p_group.index.str.contains('RecurrentNetwork')]
if args.computebest:
best_ffnn_policy = ffnn_policies.Return.idxmax()
best_rnn_policy = rnn_policies.Return.idxmax()
elif args.nondefaultasbest:
# Works when we have run the experiment only with default and optionally
# another policy that was the best during the hyperparameter search.
# Eg. if there is only one RNN default policy, then we assume there is
# no separate best RNN policy
if ((len(ffnn_policies) > 2) or (len(rnn_policies) > 2)):
raise Exception('More than 2 (R)NN policies. Ambigous which non-default policy is best.')
best_ffnn_policy = ffnn_policies.index[ffnn_policies.index != default_ffnn_policy]
best_rnn_policy = rnn_policies.index[rnn_policies.index != default_rnn_policy]
if len(best_ffnn_policy) > 0:
best_ffnn_policy = best_ffnn_policy.values[0]
else:
best_ffnn_policy = default_ffnn_policy
if len(best_rnn_policy) > 0:
best_rnn_policy = best_rnn_policy.values[0]
else:
best_rnn_policy = default_rnn_policy
else:
# Assumes best_ffnn_policy and best_rnn_policy and provided along with
# the default configurations
best_ffnn_policy = given_best_ffnn_policy
best_rnn_policy = given_best_rnn_policy
list_policies.append(best_ffnn_policy)
list_policies.append(best_rnn_policy)
# Set the colour palette - keep consistent colours
c_list = sns.color_palette()
c_palette = {'Random' : c_list[7],
'Best RNN': c_list[0],
'Best NN': c_list[1],
'Default RNN': c_list[2],
'Default NN': c_list[3],
'SW-UCB':c_list[4],
'D-UCB': c_list[5]}
if reference['bandit'] in linear_bandits.keys():
c_palette = {'Random' : c_list[7],
'Best RNN': c_list[0],
'Best NN': c_list[1],
'Default RNN': c_list[2],
'Default NN': c_list[3],
'SW-LinUCB':c_list[4],
'D-LinUCB': c_list[5]}
plot_df = df[df['Policy'].isin(list_policies)]
# Will store the name to be used in the legend
plot_df['Policy_newnames'] = ''
plot_df.loc[plot_df.Policy == best_rnn_policy, 'Policy_newnames'] = 'Best RNN'
plot_df.loc[plot_df.Policy == best_ffnn_policy, 'Policy_newnames'] = 'Best NN'
plot_df.loc[plot_df.Policy.str.contains('Random'), 'Policy_newnames'] = 'Random'
plot_df.loc[plot_df.Policy.str.contains('D_UCB'), 'Policy_newnames'] = 'D-UCB'
plot_df.loc[plot_df.Policy.str.contains('SW_UCB'), 'Policy_newnames'] = 'SW-UCB'
plot_df.loc[plot_df.Policy.str.contains('D_LinUCB'), 'Policy_newnames'] = 'D-LinUCB'
plot_df.loc[plot_df.Policy.str.contains('SW_LinUCB'), 'Policy_newnames'] = 'SW-LinUCB'
plot_df.loc[plot_df.Policy == default_rnn_policy, 'Policy_newnames'] = 'Default RNN'
plot_df.loc[plot_df.Policy == default_ffnn_policy, 'Policy_newnames'] = 'Default NN'
plot_df = plot_df.sort_values(by='Policy_newnames')
del plot_df['Policy']
plot_df.rename(columns = {'Policy_newnames':'Policy'}, inplace=True)
# Plot the regret
plt.figure(figsize=(16,9))
plot_df = plot_df[plot_df.Pull < int(reference['trial_length'])]
# add ci = "sd" for faster standard deviation confidence bounds, imstead of
# a bootstrapped estimate
ax = sns.lineplot(x='Pull', y='Regret', hue='Policy', palette = c_palette,
data=plot_df, linewidth=3.0)
plt.xlim(1, int(reference['trial_length']))
plt.xticks(range(0, int(reference['trial_length']) + 2, 1024))
plt.xlabel('time step')
plt.ylabel('regret')
# Display legend in this particular order
handles, labels = plt.gca().get_legend_handles_labels()
order = list(range(1, len(handles)))
plt.legend([handles[idx] for idx in order],[labels[idx] for idx in order])
plt.savefig('regret.pdf', bbox_inches='tight',pad_inches = 0)
if __name__ == "__main__":
main()
| StarcoderdataPython |
1671652 | #RESOLUÇÃO DO PROFESSOR/LEONARDO:
from time import sleep
import random
print('''Escolha uma opção:
[ 0 ] - PEDRA
[ 1 ] - PAPEL
[ 2 ] - TESOURA ''')
print('-=-'*30)
jog = int(input('Qual é a sua jogada? '))
if jog<0 or jog>2:
print('COMANDO INVÁLIDO, TENTE NOVAMENTE')
exit()
sleep(0.5)
print('PEDRA...')
sleep(1)
print('PAPEL...')
sleep(1)
print('TESOURA!')
print('-=-'*30)
sleep(0.5)
itens = ['PEDRA', 'PAPEL', 'TESOURA']
pc = random.randint(0,2)
print(f'Jogador escolheu {itens[jog]}. \nComputador escolheu {itens[pc]}. ')
if jog==pc:
print('EMPATE!')
elif (jog<2 and pc==jog+1) or (pc==0 and jog==2):
print('DERROTA!')
else:
print('VITÓRIA!')
print('-=-'*30)
| StarcoderdataPython |
11360479 | <filename>vmaig_blog/uwsgi-2.0.14/plugins/transformation_offload/uwsgiplugin.py
NAME='transformation_offload'
CFLAGS = []
LDFLAGS = []
LIBS = []
GCC_LIST = ['offload']
| StarcoderdataPython |
6697021 | <filename>framework/game.py<gh_stars>0
# -*- coding: utf-8 -*-
from .card import Rank, Suit, Card
from .hand import Hand
from .learning_state import LearningState
from .round_info import RoundInfo
from .utils import *
from copy import deepcopy
from random import shuffle
class Game:
def __init__(self, players, logger=None, log=False, learning_state=None, save=False):
self.logger = logger
self.log = log
self.learning_state = learning_state
self.save = save
self.analyzer = None
self.players = players
self.number_of_players = len(players)
assert(MIN_PLAYERS <= self.number_of_players <= MAX_PLAYERS)
for player_number, player in enumerate(self.players):
player.inject_info(
player_number,
self.logger,
self.learning_state,
' #{0}'.format(str(player_number + 1))
)
for player in self.players:
if player.name[:10] == 'Reinforced' and self.analyzer is None:
self.analyzer = player
self.hints = MAX_HINTS
self.lives = LIVES
self.score = 0
self.deck = []
self.all_cards = []
self.board_state = {suit: 0 for suit in Suit}
self.deck_size = 0
self.played = []
self.discarded = []
self.history = []
self.hands = []
self.hands_history = []
self.current_turn = 0
self.player_turn = 0
self.current_player_hand = None
self.other_players_hands = None
self.game_over_timer = None
self.game_over = False
self.game_ended_by_timeout = False
self.__prepare_game()
def info(self, msg):
if self.log:
self.logger.info(msg)
def __draw_card(self):
if self.deck_size != 0:
self.deck_size -= 1
if self.log and self.deck_size == 0:
msg1 = 'Last card has been drawn,'
msg2 = 'each player gets one more turn'
self.info('{0} {1}'.format(msg1, msg2))
return self.deck.pop()
else:
return None
def __prepare_game(self):
for rank in RANKS:
for suit in Suit:
self.deck.append(Card(rank, suit))
self.all_cards = self.deck[:]
self.deck_size = len(self.deck)
shuffle(self.deck)
if self.number_of_players <= 3:
hand_size = 5
else:
hand_size = 4
for player_number in range(self.number_of_players):
player_hand = Hand(player_number)
for hand_position in range(hand_size):
card = self.__draw_card()
card.hand_position = hand_position
card.drawn_on_turn = self.current_turn
player_hand.add(card)
self.hands.append(player_hand)
self.hands_history.append(self.hands)
self.info('Preparing game... Done.\n')
self.info('Hands have been dealt as follows:')
if self.log:
for hand in self.hands:
self.info('{0}: {1}'.format(
self.players[hand.player_number].name,
hand)
)
self.info('\nBeginning game...\n')
def __is_inbounds(self, lst, index):
return 0 <= index < len(lst)
def __is_legal(self, move):
if type(move) is not ChoiceDetails:
print('wrong move: ' + str(move))
assert(type(move) is ChoiceDetails)
if type(move.choice) is not Choice:
print('wrong choice from move: ' + str(move))
assert(type(move.choice) is Choice)
choice = move.choice
if choice is Choice.PLAY or choice is Choice.DISCARD:
hand_position = move.details
return self.__is_inbounds(self.current_player_hand, hand_position)
elif choice is Choice.HINT:
assert(type(move.details) is HintDetails)
if self.hints > 0:
player_number, hint = move.details
assert(type(hint) is Rank or type(hint) is Suit)
target_player_hand = get_player_hand_by_number(
self,
player_number
)
return target_player_hand is not None
return False
def __print_player_knowledge(self, number):
if self.log:
player = self.players[number]
hand = get_player_hand_by_number(self, number).current_knowledge()
self.info('Current knowledge of {0}: {1}'.format(player, hand))
def __print_current_hands(self):
if self.log:
self.info('Current player hands and their knowledge:')
for hand in self.hands:
self.info('{0}: {1}'.format(
self.players[hand.player_number].name,
hand)
)
self.info('{0}: {1}'.format(
self.players[hand.player_number].name,
hand.current_knowledge())
)
def __print_current_board(self):
if self.log:
self.info('Current board situation:')
for suit in self.board_state:
self.info('{0}: {1}'.format(
suit,
self.board_state[suit])
)
def make_move(self):
assert(self.lives != 0)
assert(self.hints >= 0)
if self.log and self.player_turn == 0:
self.info('')
self.__print_current_hands()
self.__print_current_board()
self.info('')
player_hands = deepcopy(self.hands)
self.current_player_hand = player_hands[self.player_turn]
self.other_players_hands = [
hand for hand in player_hands
if hand.player_number != self.player_turn
]
move = self.players[self.player_turn].play(RoundInfo(self))
learning_player = self.players[self.player_turn].learning
assert(self.__is_legal(move))
choice = move.choice
if choice is Choice.PLAY:
hand_position = move.details
card = self.current_player_hand[hand_position]
if card.is_playable(self):
if self.save and self.analyzer is not None:
self.analyzer.analyze_turn('Correct Play', card, RoundInfo(self), learning_player)
card.played_on_turn = self.current_turn
card.hand_position = None
self.history.append(PlayDetails(
choice, hand_position, card, self.deck_size))
self.board_state[card.real_suit] += 1
if card.real_rank is Rank.FIVE:
self.hints = min(self.hints + 1, MAX_HINTS)
self.score += 1
self.played.append(card)
self.info(
'{0} correctly played {1}'.format(
self.players[self.player_turn],
card
)
)
info_msg = 'Played {0}'.format(card)
else:
if self.save and self.analyzer is not None:
self.analyzer.analyze_turn('Wrong Play', card, RoundInfo(self), learning_player)
card.played_on_turn = self.current_turn
card.hand_position = None
card.misplayed = True
self.history.append(PlayDetails(
choice, hand_position, card, self.deck_size))
self.lives -= 1
self.discarded.append(card)
self.info(
'{0} misplayed {1}, {2} lives remaining'.format(
self.players[self.player_turn],
card,
self.lives
)
)
info_msg = 'Misplayed {0}'.format(card)
if choice is Choice.DISCARD:
hand_position = move.details
card = self.current_player_hand[hand_position]
if self.save and self.analyzer is not None:
self.analyzer.analyze_turn('Discard', card, RoundInfo(self), learning_player)
card.played_on_turn = self.current_turn
card.discarded = True
card.hand_position = None
self.history.append(PlayDetails(
choice, hand_position, card, self.deck_size))
self.discarded.append(card)
self.hints = min(self.hints + 1, MAX_HINTS)
self.info(
'{0} discarded {1}, the number of hints is currently {2}'.format(
self.players[self.player_turn],
card,
self.hints
)
)
info_msg = 'Discarded {0}'.format(card)
if choice is Choice.HINT:
player_number, hint = move.details
if self.save and self.analyzer is not None:
self.analyzer.analyze_turn('Hint', (player_number, hint), RoundInfo(self), learning_player)
hand = get_player_hand_by_number(self, player_number)
reveal_size = 0
for card in hand:
if card.real_rank is hint and card.revealed_rank is not hint:
reveal_size += 1
if card.real_suit is hint and card.revealed_suit is not hint:
reveal_size += 1
state = None
if learning_player and self.save:
state = self.learning_state.get_last_hint_state() + [self.current_turn, self.player_turn]
for card in hand:
card.reveal_info_from_hint(hint, reveal_size, state)
self.history.append(PlayDetails(
choice, move.details[0], move.details[1], self.deck_size))
self.hints -= 1
self.info(
'{0} hinted {1} to {2}, {3} hints remaining'.format(
self.players[self.player_turn],
hint,
self.players[player_number],
self.hints
)
)
info_msg = 'Hinted {0} to P{1}'.format(
hint, player_number + 1)
if self.lives == 0 or self.score == MAX_SCORE:
self.game_over = True
skip_timer = False
if choice is not Choice.HINT:
new_card = self.__draw_card()
if len(self.deck) == 0:
if self.game_over_timer is None:
self.game_over_timer = self.player_turn
skip_timer = True
if new_card is None:
self.current_player_hand.discard(hand_position)
else:
new_card.drawn_on_turn = self.current_turn
self.current_player_hand.replace(new_card, hand_position)
new_hands = [self.current_player_hand] + self.other_players_hands
new_hands.sort(key=lambda h: h.player_number)
self.hands_history.append(new_hands)
self.hands = new_hands
if choice is Choice.HINT:
self.__print_player_knowledge(player_number)
if self.game_over_timer is self.player_turn and skip_timer is False:
self.game_over = True
self.game_ended_by_timeout = True
if self.game_over:
if self.save and self.analyzer is not None:
self.analyzer.analyze_game(RoundInfo(self), self.score)
if self.score is MAX_SCORE:
self.info('\nPerfect victory!')
elif self.game_ended_by_timeout:
self.info(
'\nNo cards left in the deck! Total points: {0}'.format(self.score))
else:
self.info(
'\nGame over! Total points: {0}'.format(self.score))
self.player_turn = next_player_number(self, self.player_turn)
if self.player_turn == 0:
self.current_turn += 1
return info_msg
def is_game_over(self):
return self.game_over
| StarcoderdataPython |
1838996 | <filename>model/Media.py
from shared import db
class Media(db.Model):
id = db.Column(db.Integer, primary_key=True)
ext = db.Column(db.String(3), nullable=False)
| StarcoderdataPython |
6634016 | <gh_stars>10-100
import signal
from concurrent import futures
import grpc
import service_pb2
import service_pb2_grpc
class ServerServicer(service_pb2_grpc.ServerServicer):
def Foo(self, request, context):
return service_pb2.Empty()
def main():
port = '1337'
with open('server.key', 'rb') as f:
private_key = f.read()
with open('server.crt', 'rb') as f:
certificate_chain = f.read()
server_credentials = grpc.ssl_server_credentials(
((private_key, certificate_chain,),))
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
service_pb2_grpc.add_ServerServicer_to_server(ServerServicer(), server)
server.add_secure_port('[::]:'+port, server_credentials)
server.start()
try:
while True:
signal.pause()
except KeyboardInterrupt:
pass
server.stop(0)
if __name__ == '__main__':
main()
| StarcoderdataPython |
5153369 | from .UnaryArithmeticOpNode import UnaryArithmeticOpNode
class SuffixOpNode(UnaryArithmeticOpNode):
def __init__(self,op,expr):
super().__init__(op,expr)
def accept(self,visitor):
return visitor.visit(self) | StarcoderdataPython |
9622118 | <filename>tests/test_lib.py
"""Unit tests for library integration."""
import pathlib
import toml
import pytest
import mkdocs_code_runner
from mkdocs_code_runner import lib
def test_findall(markdown: str) -> None:
"""Check that the JavaScript code is correctly found."""
query = "div.code-runner"
expected = "let x = 3;" "\nconsole.log(x + 1);"
actual = list(lib.findall(markdown, query))[0]
assert actual == expected
def test_findall_empty(markdown: str) -> None:
"""Check that no JavaScript script code is found."""
query = "div.mock-class"
with pytest.raises(StopIteration):
next(lib.findall(markdown, query))
def test_runner(markdown: str) -> None:
"""Check that the JavaScript code is correctly append to markdown."""
query = "div.code-runner"
expected = markdown + (
"\n<script>"
'\nwindow.addEventListener("load", () => {'
"\nlet x = 3;"
"\nconsole.log(x + 1);"
"\n});"
"\n</script>"
)
actual = lib.runner(markdown, query)
assert actual == expected
def test_version() -> None:
"""Check that all the version tags are in sync."""
pyproject_path = (
pathlib.Path(mkdocs_code_runner.__file__).parents[2] / "pyproject.toml"
)
expected = toml.load(pyproject_path)["tool"]["poetry"]["version"]
actual = mkdocs_code_runner.__version__
assert actual == expected
| StarcoderdataPython |
109077 | # Copyright 2022 iiPython
# Wrapper for the builtin Python socket module
# It includes basic encryption using Python cryptography
# Modules
import json
import socket
import base64
from typing import Any, List
from types import FunctionType
from copy import copy as copyobj
try:
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
except ImportError:
Fernet, hashes, serialization, ec, HKDF = None, None, None, None, None
# Initialization
def _wrap_obj(parent: object, obj: FunctionType, name: str, new: FunctionType) -> None:
setattr(parent, name, copyobj(obj))
setattr(parent, obj.__name__, new)
# Classes
class Socket(socket.socket):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
_wrap_obj(self, self.connect, "_sock_connect", self._connect_wrap)
def _connect_wrap(self, *args, **kwargs) -> None:
self._sock_connect(*args, **kwargs)
self.handshake()
def _send_wrap(self, content: str) -> None:
payload = self._fernet.encrypt(content.encode("utf8"))
self.sendall(payload + b"\x00")
def _recv_wrap(self) -> List[str]:
data = b""
while self:
chnk = self._sock_recv(2048)
data += chnk
if not chnk or chnk[-1:] == b"\x00":
break
if not data:
return []
return [self._fernet.decrypt(msg).decode("utf8") for msg in data.split(b"\x00") if msg]
def sendjson(self, data: dict) -> None:
self._send_wrap(json.dumps(data))
def recvjson(self) -> List[Any]:
try:
return [json.loads(msg) for msg in self._recv_wrap()]
except TypeError:
raise OSError # Connection error, drop connection
def handshake(self) -> None:
self._private_key = ec.generate_private_key(ec.SECP384R1())
server_public = serialization.load_pem_public_key(self.recv(2048))
self._shared_key = HKDF(
algorithm = hashes.SHA256(),
length = 32,
salt = None,
info = None
).derive(self._private_key.exchange(ec.ECDH(), server_public))
self.sendall(
self._private_key.public_key().public_bytes(
encoding = serialization.Encoding.PEM,
format = serialization.PublicFormat.SubjectPublicKeyInfo
)
)
self._fernet = Fernet(base64.urlsafe_b64encode(self._shared_key))
_wrap_obj(self, self.send, "_sock_send", self._send_wrap)
_wrap_obj(self, self.recv, "_sock_recv", self._recv_wrap)
class Connection(object):
def __init__(self, sock: socket.socket) -> None:
self.sock = sock
self.handshake()
def handshake(self) -> None:
self._private_key = ec.generate_private_key(ec.SECP384R1())
self.sock.sendall(
self._private_key.public_key().public_bytes(
encoding = serialization.Encoding.PEM,
format = serialization.PublicFormat.SubjectPublicKeyInfo
)
)
client_public = serialization.load_pem_public_key(self.sock.recv(2048))
self._shared_key = HKDF(
algorithm = hashes.SHA256(),
length = 32,
salt = None,
info = None
).derive(self._private_key.exchange(ec.ECDH(), client_public))
self._fernet = Fernet(base64.urlsafe_b64encode(self._shared_key))
def send(self, content: str) -> None:
payload = self._fernet.encrypt(content.encode("utf8"))
self.sock.sendall(payload + b"\x00")
def sendjson(self, data: dict) -> None:
self.send(json.dumps(data))
def recv(self) -> List[str]:
data = b""
while self.sock:
chnk = self.sock.recv(2048)
data += chnk
if not chnk or chnk[-1:] == b"\x00":
break
if not data:
return []
return [self._fernet.decrypt(msg).decode("utf8") for msg in data.split(b"\x00") if msg]
def recvjson(self) -> List[Any]:
try:
return [json.loads(msg) for msg in self.recv()]
except TypeError:
raise OSError # Connection error, drop connection
| StarcoderdataPython |
1646285 | <reponame>mv/ynab-py-scripts
#!/usr/bin/env python3
""" YNAB: creting CSV files to be imported into YNAB 4 Classic
Usage:
ynab <filename> [options]
ynab <filename> [<filename>...] [-o | -i ] [-v | --verbose]
ynab --version
ynab -h | --help
Options:
-o --ofx Type: ofx file (default).
-i --inter-credit-card-csv Type: Banco Inter - Credit Card CSV
-v --verbose Verbose
--version Show version.
"""
from docopt import docopt
import sys
import ofx
print()
# print('=' * 60)
# print('argv: ', sys.argv)
# print('=' * 60)
if __name__ == '__main__':
args = docopt(__doc__, version='0.0.1')
ofx.ofx(args['<filename>'])
# print(args)
# print('=' * 60)
print()
| StarcoderdataPython |
5135430 | <gh_stars>0
# Copyright (c) 2016 The UUV Simulator Authors.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import scipy
import numpy as np
from copy import deepcopy
from ..waypoint import Waypoint
from ..waypoint_set import WaypointSet
from ..trajectory_point import TrajectoryPoint
from tf.transformations import quaternion_multiply, quaternion_inverse, quaternion_from_euler, quaternion_conjugate, quaternion_about_axis
class PathGenerator(object):
"""
Abstract class to be inherited by custom path generator to interpolate
waypoints
"""
LABEL = ''
def __init__(self, full_dof=False):
# Waypoint set
self._waypoints = None
# True if the path is generated for all degrees of freedom, otherwise
# the path will be generated for (x, y, z, yaw) only
self._is_full_dof = full_dof
# The parametric variable to use as input for the interpolator
self._s = list()
self._cur_s = 0
self._s_step = 0.0001
self._start_time = None
self._duration = None
@staticmethod
def get_generator(name, *args, **kwargs):
for gen in PathGenerator.__subclasses__():
if name == gen.LABEL:
return gen(*args, **kwargs)
rospy.ROSException('Invalid path generator method')
@staticmethod
def get_all_generators():
generators = list()
for gen in PathGenerator.__subclasses__():
generators.append(gen())
return generators
@property
def waypoints(self):
return self._waypoints
@property
def max_time(self):
return self._duration + self._start_time
@property
def duration(self):
return self._duration
@duration.setter
def duration(self, t):
assert t > 0, 'Duration must be a positive value'
self._duration = t
@property
def start_time(self):
return self._start_time
@start_time.setter
def start_time(self, time):
assert time >= 0, 'Invalid negative time'
self._start_time = time
@property
def closest_waypoint(self):
"""Return the closest waypoint to the current position on the path."""
return self._waypoints.get_waypoint(self.closest_waypoint_idx)
@property
def closest_waypoint_idx(self):
"""
Return the index of the closest waypoint to the current position on the
path.
"""
if self._cur_s == 0:
return 0
if self._cur_s == 1:
return len(self._s) - 1
v = np.array(self._s - self._cur_s)
idx = np.argmin(v)
return idx
@property
def s_step(self):
return self._s_step
@s_step.setter
def s_step(self, step):
assert 0 < step < 1
self._s_step = step
def reset(self):
self._s = list()
self._cur_s = 0
self._s_step = 0.0001
self._start_time = None
self._duration = None
def is_full_dof(self):
return self._is_full_dof
def set_full_dof(self, flag):
self._is_full_dof = flag
def get_label(self):
return self.LABEL
def init_interpolator(self):
raise NotImplementedError()
def get_samples(self, max_time, step=0.005):
raise NotImplementedError()
def add_waypoint(self, waypoint, add_to_beginning=False):
"""Add waypoint to the existing waypoint set. If no waypoint set has
been initialized, create new waypoint set structure and add the given
waypoint."""
if self._waypoints is None:
self._waypoints = WaypointSet()
self._waypoints.add_waypoint(waypoint, add_to_beginning)
return self.init_interpolator()
def init_waypoints(self, waypoints=None):
if waypoints is not None:
self._waypoints = deepcopy(waypoints)
if self._waypoints is None:
print 'Waypoint list has not been initialized'
return False
# The interpolator needs at least 4 points. If only two points
# were given, add a middle point half way through.
if self._waypoints.num_waypoints == 2:
wp_first = self._waypoints.get_waypoint(0)
wp_last = self._waypoints.get_waypoint(1)
x_ls = np.linspace(wp_first.x, wp_last.x, 5)
y_ls = np.linspace(wp_first.y, wp_last.y, 5)
z_ls = np.linspace(wp_first.z, wp_last.z, 5)
for x, y, z in zip(x_ls, y_ls, z_ls):
wp = Waypoint(i * (x - wp_first.x) + wp_first.x,
i * (y - wp_first.y) + wp_first.y,
i * (z - wp_first.z) + wp_first.z,
max_forward_speed=wp_last.max_forward_speed,
heading_offset=wp_last.heading_offset)
self._waypoints.add_waypoint(wp)
return self.init_interpolator()
def interpolate(self, tag, s):
return self._interp_fcns[tag](s)
def generate_pnt(self, s):
raise NotImplementedError()
def generate_pos(self, s):
raise NotImplementedError()
def generate_quat(self, s):
raise NotImplementedError()
def _compute_rot_quat(self, dx, dy, dz):
rotq = quaternion_about_axis(
np.arctan2(dy, dx),
[0, 0, 1])
if self._is_full_dof:
rote = quaternion_about_axis(
-1 * np.arctan2(dz, np.sqrt(dx**2 + dy**2)),
[0, 1, 0])
rotq = quaternion_multiply(rotq, rote)
return rotq
| StarcoderdataPython |
341157 | <reponame>gbv/mc2skos
#!/usr/bin/env python
# encoding=utf8
#
# Script to convert MARC 21 Classification records
# (serialized as MARCXML) to SKOS concepts. See
# README.md for more information.
import sys
import re
import time
import warnings
from datetime import datetime
from iso639 import languages
import argparse
from rdflib.namespace import OWL, RDF, SKOS, DCTERMS, XSD, Namespace
from rdflib import URIRef, Literal, Graph, BNode
from otsrdflib import OrderedTurtleSerializer
import json
import rdflib_jsonld.serializer as json_ld
import pkg_resources
import skosify
import logging
import logging.handlers
from . import __version__
from .constants import Constants
from .element import Element
from .record import InvalidRecordError, ClassificationRecord, AuthorityRecord
from .reader import MarcFileReader
from .vocabularies import Vocabularies
logging.captureWarnings(True)
warnings.simplefilter('always', DeprecationWarning)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s %(levelname)s] %(message)s')
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
WD = Namespace('http://data.ub.uio.no/webdewey-terms#')
MADS = Namespace('http://www.loc.gov/mads/rdf/v1#')
def add_record_to_graph(graph, record, options):
# Add record to graph
# logger.debug('Adding: %s', record.uri)
# Strictly, we do not need to explicitly state here that <A> and <B> are instances
# of skos:Concept, because such statements are entailed by the definition
# of skos:semanticRelation.
record_uri = URIRef(record.uri)
graph.add((record_uri, RDF.type, SKOS.Concept))
# Add skos:topConceptOf or skos:inScheme
for scheme_uri in record.scheme_uris:
if record.is_top_concept:
graph.add((record_uri, SKOS.topConceptOf, URIRef(scheme_uri)))
else:
graph.add((record_uri, SKOS.inScheme, URIRef(scheme_uri)))
if record.created is not None:
graph.add((record_uri, DCTERMS.created, Literal(record.created.strftime('%F'), datatype=XSD.date)))
if record.modified is not None:
graph.add((record_uri, DCTERMS.modified, Literal(record.modified.strftime('%F'), datatype=XSD.date)))
# Add classification number as skos:notation
if record.notation:
if record.record_type == Constants.TABLE_RECORD: # OBS! Sjekk add tables
graph.add((record_uri, SKOS.notation, Literal('T' + record.notation)))
else:
graph.add((record_uri, SKOS.notation, Literal(record.notation)))
# Add local control number as dcterms:identifier
if record.control_number:
graph.add((record_uri, DCTERMS.identifier, Literal(record.control_number)))
# Add caption as skos:prefLabel
if record.prefLabel:
graph.add((record_uri, SKOS.prefLabel, Literal(record.prefLabel, lang=record.lang)))
elif options.get('include_webdewey') and len(record.altLabel) != 0:
# If the --webdewey flag is set, we will use the first index term as prefLabel
caption = record.altLabel.pop(0)['term']
if len(record.altLabel) != 0:
caption = caption + ', …'
graph.add((record_uri, SKOS.prefLabel, Literal(caption, lang=record.lang)))
# Add index terms as skos:altLabel
if options.get('include_altlabels'):
for label in record.altLabel:
graph.add((record_uri, SKOS.altLabel, Literal(label['term'], lang=record.lang)))
# Add relations (SKOS:broader, SKOS:narrower, SKOS:xxxMatch, etc.)
for relation in record.relations:
if relation.get('uri') is not None:
graph.add((record_uri, relation.get('relation'), URIRef(relation['uri'])))
# Add notes
if not options.get('exclude_notes'):
for note in record.definition:
graph.add((record_uri, SKOS.definition, Literal(note, lang=record.lang)))
for note in record.note:
graph.add((record_uri, SKOS.note, Literal(note, lang=record.lang)))
for note in record.editorialNote:
graph.add((record_uri, SKOS.editorialNote, Literal(note, lang=record.lang)))
for note in record.scopeNote:
graph.add((record_uri, SKOS.scopeNote, Literal(note, lang=record.lang)))
for note in record.historyNote:
graph.add((record_uri, SKOS.historyNote, Literal(note, lang=record.lang)))
for note in record.changeNote:
graph.add((record_uri, SKOS.changeNote, Literal(note, lang=record.lang)))
for note in record.example:
graph.add((record_uri, SKOS.example, Literal(note, lang=record.lang)))
# Deprecated?
if record.deprecated:
graph.add((record_uri, OWL.deprecated, Literal(True)))
# Add synthesized number components
if options.get('include_components') and len(record.components) != 0:
component = record.components.pop(0)
component_uri = URIRef(record.scheme.uri('concept', collection='class', object=component))
b1 = BNode()
graph.add((record_uri, MADS.componentList, b1))
graph.add((b1, RDF.first, component_uri))
for component in record.components:
component_uri = URIRef(record.scheme.uri('concept', collection='class', object=component))
b2 = BNode()
graph.add((b1, RDF.rest, b2))
graph.add((b2, RDF.first, component_uri))
b1 = b2
graph.add((b1, RDF.rest, RDF.nil))
# Add webDewey extras
if options.get('include_webdewey'):
for key, values in record.webDeweyExtras.items():
for value in values:
graph.add((record_uri, WD[key], Literal(value, lang=record.lang)))
def process_record(graph, rec, **kwargs):
"""Convert a single MARC21 classification or authority record to RDF."""
el = Element(rec)
leader = el.text('mx:leader')
if leader is None:
raise InvalidRecordError('Record does not have a leader',
control_number=el.text('mx:controlfield[@tag="001"]'))
if leader[6] == 'w':
if kwargs.get('skip_classification'):
return
rec = ClassificationRecord(el, kwargs)
elif leader[6] == 'z':
if kwargs.get('skip_authority'):
return
rec = AuthorityRecord(el, kwargs)
else:
raise InvalidRecordError('Record is not a Marc21 Classification or Authority record',
control_number=el.text('mx:controlfield[@tag="001"]'))
if rec.is_public(add_table_numbers=kwargs.get('add_table_numbers')):
add_record_to_graph(graph, rec, kwargs)
def process_records(records, graph=None, **options):
n = 0
if graph is None:
graph = Graph()
for record in records:
n += 1
try:
process_record(graph, record, **options)
except InvalidRecordError as e:
record_id = e.control_number or '#%d' % n
logger.warning('Ignoring record %s: %s', record_id, e)
if options.get('expand'):
logger.info('Expanding RDF via basic SKOS inference')
skosify.infer.skos_related(graph)
skosify.infer.skos_topConcept(graph)
skosify.infer.skos_hierarchical(graph, narrower=True)
if options.get('skosify'):
logger.info('Running Skosify with config file %s', options['skosify'])
config = skosify.config(options['skosify'])
graph = skosify.skosify(graph, **config)
return graph
def main():
parser = argparse.ArgumentParser(description='Convert MARC21 Classification to SKOS/RDF')
parser.add_argument('infile', nargs='?', help='Input XML file')
parser.add_argument('outfile', nargs='?', help='Output RDF file')
parser.add_argument('--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='More verbose output')
parser.add_argument('-o', '--outformat', dest='outformat', metavar='FORMAT', nargs='?',
help='Output format: turtle (default), jskos, or ndjson')
parser.add_argument('--include', action='append', dest='include', default=[],
help='RDF file(s) to include in the output (e.g. to define a concept scheme). '
'Must be the same format as {outformat}.')
parser.add_argument('--uri', dest='base_uri', help='Concept URI template. See vocabularies.yml for examples.')
parser.add_argument('--scheme', dest='scheme',
help='Concept scheme, either an URI or a key from vocabularies.yml (For a list: mc2skos --list).')
parser.add_argument('--whitespace', dest='whitespace', metavar='STRING',
help='Replace whitespaces in URI templates with this.')
parser.add_argument('--altlabels', '--indexterms', dest='altlabels', action='store_true',
help='Include altlabels (from 7XX or 4XX).')
parser.add_argument('--notes', dest='notes', action='store_true',
help='Include note fields (DEPRECATED as including notes is now the default).')
parser.add_argument('--exclude_notes', dest='exclude_notes', action='store_true',
help='Exclude note fields.')
parser.add_argument('--components', dest='components', action='store_true',
help='Include component information from 765.')
parser.add_argument('--webdewey', dest='webdewey', action='store_true',
help='Include non-standard WebDewey notes from 680.')
parser.add_argument('--add-table-numbers', dest='add_table_numbers', action='store_true',
help='Include add table numbers (notations with a colon)')
parser.add_argument('--skip-classification', dest='skip_classification', action='store_true',
help='Skip classification records')
parser.add_argument('--skip-authority', dest='skip_authority', action='store_true',
help='Skip authority records')
parser.add_argument('--expand', dest='expand', action='store_true',
help='Use Skosify to infer skos:hasTopConcept, skos:narrower and skos:related')
parser.add_argument('--skosify', dest='skosify',
help='Run Skosify with given configuration file')
parser.add_argument('-l', '--list-schemes', dest='list_schemes', action='store_true',
help='List default concept schemes.')
args = parser.parse_args()
if args.notes:
warnings.warn('--notes is deprecated as including notes is now the default. '
'The inverse option --exclude_notes has been added to exclude notes.',
DeprecationWarning)
with pkg_resources.resource_stream(__name__, 'vocabularies.yml') as fp:
vocabularies = Vocabularies()
vocabularies.load_yaml(fp)
vocabularies.set_default_scheme(
generic=args.base_uri,
scheme=args.scheme,
whitespace=args.whitespace
)
if args.list_schemes:
print('Schemes:')
for voc in vocabularies:
print('- %s' % voc)
return
supported_formats = ['turtle', 'jskos', 'ndjson']
if not args.outformat and args.outfile:
ext = args.outfile.rpartition('.')[-1]
if ext in supported_formats:
args.outformat = ext
if not args.outformat:
args.outformat = 'turtle'
elif args.outformat not in supported_formats:
raise ValueError("Format not supported, must be one of '%s'." % "', '".join(supported_formats))
graph = Graph()
for filename in args.include:
if args.outformat == 'turtle':
graph.load(filename, format='turtle')
else:
graph.load(filename, format='json-ld')
nm = graph.namespace_manager
nm.bind('dcterms', DCTERMS)
nm.bind('skos', SKOS)
nm.bind('wd', WD)
nm.bind('mads', MADS)
nm.bind('owl', OWL)
if args.verbose:
console_handler.setLevel(logging.DEBUG)
else:
console_handler.setLevel(logging.INFO)
if args.infile is None:
raise ValueError('Filename not specified')
options = {
'include_altlabels': args.altlabels,
'exclude_notes': args.exclude_notes,
'include_components': args.components,
'include_webdewey': args.webdewey,
'add_table_numbers': args.add_table_numbers,
'skip_classification': args.skip_classification,
'skip_authority': args.skip_authority,
'expand': args.expand,
'skosify': args.skosify,
'vocabularies': vocabularies
}
marc = MarcFileReader(args.infile)
graph = process_records(marc.records(), graph, **options)
if not graph:
logger.warning('RDF result is empty!')
return
if args.outfile and args.outfile != '-':
out_file = open(args.outfile, 'wb')
else:
if (sys.version_info > (3, 0)):
out_file = sys.stdout.buffer
else:
out_file = sys.stdout
if args.outformat == 'turtle':
# @TODO: Perhaps use OrderedTurtleSerializer if available, but fallback to default Turtle serializer if not?
serializer = OrderedTurtleSerializer(graph)
serializer.class_order = [
SKOS.ConceptScheme,
SKOS.Concept,
]
serializer.sorters = [
(r'/([0-9A-Z\-]+)--([0-9.\-;:]+)/e', lambda x: 'C{}--{}'.format(x[0], x[1])), # table numbers
(r'/([0-9.\-;:]+)/e', lambda x: 'B' + x[0]), # standard schedule numbers
(r'^(.+)$', lambda x: 'A' + x[0]), # fallback
]
serializer.serialize(out_file)
elif args.outformat in ['jskos', 'ndjson']:
s = pkg_resources.resource_string(__name__, 'jskos-context.json').decode('utf-8')
context = json.loads(s)
jskos = json_ld.from_rdf(graph, context)
if args.outformat == 'jskos':
jskos['@context'] = u'https://gbv.github.io/jskos/context.json'
out_file.write(json.dumps(jskos, sort_keys=True, indent=2).encode('utf-8'))
else:
for record in jskos['@graph'] if '@graph' in jskos else [jskos]:
record['@context'] = u'https://gbv.github.io/jskos/context.json'
out_file.write(json.dumps(record, sort_keys=True).encode('utf-8') + b'\n')
if args.outfile and args.outfile != '-':
logger.info('Wrote %s: %s' % (args.outformat, args.outfile))
| StarcoderdataPython |
3350691 | <reponame>TriggeredMessaging/django-mongoengine
def resolve_callables(mapping):
"""
Generate key/value pairs for the given mapping where the values are
evaluated if they're callable.
"""
# backport from django-3.2
# after we drop support for django below 3.2, this can be removed
for k, v in mapping.items():
yield k, v() if callable(v) else v
| StarcoderdataPython |
6696331 | # encoding: utf-8
import ckan.authz as authz
def package_patch(context, data_dict):
return authz.is_authorized('package_update', context, data_dict)
def resource_patch(context, data_dict):
return authz.is_authorized('resource_update', context, data_dict)
def group_patch(context, data_dict):
return authz.is_authorized('group_update', context, data_dict)
def organization_patch(context, data_dict):
return authz.is_authorized('organization_update', context, data_dict)
| StarcoderdataPython |
9784029 | print("In this lesson take each keyword and first try to write out what it does from memory.")
print("Next, search online for it and see what it really does.")
| StarcoderdataPython |
5104776 | <gh_stars>0
from challenges.models.challenge import Challenge
from django.db import models
from django.utils import timezone
from challenges.models import Challenge
from accounts.models import Profile
class Submission(models.Model):
challenge = models.ForeignKey(Challenge, on_delete=models.CASCADE, related_name='solves')
user = models.ForeignKey(Profile, on_delete=models.CASCADE, related_name='submissions')
timestamp = models.DateTimeField(default=timezone.now)
correct = models.BooleanField(default=False)
class Meta:
verbose_name = "Submission"
verbose_name_plural = "Submissions"
def __str__(self):
return "{} solved {} at {}".format(self.user, self.challenge, self.timestamp) | StarcoderdataPython |
8093465 | <filename>TableGeneration/tools.py<gh_stars>1-10
from PIL import Image
from io import BytesIO
import urllib.parse
def html_to_img(driver,html_content,id_count):
'''converts html to image'''
html_content = urllib.parse.quote(html_content)
driver.get("data:text/html;charset=utf-8," + html_content)
window_size = driver.get_window_size()
max_height,max_width = window_size['height'],window_size['width']
e = driver.find_element_by_id('c0')
bboxes = []
for id in range(id_count):
e = driver.find_element_by_id('c'+str(id))
txt = e.text.strip()
lentext = len(txt)
loc = e.location
size_ = e.size
xmin = loc['x']
ymin = loc['y']
xmax = int(size_['width'] + xmin)
ymax = int(size_['height'] + ymin)
bboxes.append([lentext,txt,xmin,ymin,xmax,ymax])
# cv2.rectangle(im,(xmin,ymin),(xmax,ymax),(0,0,255),2)
png = driver.get_screenshot_as_png()
im = Image.open(BytesIO(png))
im = im.crop((0,0, max_width, max_height))
return im,bboxes | StarcoderdataPython |
4963677 | # -*- coding: utf-8 -*-
"""Flask integration that avoids the need to hard-code URLs for links.
This includes a Flask-specific schema with custom Meta options and a
relationship field for linking to related resources.
"""
from __future__ import absolute_import
import flask
from werkzeug.routing import BuildError
from .fields import Relationship as GenericRelationship
from .schema import Schema as DefaultSchema, SchemaOpts as DefaultOpts
from .utils import resolve_params
class SchemaOpts(DefaultOpts):
"""Options to use Flask view names instead of hard coding URLs."""
def __init__(self, meta, *args, **kwargs):
if getattr(meta, 'self_url', None):
raise ValueError('Use `self_view` instead of `self_url` '
'using the Flask extension.')
if getattr(meta, 'self_url_kwargs', None):
raise ValueError('Use `self_view_kwargs` instead of `self_url_kwargs` '
'when using the Flask extension.')
if getattr(meta, 'self_url_many', None):
raise ValueError('Use `self_view_many` instead of `self_url_many` '
'when using the Flask extension.')
if getattr(meta, 'self_view_kwargs', None) \
and not getattr(meta, 'self_view', None):
raise ValueError('Must specify `self_view` Meta option when '
'`self_view_kwargs` is specified.')
# Transfer Flask options to URL options, to piggy-back on its handling
setattr(meta, 'self_url', getattr(meta, 'self_view', None))
setattr(meta, 'self_url_kwargs', getattr(meta, 'self_view_kwargs', None))
setattr(meta, 'self_url_many', getattr(meta, 'self_view_many', None))
super(SchemaOpts, self).__init__(meta, *args, **kwargs)
class Schema(DefaultSchema):
"""A Flask specific schema that resolves self URLs from view names."""
OPTIONS_CLASS = SchemaOpts
class Meta:
"""Options object that takes the same options as `marshmallow-jsonapi.Schema`,
but instead of ``self_url``, ``self_url_kwargs`` and ``self_url_many``
has the following options to resolve the URLs from Flask views:
* ``self_view`` - View name to resolve the self URL link from.
* ``self_view_kwargs`` - Replacement fields for ``self_view``. String
attributes enclosed in ``< >`` will be interpreted as attributes to
pull from the schema data.
* ``self_view_many`` - View name to resolve the self URL link when a
collection of resources is returned.
"""
pass
def generate_url(self, view_name, **kwargs):
"""Generate URL with any kwargs interpolated."""
return flask.url_for(view_name, **kwargs) if view_name else None
class Relationship(GenericRelationship):
"""Field which serializes to a "relationship object"
with a "related resource link".
See: http://jsonapi.org/format/#document-resource-object-relationships
Examples: ::
author = Relationship(
related_view='author_detail',
related_view_kwargs={'author_id': '<author.id>'},
)
comments = Relationship(
related_view='posts_comments',
related_view_kwargs={'post_id': '<id>'},
many=True, include_resource_linkage=True,
type_='comments'
)
This field is read-only by default.
:param str related_view: View name for related resource link.
:param dict related_view_kwargs: Path kwargs fields for `related_view`. String arguments
enclosed in `< >` will be interpreted as attributes to pull from the target object.
:param str self_view: View name for self relationship link.
:param dict self_view_kwargs: Path kwargs for `self_view`. String arguments
enclosed in `< >` will be interpreted as attributes to pull from the target object.
:param \*\*kwargs: Same keyword arguments as `marshmallow_jsonapi.fields.Relationship`.
"""
def __init__(
self,
related_view=None, related_view_kwargs=None,
self_view=None, self_view_kwargs=None,
**kwargs
):
self.related_view = related_view
self.related_view_kwargs = related_view_kwargs or {}
self.self_view = self_view
self.self_view_kwargs = self_view_kwargs or {}
super(Relationship, self).__init__(**kwargs)
def get_url(self, obj, view_name, view_kwargs):
if view_name:
kwargs = resolve_params(obj, view_kwargs, default=self.default)
kwargs['endpoint'] = view_name
try:
return flask.url_for(**kwargs)
except BuildError:
if None in kwargs.values(): # most likely to be caused by empty relationship
return None
raise
return None
def get_related_url(self, obj):
return self.get_url(obj, self.related_view, self.related_view_kwargs)
def get_self_url(self, obj):
return self.get_url(obj, self.self_view, self.self_view_kwargs)
| StarcoderdataPython |
6421024 | from enum import Enum
import os
from robonaldo.context.game import GameContext
from robonaldo.context.updater import ContextUpdater
from robonaldo.context.robot import Robot
from robonaldo.controller import RobotController
from robonaldo.log import Logger, LogLevel
from robonaldo.utils import Singleton
from typing import List
import sys
class RobotStrategy:
"""Strategise UwU
"""
def __init__(self, id: str):
self.__id = id
@property
def id(self) -> str:
return self.__id
def update(self, robot: Robot, ctx: GameContext, controller: RobotController) -> None:
pass
def activate_on(self, ctx: GameContext) -> List[Robot]:
return None
def override(self, robot: Robot, ctx: GameContext) -> List[str]:
return []
class StrategyManager(metaclass = Singleton):
__reg = False
__target = 'robonaldo/strategies'
__logger = Logger("StrategyManager", priority = LogLevel.TRACE)
strategies = {}
enabled = []
def register_all(self) -> None:
sys.path.append(self.__target)
for file in os.listdir(self.__target):
if '.py' in file and '.pyc' not in file and '__' not in file:
name = file.replace('.py', '')
__import__(name)
def by_name(self, name: str) -> RobotStrategy:
return self.strategies.get(name)
def is_enabled(self, strategy: RobotStrategy) -> bool:
return strategy in self.enabled
def set_enabled(self, strategy: RobotStrategy, state: bool) -> None:
if state:
if strategy not in self.enabled:
self.enabled.append(strategy)
self.__logger.info("Enabled strategy \'" + strategy.id + "\'.")
else:
if strategy in self.enabled:
self.enabled.remove(strategy)
self.__logger.info("Disabled strategy \'" + strategy.id + "\'.")
def register(self, strategy: RobotStrategy, state: bool) -> None:
self.__logger.trace("Registering strategy \'" + strategy.id + "\'.")
self.strategies[strategy.id] = strategy
self.set_enabled(strategy, state)
def register_on(self, updater: ContextUpdater) -> None:
if self.__reg is not True:
self.__reg = True
updater.register(lambda ctx, dt: self.__update(ctx, dt))
else:
raise Exception("Tried registering StrategyManager twice.")
def __update(self, context: GameContext, delta_time: float) -> None:
pass
| StarcoderdataPython |
1960495 | <filename>Tagged/save_data.py
#!/usr/bin/python
# -*- coding:utf-8 -*-
#author:iuyyoy
import os,sys
sys.path.append('..')
from Global.config import *
from Global.db_op import Db_op as DB
from Global.global_function import printout
from get_data import *
class Save_data(object):
db = DB(dbinfo = dbinfo)
def __init__(self):
super(Save_data, self).__init__()
self.connect()
def connect(self):
try:
self.db.connect()
except:
pass
def __delete__(self):
try:
self.db.close()
except:
pass
class Save_word(Save_data):
def __init__(self):
super(Save_word, self).__init__()
#添加新word
def add_word(self,word_name,pos,sign = 0):
word_name = self.db.SQL_filter(word_name)
pos = self.db.SQL_filter(pos)
sign = int(sign)
sql = "INSERT INTO `"+wiki_db+"`.`"+words_table+"` (`word_name`, `pos`, `sign`) VALUES (%s, %s, %s)"
para = [word_name,pos,sign]
id = self.db.insert(sql,para)
return id
#修改word sign
def change_word_sign(self,word_id,sign = 0):
word_id = int(word_id)
sign = int(sign)
sql = "UPDATE `"+wiki_db+"`.`"+words_table+"` SET `sign` = %s WHERE `id` = %s"
para = [sign,word_id]
result = self.db.update(sql,para)
return result
#添加word和entity的关系
#返回-1没有wikidata_id对应的entity_id
#返回False插入失败,True成功
def add_word_entity_relation(self,word_id,wikidata_id,sign = 0):
word_id = int(word_id)
entity = Get_entity().get_id(wikidata_id)
if (entity):
(entity_id,sign) = entity
sign = int(sign)
sql = "INSERT INTO `"+wiki_db+"`.`"+word_entity_table+"`(`word_id`,`entity_id`,`sign`) VALUES(%s,%s,%s)"
para = [word_id,entity_id,sign]
ret = self.db.insert(sql,para)
else:
return -1
return ret
class Save_website(Save_data):
def __init__(self):
super(Save_website, self).__init__()
def build_website_word_relation(self,website_id,word_id,count,sign,rank):
sql = "INSERT INTO `"+wiki_db+"`.`"+websites_words_table+"`(`website_id`,`word_id`,`count`,`sign`,`rank`) VALUES(%s,%s,%s,%s,%s)"
para = (website_id,word_id,count,sign,rank)
ret = self.db.insert(sql,para)
return ret
def build_website_tag_relation(self,website_id,entity_id,count,sign,rank):
sql = "INSERT INTO `"+wiki_db+"`.`"+websites_tags_table+"`(`website_id`,`entity_id`,`count`,`sign`,`rank`) VALUES(%s,%s,%s,%s,%s)"
para = (website_id,entity_id,count,sign,rank)
ret = self.db.insert(sql,para)
return ret
| StarcoderdataPython |
9748824 | # -*- coding: utf-8; -*-
'''
Generate a Python extension module with the constants defined in linux/input.h.
'''
from __future__ import print_function
import os, sys, re
#-----------------------------------------------------------------------------
# The default header file locations to try.
headers = [
'/usr/include/linux/input.h',
'/usr/include/linux/input-event-codes.h',
]
if sys.argv[1:]:
headers = sys.argv[1:]
#-----------------------------------------------------------------------------
macro_regex = r'#define +((?:KEY|ABS|REL|SW|MSC|LED|BTN|REP|SND|ID|EV|BUS|SYN|FF)_\w+)'
macro_regex = re.compile(macro_regex)
uname = list(os.uname()); del uname[1]
uname = ' '.join(uname)
#-----------------------------------------------------------------------------
template = r'''
#include <Python.h>
#ifdef __FreeBSD__
#include <dev/evdev/input.h>
#else
#include <linux/input.h>
#endif
/* Automatically generated by evdev.genecodes */
/* Generated on %s */
#define MODULE_NAME "_ecodes"
#define MODULE_HELP "linux/input.h macros"
static PyMethodDef MethodTable[] = {
{ NULL, NULL, 0, NULL}
};
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef moduledef = {
PyModuleDef_HEAD_INIT,
MODULE_NAME,
MODULE_HELP,
-1, /* m_size */
MethodTable, /* m_methods */
NULL, /* m_reload */
NULL, /* m_traverse */
NULL, /* m_clear */
NULL, /* m_free */
};
#endif
static PyObject *
moduleinit(void)
{
#if PY_MAJOR_VERSION >= 3
PyObject* m = PyModule_Create(&moduledef);
#else
PyObject* m = Py_InitModule3(MODULE_NAME, MethodTable, MODULE_HELP);
#endif
if (m == NULL) return NULL;
%s
return m;
}
#if PY_MAJOR_VERSION >= 3
PyMODINIT_FUNC
PyInit__ecodes(void)
{
return moduleinit();
}
#else
PyMODINIT_FUNC
init_ecodes(void)
{
moduleinit();
}
#endif
'''
def parse_header(header):
for line in open(header):
macro = macro_regex.search(line)
if macro:
yield ' PyModule_AddIntMacro(m, %s);' % macro.group(1)
all_macros = []
for header in headers:
try:
fh = open(header)
except (IOError, OSError):
continue
all_macros += parse_header(header)
if not all_macros:
print('no input macros found in: %s' % ' '.join(headers), file=sys.stderr)
sys.exit(1)
macros = os.linesep.join(all_macros)
print(template % (uname, macros))
| StarcoderdataPython |
9605692 | <reponame>alisaifee/aredis
from __future__ import annotations
import os
import warnings
from abc import ABC, abstractmethod
from numbers import Number
from typing import (
TYPE_CHECKING,
AbstractSet,
Any,
AnyStr,
AsyncGenerator,
Awaitable,
Callable,
ClassVar,
Coroutine,
Dict,
Generic,
Iterable,
Iterator,
List,
Literal,
Mapping,
MutableMapping,
NamedTuple,
Optional,
Protocol,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from typing_extensions import (
Deque,
OrderedDict,
ParamSpec,
TypeAlias,
TypedDict,
runtime_checkable,
)
RUNTIME_TYPECHECKS = False
if os.environ.get("COREDIS_RUNTIME_CHECKS", "").lower() in ["1", "true", "t"]:
try:
import beartype
if not TYPE_CHECKING:
from beartype.typing import ( # noqa: F811
Deque,
Dict,
Iterable,
Iterator,
List,
Mapping,
OrderedDict,
Sequence,
Set,
Tuple,
TypedDict,
)
RUNTIME_TYPECHECKS = True
except ImportError: # noqa
warnings.warn("Runtime checks were requested but could not import beartype")
CommandArgList = List[Union[str, bytes, float, Number]]
P = ParamSpec("P")
R = TypeVar("R")
KeyT: TypeAlias = Union[str, bytes]
ValueT: TypeAlias = Union[str, bytes, int, float]
StringT: TypeAlias = KeyT
# TODO: mypy can't handle recursive types
ResponseType = Optional[
Union[
StringT,
int,
float,
bool,
AbstractSet,
List,
Tuple,
Mapping,
# AbstractSet["ResponseType"],
# List["ResponseType"],
# Mapping["ResponseType", "ResponseType"],
Exception,
]
]
def add_runtime_checks(func: Callable[P, R]) -> Callable[P, R]:
if RUNTIME_TYPECHECKS:
return beartype.beartype(func)
return func
@runtime_checkable
class SupportsWatch(Protocol): # noqa
async def __aenter__(self) -> "SupportsWatch":
...
async def __aexit__(self, exc_type, exc_val, exc_tb):
...
async def watch(self, *keys: KeyT) -> bool:
...
async def execute(self, raise_on_error=True) -> Any:
...
@runtime_checkable
class SupportsScript(Protocol): # noqa
async def evalsha(
self,
sha1: StringT,
keys: Optional[Iterable[KeyT]] = None,
args: Optional[Iterable[ValueT]] = None,
) -> Any:
...
async def evalsha_ro(
self,
sha1: StringT,
keys: Optional[Iterable[KeyT]] = None,
args: Optional[Iterable[ValueT]] = None,
) -> Any:
...
async def script_load(self, script: StringT) -> AnyStr:
...
@runtime_checkable
class SupportsPipeline(Protocol): # noqa
async def pipeline(
self,
transaction: Optional[bool] = True,
watches: Optional[Iterable[StringT]] = None,
) -> SupportsWatch:
...
class AbstractExecutor(ABC, Generic[AnyStr]):
@abstractmethod
async def execute_command(self, command: bytes, *args: Any, **options: Any) -> Any:
pass
__all__ = [
"AbstractExecutor",
"AbstractSet",
"Any",
"AnyStr",
"AsyncGenerator",
"Awaitable",
"Callable",
"ClassVar",
"CommandArgList",
"Coroutine",
"Deque",
"Dict",
"Generic",
"KeyT",
"Iterable",
"Iterator",
"List",
"Literal",
"Mapping",
"MutableMapping",
"NamedTuple",
"OrderedDict",
"Optional",
"ParamSpec",
"Protocol",
"ResponseType",
"Sequence",
"Set",
"SupportsWatch",
"SupportsScript",
"SupportsPipeline",
"StringT",
"Tuple",
"Type",
"TypedDict",
"TypeVar",
"Union",
"ValueT",
"TYPE_CHECKING",
"RUNTIME_TYPECHECKS",
]
| StarcoderdataPython |
4868168 | """glitter positioning system"""
import time
import gc
import math
# import adafruit_lsm9ds1
import adafruit_gps
import adafruit_rfm9x
import board
import busio
import digitalio
# import neopixel
# import rtc
from glitterpos_util import timestamp
# glitterpos_cfg.py should be unique to each box, and formatted as follows:
#
# MY_ID = 0 # must be a unique integer
# MAG_MIN = (-0.25046, -0.23506, -0.322)
# MAG_MAX = (0.68278, 0.70882, 0.59654)
# DECLINATION_RAD = 235.27 / 1000.0 # Black Rock City in radians
# #
# From the CircuitPython REPL, use `import calibrate` to find values for
# MAG_MIN and MAG_MAX.
from glitterpos_cfg import MY_ID, MAG_MIN, MAG_MAX, DECLINATION_RAD
# You can add fixed points here:
DEFAULT_BOX_COORDS = {
# BOULDER_ID: (40.018258, -105.278457)
}
RADIO_FREQ_MHZ = 915.0
# CS = digitalio.DigitalInOut(board.D10)
# RESET = digitalio.DigitalInOut(board.D11)
# Feather M0 RFM9x
CS = digitalio.DigitalInOut(board.RFM9X_CS)
RESET = digitalio.DigitalInOut(board.RFM9X_RST)
class GlitterPOS:
"""glitter positioning system"""
def __init__(self):
"""configure sensors, radio, blinkenlights"""
# Our id and the dict for storing coords of other glitterpos_boxes:
self.glitterpos_id = MY_ID
self.glitterpos_boxes = DEFAULT_BOX_COORDS
# Set the RTC to an obviously bogus time for debugging purposes:
# time_struct takes: (tm_year, tm_mon, tm_mday, tm_hour, tm_min, tm_sec, tm_wday, tm_yday, tm_isdst)
# rtc.RTC().datetime = time.struct_time((2000, 1, 1, 0, 0, 0, 0, 0, 0))
# print("startup time: " + timestamp())
self.time_set = False
self.last_send = time.monotonic()
# A tuple for our lat/long:
self.coords = (0, 0)
self.heading = 0.0
# Status light on the board, we'll use to indicate GPS fix, etc.:
# self.statuslight = neopixel.NeoPixel(board.NEOPIXEL, 1, brightness=0.005, auto_write=True)
# self.statuslight.fill(RED)
self.statuslight = digitalio.DigitalInOut(board.D13)
self.statuslight.direction = digitalio.Direction.OUTPUT
self.statuslight.value = False
# # Neopixel ring:
# self.pixels = neopixel.NeoPixel(board.A1, 16, brightness=0.01, auto_write=False)
# self.startup_animation()
time.sleep(2)
self.init_radio()
self.init_gps()
def init_radio(self):
"""Set up RFM95."""
spi = busio.SPI(board.SCK, MOSI=board.MOSI, MISO=board.MISO)
self.rfm9x = adafruit_rfm9x.RFM9x(spi, CS, RESET, RADIO_FREQ_MHZ)
self.rfm9x.tx_power = 18 # Default is 13 dB; the RFM95 goes up to 23 dB
self.radio_tx('d', 'hello world')
time.sleep(1)
def init_gps(self):
"""Set up GPS module."""
uart = busio.UART(board.TX, board.RX, baudrate=9600, timeout=100)
gps = adafruit_gps.GPS(uart)
time.sleep(1)
# https://cdn-shop.adafruit.com/datasheets/PMTK_A11.pdf
# Turn on the basic GGA and RMC info (what you typically want), then
# set update to once a second:
gps.send_command(b'PMTK314,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0')
gps.send_command(b'PMTK220,1000')
self.gps = gps
# def init_compass(self):
# """Set up LSM9DS1."""
# i2c = busio.I2C(board.SCL, board.SDA)
# self.compass = adafruit_lsm9ds1.LSM9DS1_I2C(i2c)
# time.sleep(1)
def advance_frame(self):
"""
Check the radio for new packets, poll GPS and compass data, send a
radio packet if coordinates have changed (or if it's been a while), and
update NeoPixel display. Called in an infinite loop by code.py.
To inspect the state of the system, initialize a new GlitterPOS object
from the CircuitPython REPL, and call gp.advance_frame() manually. You
can then access the instance variables defined in __init__() and
init_()* methods.
"""
current = time.monotonic()
self.radio_rx(timeout=0.5)
new_gps_data = self.gps.update()
# self.update_heading()
# self.display_pixels()
if not self.gps.has_fix:
# Try again if we don't have a fix yet.
self.statuslight.value = False
print("GPS no fix")
return
# We want to send coordinates out either on new GPS data or roughly
# every 15 seconds:
if (not new_gps_data) and (current - self.last_send < 15):
return
# Set the RTC to GPS time (UTC):
if new_gps_data and not self.time_set:
# rtc.RTC().datetime = self.gps.timestamp_utc
self.time_set = True
gps_coords = (self.gps.latitude, self.gps.longitude)
if gps_coords == self.coords:
return
self.coords = (self.gps.latitude, self.gps.longitude)
self.statuslight.value = True
print(':: ' + str(current)) # Print a separator line.
# print(timestamp())
send_packet = '{}:{}:{}:{}'.format(
self.gps.latitude,
self.gps.longitude,
self.gps.speed_knots,
self.heading
)
print(' quality: {}'.format(self.gps.fix_quality))
print(' ' + str(gc.mem_free()) + " bytes free")
# Send a location packet:
self.radio_tx('l', send_packet)
def radio_tx(self, msg_type, msg):
"""send a packet over radio with id prefix"""
packet = 'e:' + msg_type + ':' + str(self.glitterpos_id) + ':' + msg
print(' sending: ' + packet)
# Blocking, max of 252 bytes:
self.rfm9x.send(packet)
self.last_send = time.monotonic()
def radio_rx(self, timeout=0.5):
"""check radio for new packets, handle incoming data"""
packet = self.rfm9x.receive()
# If no packet was received during the timeout then None is returned:
if packet is None:
return
packet = bytes(packet)
# print(timestamp())
print(' received signal strength: {0} dB'.format(self.rfm9x.rssi))
print(' received (raw bytes): {0}'.format(packet))
pieces = packet.split(b':')
if pieces[0] != b'e' or len(pieces) < 5:
print(' bogus packet, bailing out')
return
msg_type = pieces[1].format()
sender_id = int(pieces[2].format())
# A location message:
if msg_type == 'l':
sender_lat = float(pieces[3].format())
sender_lon = float(pieces[4].format())
self.glitterpos_boxes[sender_id] = (sender_lat, sender_lon)
# packet_text = str(packet, 'ascii')
# print('Received (ASCII): {0}'.format(packet_text))
| StarcoderdataPython |
2958 | #!/usr/bin/env python
#########################################################################################
#
# Apply transformations. This function is a wrapper for sct_WarpImageMultiTransform
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Authors: <NAME>, <NAME>
# Modified: 2014-07-20
#
# About the license: see the file LICENSE.TXT
#########################################################################################
# TODO: display message at the end
# TODO: interpolation methods
from __future__ import division, absolute_import
import sys, io, os, time, functools
from msct_parser import Parser
import sct_utils as sct
import sct_convert
import sct_image
import spinalcordtoolbox.image as msct_image
from sct_crop_image import ImageCropper
class Param:
def __init__(self):
self.verbose = '1'
self.remove_temp_files = '1'
# PARSER
# ==========================================================================================
def get_parser():
# parser initialisation
parser = Parser(__file__)
parser.usage.set_description('Apply transformations. This function is a wrapper for antsApplyTransforms (ANTs).')
parser.add_option(name="-i",
type_value="file",
description="input image",
mandatory=True,
example="t2.nii.gz")
parser.add_option(name="-d",
type_value="file",
description="destination image",
mandatory=True,
example="out.nii.gz")
parser.add_option(name="-w",
type_value=[[','], "file"],
description="Transformation, which can be a warping field (nifti image) or an affine transformation matrix (text file).",
mandatory=True,
example="warp1.nii.gz,warp2.nii.gz")
parser.add_option(name="-crop",
type_value="multiple_choice",
description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background",
mandatory=False,
default_value='0',
example=['0', '1', '2'])
parser.add_option(name="-c",
type_value=None,
description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background",
mandatory=False,
deprecated_by='-crop')
parser.add_option(name="-o",
type_value="file_output",
description="registered source.",
mandatory=False,
default_value='',
example="dest.nii.gz")
parser.add_option(name="-x",
type_value="multiple_choice",
description="interpolation method",
mandatory=False,
default_value='spline',
example=['nn', 'linear', 'spline'])
parser.add_option(name="-r",
type_value="multiple_choice",
description="""Remove temporary files.""",
mandatory=False,
default_value='1',
example=['0', '1'])
parser.add_option(name="-v",
type_value="multiple_choice",
description="""Verbose.""",
mandatory=False,
default_value='1',
example=['0', '1', '2'])
return parser
class Transform:
def __init__(self, input_filename, warp, fname_dest, output_filename='', verbose=0, crop=0, interp='spline', remove_temp_files=1, debug=0):
self.input_filename = input_filename
if isinstance(warp, str):
self.warp_input = list([warp])
else:
self.warp_input = warp
self.fname_dest = fname_dest
self.output_filename = output_filename
self.interp = interp
self.crop = crop
self.verbose = verbose
self.remove_temp_files = remove_temp_files
self.debug = debug
def apply(self):
# Initialization
fname_src = self.input_filename # source image (moving)
fname_warp_list = self.warp_input # list of warping fields
fname_out = self.output_filename # output
fname_dest = self.fname_dest # destination image (fix)
verbose = self.verbose
remove_temp_files = self.remove_temp_files
crop_reference = self.crop # if = 1, put 0 everywhere around warping field, if = 2, real crop
interp = sct.get_interpolation('isct_antsApplyTransforms', self.interp)
# Parse list of warping fields
sct.printv('\nParse list of warping fields...', verbose)
use_inverse = []
fname_warp_list_invert = []
# fname_warp_list = fname_warp_list.replace(' ', '') # remove spaces
# fname_warp_list = fname_warp_list.split(",") # parse with comma
for idx_warp, path_warp in enumerate(fname_warp_list):
# Check if inverse matrix is specified with '-' at the beginning of file name
if path_warp.startswith("-"):
use_inverse.append('-i')
fname_warp_list[idx_warp] = path_warp[1:] # remove '-'
fname_warp_list_invert += [[use_inverse[idx_warp], fname_warp_list[idx_warp]]]
else:
use_inverse.append('')
fname_warp_list_invert += [[path_warp]]
path_warp = fname_warp_list[idx_warp]
if path_warp.endswith((".nii", ".nii.gz")) \
and msct_image.Image(fname_warp_list[idx_warp]).header.get_intent()[0] != 'vector':
raise ValueError("Displacement field in {} is invalid: should be encoded" \
" in a 5D file with vector intent code" \
" (see https://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h" \
.format(path_warp))
# need to check if last warping field is an affine transfo
isLastAffine = False
path_fname, file_fname, ext_fname = sct.extract_fname(fname_warp_list_invert[-1][-1])
if ext_fname in ['.txt', '.mat']:
isLastAffine = True
# check if destination file is 3d
if not sct.check_if_3d(fname_dest):
sct.printv('ERROR: Destination data must be 3d')
# N.B. Here we take the inverse of the warp list, because sct_WarpImageMultiTransform concatenates in the reverse order
fname_warp_list_invert.reverse()
fname_warp_list_invert = functools.reduce(lambda x,y: x+y, fname_warp_list_invert)
# Extract path, file and extension
path_src, file_src, ext_src = sct.extract_fname(fname_src)
path_dest, file_dest, ext_dest = sct.extract_fname(fname_dest)
# Get output folder and file name
if fname_out == '':
path_out = '' # output in user's current directory
file_out = file_src + '_reg'
ext_out = ext_src
fname_out = os.path.join(path_out, file_out + ext_out)
# Get dimensions of data
sct.printv('\nGet dimensions of data...', verbose)
img_src = msct_image.Image(fname_src)
nx, ny, nz, nt, px, py, pz, pt = img_src.dim
# nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_src)
sct.printv(' ' + str(nx) + ' x ' + str(ny) + ' x ' + str(nz) + ' x ' + str(nt), verbose)
# if 3d
if nt == 1:
# Apply transformation
sct.printv('\nApply transformation...', verbose)
if nz in [0, 1]:
dim = '2'
else:
dim = '3'
sct.run(['isct_antsApplyTransforms',
'-d', dim,
'-i', fname_src,
'-o', fname_out,
'-t',
] + fname_warp_list_invert + [
'-r', fname_dest,
] + interp, verbose=verbose, is_sct_binary=True)
# if 4d, loop across the T dimension
else:
path_tmp = sct.tmp_create(basename="apply_transfo", verbose=verbose)
# convert to nifti into temp folder
sct.printv('\nCopying input data to tmp folder and convert to nii...', verbose)
img_src.save(os.path.join(path_tmp, "data.nii"))
sct.copy(fname_dest, os.path.join(path_tmp, file_dest + ext_dest))
fname_warp_list_tmp = []
for fname_warp in fname_warp_list:
path_warp, file_warp, ext_warp = sct.extract_fname(fname_warp)
sct.copy(fname_warp, os.path.join(path_tmp, file_warp + ext_warp))
fname_warp_list_tmp.append(file_warp + ext_warp)
fname_warp_list_invert_tmp = fname_warp_list_tmp[::-1]
curdir = os.getcwd()
os.chdir(path_tmp)
# split along T dimension
sct.printv('\nSplit along T dimension...', verbose)
im_dat = msct_image.Image('data.nii')
im_header = im_dat.hdr
data_split_list = sct_image.split_data(im_dat, 3)
for im in data_split_list:
im.save()
# apply transfo
sct.printv('\nApply transformation to each 3D volume...', verbose)
for it in range(nt):
file_data_split = 'data_T' + str(it).zfill(4) + '.nii'
file_data_split_reg = 'data_reg_T' + str(it).zfill(4) + '.nii'
status, output = sct.run(['isct_antsApplyTransforms',
'-d', '3',
'-i', file_data_split,
'-o', file_data_split_reg,
'-t',
] + fname_warp_list_invert_tmp + [
'-r', file_dest + ext_dest,
] + interp, verbose, is_sct_binary=True)
# Merge files back
sct.printv('\nMerge file back...', verbose)
import glob
path_out, name_out, ext_out = sct.extract_fname(fname_out)
# im_list = [Image(file_name) for file_name in glob.glob('data_reg_T*.nii')]
# concat_data use to take a list of image in input, now takes a list of file names to open the files one by one (see issue #715)
fname_list = glob.glob('data_reg_T*.nii')
fname_list.sort()
im_out = sct_image.concat_data(fname_list, 3, im_header['pixdim'])
im_out.save(name_out + ext_out)
os.chdir(curdir)
sct.generate_output_file(os.path.join(path_tmp, name_out + ext_out), fname_out)
# Delete temporary folder if specified
if int(remove_temp_files):
sct.printv('\nRemove temporary files...', verbose)
sct.rmtree(path_tmp, verbose=verbose)
# 2. crop the resulting image using dimensions from the warping field
warping_field = fname_warp_list_invert[-1]
# if last warping field is an affine transfo, we need to compute the space of the concatenate warping field:
if isLastAffine:
sct.printv('WARNING: the resulting image could have wrong apparent results. You should use an affine transformation as last transformation...', verbose, 'warning')
elif crop_reference == 1:
ImageCropper(input_file=fname_out, output_file=fname_out, ref=warping_field, background=0).crop()
# sct.run('sct_crop_image -i '+fname_out+' -o '+fname_out+' -ref '+warping_field+' -b 0')
elif crop_reference == 2:
ImageCropper(input_file=fname_out, output_file=fname_out, ref=warping_field).crop()
# sct.run('sct_crop_image -i '+fname_out+' -o '+fname_out+' -ref '+warping_field)
sct.display_viewer_syntax([fname_dest, fname_out], verbose=verbose)
# MAIN
# ==========================================================================================
def main(args=None):
# check user arguments
if not args:
args = sys.argv[1:]
# Get parser info
parser = get_parser()
arguments = parser.parse(args)
input_filename = arguments["-i"]
fname_dest = arguments["-d"]
warp_filename = arguments["-w"]
transform = Transform(input_filename=input_filename, fname_dest=fname_dest, warp=warp_filename)
if "-crop" in arguments:
transform.crop = arguments["-crop"]
if "-o" in arguments:
transform.output_filename = arguments["-o"]
if "-x" in arguments:
transform.interp = arguments["-x"]
if "-r" in arguments:
transform.remove_temp_files = int(arguments["-r"])
transform.verbose = int(arguments.get('-v'))
sct.init_sct(log_level=transform.verbose, update=True) # Update log level
transform.apply()
# START PROGRAM
# ==========================================================================================
if __name__ == "__main__":
sct.init_sct()
# # initialize parameters
param = Param()
# call main function
main()
| StarcoderdataPython |
6669554 | <filename>datasets/prepare_data/SISR/make_kernel_noise.py
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Power by <NAME> 2020-06-07 17:21:29
from pathlib import Path
import sys
sys.path.append(str(Path('./')))
from utils import getGaussianKernel2D
from datasets.data_tools import anisotropic_Gaussian
import numpy as np
from scipy.io import savemat, loadmat
p = 15
kernels = np.zeros([p, p, 8])
kernels_kai = loadmat('/home/oa/code/python/VDNet-TPAMI/test_data/kernels_SISR/kernels_12.mat')['kernels']
kernels[:, :, 0] = getGaussianKernel2D(p, 0.7)
kernels[:, :, 1] = getGaussianKernel2D(p, 1.2)
kernels[:, :, 2] = getGaussianKernel2D(p, 1.6)
kernels[:, :, 3] = getGaussianKernel2D(p, 2.0)
kernels[:, :, 4] = anisotropic_Gaussian(p, np.pi*0, 4, 1.5)
kernels[:, :, 5] = anisotropic_Gaussian(p, np.pi*0.75, 6, 1)
kernels[:, :, 6] = anisotropic_Gaussian(p, np.pi*0.25, 6, 1)
kernels[:, :, 7] = anisotropic_Gaussian(p, np.pi*0.1, 5, 3)
kernel_path = Path('./test_data') / 'kernels_SISR'
if not kernel_path.exists():
kernel_path.mkdir()
savemat(str(kernel_path/'kernels_8.mat'), {'kernels':kernels})
np.random.seed(10000)
noise = np.zeros([1024, 1024, 3, 2])
noise[:, :, :, 0] = np.random.randn(1024, 1024, 3) * (2.55/255)
noise[:, :, :, 1] = np.random.randn(1024, 1024, 3) * (7.65/255)
noise_path = Path('./test_data') / 'noise_SISR'
if not noise_path.exists():
noise_path.mkdir()
savemat(str(noise_path/'noise.mat'), {'noise':noise})
| StarcoderdataPython |
1849949 | import re
import pytest
from pyquery import PyQuery
from scripts.helpers import serialize_xml, parse_xml
from scripts.generate_case_html import generate_html, tag_map
from scripts.merge_alto_style import generate_styled_case_xml
from scripts.compare_alto_case import validate
from capdb.models import CaseXML, CaseMetadata
def test_serialize_xml_should_not_modify_input_xml(unaltered_alto_xml):
parsed = parse_xml(unaltered_alto_xml)
# make a change
parsed('[ID="b17-15"]').attr('ID', 'replace_me')
# serialize parsed xml
new_xml = serialize_xml(parsed)
# undo the change for comparison
assert b'replace_me' in new_xml # make sure modification worked
new_xml = new_xml.replace(b'replace_me', b'b17-15')
# serialized xml should be identical
assert unaltered_alto_xml == new_xml
@pytest.mark.django_db
def test_generate_html_tags(ingest_case_xml):
for case in CaseMetadata.objects.in_scope():
parsed_case_xml = case.case_xml.get_parsed_xml()
casebody_tree = parsed_case_xml("casebody|casebody")[0]
casebody_html = generate_html(case.case_xml.extract_casebody()).replace('\n', '').replace('\r', '').replace('\t', ' ')
for element in casebody_tree.iter():
old_tag = element.tag.split("}")[1]
new_tag = 'p' if old_tag == 'p' else tag_map[old_tag]
if 'id' in element.attrib:
id_search = r'<' + re.escape(new_tag) + r'[^>]*id="' + re.escape(element.attrib['id'])
assert re.search(id_search, casebody_html, re.IGNORECASE) is not None
else:
class_search = r'<' + re.escape(new_tag) + r'[^>]*class="' + re.escape(old_tag)
assert re.search(class_search, casebody_html, re.IGNORECASE) is not None
parsed_html = PyQuery(casebody_html)
for footnote in parsed_case_xml("casebody|footnote"):
footnote_id = "footnote_" + footnote.attrib['label']
assert parsed_html('aside[id="%s"]' % footnote_id).length == 1
assert parsed_html('a[href="#%s"]' % footnote_id).length == 1
@pytest.mark.django_db
def test_html_pagebreak(ingest_case_xml):
for case in CaseMetadata.objects.in_scope():
styled_xml = generate_styled_case_xml(case.case_xml, strict = False)
styled_html = generate_html(parse_xml(styled_xml)('casebody|casebody'))
pb_list = re.findall(r'(.{3})<pagebreak/>(.{3})', styled_xml)
br_list = re.findall(r'(.{3})<br class="pagebreak" style="page-break-before: always"/>(.{3})', styled_html)
assert set(pb_list) == set(br_list)
@pytest.mark.django_db
def test_between_paragraph_pagebreak(ingest_case_xml):
for case in CaseMetadata.objects.in_scope():
# this test logic is too stupid to handle pagebreaks where multiple pages of footnotes
# at the end of the opinion. The actual logic does work.
if case.case_id.startswith("WnApp"):
continue
#generate the styled and page broken XML
styled_xml = generate_styled_case_xml(case.case_xml, strict = False)
# Highlight the page breaks at the beginning of the casebody elements...
stripped_xml = re.sub(r'"\>\<page-number[a-zA-Z0-9= #\-"]*\>\*\d+\<\/page-number\>', '">__PAGE_BREAK__', styled_xml)
# get rid of all tags that will interfere with the xml parsing, and the inline pagebreak tags
strip_tags = r'\<em\>|\<\/em\>|\<strong\>|\<\/strong\>|\<footnotemark\>|\<\/footnotemark\>|\<bracketnum\>|\<\/bracketnum\>|\<page-number[a-zA-Z0-9= #\-"]*\>\*\d+\<\/page-number\>'
stripped_xml = re.sub(strip_tags, '', stripped_xml)
stripped_xml = re.sub(r'\xad', ' ', stripped_xml)
parsed_xml = parse_xml(stripped_xml)
previous_page = None
for casebody_element in parsed_xml("casebody|casebody").children():
if 'pgmap' not in casebody_element.attrib:
continue
current_page = casebody_element.attrib['pgmap'].split(' ')[-1].split('(')[0]
if previous_page != current_page and previous_page is not None and ' ' not in casebody_element.attrib['pgmap']:
assert casebody_element.text.startswith("__PAGE_BREAK__")
previous_page = current_page
@pytest.mark.django_db
def test_generate_inline_pagebreak(ingest_case_xml):
page_break_element_search = re.compile(r'\d+\((\d+)\)')
for case in CaseMetadata.objects.in_scope():
# this test logic is too stupid to handle pagebreaks where multiple pages of footnotes
# at the end of the opinion. The actual logic does work.
if case.case_id.startswith("WnApp"):
continue
#generate the styled and page broken XML
styled_xml = generate_styled_case_xml(case.case_xml, strict = False)
# dump the page breaks that come at the beginning of the casebody elements...
stripped_xml = re.sub(r'"\>\<page-number[a-zA-Z0-9= #\-"]*\>\*\d+\<\/page-number\>', '">', styled_xml)
# get rid of all tags that will interfere with the xml parsing, and the beginning of the pagebreak tags
strip_tags = r'\<em\>|\<\/em\>|\<strong\>|\<\/strong\>|\<footnotemark\>|\<\/footnotemark\>|\<bracketnum\>|\<\/bracketnum\>|\<page-number[a-zA-Z0-9= #\-"]*\>\*\d+'
stripped_xml = re.sub(strip_tags, '', stripped_xml)
# so we can keep track of the pagebreak tags without breaking xml rendering
stripped_xml = re.sub(r'\<\/page-number\>', '__PAGE_BREAK__ ', stripped_xml)
stripped_xml = re.sub(r'\xad', ' ', stripped_xml)
parsed_xml = parse_xml(stripped_xml)
for p in parsed_xml("casebody|p"):
if ') ' in p.get('pgmap'):
page_breaks = page_break_element_search.findall(p.get('pgmap'))
page_break_element_counts = []
for i, value in enumerate(page_breaks):
if i >= len(page_breaks) - 1:
break
value = int(value) + int(page_breaks[i - 1]) if i > 0 else int(value)
page_break_element_counts.append(value)
element_split = p.text.split(" ")
actual_locations = []
while '__PAGE_BREAK__' in element_split:
loc = element_split.index("__PAGE_BREAK__")
del element_split[loc]
actual_locations.append(loc)
assert set(actual_locations) == set(page_break_element_counts)
@pytest.mark.django_db
def test_merge_alto_case(ingest_case_xml):
# testing strict, totally compliant case
case_xml = CaseXML.objects.get(metadata_id__case_id="32044057891608_0001")
styled_case = parse_xml(generate_styled_case_xml(case_xml))
assert len(styled_case("casebody|em")) == 23
assert len(styled_case("casebody|strong")) == 11
@pytest.mark.django_db
def test_merge_alto_extra_char_exception(ingest_case_xml):
# testing processing with a case that has some character mismatches.
case_xml = CaseXML.objects.get(metadata_id__case_id="32044057892259_0001")
case_xml.orig_xml = case_xml.orig_xml.replace("</p>", "y</p>")
alto_xml = case_xml.pages.first()
alto_xml.orig_xml = alto_xml.orig_xml.replace('CONTENT="', 'CONTENT="x')
alto_xml.save()
# fails with strict
with pytest.raises(Exception, match=r'Case text and alto text do not match'):
generate_styled_case_xml(case_xml)
# passes without strict
styled_case = parse_xml(generate_styled_case_xml(case_xml, False))
assert len(styled_case("casebody|em")) == 8
assert len(styled_case("casebody|strong")) == 11
@pytest.mark.django_db
def test_merge_dup_exception(ingest_case_xml):
case_xml = CaseXML.objects.get(metadata_id__case_id="32044061407086_0001")
with pytest.raises(Exception, match=r'Duplicative case: no casebody data to merge'):
generate_styled_case_xml(case_xml)
@pytest.mark.django_db
def test_validate_alto_casemets_dup(ingest_case_xml):
results = validate(CaseXML.objects.get(metadata_id__case_id="32044061407086_0001"))
assert results['status'] == 'ok'
assert results['results'] == 'duplicative'
@pytest.mark.django_db
def test_validate_alto_casemets_clean(ingest_case_xml):
results = validate(CaseXML.objects.get(metadata_id__case_id="32044057891608_0001"))
assert results['status'] == 'ok'
assert results['results'] == 'clean'
@pytest.mark.django_db
def test_validate_alto_casemets_dirty(ingest_case_xml):
results = validate(CaseXML.objects.get(metadata_id__case_id="32044057892259_0001"))
assert results['status'] == 'warning'
assert results['results'] == 'encountered 2 problems'
problem_1 = {'alto': {'current': {'ST_17.1.8.1': 'matter'},
'current_character': {'ST_17.1.8.1': 'm'},
'next': {'ST_17.1.8.3': 'in'},
'prev': None},
'casemets': {'current': '\xadmatte',
'current_character': '\xad',
'snippet': 'tion of the subject-\xadmatter in controver'},
'description': 'extra char in case_mets? match found in current alto'}
problem_2 = {'alto': {'current': {'ST_19.1.11.7': '113\xad'},
'current_character': {'ST_19.1.11.7': '\xad'},
'next': {'ST_19.1.11.9': ';'},
'prev': {'ST_19.1.11.5': 'Ill.'}},
'casemets': {'current': '; Ca',
'current_character': ';',
'snippet': 'Strobel, 24 Ill. 113; Carpenter v. Wells'},
'description': 'extra char in alto? match found subsequent alto element'}
assert problem_1 in results['problems']
assert problem_2 in results['problems']
@pytest.mark.django_db
def test_validate_alto_casemets_error(ingest_case_xml):
case_xml = CaseXML.objects.get(metadata_id__case_id="32044057891608_0001")
parsed_case_xml = parse_xml(case_xml.orig_xml)
case_parent_tag = parsed_case_xml('casebody|parties')
case_parent_tag.text("<NAME>, Propellant, v. <NAME>, Applebees.")
case_xml.orig_xml = serialize_xml(parsed_case_xml)
case_xml.save(update_related=False)
results = validate(case_xml)
problem_1 = {'alto': {'current': {'ST_17.2.1.5': 'Appellant,'},
'current_character': {'ST_17.2.1.5': 'A'},
'next': {'ST_17.2.1.7': 'v.'},
'prev': {'ST_17.2.1.3': 'Taylor,'}},
'casemets': {'current': 'Propellant',
'current_character': 'P',
'snippet': '<NAME>, Propellant, v. Macha'},
'description': 'Unspecified Mismatch.'}
problem_2 = {'alto': {'current': {'ST_17.2.1.7': 'v.'},
'current_character': {'ST_17.2.1.7': 'v'},
'next': {'ST_17.2.1.9': 'Michael'},
'prev': {'ST_17.2.1.5': 'Appellant,'}},
'casemets': {'current': 'Pr',
'current_character': 'P',
'snippet': '<NAME>, Propellant, v. Macha'},
'description': 'Unspecified Mismatch.'}
assert results['status'] == 'error'
assert results['results'] == 'gave up after 2 consecutive bad words'
assert problem_1 in results['problems']
assert problem_2 in results['problems']
| StarcoderdataPython |
6577653 | <gh_stars>0
machines = ['fan','pump','slider','valve']
kinds = ['normal', 'abnormal']
rootpath = f'F:/Graduate_projrct/Pictures/Mel/'
def name_path(name):
paths = []
for machine in machines:
paths.append(rootpath+f'{machine}/{name}')
return paths
file_names = {'normal': name_path(kinds[0]),'abnormal': name_path(kinds[1])} | StarcoderdataPython |
6601509 | import numpy as np
from cachetools.keys import hashkey
from cachetools import LRUCache, cached
from scipy import signal, fftpack
from datavis.common import strided_array
def speckey(sig, *args, **kwargs):
key = hashkey(*args, **kwargs)
return key
@cached(LRUCache(maxsize=10), key=speckey)
def spectrogram(sig, fs, win_len=512, hop=256, win_type='hanning', filename=''):
W = signal.get_window(win_type, win_len, fftbins=False)
sig_strided = strided_array(sig, win_len, hop)
sig_windowed = np.multiply(sig_strided, W)
Sxx = np.abs(np.fft.rfft(sig_windowed, win_len))[:, :win_len // 2]
Sxx = np.transpose(Sxx)
freq = np.arange(0, fs / 2, fs / win_len)
return Sxx, freq
def envelope(sig: np.ndarray):
env = np.abs(signal.hilbert(sig, fftpack.helper.next_fast_len(len(sig))))
return env
def segmented_spectogram(y: np.ndarray, fs: int, fs_step: float, fs_max: float, db_threshold: float) -> np.ndarray:
fs_win = fs_max / fs_step
win_len = int(fs / fs_win)
spec, freq = spectrogram(y, fs, win_len=win_len, hop=win_len)
bands_Hz = np.arange(fs_step, fs_max, fs_step)
bands_bin = (bands_Hz / fs_win).astype(int)
spec_db = 20 * np.log10(spec / np.max(spec))
spec_bands = np.split(spec_db, bands_bin)
spec_segmented_and_thresholded = np.array([np.sum(arr > db_threshold) / arr.size for arr in spec_bands])
return spec_segmented_and_thresholded
| StarcoderdataPython |
3400409 | import pathlib
import typer
import pandas as pd
from transliterate import get_translit_function
from sklearn.model_selection import train_test_split
from ..common import nlu_path_to_dataframe, dataframe_to_nlu_file, entity_names
app = typer.Typer(
name="augment",
add_completion=False,
help="""Commands to generate transliterations.""",
)
class Translitor:
def __init__(self, lang, reversed, ents):
self.translitor = get_translit_function(lang)
self.mapper = {e: i for i, e in enumerate(ents)}
self.reversed = reversed
def hide_ents(self, s):
for k, v in self.mapper.items():
s = s.replace(k, str(v))
return s
def show_ents(self, s):
for k, v in self.mapper.items():
s = s.replace(str(v), k)
return s
def translit(self, s):
return self.show_ents(
self.translitor(self.hide_ents(s), reversed=self.reversed)
)
def add_transliteration(dataf, lang, reversed, text_col="text"):
"""Applies the translitertion to a column in the dataframe."""
ents = entity_names(list(dataf["text"]))
tl = Translitor(lang=lang, reversed=reversed, ents=ents)
return dataf.assign(**{text_col: lambda d: [tl.translit(e) for e in d["text"]]})
@app.command()
def augment(
file: pathlib.Path = typer.Argument(..., help="The original nlu.yml file"),
out: pathlib.Path = typer.Argument(..., help="Path to write misspelled file to"),
target: str = typer.Option("latin", help="Alphabet to map to."),
source: str = typer.Option("latin", help="Alphabet to map from."),
lang: str = typer.Option("en", help="Language for keyboard layout"),
):
"""
Applies translitertion to an NLU file and saves it to disk.
"""
if target == source:
typer.echo(
"Error! Either --target or --source needs to be set. Cannot be the same."
)
raise typer.Exit(1)
lang = target if target != "latin" else source
reversed = target == "latin"
dataf = nlu_path_to_dataframe(file)
(
dataf.pipe(add_transliteration, lang=lang, reversed=reversed).pipe(
dataframe_to_nlu_file, write_path=out, label_col="intent"
)
)
@app.command()
def generate(
file: pathlib.Path = typer.Argument(..., help="The original nlu.yml file"),
seed_aug: int = typer.Option(42, help="The seed value to split the data"),
test_size: int = typer.Option(33, help="Percentage of data to keep as test data"),
prefix: str = typer.Option("translit", help="Prefix to add to all the files"),
target: str = typer.Option("latin", help="Alphabet to map to."),
source: str = typer.Option("latin", help="Alphabet to map from."),
lang: str = typer.Option("en", help="Language for keyboard layout"),
):
"""
Generate train/validation data with/without translitertion.
Will also generate files for the `/test` directory.
"""
if target == source:
typer.echo(
"Error! Either --target or --source needs to be set. Cannot be the same."
)
raise typer.Exit(1)
lang = target if target != "latin" else source
reversed = target == "latin"
dataf = nlu_path_to_dataframe(file)
X_train, X_test, y_train, y_test = train_test_split(
dataf["text"], dataf["intent"], test_size=test_size / 100, random_state=seed_aug
)
df_valid = pd.DataFrame({"text": X_test, "intent": y_test}).sort_values(["intent"])
df_train = pd.DataFrame({"text": X_train, "intent": y_train}).sort_values(
["intent"]
)
(
df_train.pipe(
dataframe_to_nlu_file, write_path="data/nlu-train.yml", label_col="intent"
)
)
(
df_valid.pipe(
dataframe_to_nlu_file, write_path="test/nlu-valid.yml", label_col="intent"
)
)
(
df_train.pipe(add_transliteration, lang=lang, reversed=reversed).pipe(
dataframe_to_nlu_file,
write_path=f"data/{prefix}-nlu-train.yml",
label_col="intent",
)
)
(
df_valid.pipe(add_transliteration, lang=lang, reversed=reversed).pipe(
dataframe_to_nlu_file,
write_path=f"test/{prefix}-nlu-valid.yml",
label_col="intent",
)
)
| StarcoderdataPython |
5196537 | <gh_stars>10-100
import sys
import spotipy
import yaml
import spotipy.util as util
from pprint import pprint
import json
def load_config():
global user_config
stream = open('config.yaml')
user_config = yaml.load(stream)
# pprint(user_config)
def add_monthly_playlist_tracks(sources, target_playlist_id):
for key in sources.keys():
print(key)
all_track_ids=[]
all_track_names=[]
track_list = sp.playlist_tracks(months[key])
for track in track_list['items']:
try:
all_track_ids.append(track['track']['id'])
all_track_names.append(track['track']['name'])
except:
continue
# print(all_track_names)
# print(all_track_ids)
sp.user_playlist_add_tracks(user=user_config['username'], playlist_id=target_playlist_id, tracks=all_track_ids)
print()
def playlist_uri_stripper(playlist_uri):
return playlist_uri.split(':')[2]
if __name__ == '__main__':
global sp
global user_config
months = {
'jan20': '45hqvkXjiYcilhfj28Eydh',
'feb20': '6FecIWptAeAPUbC8CxBjFu',
'mar20': '44NeB9aqikV0Xm7vEjy7AX',
'apr20': '6kogRNGaDDfIIV2c4AEwYB',
'may20': '4mMRn8Kr8QhE2gZL0fcYoc',
'jun20': '0NJ1vCjR453682Vl0UBW9i',
'jul20': '16LJnglVuijORDhHZyc6hW',
'aug20': '3a4MA8aVlqpcnrGtH1zPDX'
}
load_config()
token = util.prompt_for_user_token(user_config['username'], scope='playlist-modify-private,playlist-modify-public', client_id=user_config['client_id'], client_secret=user_config['client_secret'], redirect_uri=user_config['redirect_uri'])
if token:
sp = spotipy.Spotify(auth=token)
target_uri = input("Target playlist URI: ")
target_id = playlist_uri_stripper(target_uri)
tracks = add_monthly_playlist_tracks(months, target_uri)
else:
print ("Can't get token for", user_config['username']) | StarcoderdataPython |
1908793 | <reponame>BraveGroup/SSPL
"""
Predictive coding module (PCM) for audio and visual feature alignment.
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
# top-down prediction process
class pred_module(nn.Module):
def __init__(self, inchan, outchan, downsample=False):
super(pred_module, self).__init__()
self.conv2d = nn.Conv2d(inchan, outchan, kernel_size=3, stride=1, padding=1, bias=False)
self.downsample = downsample
if self.downsample:
self.Downsample = nn.MaxPool2d(kernel_size=2, stride=2)
def forward(self, x):
x = self.conv2d(x)
if self.downsample:
x = self.Downsample(x)
return x
# bottom-up error propagation process
class error_module(nn.Module):
def __init__(self, inchan, outchan, upsample=False, scale_factor=2):
super(error_module, self).__init__()
self.convtrans2d = nn.ConvTranspose2d(inchan, outchan, kernel_size=3, stride=1, padding=1, bias=False)
self.upsample = upsample
if self.upsample:
self.Upsample = nn.Upsample(scale_factor=scale_factor, mode='bilinear', align_corners=True)
def forward(self, x):
if self.upsample:
x = self.Upsample(x)
x = self.convtrans2d(x)
return x
# input feature map: 14 x 14
class PCNet(nn.Module):
def __init__(self, cycs_in=4, dim_audio=128, n_fm_out=512):
super(PCNet, self).__init__()
self.cycs_in = cycs_in
self.in_channels = [dim_audio, 512, 512, 512]
self.out_channels = [512, 512, 512, n_fm_out]
# in->out 3x3->3x3, 3x3->7x7, 7x7->14x14, 14x14->14x14
sample_flag = [False, True, True, False]
self.num_layers = len(self.in_channels)
# -------------------------------------------------------
# feedback prediction process
# -------------------------------------------------------
pred_bottom_layer = [nn.Conv2d(self.out_channels[0], self.in_channels[0],
kernel_size=1, stride=1, padding=0)]
self.PredProcess = nn.ModuleList(pred_bottom_layer +
[pred_module(self.out_channels[i], self.in_channels[i], downsample=sample_flag[i])
for i in range(1, self.num_layers - 1)])
# -------------------------------------------------------
# feedforward error propagation process
# -------------------------------------------------------
error_bottom_layer1 = [nn.ConvTranspose2d(self.in_channels[0], self.out_channels[0],
kernel_size=1, stride=1, padding=0, bias=False)]
error_bottom_layer2 = [nn.ConvTranspose2d(self.in_channels[1], self.out_channels[1],
kernel_size=3, stride=2, padding=0, bias=False)]
error_output_layer = [nn.ConvTranspose2d(self.in_channels[-1], self.out_channels[-1],
kernel_size=1, stride=1, padding=0)]
self.ErrorProcess = nn.ModuleList(error_bottom_layer1 + error_bottom_layer2 +
[error_module(self.in_channels[i], self.out_channels[i], upsample=sample_flag[i])
for i in range(2, self.num_layers - 1)] + error_output_layer)
# -------------------------------------------------------
# two kinds of scalars
# -------------------------------------------------------
self.b0 = nn.ParameterList([nn.Parameter(torch.zeros(1, self.in_channels[i], 1, 1) + 0.5)
for i in range(1, self.num_layers)])
self.a0 = nn.ParameterList([nn.Parameter(torch.zeros(1, self.out_channels[i], 1, 1) + 1.0)
for i in range(self.num_layers - 1)])
# -------------------------------------------------------
# batch norm
# -------------------------------------------------------
# for representation initialization
self.BNPred = nn.ModuleList([nn.BatchNorm2d(self.in_channels[i]) for i in range(1, self.num_layers - 1)])
self.BNError = nn.ModuleList([nn.BatchNorm2d(self.out_channels[i]) for i in range(self.num_layers - 1)])
# for representation updates at each time step
BNPred_step = []
BNError_step = []
for t in range(cycs_in):
BNPred_step = BNPred_step + [nn.BatchNorm2d(self.in_channels[i]) for i in range(1, self.num_layers - 1)]
BNError_step = BNError_step + [nn.BatchNorm2d(self.out_channels[i]) for i in range(self.num_layers - 1)]
self.BNPred_step = nn.ModuleList(BNPred_step)
self.BNError_step = nn.ModuleList(BNError_step)
def forward(self, vis_fm, audio_feature_orig):
# representation initialization (feedback process)
r_pred = [vis_fm]
for i in range(self.num_layers - 2, 0, -1):
r_pred = [F.gelu(self.BNPred[i - 1](self.PredProcess[i](r_pred[0])))] + r_pred
# predict audio feature
audio_feature_pred = F.gelu(self.PredProcess[0](r_pred[0])) # B x C_audio x 3 x 3
audio_feature_pred = F.adaptive_avg_pool2d(audio_feature_pred, 1).view(audio_feature_pred.size(0), -1)
# representation initialization (feedforward process)
pred_error_audio = audio_feature_orig - audio_feature_pred # B x C_audio
pred_error_audio = pred_error_audio.unsqueeze(-1).unsqueeze(-1)
pred_error_audio = pred_error_audio.expand(-1, -1, 3, 3)
a0 = F.relu(self.a0[0]).expand_as(r_pred[0])
r_update = [F.gelu(self.BNError[0](r_pred[0] + a0 * self.ErrorProcess[0](pred_error_audio)))]
for i in range(1, self.num_layers - 1):
pred_error = r_update[i - 1] - r_pred[i - 1]
a0 = F.relu(self.a0[i]).expand_as(r_pred[i])
r_update.append(F.gelu(self.BNError[i](r_pred[i] + a0 * self.ErrorProcess[i](pred_error))))
for t in range(self.cycs_in):
# representation updates (feedback process)
b0 = F.relu(self.b0[-1]).expand_as(r_update[-1])
r_update[-1] = F.gelu((1 - b0) * r_update[-1] + b0 * r_pred[-1])
for i in range(self.num_layers - 2, 0, -1):
r_pred[i - 1] = self.PredProcess[i](r_update[i])
b0 = F.relu(self.b0[i - 1]).expand_as(r_update[i - 1])
r_update[i - 1] = F.gelu(
self.BNPred_step[(self.num_layers-2)*t+i-1]((1 - b0) * r_update[i - 1] + b0 * r_pred[i - 1]))
# predict audio feature
audio_feature_pred = F.gelu(self.PredProcess[0](r_update[0]))
audio_feature_pred = F.adaptive_avg_pool2d(audio_feature_pred, 1).view(audio_feature_pred.size(0), -1)
# representation updates (feedforward process)
pred_error_audio = audio_feature_orig - audio_feature_pred # B x C_audio
pred_error_audio = pred_error_audio.unsqueeze(-1).unsqueeze(-1)
pred_error_audio = pred_error_audio.expand(-1, -1, 3, 3)
a0 = F.relu(self.a0[0]).expand_as(r_update[0])
r_update[0] = F.gelu(
self.BNError_step[(self.num_layers-1)*t](r_update[0] + a0 * self.ErrorProcess[0](pred_error_audio)))
for i in range(1, self.num_layers - 1):
pred_error = r_update[i - 1] - r_pred[i - 1]
a0 = F.relu(self.a0[i]).expand_as(r_update[i])
r_update[i] = F.gelu(
self.BNError_step[(self.num_layers-1)*t+i](r_update[i] + a0 * self.ErrorProcess[i](pred_error)))
# transformed feature
feat_trans = self.ErrorProcess[-1](r_update[-1]) # B x C_vis x 14 x 14
return feat_trans
| StarcoderdataPython |
290170 | # Generated by Django 3.0.5 on 2021-09-01 10:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Quiz', '0006_auto_20210901_0043'),
]
operations = [
migrations.AlterField(
model_name='elegirrespuesta',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='pregunta',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='preguntasrespondidas',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='quizusuario',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| StarcoderdataPython |
12807212 | spam = 42 # global variable
def eggs():
spam = 42 # local variable
print('Some code here.')
print('Some more code.')
| StarcoderdataPython |
3496166 |
def xor(*args):
if sum([bool(a) for a in args])==1:
return True
return False | StarcoderdataPython |
6420497 | import numpy as np
def round_amount(amount, exchange, symbol, type):
rounded_amount_str = exchange.amount_to_precision(symbol, amount)
if type == 'down':
rounded_amount = float(rounded_amount_str)
elif type == 'up':
decimal = rounded_amount_str[::-1].find('.')
min_amount = 10 ** (-decimal)
rounded_amount = float(rounded_amount_str) + min_amount
return rounded_amount
def round_up_amount(amount, decimal):
floor_amount = np.ceil(amount * (10 ** decimal)) / (10 ** decimal)
return floor_amount
def cal_adjusted_price(order, fee):
adjusted_cost = order['cost'] + fee
adjusted_price = adjusted_cost / order['filled']
return adjusted_price
def cal_unrealised(last_price, grid, open_orders_df):
open_sell_orders_df = open_orders_df[open_orders_df['side'] == 'sell']
n_open_sell_oders = len(open_sell_orders_df)
price_list = [x - grid for x in open_sell_orders_df['price']]
amount_list = open_sell_orders_df['amount'].to_list()
amount = sum(amount_list)
total_value = sum([i * j for i, j in zip(price_list, amount_list)])
try:
avg_price = total_value / amount
except ZeroDivisionError:
avg_price = 0
unrealised = (last_price - avg_price) * amount
return unrealised, n_open_sell_oders, amount, avg_price
def cal_unrealised_future(last_price, position):
if position['side'] == 'buy':
margin = last_price - position['entry_price']
elif position['side'] == 'sell':
margin = position['entry_price'] - last_price
unrealised = margin * float(position['amount'])
return unrealised
def cal_drawdown_future(last_price, position):
if position['side'] == 'buy':
drawdown = max(1 - (last_price / position['entry_price']), 0)
elif position['side'] == 'sell':
drawdown = max((last_price / position['entry_price']) - 1, 0)
return drawdown
def cal_available_budget(quote_currency_free, available_cash_flow, transfer):
# Exclude withdraw_cash_flow as it is moved instantly.
total_withdraw = transfer['withdraw'] + transfer['pending_withdraw']
available_budget = quote_currency_free - available_cash_flow - total_withdraw
return available_budget
def cal_end_balance(base_currency_value, quote_currency_value, transfer):
# This function is called before update, today withdraw is still in withdraw.
# After update, withdraw is moved to pending_withdraw and will be edited manually after tranfer fund.
# Deposit has already been included in balance as it is added before end of the day.
end_balance = base_currency_value + quote_currency_value - transfer['withdraw'] - transfer['pending_withdraw']
return end_balance
def cal_end_cash(cash, transfer):
end_cash = cash - transfer['withdraw'] - transfer['pending_withdraw']
return end_cash | StarcoderdataPython |
208280 | # (C) Copyright 2021 Hewlett Packard Enterprise Development LP.
# Apache License 2.0
import json
import logging
from pyaoscx.exceptions.generic_op_error import GenericOperationError
from pyaoscx.exceptions.parameter_error import ParameterError
from pyaoscx.exceptions.response_error import ResponseError
from pyaoscx.utils import util as utils
from pyaoscx.pyaoscx_module import PyaoscxModule
class OspfVlink(PyaoscxModule):
"""
Provide configuration management for OSPF VLink instance on AOS-CX devices.
"""
collection_uri = (
"system/vrfs/{name}/ospf{version}_routers/"
"{instance_tag}/areas/{area_id}/ospf_vlinks"
)
object_uri = collection_uri + "/{peer_router_id}"
resource_uri_name = "ospf_vlinks"
def __init__(self, session, peer_router_id, parent_ospf_area, **kwargs):
self.session = session
self.__parent_ospf_area = parent_ospf_area
self.__peer_router_id = peer_router_id
# List used to determine attributes related to the OPSF configuration
self.config_attrs = []
self.materialized = False
# Dictionary used to manage original data obtained from the GET
self._original_attributes = {}
# Set arguments needed for correct creation
utils.set_creation_attrs(self, **kwargs)
# Attribute used to know if object was changed recently
self.__modified = False
self.base_uri = self.__parent_ospf_area.path + "/ospf_vlinks"
self.path = "{0}/{1}".format(self.base_uri, self.__peer_router_id)
@property
def peer_router_id(self):
"""
Return this object's identifier
"""
return self.__peer_router_id
@property
def modified(self):
"""
Return boolean with whether this object has been modified
"""
return self.__modified
def _get_indices(self):
"""
Get indices to retrieve collection of this object's instances
:return: a dictionary with each key in the collection_uri, and its
respective value to perform a GET request, or empty dictionary
if the collection_uri has no indices.
"""
indices = {
"area_id": self.__parent_ospf_area.area_id
}
indices.update(self.__parent_ospf_area._get_indices())
return indices
@PyaoscxModule.connected
def get(self, depth=None, selector=None):
"""
Perform a GET request to retrieve data for an OSPF VLink table entry
and fill the object with the incoming attributes
:param depth: Integer deciding how many levels into the API JSON that
references will be returned.
:param selector: Alphanumeric option to select specific information to
return.
:return: Returns True if there is not an exception raised
"""
logging.info("Retrieving an OSPF VLink from switch")
data = self._get_data(depth, selector)
# Add dictionary as attributes for the object
utils.create_attrs(self, data)
# Set original attributes
self._original_attributes = data
# Sets object as materialized
self.materialized = True
return True
@classmethod
def get_all(cls, session, parent_ospf_area):
"""
Perform a GET request to retrieve all system OSPF Virtual Links inside
a OPSF Router, and create a dictionary containing them
:param cls: Object's class
:param session: pyaoscx.Session object used to represent a logical
connection to the device
:param parent_ospf_area: parent OPSF Area object where OPSF VLink
is stored
:return: Dictionary containing OSPF Virtual Link IDs as keys and a OSPF
Virtual Link objects as values
"""
logging.info("Retrieving the switch %s data", cls.__name__)
uri_indices = {
"area_id": parent_ospf_area.area_id
}
uri_indices.update(parent_ospf_area._get_indices())
uri = session.base_url + cls.collection_uri.format(uri_indices)
try:
response = session.s.get(uri, verify=False, proxies=session.proxy)
except Exception as exc:
raise ResponseError("GET", exc) from exc
if not utils._response_ok(response, "GET"):
raise GenericOperationError(response.text, response.status_code)
data = json.loads(response.text)
ospf_vlink_dict = {}
uri_list = session.api.get_uri_from_data(data)
for uri in uri_list:
peer_router_id, vlink = cls.from_uri(
session,
uri,
parent_ospf_area
)
vlink.get()
ospf_vlink_dict[peer_router_id] = vlink
return ospf_vlink_dict
@PyaoscxModule.connected
def apply(self):
"""
Main method used to either create or update an existing
Ospf Virtual Link table entry.
Checks whether the OSPF Virtual Link exists in the switch
Calls self.update() if OSPF Virtual Link is being updated
Calls self.create() if a new OSPF Virtual Link is being created
:return modified: Boolean, True if object was created or modified
False otherwise
"""
if not self.__parent_ospf_area.materialized:
self.__parent_ospf_area.apply()
if self.materialized:
self.__modified = self.update()
else:
self.__modified = self.create()
return self.__modified
@PyaoscxModule.connected
def update(self):
"""
Perform a PUT request to apply changes to an existing OSPF VLink table
entry
:return modified: True if Object was modified and a PUT request was
made. False otherwise
"""
ospf_vlink_data = utils.get_attrs(self, self.config_attrs)
self.__modified = self._put_data(ospf_vlink_data)
return self.__modified
@PyaoscxModule.connected
def create(self):
"""
Perform a POST request to create a new OSPF Virtual Link
Only returns if an exception is not raised
:return modified: Boolean, True if entry was created
"""
ospf_vlink_data = utils.get_attrs(self, self.config_attrs)
ospf_vlink_data["peer_router_id"] = self.__peer_router_id
self.__modified = self._post_data(ospf_vlink_data)
return self.__modified
@PyaoscxModule.connected
def delete(self):
"""
Perform DELETE call to delete OSPF Virtual Link table entry.
"""
self._send_data(self.path, None, "DELETE", "Delete")
utils.delete_attrs(self, self.config_attrs)
@classmethod
def from_uri(cls, session, uri, parent_ospf_area=None):
"""
Create an OspfVlink object given a URI
:param session: pyaoscx.Session object used to represent a logical
connection to the device
:param parent_ospf_area: parent OspfArea object where OspfVlink
object is stored
:param uri: an OSPF VLink URI with its index (a peer_router_id)
:return peer_router_id, ospf_vlink: tuple with the OspfVlink ID, and
the object
"""
if parent_ospf_area is None:
raise ParameterError(
"{0} requires parent_ospf_area instance".format(cls.__name__)
)
# Obtain ID from URI of the form ".../ospf_vlinks/{peer_router_id}"
peer_router_id = uri.split("/")[-1]
vlink = cls(session, peer_router_id, parent_ospf_area)
return peer_router_id, vlink
def __str__(self):
return "OSPF Virtual Link ID {}".format(self.__peer_router_id)
def get_uri(self):
"""
Method used to obtain the specific OSPF Virtual Link URI
return: Object's URI
"""
# PyaoscxModule uses the uri with the name self.path
# so it needs to have that name
return self.path
def get_info_format(self):
"""
Method used to obtain correct object format for referencing inside
other objects
return: Object format depending on the API Version
"""
return self.session.api.get_index(self)
def was_modified(self):
"""
Getter method for the __modified attribute
:return: Boolean True if the object was recently modified,
False otherwise.
"""
return self.__modified
| StarcoderdataPython |
11344485 | <filename>bika/lims/upgrade/to3019.py
from Acquisition import aq_inner
from Acquisition import aq_parent
from bika.lims.permissions import *
def upgrade(tool):
# Hack prevent out-of-date upgrading
# Related: PR #1484
# https://github.com/bikalabs/Bika-LIMS/pull/1484
from bika.lims.upgrade import skip_pre315
if skip_pre315(aq_parent(aq_inner(tool))):
return True
portal = aq_parent(aq_inner(tool))
wf = portal.portal_workflow
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow')
wf.updateRoleMappings()
# missing bika_arpriorities folder permission
folder = portal.bika_setup.bika_arpriorities
folder.manage_permission(
ManageARPriority,
['Manager', 'Site Administrator', 'LabManager', 'Owner'], 0)
folder.reindexObject()
return True
| StarcoderdataPython |
6691260 | import pymysql
import bcrypt
import pygame
import sqlite3
import os
pygame.mixer.init()
main_dir = os.path.split(os.path.abspath(__file__))[0]
data_dir = os.path.join(main_dir, 'data')
class Database(object):
path = os.path.join(data_dir, 'hiScores.db')
def __init__(self,host='database-1.c79ahye2go7m.ap-northeast-2.rds.amazonaws.com',user='admin',password='<PASSWORD>',db='hiScores',charset='utf8'):
self.scoreDB=pymysql.connect(host=host,user=user,password=password,db=db,charset=charset)
self.curs = self.scoreDB.cursor()
self.numScores=15
def id_not_exists(self,input_id):
sql="SELECT * FROM users WHERE user_id=%s"
self.curs.execute(sql,input_id)
data=self.curs.fetchone()
self.curs.close()
if data:
return False
else:
return True
def compare_data(self, id_text, pw_text): # 데이터베이스의 아이디와 비밀번호 비교
input_password=pw_text.encode('utf-8')
curs = self.scoreDB.cursor(pymysql.cursors.DictCursor)
sql = "SELECT * FROM users WHERE user_id=%s"
curs.execute(sql,id_text)
data = curs.fetchone()
curs.close()
check_password=bcrypt.checkpw(input_password,data['user_password'].encode('utf-8'))
return check_password
def add_id_data(self,user_id): # 아이디 추가
sql = "INSERT INTO users (user_id) VALUES (%s)"
self.curs.execute(sql, user_id)
self.scoreDB.commit() #서버로 추가 사항 보내기
self.curs.close()
def add_password_data(self,user_password,user_id): # 비밀번호 추가
new_salt=bcrypt.gensalt()
new_password=user_password.encode('utf-8')
hashed_password=bcrypt.hashpw(new_password,new_salt)
decode_hash_pw=hashed_password.decode('utf-8')
self.curs = self.scoreDB.cursor()
sql = "UPDATE users SET user_password= %s WHERE user_id=%s"
self.curs.execute(sql,(decode_hash_pw,user_id))
self.scoreDB.commit() #서버로 추가 사항 보내기
self.curs.close()
@staticmethod
def getSound(music=False):
conn = sqlite3.connect(Database.path)
c = conn.cursor()
if music:
c.execute("CREATE TABLE if not exists music (setting integer)")
c.execute("SELECT * FROM music")
else:
c.execute("CREATE TABLE if not exists sound (setting integer)")
c.execute("SELECT * FROM sound")
setting = c.fetchall()
conn.close()
return bool(setting[0][0]) if len(setting) > 0 else False
@staticmethod
def setSound(setting, music=False):
conn = sqlite3.connect(Database.path)
c = conn.cursor()
if music:
c.execute("DELETE FROM music")
c.execute("INSERT INTO music VALUES (?)", (setting,))
else:
c.execute("DELETE FROM sound")
c.execute("INSERT INTO sound VALUES (?)", (setting,))
conn.commit()
conn.close()
def getScores(self):
self.curs.execute('''CREATE TABLE if not exists scores
(name text, score integer, accuracy real)''')
self.curs.execute("SELECT * FROM scores ORDER BY score DESC")
self.scoreDB.commit()
hiScores = self.curs.fetchall()
self.curs.close()
return hiScores
def setScore(self,hiScores,name, score, accuracy):
sql="SELECT * FROM scores WHERE name=%s"
self.curs.execute(sql,name)
data=self.curs.fetchone()
if data:
self.curs.close()
return
else:
if len(hiScores) >= self.numScores:
lowScoreName = hiScores[-1][0]
lowScore = hiScores[-1][1]
sql="DELETE FROM scores WHERE (name = %s AND score = %s)"
self.curs.execute(sql,(lowScoreName,lowScore))
sql="INSERT INTO scores VALUES (%s,%s,%s)"
self.curs.execute(sql,(name, score, accuracy))
self.scoreDB.commit()
self.curs.close()
def getTimeScores(self): #For TimeMode
self.curs.execute('''CREATE TABLE if not exists time
(name text, score integer, accuracy real)''')
self.curs.execute("SELECT * FROM time ORDER BY score DESC")
self.scoreDB.commit()
hiScores = self.curs.fetchall()
self.curs.close()
return hiScores
def setTimeScore(self,hiScores,name, score, accuracy): #For TimeMode
sql="SELECT * FROM time WHERE name=%s"
self.curs.execute(sql,name)
data=self.curs.fetchone()
if data:
self.curs.close()
return
else:
if len(hiScores) >= self.numScores:
lowScoreName = hiScores[-1][0]
lowScore = hiScores[-1][1]
sql="DELETE FROM time WHERE (name = %s AND score = %s)"
self.curs.execute(sql,(lowScoreName,lowScore))
sql="INSERT INTO time VALUES (%s,%s,%s)"
self.curs.execute(sql,(name, score, accuracy))
self.scoreDB.commit()
self.curs.close()
def name_not_exists(self,name,mode):
if mode==0:
sql="SELECT * FROM scores WHERE name=%s"
elif mode==1:
sql="SELECT * FROM time WHERE name=%s"
self.curs.execute(sql,name)
data=self.curs.fetchone()
if data:
return False
else:
return True
| StarcoderdataPython |
11218511 | <gh_stars>10-100
#!/usr/bin/env python
# encoding: utf-8
"""
test_geomutils.py
Created by <NAME> on 2015-04-21.
"""
from __future__ import division, print_function, absolute_import, unicode_literals
import os
from pygaarst import geomutils as gu
def test_modapsclient_creation():
a = True
assert a
| StarcoderdataPython |
377527 | <filename>test/torch/nn/test_conv.py
from syft.frameworks.torch.nn.conv import Conv2d
import syft as sy
import torch as th
import torch.nn as nn
def test_conv2d(workers):
"""
Test the Conv2d module to ensure that it produces the exact same
output as the primary torch implementation, in the same order.
"""
th.manual_seed(121) # Truncation might not always work so we set the random seed
# Disable mkldnn to avoid rounding errors due to difference in implementation
# mkldnn_enabled_init = th._C._get_mkldnn_enabled()
# th._C._set_mkldnn_enabled(False)
model2 = Conv2d(1, 16, 3, bias=True)
model = nn.Conv2d(1, 2, 3, bias=True)
model2.weight = th.tensor(model.weight).fix_prec()
model2.bias = th.tensor(model.bias).fix_prec()
data = th.rand(10, 1, 28, 28) # eg. mnist data
out = model(data)
out2 = model2(data.fix_prec()).float_prec()
# Reset mkldnn to the original state
# th._C._set_mkldnn_enabled(mkldnn_enabled_init)
# Note: absolute tolerance can be reduced by increasing precision_fractional of fix_prec()
assert th.allclose(out, out2, atol=1e-2)
# Test with Shared model and data
bob, alice, james = (workers["bob"], workers["alice"], workers["james"])
shared_data = data.fix_prec().share(bob, alice, crypto_provider=james)
shared_model = model2.share(bob, alice, crypto_provider=james)
out3 = shared_model(shared_data).get().float_prec()
assert th.allclose(out, out3, atol=1e-2)
| StarcoderdataPython |
6478940 | <filename>exempel1.py
hej = int(32.8) + int( 333 ) + int( 99 )
print(hej)
hej = "tja"
he = "kalle"
print( he )
apa = hej
print( apa )
apa = "orm"
print(apa)
hej = hej + he + apa
print( hej )
| StarcoderdataPython |
1831509 | <gh_stars>1-10
from fastapi import Depends
from fastapi_utils.cbv import cbv
from fastapi_utils.inferring_router import InferringRouter
from core.addons import AddonsManager
from core.config import settings
from core.pagination import Pagination
from core.response import api_return_handler, ResponseMessage
from fastapi.responses import FileResponse
from pathlib import Path
router = InferringRouter()
@cbv(router) # Step 2: Create and decorate a class to hold the endpoints
class AddonsCBV:
# Step 3: Add dependencies as class attributes
addons = AddonsManager(settings.ADDONS_FOLDER)
@router.get("/", status_code=201, )
def list(self, pager: Pagination = Depends(Pagination)):
self.addons.load_addons()
print(self.addons.all_addons)
data = pager.paginate([i.dict() for i in self.addons.all_addons.values()])
return data
@router.post("/", status_code=201, response_model=ResponseMessage)
def post(self, item):
return ResponseMessage(message='hello', data={})
@router.get("/item/{pk}")
def get(self, pk):
return_dict = ResponseMessage(message='hello', data={})
return api_return_handler(return_dict)
@router.put("/item/{pk}")
def update(self, pk):
return_dict = ResponseMessage(message='hello', data={})
return api_return_handler(return_dict)
@router.get("/install/{addons_name}")
def install(self, addons_name):
pass
@router.get("/icon/{addons_name}/{icon_name}", response_class=FileResponse)
async def get_icon(self, addons_name, icon_name):
icon_path = Path(settings.ADDONS_FOLDER).joinpath(addons_name).joinpath(icon_name)
return FileResponse(str(icon_path))
| StarcoderdataPython |
3240960 | #coding:utf-8
#
# id: bugs.core_5550
# title: Computed decimal field in a view has wrong RDB$FIELD_PRECISION
# decription:
# 30SS, build 3.0.3.32738: OK, 0.828s.
# 40SS, build 4.0.0.680: OK, 1.062s.
#
# tracker_id: CORE-5550
# min_versions: ['3.0.3']
# versions: 3.0.3
# qmid: None
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 3.0.3
# resources: None
substitutions_1 = []
init_script_1 = """"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
test_script_1 = """
recreate view v_test(f) as
select cast(null as decimal(10,2))
from rdb$database;
commit;
set list on;
set count on;
select
cast(rf.rdb$field_name as varchar(80)) rf_field_name,
ff.rdb$field_precision ff_field_prec,
ff.rdb$field_scale ff_field_scale
from rdb$relation_fields rf
join rdb$fields ff on rf.rdb$field_source = ff.rdb$field_name
where rf.rdb$relation_name = upper('v_test');
"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
RF_FIELD_NAME F
FF_FIELD_PREC 18
FF_FIELD_SCALE -2
Records affected: 1
"""
@pytest.mark.version('>=3.0.3')
def test_1(act_1: Action):
act_1.expected_stdout = expected_stdout_1
act_1.execute()
assert act_1.clean_expected_stdout == act_1.clean_stdout
| StarcoderdataPython |
3599753 | import datetime
import unittest
import googleanalytics
from googleanalytics.exception import GoogleAnalyticsClientError
from googleanalytics import config
class GoogleAnalyticsTest(unittest.TestCase):
def setUp(self):
self.connection = googleanalytics.Connection()
self.valid_profile_ids = config.get_valid_profiles()
self.end_date = datetime.date.today()
self.start_date = self.end_date - datetime.timedelta(30)
def test_goodconnection(self):
assert self.connection.auth_token is not None
def test_badconnection(self):
Connection = googleanalytics.Connection
try:
connection = Connection('<EMAIL>', 'fakefake')
except GoogleAnalyticsClientError, e:
assert str(e.reason) == "HTTP Error 403: Forbidden"
def test_accountlist(self):
for c in range(1, len(self.valid_profile_ids)):
accounts = self.connection.get_accounts(max_results=c)
assert len(accounts) == c
def test_bad_date_order(self):
start_date = datetime.date(2009, 02, 21)
end_date = datetime.date(2009, 02, 20)
account = self.connection.get_account(self.valid_profile_ids[0])
try:
data = account.get_data(start_date=start_date, end_date=end_date, metrics=['pageviews'])
except GoogleAnalyticsClientError, e:
assert str(e.reason) == "Date orders are reversed"
def test_dimensions_basic_get_data(self):
for profile_id in self.valid_profile_ids:
account = self.connection.get_account(profile_id)
data = account.get_data(self.start_date, self.end_date, metrics=['pageviews'], dimensions=['browser'])
assert len(data) > 0
data = account.get_data(self.start_date, self.end_date, metrics=['pageviews'], dimensions=['pagePath'])
assert len(data) > 0
def test_dimensions_basic_get_data_output(self):
for profile_id in self.valid_profile_ids:
account = self.connection.get_account(profile_id)
data = account.get_data(self.start_date, self.end_date, metrics=['pageviews'], dimensions=['browser'], sort=['-pageviews'])
assert len(data) > 0
assert isinstance(data.list, list)
assert isinstance(data.list[0], list)
assert isinstance(data.tuple, tuple)
assert isinstance(data.tuple[0], tuple)
def test_basic_filter(self):
filters = [
['country', '==', 'United States'],
]
account = googleanalytics.account.Account()
filter_string = account.process_filters(filters)
assert filter_string == 'ga:country==United States'
def test_filter_escaping(self):
filters = [
['country', '==', 'United,States'],
]
account = googleanalytics.account.Account()
filter_string = account.process_filters(filters)
assert filter_string == 'ga:country==United\,States'
filters = [
['country', '==', 'United\States'],
]
filter_string = account.process_filters(filters)
assert filter_string == 'ga:country==United\\\\States'
filters = [
['country', '==', 'Uni,tedSt,ates'],
]
filter_string = account.process_filters(filters)
assert filter_string == 'ga:country==Uni\,tedSt\,ates'
filters = [
['country', '==', 'Uni,tedSt;at,es'],
]
filter_string = account.process_filters(filters)
assert filter_string == 'ga:country==Uni\,tedSt\;at\,es'
def test_bad_operator_rejection(self):
filters = [
['country', '@@', 'United,States'],
]
account = googleanalytics.account.Account()
filter_string = account.process_filters(filters)
assert filter_string == ''
def test_multiple_filters(self):
filters = [
['country', '==', 'United States', 'AND'],
['country', '==', 'Canada']
]
account = googleanalytics.account.Account()
filter_string = account.process_filters(filters)
assert filter_string == 'ga:country==United States;ga:country==Canada'
filters = [
['city', '=~', '^L', 'AND'],
['browser', '=~', '^Fire']
]
filter_string = account.process_filters(filters)
assert filter_string == 'ga:city=~^L;ga:browser=~^Fire'
filters = [
['browser', '=~', '^Fire', 'OR'],
['browser', '=~', '^Internet', 'OR'],
['browser', '=~', '^Saf'],
]
filter_string = account.process_filters(filters)
assert filter_string == 'ga:browser=~^Fire,ga:browser=~^Internet,ga:browser=~^Saf'
def test_multiple_filters_mix_ops(self):
filters = [
['browser', '=~', 'Firefox', 'AND'],
['browser', '=~', 'Internet (Explorer|Exploder)', 'OR'],
['city', '=@', 'York', 'OR'],
['state', '!=', 'California', 'AND'],
['timeOnPage', '<', '10'],
]
account = googleanalytics.account.Account()
filter_string = account.process_filters(filters)
assert filter_string == 'ga:browser=~Firefox;ga:browser=~Internet (Explorer|Exploder),ga:city=@York,ga:state!=California;ga:timeOnPage<10'
def test_paging(self):
for profile_id in self.valid_profile_ids:
account = self.connection.get_account(profile_id)
data = account.get_data(self.start_date, self.end_date, metrics=['pageviews'], dimensions=['pageTitle', 'pagePath'], sort=['-pageviews'])
max_results = len(data) / 2
if not max_results:
print("profileId: %s does not have enough results for `test_paging`" % profile_id)
data1 = account.get_data(self.start_date, self.end_date, metrics=['pageviews'], dimensions=['pageTitle', 'pagePath'], sort=['-pageviews'], max_results=max_results)
assert len(data1) == max_results
data2 = account.get_data(self.start_date, self.end_date, metrics=['pageviews'], dimensions=['pageTitle', 'pagePath'], sort=['-pageviews'], max_results=max_results, start_index=max_results)
assert len(data2) == max_results
for value in data1.tuple:
assert value not in data2
def test_multiple_dimensions(self):
for profile_id in self.valid_profile_ids:
account = self.connection.get_account(profile_id)
data = account.get_data(self.start_date, self.end_date, metrics=['pageviews', 'timeOnPage', 'entrances'], dimensions=['pageTitle', 'pagePath'], max_results=10)
for t in data.tuple:
assert len(t) == 2
assert len(t[0]) == 2
assert len(t[1]) == 3
def test_data_attributes(self):
for profile_id in self.valid_profile_ids:
account = self.connection.get_account(profile_id)
metrics = ['pageviews', 'timeOnPage', 'entrances']
dimensions = ['pageTitle', 'pagePath']
data = account.get_data(self.start_date, self.end_date, metrics=metrics, dimensions=dimensions, max_results=10)
assert data.startDate == self.start_date
assert data.endDate == self.end_date
assert len(data.aggregates) == len(metrics)
for dp in data:
assert len(dp.metrics) == len(metrics)
for metric in metrics:
assert hasattr(dp, metric)
assert len(dp.dimensions) == len(dimensions)
for dimension in dimensions:
assert hasattr(dp, dimension)
def test_suite():
return unittest.makeSuite(GoogleAnalyticsTest)
| StarcoderdataPython |
100857 | <reponame>saurabhindoria/celery-docker-swarm
import random
from celery_tasks.tasks import AdditionCeleryTask, SubtractionCeleryTask, MultiplicationCeleryTask, DivisionCeleryTask
from celery_tasks.utils import create_worker_from
from flask import Flask
flask_app = Flask(__name__)
# create worker
_, addition_worker = create_worker_from(AdditionCeleryTask)
_, subtraction_worker = create_worker_from(SubtractionCeleryTask)
_, multiplication_worker = create_worker_from(MultiplicationCeleryTask)
_, division_worker = create_worker_from(DivisionCeleryTask)
@flask_app.route('/create_tasks/<count>')
def create_tasks(count):
count = int(count)
for i in range(count):
num_1 = random.randint(1, 1000)
num_2 = random.randint(1, 1000)
payload = {
'num_1': num_1,
'num_2': num_2
}
addition_worker.apply_async(args=[payload, ])
subtraction_worker.apply_async(args=[payload, ])
multiplication_worker.apply_async(args=[payload, ])
division_worker.apply_async(args=[payload, ])
return "Done " + str(count)
if __name__ == '__main__':
flask_app.run(host="0.0.0.0", port=5000)
| StarcoderdataPython |
226344 | <gh_stars>1-10
import numpy as np
from sklearn.metrics import (accuracy_score, f1_score, log_loss, mean_absolute_error, mean_squared_error, r2_score,
roc_auc_score)
from typing import List
from sklearn.preprocessing import LabelBinarizer
from fedot.core.data.data import InputData, OutputData
from fedot.core.repository.quality_metrics_repository import (ClassificationMetricsEnum, ClusteringMetricsEnum,
ComplexityMetricsEnum, MetricsRepository,
RegressionMetricsEnum)
from fedot.core.repository.tasks import Task, TaskTypesEnum
from fedot.core.utils import probs_to_labels
class ApiMetricsHelper():
def get_tuner_metrics_mapping(self,
metric_name):
tuner_dict = {
'acc': accuracy_score,
'roc_auc': roc_auc_score,
'f1': f1_score,
'logloss': log_loss,
'mae': mean_absolute_error,
'mse': mean_squared_error,
'r2': r2_score,
'rmse': mean_squared_error,
}
return tuner_dict.get(metric_name)
def get_problem_metrics(self,
problem: str):
task_dict = {
'regression': ['rmse', 'mae'],
'classification': ['roc_auc', 'f1'],
'multiclassification': 'f1',
'clustering': 'silhouette',
'ts_forecasting': ['rmse', 'mae']
}
return task_dict[problem]
def get_composer_metrics_mapping(self,
metric_name: str):
composer_metric_dict = {
'acc': ClassificationMetricsEnum.accuracy,
'roc_auc': ClassificationMetricsEnum.ROCAUC,
'f1': ClassificationMetricsEnum.f1,
'logloss': ClassificationMetricsEnum.logloss,
'mae': RegressionMetricsEnum.MAE,
'mse': RegressionMetricsEnum.MSE,
'msle': RegressionMetricsEnum.MSLE,
'mape': RegressionMetricsEnum.MAPE,
'r2': RegressionMetricsEnum.R2,
'rmse': RegressionMetricsEnum.RMSE,
'rmse_pen': RegressionMetricsEnum.RMSE_penalty,
'silhouette': ClusteringMetricsEnum.silhouette,
'node_num': ComplexityMetricsEnum.node_num
}
return composer_metric_dict[metric_name]
def get_metrics_for_task(self,
problem: str,
metric_name: str):
task_metrics = self.get_problem_metrics(problem)
composer_metric = self.get_composer_metrics_mapping(metric_name[0])
tuner_metrics = self.get_tuner_metrics_mapping(metric_name[0])
return task_metrics, composer_metric, tuner_metrics
def check_prediction_shape(self,
task: Task,
metric_name: str,
real: InputData,
prediction: OutputData):
if task == TaskTypesEnum.ts_forecasting:
real.target = real.target[~np.isnan(prediction.predict)]
prediction.predict = prediction.predict[~np.isnan(prediction.predict)]
if metric_name == 'roc_auc' and len(prediction.predict.shape) == 1:
if real.num_classes == 2:
prediction.predict = probs_to_labels(prediction.predict)
else:
real.target, prediction.predict = self.multiclass_roc_auc_score(real.target,
prediction.predict)
elif metric_name == 'f1' and len(prediction.predict.shape) > len(real.target.shape):
prediction.predict = probs_to_labels(prediction.predict)
else:
pass
return real.target, prediction.predict
def multiclass_roc_auc_score(self,
truth: List,
pred: List):
lb = LabelBinarizer()
lb.fit(truth)
truth = lb.transform(truth)
pred = lb.transform(pred)
return truth, pred
| StarcoderdataPython |
8016114 | from sys import stdin
def get_answer(criteria):
answer = ""
if type(criteria) is int:
while len(answer) < criteria:
answer = stdin.readline().strip()
elif type(criteria) is list or type(criteria) is tuple or type(criteria) is dict:
while answer not in criteria:
answer = stdin.readline().strip()
return answer
| StarcoderdataPython |
1953160 | from emnist import extract_training_samples, extract_test_samples
from matplotlib import pyplot
trainx, trainy = extract_training_samples('digits')
for i in range(25):
pyplot.subplot(5, 5, 1 + i)
pyplot.axis('off')
pyplot.imshow(trainx[i], cmap='gray_r')
pyplot.show() | StarcoderdataPython |
6532058 | <filename>config.py<gh_stars>0
import argparse
import torch
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
folder = 'data/《刘慈欣作品全集》(v1.0)'
print_freq = 1000
data_path = 'data/data.pkl'
vocabulary_size = 60898
emb_size = 128
def parse_args():
parser = argparse.ArgumentParser(description='train word2vec')
parser.add_argument('--end-epoch', type=int, default=50, help='training epoch size.')
parser.add_argument('--lr', type=float, default=0.002, help='start learning rate')
parser.add_argument('--lr-step', type=int, default=10, help='period of learning rate decay')
parser.add_argument('--optimizer', default='sgd', help='optimizer')
parser.add_argument('--weight-decay', type=float, default=0.0005, help='weight decay')
parser.add_argument('--mom', type=float, default=0.9, help='momentum')
parser.add_argument('--batch-size', type=int, default=512, help='batch size in each context')
parser.add_argument('--checkpoint', type=str, default=None, help='checkpoint')
parser.add_argument('--use-se', type=bool, default=False, help='use SEBlock')
parser.add_argument('--pretrained', type=bool, default=False, help='pretrained model')
args = parser.parse_args()
return args
| StarcoderdataPython |
4881212 | <reponame>Rakshit2214/virtual-assistant-Python-<gh_stars>1-10
from __future__ import print_function
import numpy as np
from pydub import AudioSegment
import random
import sys
import os
from scipy.io import wavfile
print("by Logical Spot")
import tensorflow
import argparse
parser = argparse.ArgumentParser(description='Dir For Dataset e.g. neg an pos')
parser.add_argument('--input', action='store', type=str, required=True)
parser.add_argument('--epochs', action='store', type=int, required=True)
parser.add_argument('--tx', action='store', type=int, default=5511)
parser.add_argument('--nf', action='store', type=int, default=101)
parser.add_argument('--ty', action='store', type=int, default=1375)
args = parser.parse_args()
pathD = args.input
import matplotlib.pyplot as plt
from scipy.io import wavfile
import os
from pydub import AudioSegment
# Calculate and plot spectrogram for a wav audio file
def graph_spectrogram(wav_file):
rate, data = get_wav_info(wav_file)
nfft = 200 # Length of each window segment
fs = 8000 # Sampling frequencies
noverlap = 120 # Overlap between windows
nchannels = data.ndim
if nchannels == 1:
pxx, freqs, bins, im = plt.specgram(data, nfft, fs, noverlap = noverlap)
elif nchannels == 2:
pxx, freqs, bins, im = plt.specgram(data[:,0], nfft, fs, noverlap = noverlap)
return pxx
# Load a wav file
def get_wav_info(wav_file):
rate, data = wavfile.read(wav_file)
return rate, data
# Used to standardize volume of audio clip
def match_target_amplitude(sound, target_dBFS):
change_in_dBFS = target_dBFS - sound.dBFS
return sound.apply_gain(change_in_dBFS)
# Load raw audio files for speech synthesis
def load_raw_audio():
activates = []
backgrounds = []
negatives = []
for filename in os.listdir(pathD + "/positives/"):
if filename.endswith("wav"):
activate = AudioSegment.from_wav(pathD + "/positives/"+filename)
activates.append(activate)
for filename in os.listdir("./backgrounds/"):
if filename.endswith("wav"):
background = AudioSegment.from_wav("./backgrounds/"+filename)
backgrounds.append(background)
for filename in os.listdir(pathD + "/negatives/"):
if filename.endswith("wav"):
negative = AudioSegment.from_wav(pathD + "/negatives/"+filename)
negatives.append(negative)
return activates, negatives, backgrounds
Tx = args.tx # The number of time steps input to the model from the spectrogram
n_freq = args.nf # Number of frequencies input to the model at each time step of the spectrogram
Ty = args.ty # The number of time steps in the output of our model
activates, negatives, backgrounds = load_raw_audio()
def get_random_time_segment(segment_ms):
"""
Gets a random time segment of duration segment_ms in a 10,000 ms audio clip.
Arguments:
segment_ms -- the duration of the audio clip in ms ("ms" stands for "milliseconds")
Returns:
segment_time -- a tuple of (segment_start, segment_end) in ms
"""
segment_start = np.random.randint(low=0, high=10000-segment_ms) # Make sure segment doesn't run past the 10sec background
segment_end = segment_start + segment_ms - 1
return (segment_start, segment_end)
def is_overlapping(segment_time, previous_segments):
"""
Checks if the time of a segment overlaps with the times of existing segments.
Arguments:
segment_time -- a tuple of (segment_start, segment_end) for the new segment
previous_segments -- a list of tuples of (segment_start, segment_end) for the existing segments
Returns:
True if the time segment overlaps with any of the existing segments, False otherwise
"""
segment_start, segment_end = segment_time
# Step 1: Initialize overlap as a "False" flag. (≈ 1 line)
overlap = False
# Step 2: loop over the previous_segments start and end times.
# Compare start/end times and set the flag to True if there is an overlap (≈ 3 lines)
for previous_start, previous_end in previous_segments:
if segment_start <= previous_end and segment_end >= previous_start:
overlap = True
return overlap
def insert_audio_clip(background, audio_clip, previous_segments):
"""
Insert a new audio segment over the background noise at a random time step, ensuring that the
audio segment does not overlap with existing segments.
Arguments:
background -- a 10 second background audio recording.
audio_clip -- the audio clip to be inserted/overlaid.
previous_segments -- times where audio segments have already been placed
Returns:
new_background -- the updated background audio
"""
# Get the duration of the audio clip in ms
segment_ms = len(audio_clip)
# Step 1: Use one of the helper functions to pick a random time segment onto which to insert
# the new audio clip. (≈ 1 line)
segment_time = get_random_time_segment(segment_ms)
# Step 2: Check if the new segment_time overlaps with one of the previous_segments. If so, keep
# picking new segment_time at random until it doesn't overlap. (≈ 2 lines)
while is_overlapping(segment_time, previous_segments):
segment_time = get_random_time_segment(segment_ms)
# Step 3: Add the new segment_time to the list of previous_segments (≈ 1 line)
previous_segments.append(segment_time)
# Step 4: Superpose audio segment and background
new_background = background.overlay(audio_clip, position = segment_time[0])
return new_background, segment_time
def insert_ones(y, segment_end_ms):
"""
Update the label vector y. The labels of the 50 output steps strictly after the end of the segment
should be set to 1. By strictly we mean that the label of segment_end_y should be 0 while, the
50 followinf labels should be ones.
Arguments:
y -- numpy array of shape (1, Ty), the labels of the training example
segment_end_ms -- the end time of the segment in ms
Returns:
y -- updated labels
"""
# duration of the background (in terms of spectrogram time-steps)
segment_end_y = int(segment_end_ms * Ty / 10000.0)
# Add 1 to the correct index in the background label (y)
for i in range(segment_end_y + 1, segment_end_y + 51):
if i < Ty:
y[0, i] = 1
return y
def create_training_example(background, activates, negatives):
"""
Creates a training example with a given background, activates, and negatives.
Arguments:
background -- a 10 second background audio recording
activates -- a list of audio segments of the word "activate"
negatives -- a list of audio segments of random words that are not "activate"
Returns:
x -- the spectrogram of the training example
y -- the label at each time step of the spectrogram
"""
# Set the random seed
# Make background quieter
background = background - 20
# Step 1: Initialize y (label vector) of zeros (≈ 1 line)
y = np.zeros((1, Ty))
# Step 2: Initialize segment times as empty list (≈ 1 line)
previous_segments = []
# Select 0-4 random "activate" audio clips from the entire list of "activates" recordings
number_of_activates = np.random.randint(0, 5)
random_indices = np.random.randint(len(activates), size=number_of_activates)
random_activates = [activates[i] for i in random_indices]
# Step 3: Loop over randomly selected "activate" clips and insert in background
for random_activate in random_activates:
# Insert the audio clip on the background
background, segment_time = insert_audio_clip(background, random_activate, previous_segments)
# Retrieve segment_start and segment_end from segment_time
segment_start, segment_end = segment_time
# Insert labels in "y"
y = insert_ones(y, segment_end_ms=segment_end)
# Select 0-2 random negatives audio recordings from the entire list of "negatives" recordings
number_of_negatives = np.random.randint(0, 3)
random_indices = np.random.randint(len(negatives), size=number_of_negatives)
random_negatives = [negatives[i] for i in random_indices]
# Step 4: Loop over randomly selected negative clips and insert in background
for random_negative in random_negatives:
# Insert the audio clip on the background
background, _ = insert_audio_clip(background, random_negative, previous_segments)
# Standardize the volume of the audio clip
background = match_target_amplitude(background, -20.0)
# Export new training example
file_handle = background.export("train" + ".wav", format="wav")
print("File (train.wav) was saved in your directory.")
# Get and plot spectrogram of the new recording (background with superposition of positive and negatives)
x = graph_spectrogram("train.wav")
print(y)
return x, y
X=[]
Y=[]
for i in range(0,13):
x, y = create_training_example(backgrounds[0], activates, negatives)
X.append(x)
Y.append(y)
for i in range(13,26):
x, y = create_training_example(backgrounds[1], activates, negatives)
X.append(x)
Y.append(y)
X=np.array([X])
X=X[0]
Y=np.array([Y])
Y=Y[0]
print(X.shape)
print(Y.shape)
X=np.transpose(X,(0,2,1))
Y=np.transpose(Y,(0,2,1))
print(X.shape)
print(Y.shape)
print(X)
print(Y)
for i in Y[0]:
print(i)
from sklearn.model_selection import train_test_split
X_train,X_test,Y_train,Y_test = train_test_split(X,Y,test_size=0.2,random_state=37)
#X_train.shape
from keras.models import Model, load_model, Sequential
from keras.layers import Dense, Activation, Dropout, Input, Masking, TimeDistributed, LSTM, Conv1D
from keras.layers import GRU, Bidirectional, BatchNormalization, Reshape
from keras.optimizers import Adam
def model(input_shape):
"""
Function creating the model's graph in Keras.
Argument:
input_shape -- shape of the model's input data (using Keras conventions)
Returns:
model -- Keras model instance
with the help of <NAME> and <NAME>
"""
X_input = Input(shape = input_shape)
# Step 1: CONV layer (≈4 lines)
X = Conv1D(196, kernel_size=15, strides=4)(X_input) # CONV1D
X = BatchNormalization()(X) # Batch normalization
X = Activation('relu')(X) # ReLu activation
X = Dropout(0.8)(X) # dropout (use 0.8)
# Step 2: First GRU Layer (≈4 lines)
X = GRU(units = 128, return_sequences = True)(X) # GRU (use 128 units and return the sequences)
X = Dropout(0.8)(X) # dropout (use 0.8)
X = BatchNormalization()(X) # Batch normalization
# Step 3: Second GRU Layer (≈4 lines)
X = GRU(units = 128, return_sequences = True)(X) # GRU (use 128 units and return the sequences)
X = Dropout(0.8)(X) # dropout (use 0.8)
X = BatchNormalization()(X) # Batch normalization
X = Dropout(0.8)(X) # dropout (use 0.8)
# Step 4: Time-distributed dense layer (≈1 line)
X = TimeDistributed(Dense(1, activation = "sigmoid"))(X) # time distributed (sigmoid)
model = Model(inputs = X_input, outputs = X)
return model
model = model(input_shape = (Tx, n_freq))
model.summary()
opt = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, decay=0.01)
model.compile(loss='binary_crossentropy', optimizer=opt, metrics=["accuracy"])
epoch = args.epochs
model.fit(X,Y,batch_size=10,epochs=epoch)
from keras.models import model_from_json
model.save("mode.h5")
print("Model Saved !")
| StarcoderdataPython |
5174228 | <gh_stars>1-10
#!/usr/bin/python -Wall
# ================================================================
# Please see LICENSE.txt in the same directory as this file.
# <NAME>
# <EMAIL>
# 2007-05-31
# ================================================================
# ----------------------------------------------------------------
def gcd(a, b):
r = 0
if (a == 0):
return b
if (b == 0):
return a
while (1):
r = a % b
if (r == 0):
break
a = b
b = r
if (b < 0):
b = -b
return b
# ----------------------------------------------------------------
# Blankinship's algorithm
def extgcd(a, b):
# Initialize
mprime = 1
n = 1
m = 0
nprime = 0
c = a
d = b
while (1):
# Divide
q = c / d
r = c % d
# Note: now c = qd + r and 0 <= r < d
# Remainder zero?
if (r == 0):
break
# Recycle
c = d
d = r
t = mprime
mprime = m
qm = q * m
m = t - qm
t = nprime
nprime = n
qn = q * n
n = t - qn
return [d, m, n]
# ----------------------------------------------------------------
# This function should be invoked with only one argument.
# The optional argument is a way to have a local static in Python.
# See Lutz & Ascher, 2nd. ed., p 241.
def eulerphi(n, cached_n_and_phi=[2,1]):
if (n == cached_n_and_phi[0]):
# Cache hit
return cached_n_and_phi[1]
phi = 0
for i in range(1, n):
if (gcd(n, i) == 1):
phi += 1
return phi
cached_n_and_phi[0] = n
cached_n_and_phi[1] = phi
return phi
# ----------------------------------------------------------------
# Binary exponentiation
def intexp(x, e):
xp = x
rv = 1
if (e < 0):
print(("intexp: negative exponent", e, "disallowed."))
raise RuntimeError
while (e != 0):
if (e & 1):
rv = (rv * xp)
e = e >> 1
xp = (xp * xp)
return rv
# ----------------------------------------------------------------
# Binary exponentiation
def intmodexp(x, e, m):
xp = x
rv = 1
if (e < 0):
e = -e
x = intmodrecip(x, m)
while (e != 0):
if (e & 1):
rv = (rv * xp) % m
e = e >> 1
xp = (xp * xp) % m
return rv
# ----------------------------------------------------------------
def intmodrecip(x, m):
if (gcd(x, m) != 1):
print(("intmodrecip: impossible inverse", x, "mod", m))
raise RuntimeError
phi = eulerphi(m)
return intmodexp(x, phi-1, m)
# ----------------------------------------------------------------
def factorial(n):
if (n < 0):
print("factorial: negative input disallowed.")
raise RuntimeError
if (n < 2):
return 1
rv = 1
for k in range(2, n+1):
rv *= k
return rv
# ----------------------------------------------------------------
# How to compute P(n) = number of partitions of n. Examples for n = 1 to 5:
#
# 1 2 3 4 5
# 1 1 2 1 3 1 4 1
# 1 1 1 2 2 3 2
# 2 1 1 3 1 1
# 1 1 1 1 2 2 1
# 2 1 1 1
# 1 1 1 1 1
#
# This is a first-rest algorithm. Loop over possible choices k for the first
# number. The rest must sum to n-k. Furthermore, the rest must be descending
# and so each must be less than or equal to k. Thus we naturally have an
# auxiliary function P(n, m) counting partitions of n with each element less
# than or equal to m.
def num_ptnsm(n, m):
if (n < 0): return 0
if (n <= 1): return 1
if (m == 1): return 1
sum = 0
for k in range(1, m+1):
if (n-k >= 0):
sum += num_ptnsm(n-k, k)
return sum
# ----------------------------------------------------------------
def num_ptns(n):
return num_ptnsm(n, n)
# ----------------------------------------------------------------
def ptnsm(n, m):
rv = []
if (n < 0): return 0
if (n == 0): return [[]]
if (n == 1): return [[1]]
if (m == 1): return [[1] * n]
sum = 0
for k in range(1, m+1):
if (n-k >= 0):
tails = ptnsm(n-k, k)
for tail in tails:
rv.append([k] + tail)
return rv
# ----------------------------------------------------------------
def ptns(n):
return ptnsm(n, n)
#for n in range(1, 21):
# a = onum_ptns(n)
# b = num_ptns(n)
# print "%2d %2d %2d" % (n, a, b)
#for n in range(1, 5+1):
# for m in range(1, n+1):
# p = num_ptnsm(n, m)
# print n, m, p
# print
#for n in range(1, 7+1):
# for m in range(1, n+1):
# X = ptnsm(n, m)
# print n, m, len(X), X
# print
| StarcoderdataPython |
6666341 | <reponame>lestrato/badgepack<filename>apps/community/admin.py
from django.contrib import admin
from community.models import Community, Membership, Invitation, Application
class CommunityAdmin(admin.ModelAdmin):
readonly_fields = () #'created', 'name',
list_display = ('name', 'description', 'tag', 'created_on', 'is_private',)
list_filter = ('is_private',)
search_fields = ('name', 'tag',)
fieldsets = (
('Metadata', {'fields': ('created_on',)}),#', classes': ('collapse',)
('Properties', {'fields': ('name', 'description', 'tag',)}),
('Access', {'fields': ('is_private',)}),
)
pass
admin.site.register(Community, CommunityAdmin)
class MembershipAdmin(admin.ModelAdmin):
readonly_fields = () #'created', 'name',
list_display = ('user', 'community', 'joined_on', 'user_status')
list_filter = ('user_status',)
search_fields = ('user', 'community',)
fieldsets = (
('Metadata', {'fields': ('joined_on',)}),#', classes': ('collapse',)
('Properties', {'fields': ('user', 'community')}),
('Permissions', {'fields': ('user_status',)}),
)
pass
admin.site.register(Membership, MembershipAdmin)
class InvitationAdmin(admin.ModelAdmin):
readonly_fields = () #'created', 'name',
list_display = ('recipient', 'community', 'created_on', 'sender', 'to_be_moderator')
list_filter = ('to_be_moderator',)
search_fields = ('sender', 'community', 'recipient')
fieldsets = (
('Metadata', {'fields': ('created_on',)}),#', classes': ('collapse',)
('Properties', {'fields': ('sender', 'community', 'recipient',)}),
('Permissions', {'fields': ('to_be_moderator',)}),
)
pass
admin.site.register(Invitation, InvitationAdmin)
class ApplicationAdmin(admin.ModelAdmin):
readonly_fields = () #'created', 'name',
list_display = ('applicant', 'community', 'created_on', 'accepted_by')
list_filter = ()
search_fields = ('applicant', 'community', 'accepted_by')
fieldsets = (
('Metadata', {'fields': ('created_on',)}),#', classes': ('collapse',)
('Properties', {'fields': ('applicant', 'community', 'accepted_by')}),
)
pass
admin.site.register(Application, ApplicationAdmin)
| StarcoderdataPython |
9746666 | # -*- coding: utf-8 -*-
"""
Created on Sat Jun 29 07:41:54 2019
@author: (c) 2020 The Patent2Net Developers
"""
#import codecs
import os
import sys
#import shutil
#import pickle
import matplotlib.cm
from Patent2Net.P2N_Lib import LoadBiblioFile, AnnonceProgres, AnnonceLog
from Patent2Net.P2N_Config import LoadConfig
#from Patent2Net.P2N_Lib_Acad import IPCCategorizer, IPCExtractPredictionBrevet,PubMedCheckNameAndGetAffiliation, OPSChercheAbstractBrevet
from Patent2Net.P2N_Lib_Acad import NoPunct #, CheckListInclu, CheckListMix, CheckListExclu, UnCheck, Check
#from fuzzywuzzy import fuzz
from P2N_Lib import UrlPatent,UrlApplicantBuild,UrlInventorBuild,UrlIPCRBuild, cmap_discretize, RenderTemplate
#from dateutil.relativedelta import relativedelta
import networkx as nx
#import matplotlib.pyplot as plt
#from networkx.readwrite import json_graph
import pandas as pd
#import string
#import re
#import unidecode
from networkx_functs import calculate_degree, calculate_betweenness, calculate_degree_centrality
from networkx.drawing.nx_agraph import graphviz_layout
screenX = 1000
screenY = 1000
AnnonceProgres (Appli = 'p2n_network', valMax = 100, valActu = 0)
configFile = LoadConfig()
# Les champs nécessaires par brevet.
NeededInfo = ['label', 'date', 'inventor', 'title', 'abstract']
requete = configFile.requete
projectName = configFile.ndf
ndf = projectName
BiblioPath = configFile.ResultBiblioPath
ResultBiblioPath = configFile.ResultBiblioPath
temporPath = configFile.temporPath
ResultGephiPath = configFile.ResultGephiPath
ResultPathContent= configFile.ResultContentsPath
ResultAbstractPath = configFile.ResultAbstractPath
import time
start_time = time.time()
def cycle (liste):
tempo = []
if len(liste) < 1:
return None
else:
taille = len(liste)-1
for indice in range(taille):
tempo.append((liste [indice], liste[indice+1]))
return tempo
Inventeurs= set()
Applicants = set()
AnnonceLog (Appli = 'p2n_network', texte='Net processing is starting ')
if configFile.GatherFamilly:
PU = [ndf, 'Families'+ndf]
else:
PU = [ndf]
for fic in PU:
print("\n> Hi! This is Net processor used on:", fic)
if 'Description' + fic in os.listdir(ResultBiblioPath):
with open(ResultBiblioPath + '//' + fic, 'r') as data:
dico = LoadBiblioFile(ResultBiblioPath, fic)
else: # Retrocompatibility
print("please use Comptatibilizer")
sys.exit()
LstBrevet = dico['brevets']
for bre in LstBrevet:
if isinstance(bre['label'], list):
# if len(bre['label']) >1:
if len(bre['label']) != len( set(bre['label'])):
AnnonceLog (Appli = 'p2n_network', texte='Good, two labels for same patent fixing to first one ' + str(bre ["label"]) )
#print ("two labels for same patent fixing to first one" , bre ["label"] )
bre ["label"] = bre ["label"] [0]
else:
bre ["label"] = bre ["label"] [0]
if isinstance(bre['inventor'], list):
if ''.join(bre['inventor']).strip().lower() == 'empty':
bre['inventor'] = []
bre['inventor-nice'] = []
else:
tempoinv = []
for inv in bre['inventor']:
if inv.lower() != 'empty':
tempoinv.append(inv.title())
Inventeurs.add(inv.title())
bre['inventor'] = tempoinv
elif bre['inventor'].strip().lower() == 'empty' or len(bre['inventor'].strip().lower()) ==0:
bre['inventor'] = []
bre['inventor-nice'] = []
else:
bre['inventor'] = bre['inventor'].title()
Inventeurs.add(bre['inventor'])
if isinstance(bre['applicant'], list):
if ''.join(bre['applicant']).strip().lower() == 'empty':
bre['applicant'] = []
bre['applicant-nice'] = []
else:
tempoappl = []
for inv in bre['applicant']:
if len(inv) == 1 :
print (inv)
if inv.lower() != 'empty' and len(inv.strip()) >0:
tempoappl.append(inv.upper())
Applicants.add(inv.upper())
bre['applicant'] = tempoappl
else:
if len(bre['applicant'].strip()) >0:
bre['applicant'] = bre['applicant'] .upper ()
Applicants.add(bre['applicant'])
if 'Families' in fic:
df_Fam = pd.DataFrame(LstBrevet)
else:
df = pd.DataFrame(LstBrevet)
total = len(df) + len(df_Fam)
AnnonceLog (Appli = 'p2n_network', texte='Good, processing '+ str(total) +' patents ')
AnnonceProgres (Appli = 'p2n_network', valMax = 100, valActu = 20)
for ndf in PU:
if 'Families' not in ndf:
dataf = df
dataf ['family lenght'] = [1] * len(dataf.index)
else:
dataf = df_Fam
for bre in dataf.itertuples():
# cleaning
if not isinstance(bre.label, str):
dataf.at [bre.Index, 'label' ] = bre.label [0]
else:
pass
if not isinstance(bre.Citations, str) and not isinstance(bre.Citations, int):
dataf.at [bre.Index, 'Citations' ] = bre.Citations [0]
else:
pass
if not isinstance(bre.applicant, list):
if len(bre .applicant.strip())>0 :
dataf.at [bre.Index, 'applicant' ] = [bre .applicant]
elif isinstance(bre.applicant, list):
if sum([len( truc) for truc in bre .applicant]) == len(bre.applicant):
tempoappl = [''.join (bre .applicant)]
else:
tempoappl = list(set([inf for inf in bre.applicant if inf.lower() not in ['empty', '', 'none', ' ']]))
dataf.at [bre.Index, 'applicant' ] = tempoappl
if not isinstance(bre.inventor, list):
dataf.at [bre.Index, 'inventor' ] = [bre.inventor]
else:
dataf.at [bre.Index, 'inventor' ] = list(set([inf for inf in bre.inventor if inf.lower() not in ['empty', '', 'none', ' ']]))
if not isinstance(bre.IPCR1, list):
if bre.IPCR1 not in ['empty', '', 'none', ' ']:
dataf.at [bre.Index, 'IPCR1' ] = [bre.IPCR1]
else:
dataf.at [bre.Index, 'IPCR1' ] = []
else:
dataf.at [bre.Index, 'IPCR1' ] = list(set([ipc for ipc in bre.IPCR1 if ipc.lower() not in ['empty', '', 'none', ' ']]))
if not isinstance(bre.IPCR3, list):
if bre.IPCR3 not in ['empty', '', 'none', ' ']:
dataf.at [bre.Index, 'IPCR3' ] = [bre.IPCR3]
else:
dataf.at [bre.Index, 'IPCR3' ] = []
else:
dataf.at [bre.Index, 'IPCR3' ] = list (set([ipc for ipc in bre.IPCR3 if ipc.lower() not in ['empty', '', 'none', ' ']]))
if not isinstance(bre.IPCR4, list):
if bre.IPCR4 not in ['empty', '', 'none', ' ']:
dataf.at [bre.Index, 'IPCR4' ] = [bre.IPCR4]
else:
dataf.at [bre.Index, 'IPCR4' ] = []
else:
dataf.at [bre.Index, 'IPCR4' ] = list (set([ipc for ipc in bre.IPCR4 if ipc.lower() not in ['empty', '', 'none', ' ']]))
if not isinstance(bre.IPCR7, list):
if bre.IPCR7 not in ['empty', '', 'none', ' ']:
dataf.at [bre.Index, 'IPCR7' ] = [bre.IPCR7]
else:
dataf.at [bre.Index, 'IPCR7' ] = []
else:
dataf.at [bre.Index, 'IPCR7' ] = list (set([ipc for ipc in bre.IPCR7 if ipc.lower() not in ['empty', '', 'none', ' ']]))
if not isinstance(bre.IPCR11, list):
if bre.IPCR11 not in ['empty', '', 'none', ' ']:
dataf.at [bre.Index, 'IPCR11' ] = [bre.IPCR11]
else:
dataf.at [bre.Index, 'IPCR11' ] = []
else:
dataf.at [bre.Index, 'IPCR11' ] = list (set([ipc for ipc in bre.IPCR11 if ipc.lower() not in ['empty', '', 'none', ' ']]))
if not isinstance(bre.equivalents, list):
dataf.at [bre.Index, 'equivalents' ] = [bre.equivalents]
else:
dataf.at [bre.Index, 'equivalents' ] = list (set([ipc for ipc in bre.equivalents if ipc.lower() not in ['empty', '', 'none']]))
if not isinstance(bre.CitedBy, list):
dataf.at [bre.Index, 'CitedBy' ] = [bre.CitedBy]
else:
dataf.at [bre.Index, 'CitedBy' ] = list (set([ipc for ipc in bre.CitedBy if ipc.lower() not in ['empty', '', 'none']]))
if not isinstance(bre.CitP, list):
dataf.at [bre.Index, 'CitP' ] = [bre.CitP]
else:
dataf.at [bre.Index, 'CitP' ] = list (set([ipc for ipc in bre.CitP if ipc.lower() not in ['empty', '', 'none']]))
if not isinstance(bre.CitO, list):
dataf.at [bre.Index, 'CitO' ] = [bre.CitO]
else:
dataf.at [bre.Index, 'CitO' ] = list (set([ipc for ipc in bre.CitO if ipc.lower() not in ['empty', '', 'none']]))
AnnonceProgres (Appli = 'p2n_network', valMax = 100, valActu = 40)
dicoAttrsAut = dict() # attributes for author nodes
for aut in Inventeurs:
# node attributes for an author
if len(aut) >0:
IPC11_range = []
IPC7_range = []
IPC4_range = []
[IPC4_range.extend(bre.IPCR4) for bre in dataf.itertuples() if aut in bre.inventor]
[IPC7_range.extend(bre.IPCR7) for bre in dataf.itertuples() if aut in bre.inventor]
[IPC11_range.extend(bre.IPCR11) for bre in dataf.itertuples() if aut in bre.inventor]
dicoAttrsAut [aut] = {#'AutFr': dicoAttrsAut [aut]['AutFr'],
'Citations' : sum ( [bre.Citations for bre in dataf.itertuples() if aut in bre.inventor]),
'Famille' : sum( [aut in truc for truc in df_Fam ['inventor']]), #.loc[dataf .index[dataf ['label'] ==bre.label ]].values[0],
'category' : 'Inventor',
'NbBrevets' : sum( [aut in truc for truc in dataf ['inventor']]),
'IPC11-range' : len(set(IPC11_range)), # variety of IPC level 11
'IPC7-range' : len(set(IPC7_range)),
'IPC4-range' : len(set(IPC4_range)),
'IPCDiversity': len(set(IPC11_range)), # number of variety
'IPCForce' : len(IPC11_range) # same as previous but with occurences
}
dicoAttrsAppli = dict() # attributes for applicant nodes
for aut in Applicants:
# node attributes for an author
if len(aut) >0:
IPC11_range = []
IPC7_range = []
IPC4_range = []
[IPC4_range.extend(bre.IPCR4) for bre in dataf.itertuples() if aut in bre.applicant]
[IPC7_range.extend(bre.IPCR7) for bre in dataf.itertuples() if aut in bre.applicant]
[IPC11_range.extend(bre.IPCR11) for bre in dataf.itertuples() if aut in bre.applicant]
dicoAttrsAppli [aut] = {#'AutFr': dicoAttrsAut [aut]['AutFr'],
'Citations' : sum ( [bre.Citations for bre in dataf.itertuples() if aut in bre.applicant]), # sum of citations of each patent
'Famille' : sum( [aut in truc for truc in df_Fam ['applicant']]), # sum of family lenght for each patent of this applicant
'category' : 'Applicant',
'NbBrevets' : sum( [aut in truc for truc in dataf ['applicant']]), # number of patents
'IPC11-range' : len(set(IPC11_range)), # variety of IPC level 11
'IPC7-range' : len(set(IPC7_range)),
'IPC4-range' : len(set(IPC4_range)),
'IPCDiversity': len(set(IPC11_range)), # number of variety
'IPCForce' : len(IPC11_range) # same as previous but with occurences
}
dicoAttrs = dict() # attributes for patent nodes
dicoAttrsCitP = dict()
dicoAttrsCitedBy = dict()
dicoAttrsEquiv = dict()
dicoAttrsOut = dict()
dicoAttrsTechno = dict()
AnnonceProgres (Appli = 'p2n_network', valMax = 100, valActu = 50)
compt = 0
for ndf in PU:
if 'Families' not in ndf:
dataf = df
dataf ['family lenght'] = [1] * len(dataf.index)
else:
dataf = df_Fam
for bre in dataf.itertuples():
compt+=1
AnnonceProgres (Appli = 'p2n_network', valMax = 100, valActu = 50+(50*compt/total))
dicoAttrs [bre.label] = {'Famille': sum( [bre.label in truc for truc in df_Fam ['equivalents']]),
'IPC11-range': len(set(bre.IPCR11)),
'IPC7-range': len(set(bre.IPCR7)),
'IPC4-range': len(set(bre.IPCR4)),
'Citations' : bre.Citations,
"category" : 'label'
#'NbBrevets' : 1
}
for lab in bre.CitP:
dicoAttrsCitP [lab] = {"category" : 'CitP'
}
for lab in bre.CitedBy:
dicoAttrsCitedBy [lab] = {"category" : 'CitedBy'
}
for lab in bre.equivalents:
dicoAttrsEquiv [lab] = {"category" : 'equivalent'
}
for lab in bre.CitO: # we may get in trouble here
if len(lab)>0:
dicoAttrsOut [lab] = {"category" : 'CitO'
}
for ipc in bre.IPCR1:
if len(ipc)>0 and ipc not in dicoAttrsTechno.keys():
dicoAttrsTechno [ipc] = {'category' : "IPCR1"}
for ipc in bre.IPCR4:
if len(ipc)>0 and ipc not in dicoAttrsTechno.keys():
dicoAttrsTechno [ipc] = {'category' : "IPCR4"}
for ipc in bre.IPCR7:
if len(ipc)>0 and ipc not in dicoAttrsTechno.keys():
dicoAttrsTechno [ipc] = {'category' : "IPCR7"}
for ipc in bre.IPCR11:
ipc = ipc.replace('/', '-')
if len(ipc)>0 and ipc not in dicoAttrsTechno.keys():
dicoAttrsTechno [ipc] = {'category' : "IPCR11"}
Networks = dict()
#next lines are here to avoid the changing scheme lecture of requete.cql
Networks["_CountryCrossTech"] = [configFile.CountryCrossTechNetwork, [ 'IPCR7', "country"]] # not finished yet
Networks["_CrossTech"] = [configFile.CrossTechNetwork, ['label','IPCR7','IPCR1', 'IPCR4', 'IPCR11']] # GraphTechnos
Networks["_Inventors_CrossTech"] = [configFile.InventorCrossTechNetwork, ['IPCR11','IPCR7','IPCR4','IPCR1',"Inventor"]] # GraphTechnosAuthor
Networks["_Applicants_CrossTech"] = [configFile.ApplicantCrossTechNetwork, ['IPCR11','IPCR7','IPCR4','IPCR1', "Applicant"]] # GraphTechnosAppli
Networks["_ApplicantInventor"] = [configFile.ApplicantInventorNetwork, ["Applicant", "Inventor"]] # GraphAuteursAppli
Networks["_Applicants"] = [configFile.ApplicantNetwork, ["Applicant"]] # GraphApplicant
Networks["_Inventors"] = [configFile.InventorNetwork, ["Inventor"]] # GraphAuteurs
Networks["_References"] = [configFile.References, [ 'label', 'CitP', "CitO"]] # GraphBrevetsReferences
Networks["_Citations"] = [configFile.Citations, [ 'label', "CitedBy"]] # GraphBrevetsCitations
Networks["_Equivalents"] = [configFile.Equivalents, [ 'label', "equivalent"]] # GraphBrevetsEquivalents
Networks["_LabelApplicants"] = [configFile.Equivalents, [ 'label', "Applicant"]] # GraphBrevetsEquivalents#GraphBrevets
for ndf in PU:
if 'Families' not in ndf:
dataf = df
dataf ['family lenght'] = [1] * len(dataf.index)
else:
dataf = df_Fam
Inventeurs = set()
Applis = []
Techno = dict()
# graph init
GraphAuteurs = nx.DiGraph()
GraphApplicant = nx.DiGraph()
GraphBrevets = nx.DiGraph()
GraphAuteursAppli = nx.DiGraph()
GraphBrevetsCitations = nx.DiGraph() # graph of citing patents of patents from PU
GraphBrevetsReferences = nx.DiGraph() # graph of references from patents from PU includes patents and other references
GraphBrevetsEquivalents = nx.DiGraph() # graph of equivalents
GraphTechnos = nx.DiGraph() # IPC graphs
GraphTechnosAppli = nx.DiGraph() # IPC graphs
GraphTechnosAuthor = nx.DiGraph() # IPC graphs
# TypeBre = dict()
for bre in dataf.itertuples():
GraphBrevets.add_node(bre .label)
GraphBrevetsCitations.add_node(bre .label)
GraphBrevetsReferences.add_node(bre .label)
GraphBrevetsEquivalents.add_node(bre .label)
GraphTechnos .add_node(bre .label)
for lab in bre.CitedBy:
if len(lab)>0 and bool(lab.strip()):
GraphBrevetsCitations.add_node(lab)
GraphBrevetsCitations.add_edge(lab, bre.label)
for lab in bre.CitP:
if len(lab)>0 and bool(lab.strip()):
GraphBrevetsReferences.add_node(lab)
GraphBrevetsReferences.add_edge(bre.label, lab)
for lab in bre.CitO:
if len(lab)>0 and bool(lab.strip()):
GraphBrevetsReferences.add_node(lab)
GraphBrevetsReferences.add_edge(bre.label, lab)
for lab in bre.equivalents :
if len(lab)>0 and bool(lab.strip()):
GraphBrevetsEquivalents.add_node(lab)
GraphBrevetsEquivalents.add_edge(bre.label, lab)
joliTecno = list(set([ipc.replace('/', '-') for ipc in bre.IPCR11]))
for ipc in bre.IPCR1 + bre.IPCR4 + bre.IPCR7 + joliTecno:
if bool(ipc.strip()):
if ipc in dicoAttrsTechno:
if 'size' in dicoAttrsTechno [ipc].keys():
dicoAttrsTechno [ipc] ['size'] +=1
else:
dicoAttrsTechno [ipc] ['size'] = 1
GraphTechnos .add_node(ipc)
GraphTechnosAppli.add_node(ipc)
GraphTechnosAuthor.add_node(ipc)
for ipc in joliTecno:
if bool(ipc.strip()):
for ipcUp in bre.IPCR7:
if ipc.startswith (ipcUp) and bool(ipcUp.strip()):
GraphTechnos .add_edge(ipcUp, ipc)
for ipc in bre.IPCR7:
if bool(ipc.strip()):
for ipcUp in bre.IPCR4:
if ipc.startswith (ipcUp) and bool(ipcUp.strip()):
GraphTechnos .add_edge(ipcUp, ipc)
for ipc in bre.IPCR4:
if bool(ipc.strip()):
for ipcUp in bre.IPCR1:
if ipc.startswith (ipcUp) and bool(ipcUp.strip()):
GraphTechnos .add_edge(ipcUp, ipc)
# for ipcUp in bre.IPCR1:
# GraphTechnos .add_edge(bre .label,ipcUp)
# chainning technlogy from most precise if existing
if len(joliTecno)>0:
for ipc in joliTecno:
if bool(ipc.strip()):
GraphTechnos .add_edge(bre .label,ipc)
elif len(bre.IPCR7) >0:
for ipc in bre.IPCR7:
if bool(ipc.strip()):
GraphTechnos .add_edge(bre .label,ipc)
elif len(bre.IPCR4) >0:
for ipc in bre.IPCR4:
if bool(ipc.strip()):
GraphTechnos .add_edge(bre .label,ipc)
else:
for ipc in bre.IPCR1:
if bool(ipc.strip()):
GraphTechnos .add_edge(bre .label,ipc)
if not isinstance( bre .applicant, str) and len( bre.applicant)>1:
if sum([len( truc) for truc in bre .applicant]) == len(bre.applicant):
dataf.at [bre.Index, 'applicant' ] = [''.join (bre .applicant)]
for appl in bre .applicant:
appl=appl.strip()
if len(appl) >0:
if len(joliTecno)>0:
for ipc in joliTecno:
GraphTechnosAppli .add_edge(appl,ipc)
elif len(bre.IPCR7) >0:
for ipc in bre.IPCR7:
GraphTechnosAppli .add_edge(appl,ipc)
elif len(bre.IPCR4) >0:
for ipc in bre.IPCR4:
GraphTechnosAppli .add_edge(appl,ipc)
else:
for ipc in bre.IPCR1:
GraphTechnosAppli .add_edge(appl,ipc)
if len(bre.applicant) >1:
for appl, coAut in cycle(bre.applicant):
appl=appl.strip()
coAut=coAut.strip()
if len(appl)>0 and appl.title() not in Inventeurs and NoPunct(appl).title() not in Inventeurs and appl.lower() != 'empty':
GraphBrevets.add_node(appl)
GraphBrevets.add_edge(appl, bre .label)
GraphApplicant.add_node(appl)
GraphTechnosAppli.add_node(appl)
GraphAuteursAppli.add_node(appl)
if len(coAut)>0 and coAut.title() not in Inventeurs and NoPunct(coAut).title() not in Inventeurs and coAut.lower() != 'empty':
GraphBrevets.add_node(coAut)
GraphBrevets.add_edge(coAut, bre .label)
GraphBrevets.add_edge( appl, coAut)
GraphApplicant.add_node(coAut)
GraphTechnosAppli.add_node(coAut)
GraphAuteursAppli.add_node(coAut)
elif len( bre.applicant)>0 and isinstance( bre .applicant, list):
appl= bre.applicant [0].strip()
if len(appl)>0 and appl.title() not in Inventeurs and NoPunct(appl).title() not in Inventeurs and appl.lower() != 'empty':
GraphBrevets.add_node(appl)
GraphBrevets.add_edge(appl, bre .label)
# GraphApplicant.add_node(appl)
GraphTechnosAppli.add_node(appl)
GraphAuteursAppli.add_node(appl)
if len(joliTecno)>0:
for ipc in joliTecno:
GraphTechnosAppli .add_edge(appl,ipc)
elif len(bre.IPCR7) >0:
for ipc in bre.IPCR7:
GraphTechnosAppli .add_edge(appl,ipc)
elif len(bre.IPCR4) >0:
for ipc in bre.IPCR4:
GraphTechnosAppli .add_edge(appl,ipc)
else:
for ipc in bre.IPCR1:
GraphTechnosAppli .add_edge(appl,ipc)
else:
pass
for aut in bre .inventor:
aut= aut.title()
GraphAuteurs.add_node(aut)
GraphTechnosAuthor.add_node(aut)
for ipc in joliTecno:
GraphTechnosAuthor .add_edge(aut,ipc)
if len(joliTecno)>0:
for ipc in joliTecno:
GraphTechnosAuthor .add_edge(aut,ipc)
elif len(bre.IPCR7) >0:
for ipc in bre.IPCR7:
GraphTechnosAuthor .add_edge(aut,ipc)
elif len(bre.IPCR4) >0:
for ipc in bre.IPCR4:
GraphTechnosAuthor .add_edge(aut,ipc)
else:
for ipc in bre.IPCR1:
GraphTechnosAuthor .add_edge(aut,ipc)
# chaining collaborations
if isinstance(bre .inventor, list) and len( bre .inventor)>1:
for aut, coAut in cycle( bre .inventor):
aut= aut.title()
coAut= coAut.title()
GraphAuteurs.add_edge(aut, coAut, label = 'AuthorCollaboration')
GraphAuteursAppli.add_edge(aut, coAut, label = 'AuthorCollaboration')
if isinstance(bre.applicant, list) and len( bre.applicant)>1: # many applicants
#cycling collaborations
for aut, coAut in cycle(bre .applicant):
aut = aut.strip()
coAut = coAut.strip()
if len(aut) ==1:
print (aut)
if len(coAut) == 1:
print(coAut)
regles = [len(aut)>0,
len(coAut) >0,
aut.title() not in Inventeurs,
NoPunct(aut).title() not in Inventeurs,
coAut.title() not in Inventeurs,
NoPunct(coAut).title() not in Inventeurs
]
if all(regles):
aut= aut.upper()
coAut= coAut.upper()
GraphAuteursAppli.add_edge(aut, coAut, label = 'ApplicantCollaboration')
GraphApplicant.add_edge(aut, coAut)
#chaining authors
for appl in bre.applicant:
if bool(appl.strip()):
appl= appl.upper()
regles= [len(appl)>0,
appl.title() not in Inventeurs,
NoPunct(appl).title() not in Inventeurs,
]
if all(regles):
for aut in bre .inventor:
aut= aut.title()
GraphAuteursAppli.add_edge( aut, appl, label = 'workfor')
elif len( bre.applicant)>0 and bool(bre.applicant[0].strip()): # only one applicant
appl= bre.applicant[0].upper()
regles= [len(appl)>0,
appl.title() not in Inventeurs,
NoPunct(appl).title() not in Inventeurs,
]
if all(regles):
for aut in bre .inventor:
aut= aut.title()
GraphAuteursAppli.add_edge( aut, appl, label = 'workfor')
else:
pass
NoeudAut = list(iter(GraphAuteurs))
NoeudApplicant = list(iter(GraphApplicant))
EdgesAut = list(iter(GraphAuteurs.edges()))
EdgesAut2 = [orig + ' ' + dest for orig, dest in EdgesAut]
EdgesApplicant = list(iter(GraphApplicant.edges()))
EdgesApplicant2 = [orig + ' ' + dest for orig, dest in EdgesApplicant]
NoeudApplicantInv = list(iter(GraphAuteursAppli))
EdgeApplicantInv = list(iter(GraphAuteursAppli.edges()))
EdgeApplicantInv2 = [orig + ' ' + dest for orig, dest in EdgeApplicantInv]
Autnode_sizes = {aut:sum([1 for truc in EdgesAut if truc [0] == aut or truc [1] == aut]) for aut in dicoAttrsAut.keys()}
Applicantnode_sizes = { appl: sum([1 for truc in EdgesApplicant if \
truc [0] == appl or truc [1] == appl]) for appl in dicoAttrsAppli.keys()}
ApplicantInvnode_sizes = { machin: sum([1 for truc in EdgeApplicantInv if truc [0] == machin or truc [1] == machin]) for machin in list(dicoAttrsAppli.keys())+list(dicoAttrsAut.keys())}
nx.set_node_attributes(GraphApplicant,Applicantnode_sizes, 'size')
nx.set_node_attributes(GraphAuteurs,Autnode_sizes, 'size')
nx.set_node_attributes( GraphBrevetsCitations, dicoAttrsCitedBy) # graph of citing patents of patents from PU
nx.set_node_attributes( GraphBrevetsCitations, dicoAttrs) # graph of citing patents of patents from PU
nx.set_node_attributes( GraphBrevetsReferences, dicoAttrsCitP) # graph of references from patents from PU includes patents and other references
nx.set_node_attributes( GraphBrevetsReferences, dicoAttrs)
nx.set_node_attributes( GraphBrevetsReferences, dicoAttrsOut )
nx.set_node_attributes( GraphBrevetsEquivalents, dicoAttrsEquiv) # graph of equivalents
nx.set_node_attributes( GraphBrevetsEquivalents, dicoAttrs)
nx.set_node_attributes(GraphTechnos, dicoAttrs) # IPC graphs
nx.set_node_attributes(GraphTechnos, dicoAttrsTechno) # IPC graphs
nx.set_node_attributes(GraphTechnosAppli, dicoAttrsTechno)
nx.set_node_attributes(GraphTechnosAppli, dicoAttrsAppli)
nx.set_node_attributes( GraphTechnosAuthor, dicoAttrsTechno)
nx.set_node_attributes( GraphTechnosAuthor, dicoAttrsAut)
nx.set_node_attributes(GraphBrevets,dicoAttrs)
nx.set_node_attributes(GraphBrevets,dicoAttrsAppli)
nx.set_node_attributes(GraphApplicant,dicoAttrsAppli)
nx.set_node_attributes(GraphAuteurs,dicoAttrsAut)
nx.set_node_attributes(GraphAuteursAppli,dicoAttrsAut)
nx.set_node_attributes(GraphAuteursAppli,dicoAttrsAppli)
nx.set_node_attributes(GraphAuteursAppli,Applicantnode_sizes, 'size')
nx.set_node_attributes(GraphAuteursAppli,Autnode_sizes, 'size')
nx.write_gexf(GraphAuteurs, ResultGephiPath+"/"+ndf+"_Inventors.gexf") # GraphAuteurs
nx.write_gexf(GraphApplicant, ResultGephiPath+"/"+ndf+"_Applicant.gexf") #
nx.write_gexf(GraphBrevets, ResultGephiPath+"/"+ndf+"-GraphBrevets.gexf")
nx.write_gexf(GraphAuteursAppli, ResultGephiPath+"/"+ndf+"_ApplicantInventor.gexf")
nx.write_gexf(GraphBrevetsEquivalents, ResultGephiPath+"/"+ndf+"_Equivalents.gexf")
nx.write_gexf(GraphBrevetsReferences, ResultGephiPath+"/"+ndf+"_References.gexf")
nx.write_gexf(GraphBrevetsCitations, ResultGephiPath+"/"+ndf+"_Citations.gexf")
nx.write_gexf (GraphTechnos, ResultGephiPath+"/"+ndf+"_CrossTech.gexf")
nx.write_gexf (GraphTechnosAppli, ResultGephiPath+"/"+ndf+"_Applicants_CrossTech.gexf")
nx.write_gexf (GraphTechnosAuthor, ResultGephiPath+"/"+ndf+"_Inventors_CrossTech.gexf")
visu = 'neato'
for G, network in [(GraphAuteurs, "_Inventors"),
(GraphApplicant, "_Applicants"),
(GraphBrevets, "_LabelApplicants"),
(GraphAuteursAppli, "_ApplicantInventor"),
(GraphBrevetsEquivalents, "_Equivalents"),
(GraphBrevetsReferences, "_References"),
(GraphBrevetsCitations, "_Citations"),
(GraphTechnos, "_CrossTech"),
(GraphTechnosAppli, "_Applicants_CrossTech"),
(GraphTechnosAuthor, "_Inventors_CrossTech") ]:
if len(G) == 0:
continue
Maxdegs = max([G.degree[node] for node in G.nodes()])
zoom = len(G) / Maxdegs
arguDot='-Goverlap="0:prism" -Gsize="1000,800" -GLT=550 -GKsep='+str(zoom)
pos = nx.spring_layout(G,k=3,iterations=20)
# pos = nx.graphviz_layout(G,prog=visu)
# Networks["_CountryCrossTech"] = [configFile.CountryCrossTechNetwork, [ 'IPCR7', "country"]] # not finished yet
# Networks[] = [configFile.CrossTechNetwork, ['IPCR7']] # GraphTechnos
# Networks[] = [configFile.InventorCrossTechNetwork, ['IPCR7', "inventor-nice"]] # GraphTechnosAuthor
# Networks[] = [configFile.ApplicantCrossTechNetwork, ['IPCR7', "applicant-nice"]] # GraphTechnosAppli
# Networks[] = [configFile.ApplicantInventorNetwork, ["applicant-nice", "inventor-nice"]] # GraphAuteursAppli
# Networks[] = [configFile.ApplicantNetwork, ["applicant-nice"]] # GraphApplicant
# Networks[] = [configFile.InventorNetwork, ["inventor-nice"]] # GraphAuteurs
# Networks[] = [configFile.References, [ 'label', 'CitP', "CitO"]] # GraphBrevetsReferences
# Networks[] = [configFile.Citations, [ 'label', "CitedBy"]] # GraphBrevetsCitations
# Networks[] = [configFile.Equivalents, [ 'label', "equivalents"]] # GraphBrevetsEquivalents
# Networks[] = [configFile.Equivalents, [ 'label', "applicant-nice"]] # GraphBrevetsEquivalents#GraphBrevets
factx, facty = 1, 1 # neatto
if len(pos)>0:
MaxPosX = max([pos[k][0] for k in list(pos.keys())])
MaxPosY = max([pos[k][1] for k in list(pos.keys())])
MinPosX = min([pos[k][0] for k in list(pos.keys())])
MinPosY = min([pos[k][1] for k in list(pos.keys())])
GvScreenX = MaxPosX-MinPosX
GvScreenY = MaxPosY-MinPosY
factx = screenX/GvScreenX
facty = screenY/GvScreenY
else: #by the way this is an empty net
MaxPosX = 200
MaxPosY = 100
MinPosX = -200
MinPosY = -100
GvScreenX = MaxPosX-MinPosX
GvScreenY = MaxPosY-MinPosY
factx = screenX/GvScreenX
facty = screenY/GvScreenY
if MinPosY>0:
posx, posy = 0, -400
else:
posx, posy = 0, 0
#one color for one kind of node
size = len (Networks [network][1])
# argu='-Goverlap="9:prism" -Gsize="1000,800" -Gdim=3 -Gdimen=2 -GLT=550 -GKsep='+str(zoom)
# pos=nx.graphviz_layout(G,prog='sfdp', args = argu )
#pos = nx.graphviz_layout(G, prog='dot', args = arguDot )
# pos = nx.spring_layout(G, dim=2, k=3, scale =1, iterations = 800)
# pos = nx.spectral_layout(G, dim=2,scale =1)
# newCoord = project_points(pos[k][0], pos[k][1], pos[k][2], 0, 0, 1)
# Visu['position']= {'x':newCoord[0][0], 'y':newCoord[0][1], 'z':0}
# norme = np.linalg.norm(pos[k])
cmpe = cmap_discretize(matplotlib.cm.jet, int(size))
for k in G.nodes():
#G.node[k]["weight"] = G.node[k]["weight"]['value'] # static net
#G.node[k]["id"] = G.node[k]["id"]['id']
Visu = dict()
Visu['color'] = dict()
#G.node[k]['label'] = Nodes.keys()[k]
#G.node[k]['category'] = Nodes[Nodes.keys()[k]]['category']
if G.nodes[k]['category'] == 'label':
G.nodes[k]['url'] =UrlPatent(k)[0]
Visu['color']['a'] = 1
Visu['color']['r']= int(254)
Visu['color']['g']= int(0)
Visu['color']['b']= int(0)
Visu['shape'] ="diamond"
elif G.nodes[k]['category'] =='CitP':
Visu['color']['a'] = 1
Visu['color']['r']= int(0)
Visu['color']['g']= int(254)
Visu['color']['b']= int(0)
Visu['shape'] ="ellipse"
elif G.nodes[k]['category'] == 'CitO':
# a hack here, trying to find out content in scholar
#https:/scholar.google.fr/scholar?hl=fr&q=pipo+test&btnG=&lr=
Visu['color']['r']= int(0)
Visu['color']['g']= int(0)
Visu['color']['b']= int(254)
Visu['color']['a'] =1
Visu['shape'] ="disc"
#UrlTemp = "https:/scholar.google.com/scholar?q=" + quot(Nodes.keys()[k])
#G.node[k]['url'] = UrlTemp
elif G.nodes[k]['category'] == 'CitedBy':
Visu['color']['a'] = 1
Visu['color']['r']= int(0)
Visu['color']['g']= int(127)
Visu['color']['b']= int(127)
Visu['shape'] ="square"
G.nodes[k]['url'] =UrlPatent(k)[0]
elif G.nodes[k]['category'] == "equivalents":
Visu['color']['a'] = 1
Visu['color']['r']= int(127)
Visu['color']['g']= int(127)
Visu['color']['b']= int(0)
Visu['shape'] ="ellipse"
G.nodes[k]['url'] =UrlPatent(k)[0]
elif G.nodes[k]['category'] == 'Applicant':
#G.node[k]['category'] = 'Applicant'# for readable facility
G.nodes[k]['url'] = UrlApplicantBuild(k)[0]
Visu['color']['a'] = 1
Visu['color']['r']= int(127)
Visu['color']['g']= int(0)
Visu['color']['b']= int(127)
Visu['shape'] ="star"
elif G.nodes[k]['category'] == 'IPCR1' or G.nodes[k]['category'] == 'IPCR3' or G.nodes[k]['category'] == 'IPCR4' or G.nodes[k]['category'] == 'IPCR7' or G.nodes[k]['category'] == 'IPCR11' or G.nodes[k]['category'] == 'CPC':
G.nodes[k]['url'] = UrlIPCRBuild(k)[0]
Visu['color']['a'] = 1
Visu['color']['r']= int(127)
Visu['color']['g']= int(254)
Visu['color']['b']= int(127)
Visu['shape'] ="database"
elif G.nodes[k]['category'] == 'Inventor':
#G.node[k]['category'] = 'inventor'# for readable facility
G.nodes[k]['url'] = UrlInventorBuild(k)[0]
Visu['color']['a'] = 1
Visu['color']['r']= int(127)
Visu['color']['g']= int(127)
Visu['color']['b']= int(254)
Visu['shape'] ="triangleDown"
else:
Visu['color']['a'] = 1
Visu['color']['r']= int(254)
Visu['color']['g']= int(254)
Visu['color']['b']= int(0)
Visu['shape'] ="image"
# if "label" not in mixNet:
# mixNet.append('label')
#factx, facty = 500, 400
# if 'inventor' in G.node[k]['category'] or 'applicant' in G.node[k]['category']:
# categ = G.node[k]['category']+'-nice' # for readable facility
# count = mixNet.index(categ)
# else:
# count = mixNet.index(G.node[k]['category'])
count = Networks [network][1].index(G.nodes[k]['category'])
Visu['position']= {'x':(int(pos[k][0]*factx)+posx), 'y':(int(pos[k][1]*facty)+posy), 'z':0.0}
# Visu['size'] = np.log(int(G.node[k]["weight"])+1)+4#
Visu['color']['a']= count
G.nodes[k]['viz'] =dict()
# Visu['color']['a']= count
# Visu['size'] = (G.node[k]["degree"]*1.0)#(G.node[k]["degree"]*1.0/Maxdegs)*150#(G.node[k]["weight"]) /MaxWeight #addd 1 for viewiong all...
#Visu['size'] = (G.node[k]["degree"]*zoom) +1 #(G.node[k]["weight"]) /MaxWeight #addd 1 for viewiong all...
Visu['size'] = G.degree [k]*10.0/Maxdegs +4
# Visu['size'] = np.log(int(G.node[k]["weight"])+1)*zoom+1#
for cle in list(Visu.keys()):
G.nodes[k]['viz'][cle] = Visu[cle]
# print G.node[k]
# nx.set_node_attributes(G, 'weight', attr_dict)
outputFile = ndf+network + 'JS.gexf'
try:
os.remove(ResultGephiPath+'/'+outputFile)
except:
try:
os.remove(ResultGephiPath+'/'+outputFile)
except:
pass
#
nx.write_gexf(G, ResultGephiPath+'/'+outputFile, version='1.2draft')
fic = open(ResultGephiPath+'/'+outputFile, 'r', encoding = 'utf8')
# Next is a hack to correct the bad writing of the header of the gexf file
# with dynamics properties
fictemp=open(ResultGephiPath+'/'+"Good"+outputFile, 'w', encoding = 'utf8')
ecrit = True
data = fic.read()
# VERY UGLY Hack here !!!!
data = data.replace('ns0:', 'viz:') # may be someone knows how to set namespace in networkx...
data = data.replace('a="None"', '') # may be someone knows why network set the "a" attribute...
for lig in data.split('\n'):
if lig.count('<nodes>'):
ecrit = True
if ecrit:
fictemp.write(lig+'\n')
fictemp.close()
fic.close()
try:
#os.remove(ResultGephiPath+'\\'+ndf+'.gexf')
os.remove(ResultGephiPath+'/'+outputFile)
except:
pass
os.rename(ResultGephiPath+'/'+"Good"+outputFile, ResultGephiPath+'/'+outputFile)
AnnonceLog (Appli = 'p2n_network', texte="Network file writen in " + ResultGephiPath+' directory.\n See file: '+outputFile)
#making the html from model
RenderTemplate(
"Graphe.html",
ResultGephiPath + '/'+outputFile.replace('.gexf','.html'),
TitleNet=network[1:]+' Network for ' + requete,
fichierConfigJS=outputFile.replace('.gexf','') +'Conf.js',
mediaStyle='../../../Patent2Net/media/styles',
mediaJs='../../../Patent2Net/media/js',
)
# making the js from model
# maybe we could adjust node size and other parameters here
RenderTemplate(
"gephiConfig.js",
ResultGephiPath + '/'+outputFile.replace('.gexf','') +'Conf.js',
FicRezo=outputFile,
)
RenderTemplate(
"GraphIndex.html",
configFile.ResultPath+"/GraphIndex"+ projectName+ ".html"
)
# making the js from model
# maybe we could adjust node size and other parameters here
RenderTemplate(
"GraphUploadModel.html",
configFile.ResultPath+"/GraphUpload.html",
request=projectName,
anglOpt = "{{loadingMessage=='' ? 'LOAD GRAPH' : loadingMessage}}"
)
# Graph recupes v2
RenderTemplate(
"graph-recipes.html",
configFile.ResultPath+"/graph-recipes-" + projectName + ".html",
request=projectName
)
secondes = time.time() - start_time
heures, secondes = divmod(secondes, 3600)
minutes, secondes = divmod(secondes, 60)
print ('Duree : ', heures, " h ", minutes, " m et ", secondes, " sec")
AnnonceLog (Appli = 'p2n_network', texte='All patents processed ' + str(total) +" in " + str(heures) + " h" + str(minutes) + " min and " +str(secondes) + " sec")
| StarcoderdataPython |
3437900 | <reponame>jeevan-revaneppa-hirethanad/audio-to-speech-pipeline
CONFIG_NAME = "audio_transcription_config"
CLEAN_AUDIO_PATH = "remote_clean_audio_file_path"
SHOULD_SKIP_REJECTED = "should_skip_rejected"
LANGUAGE = "language"
AUDIO_LANGUAGE = "audio_language"
| StarcoderdataPython |
5088928 | import torch
from torch import nn
from torch.nn import functional
def multiplicative(x, data):
"""
This function takes a 5d tensor (with the same shape and dimension order
as the input to Conv3d) and a 2d data tensor. For each element in the
batch, the data vector is combined with the first D dimensions of the 5d
tensor through an elementwise product.
Input: (N, C_{in}, L, H, W), (N, D)
Output: (N, C_{in}, L, H, W)
"""
N, D = data.size()
N, C, L, H, W = x.size()
assert D <= C, "data dims must be less than channel dims"
x = torch.cat([
x[:, :D, :, :, :] * data.view(N, D, 1, 1, 1).expand(N, D, L, H, W),
x[:, D:, :, :, :]
], dim=1)
return x
class AttentiveEncoder(nn.Module):
"""
Input: (N, 3, L, H, W), (N, D, )
Output: (N, 3, L, H, W)
"""
def __init__(self, data_dim, tie_rgb=False, linf_max=0.016,
kernel_size=(1, 11, 11), padding=(0, 5, 5)):
super(AttentiveEncoder, self).__init__()
self.linf_max = linf_max
self.data_dim = data_dim
self.kernel_size = kernel_size
self.padding = padding
self._attention = nn.Sequential(
nn.Conv3d(3, 32, kernel_size=kernel_size, padding=padding),
nn.Tanh(),
nn.BatchNorm3d(32),
nn.Conv3d(32, data_dim, kernel_size=kernel_size, padding=padding),
nn.Tanh(),
nn.BatchNorm3d(data_dim),
)
self._conv = nn.Sequential(
nn.Conv3d(4, 32, kernel_size=kernel_size, padding=padding),
nn.Tanh(),
nn.BatchNorm3d(32),
nn.Conv3d(32, 1 if tie_rgb else 3, kernel_size=kernel_size, padding=padding),
nn.Tanh(),
)
def forward(self, frames, data):
data = data * 2.0 - 1.0
x = functional.softmax(self._attention(frames), dim=1)
x = torch.sum(multiplicative(x, data), dim=1, keepdim=True)
x = self._conv(torch.cat([frames, x], dim=1))
return frames + self.linf_max * x
class AttentiveDecoder(nn.Module):
"""
Input: (N, 3, L, H, W)
Output: (N, D)
"""
def __init__(self, encoder):
super(AttentiveDecoder, self).__init__()
self.data_dim = encoder.data_dim
self._attention = encoder._attention
self._conv = nn.Sequential(
nn.Conv3d(3, 32, kernel_size=encoder.kernel_size, padding=encoder.padding, stride=1),
nn.Tanh(),
nn.BatchNorm3d(32),
nn.Conv3d(32, self.data_dim, kernel_size=encoder.kernel_size,
padding=encoder.padding, stride=1),
)
def forward(self, frames):
N, D, L, H, W = frames.size()
attention = functional.softmax(self._attention(frames), dim=1)
x = self._conv(frames) * attention
return torch.mean(x.view(N, self.data_dim, -1), dim=2)
| StarcoderdataPython |
6624517 | <reponame>ws0416/tencentcloud-cli-intl-en<filename>tccli/services/gaap/gaap_client.py
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.gaap.v20180529 import gaap_client as gaap_client_v20180529
from tencentcloud.gaap.v20180529 import models as models_v20180529
def doDescribeProxyGroupList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCertificates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCertificatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCertificates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRemoveRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RemoveRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.RemoveRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeHTTPSListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeHTTPSListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeHTTPSListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateHTTPSListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateHTTPSListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateHTTPSListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetAuthentication(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetAuthenticationRequest()
model.from_json_string(json.dumps(args))
rsp = client.SetAuthentication(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDestRegions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDestRegionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDestRegions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyGroupStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityPolicyDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityPolicyDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSecurityPolicyDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyCertificateAttributes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyCertificateAttributesRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyCertificateAttributes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRealServerName(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRealServerNameRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRealServerName(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeHTTPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeHTTPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeHTTPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCheckProxyCreate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CheckProxyCreateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CheckProxyCreate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccessRegions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccessRegionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAccessRegions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyGroupDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyHTTPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyHTTPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyHTTPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxiesProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxiesProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxiesProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeGroupDomainConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeGroupDomainConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeGroupDomainConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindRuleRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindRuleRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindRuleRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccessRegionsByDestRegion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccessRegionsByDestRegionRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAccessRegionsByDestRegion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyHTTPSListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyHTTPSListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyHTTPSListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRuleAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRuleAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRuleAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCertificateDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCertificateDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCertificateDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyGroupDomainConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyGroupDomainConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyGroupDomainConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTCPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTCPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTCPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDomainErrorPageInfoByIds(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDomainErrorPageInfoByIdsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDomainErrorPageInfoByIds(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyUDPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyUDPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyUDPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyAndStatisticsListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyAndStatisticsListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyAndStatisticsListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateHTTPListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateHTTPListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateHTTPListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeUDPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeUDPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeUDPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxyConfiguration(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxyConfigurationRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxyConfiguration(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTCPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTCPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTCPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxyGroupAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxyGroupAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxyGroupAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeGroupAndStatisticsProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeGroupAndStatisticsProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeGroupAndStatisticsProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServerStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServerStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServerStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindListenerRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindListenerRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindListenerRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxyGroupDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyGroupDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxyGroupDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateUDPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateUDPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateUDPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRegionAndPrice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRegionAndPriceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRegionAndPrice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySecurityRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySecurityRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifySecurityRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeListenerStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeListenerStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeListenerStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRuleRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRuleRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRuleRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAddRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AddRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.AddRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxiesStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxiesStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxiesStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTCPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTCPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTCPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxiesAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxiesAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxiesAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDestroyProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DestroyProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DestroyProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResourcesByTag(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourcesByTagRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResourcesByTag(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCountryAreaMapping(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCountryAreaMappingRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCountryAreaMapping(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeListenerRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeListenerRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeListenerRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServersStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServersStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServersStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRulesByRuleIds(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRulesByRuleIdsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRulesByRuleIds(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInquiryPriceCreateProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InquiryPriceCreateProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.InquiryPriceCreateProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180529": gaap_client_v20180529,
}
MODELS_MAP = {
"v20180529": models_v20180529,
}
ACTION_MAP = {
"DescribeProxyGroupList": doDescribeProxyGroupList,
"OpenSecurityPolicy": doOpenSecurityPolicy,
"DescribeCertificates": doDescribeCertificates,
"CreateSecurityRules": doCreateSecurityRules,
"RemoveRealServers": doRemoveRealServers,
"DescribeHTTPSListeners": doDescribeHTTPSListeners,
"CreateHTTPSListener": doCreateHTTPSListener,
"DeleteSecurityPolicy": doDeleteSecurityPolicy,
"SetAuthentication": doSetAuthentication,
"DeleteRule": doDeleteRule,
"DeleteDomainErrorPageInfo": doDeleteDomainErrorPageInfo,
"ModifyCertificate": doModifyCertificate,
"DescribeProxyStatistics": doDescribeProxyStatistics,
"CreateRule": doCreateRule,
"DescribeDestRegions": doDescribeDestRegions,
"CreateDomainErrorPageInfo": doCreateDomainErrorPageInfo,
"DescribeProxyGroupStatistics": doDescribeProxyGroupStatistics,
"DescribeSecurityPolicyDetail": doDescribeSecurityPolicyDetail,
"ModifyDomain": doModifyDomain,
"ModifyCertificateAttributes": doModifyCertificateAttributes,
"CloseProxies": doCloseProxies,
"OpenProxies": doOpenProxies,
"ModifyRealServerName": doModifyRealServerName,
"DescribeHTTPListeners": doDescribeHTTPListeners,
"CheckProxyCreate": doCheckProxyCreate,
"OpenProxyGroup": doOpenProxyGroup,
"DescribeAccessRegions": doDescribeAccessRegions,
"DeleteSecurityRules": doDeleteSecurityRules,
"DescribeProxyGroupDetails": doDescribeProxyGroupDetails,
"CreateProxy": doCreateProxy,
"DeleteCertificate": doDeleteCertificate,
"CreateSecurityPolicy": doCreateSecurityPolicy,
"DescribeProxies": doDescribeProxies,
"ModifyHTTPListenerAttribute": doModifyHTTPListenerAttribute,
"ModifyProxiesProject": doModifyProxiesProject,
"DescribeGroupDomainConfig": doDescribeGroupDomainConfig,
"BindRuleRealServers": doBindRuleRealServers,
"DeleteProxyGroup": doDeleteProxyGroup,
"DescribeAccessRegionsByDestRegion": doDescribeAccessRegionsByDestRegion,
"ModifyHTTPSListenerAttribute": doModifyHTTPSListenerAttribute,
"CreateDomain": doCreateDomain,
"ModifyRuleAttribute": doModifyRuleAttribute,
"DescribeCertificateDetail": doDescribeCertificateDetail,
"CloseSecurityPolicy": doCloseSecurityPolicy,
"ModifyGroupDomainConfig": doModifyGroupDomainConfig,
"DescribeTCPListeners": doDescribeTCPListeners,
"DescribeDomainErrorPageInfo": doDescribeDomainErrorPageInfo,
"DescribeRealServers": doDescribeRealServers,
"DescribeDomainErrorPageInfoByIds": doDescribeDomainErrorPageInfoByIds,
"ModifyUDPListenerAttribute": doModifyUDPListenerAttribute,
"DescribeProxyAndStatisticsListeners": doDescribeProxyAndStatisticsListeners,
"CreateHTTPListener": doCreateHTTPListener,
"DescribeUDPListeners": doDescribeUDPListeners,
"ModifyProxyConfiguration": doModifyProxyConfiguration,
"CloseProxyGroup": doCloseProxyGroup,
"ModifyTCPListenerAttribute": doModifyTCPListenerAttribute,
"ModifyProxyGroupAttribute": doModifyProxyGroupAttribute,
"DescribeGroupAndStatisticsProxy": doDescribeGroupAndStatisticsProxy,
"DescribeRealServerStatistics": doDescribeRealServerStatistics,
"BindListenerRealServers": doBindListenerRealServers,
"CreateProxyGroupDomain": doCreateProxyGroupDomain,
"CreateProxyGroup": doCreateProxyGroup,
"CreateUDPListeners": doCreateUDPListeners,
"DescribeRegionAndPrice": doDescribeRegionAndPrice,
"ModifySecurityRule": doModifySecurityRule,
"DescribeProxyDetail": doDescribeProxyDetail,
"DescribeListenerStatistics": doDescribeListenerStatistics,
"DescribeRuleRealServers": doDescribeRuleRealServers,
"AddRealServers": doAddRealServers,
"DescribeProxiesStatus": doDescribeProxiesStatus,
"DescribeSecurityRules": doDescribeSecurityRules,
"DeleteDomain": doDeleteDomain,
"CreateCertificate": doCreateCertificate,
"CreateTCPListeners": doCreateTCPListeners,
"DescribeRules": doDescribeRules,
"ModifyProxiesAttribute": doModifyProxiesAttribute,
"DestroyProxies": doDestroyProxies,
"DescribeResourcesByTag": doDescribeResourcesByTag,
"DescribeCountryAreaMapping": doDescribeCountryAreaMapping,
"DescribeListenerRealServers": doDescribeListenerRealServers,
"DeleteListeners": doDeleteListeners,
"DescribeRealServersStatus": doDescribeRealServersStatus,
"DescribeRulesByRuleIds": doDescribeRulesByRuleIds,
"InquiryPriceCreateProxy": doInquiryPriceCreateProxy,
}
AVAILABLE_VERSION_LIST = [
"v20180529",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["gaap"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["gaap"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| StarcoderdataPython |
1858510 | from .. import Provider as PersonProvider
class Provider(PersonProvider):
formats = (
"{{first_name}} {{last_name}}",
"{{first_name}} {{last_name}}",
"{{last_name}}, {{first_name}}",
)
first_names = (
"Tomas",
"Lukas",
"Mantas",
"Deividas",
"Arnas",
"Artūras",
"Karolis",
"Dovydas",
"Dominykas",
"Darius",
"Edvinas",
"Jonas",
"Martynas",
"Kajus",
"Donatas",
"Andrius",
"Matas",
"Rokas",
"Augustas",
"Danielius",
"Mindaugas",
"Paulius",
"Marius",
"Armandas",
"Edgaras",
"Jokūbas",
"Nedas",
"Tadas",
"Nerijus",
"Simonas",
"Vytautas",
"Artūras",
"Robertas",
"Eimantas",
"Arijus",
"Nojus",
"Egidijus",
"Aurimas",
"Emilis",
"Laurynas",
"Edvardas",
"Joris",
"Pijus",
"Erikas",
"Domas",
"Vilius",
"Evaldas",
"Justinas",
"Aleksandras",
"Kristupas",
"Gabrielius",
"Benas",
"Gytis",
"Arminas",
"Vakris",
"Tautvydas",
"Domantas",
"Justas",
"Markas",
"Antanas",
"Arūnas",
"Ernestas",
"Aronas",
"Vaidas",
"Ąžuolas",
"Titas",
"Giedrius",
"Ignas",
"Povilas",
"Saulius",
"Julius",
"Arvydas",
"Kęstutis",
"Rytis",
"Aistis",
"Gediminas",
"Algirdas",
"Naglis",
"Irmantas",
"Rolandas",
"Aivaras",
"Simas",
"Faustas",
"Ramūnas",
"Šarūnas",
"Gustas",
"Tajus",
"Dainius",
"Arnoldas",
"Linas",
"Rojus",
"Adomas",
"Žygimantas",
"Ričardas",
"Orestas",
"Kipras",
"Juozas",
"Audrius",
"Romualdas",
"Petras",
"Eleonora",
"Raminta",
"Dovilė",
"Sandra",
"Dominyka",
"Ana",
"Erika",
"Kristina",
"Gintarė",
"Rūta",
"Edita",
"Karina",
"Živilė",
"Jolanta",
"Radvilė",
"Ramunė",
"Svetlana",
"Ugnė",
"Eglė",
"Viktorija",
"Justina",
"Brigita",
"Rasa",
"Marija",
"Giedrė",
"Iveta",
"Sonata",
"Vitalija",
"Adrija",
"Goda",
"Paulina",
"Kornelija",
"Liepa",
"Vakarė",
"Milda",
"Meda",
"Vaida",
"Izabelė",
"Jovita",
"Irma",
"Žemyna",
"Leila",
"Rimantė",
"Mantė",
"Rytė",
"Perla",
"Greta",
"Monika",
"Ieva",
"Indrė",
"Ema",
"Aurelija",
"Smiltė",
"Ingrida",
"Simona",
"Amelija",
"Sigita",
"Olivija",
"Laurita",
"Jorūnė",
"Leticija",
"Vigilija",
"Medėja",
"Laura",
"Agnė",
"Evelina",
"Kotryna",
"Lėja",
"Aušra",
"Neringa",
"Gerda",
"Jurgita",
"Rusnė",
"Aušrinė",
"Rita",
"Elena",
"Ineta",
"Ligita",
"Vasarė",
"Vėjūnė",
"Ignė",
"Gytė",
"Ariana",
"Arielė",
"Vytė",
"Eidvilė",
"Karolina",
"Miglė",
"Viltė",
"Jolanta",
"Enrika",
"Aurėja",
"Vanesa",
"Darija",
"Reda",
"Milana",
"Rugilė",
"Diana",
)
last_names = (
"Kazlauskas",
"Jankauskas",
"Petrauskas",
"Pocius",
"Stankevičius",
"Vsiliauskas",
"Žukauskas",
"Butkus",
"Paulauskas",
"Urbonas",
"Kavaliauskas",
"Sakalauskas",
"Žukauskas",
"Akelis",
"Ambrasas",
"Kairys",
"Kalvaitis",
"Kalvelis",
"Kalvėnas",
"Kaupas",
"Kiška",
"Gagys",
"Gailius",
"Gailys",
"Gaižauskas",
"Gaičiūnas",
"Galdikas",
"Gintalas",
"Ginzburgas",
"Grinius",
"Gronskis",
"Nagys",
"Naujokas",
"Narušis",
"Nausėda",
"Poška",
"Povilonis",
)
| StarcoderdataPython |
1994104 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This script reads Huffman Code table [1] and generates symbol table
# and decoding tables in C language. The resulting code is used in
# lib/nghttp2_hd_huffman.h and lib/nghttp2_hd_huffman_data.c
#
# [1] http://http2.github.io/http2-spec/compression.html
from __future__ import unicode_literals
import re
import sys
import StringIO
# From [1]
HUFFMAN_CODE_TABLE = """\
( 0) |11111111|11000 1ff8 [13]
( 1) |11111111|11111111|1011000 7fffd8 [23]
( 2) |11111111|11111111|11111110|0010 fffffe2 [28]
( 3) |11111111|11111111|11111110|0011 fffffe3 [28]
( 4) |11111111|11111111|11111110|0100 fffffe4 [28]
( 5) |11111111|11111111|11111110|0101 fffffe5 [28]
( 6) |11111111|11111111|11111110|0110 fffffe6 [28]
( 7) |11111111|11111111|11111110|0111 fffffe7 [28]
( 8) |11111111|11111111|11111110|1000 fffffe8 [28]
( 9) |11111111|11111111|11101010 ffffea [24]
( 10) |11111111|11111111|11111111|111100 3ffffffc [30]
( 11) |11111111|11111111|11111110|1001 fffffe9 [28]
( 12) |11111111|11111111|11111110|1010 fffffea [28]
( 13) |11111111|11111111|11111111|111101 3ffffffd [30]
( 14) |11111111|11111111|11111110|1011 fffffeb [28]
( 15) |11111111|11111111|11111110|1100 fffffec [28]
( 16) |11111111|11111111|11111110|1101 fffffed [28]
( 17) |11111111|11111111|11111110|1110 fffffee [28]
( 18) |11111111|11111111|11111110|1111 fffffef [28]
( 19) |11111111|11111111|11111111|0000 ffffff0 [28]
( 20) |11111111|11111111|11111111|0001 ffffff1 [28]
( 21) |11111111|11111111|11111111|0010 ffffff2 [28]
( 22) |11111111|11111111|11111111|111110 3ffffffe [30]
( 23) |11111111|11111111|11111111|0011 ffffff3 [28]
( 24) |11111111|11111111|11111111|0100 ffffff4 [28]
( 25) |11111111|11111111|11111111|0101 ffffff5 [28]
( 26) |11111111|11111111|11111111|0110 ffffff6 [28]
( 27) |11111111|11111111|11111111|0111 ffffff7 [28]
( 28) |11111111|11111111|11111111|1000 ffffff8 [28]
( 29) |11111111|11111111|11111111|1001 ffffff9 [28]
( 30) |11111111|11111111|11111111|1010 ffffffa [28]
( 31) |11111111|11111111|11111111|1011 ffffffb [28]
' ' ( 32) |010100 14 [ 6]
'!' ( 33) |11111110|00 3f8 [10]
'"' ( 34) |11111110|01 3f9 [10]
'#' ( 35) |11111111|1010 ffa [12]
'$' ( 36) |11111111|11001 1ff9 [13]
'%' ( 37) |010101 15 [ 6]
'&' ( 38) |11111000 f8 [ 8]
''' ( 39) |11111111|010 7fa [11]
'(' ( 40) |11111110|10 3fa [10]
')' ( 41) |11111110|11 3fb [10]
'*' ( 42) |11111001 f9 [ 8]
'+' ( 43) |11111111|011 7fb [11]
',' ( 44) |11111010 fa [ 8]
'-' ( 45) |010110 16 [ 6]
'.' ( 46) |010111 17 [ 6]
'/' ( 47) |011000 18 [ 6]
'0' ( 48) |00000 0 [ 5]
'1' ( 49) |00001 1 [ 5]
'2' ( 50) |00010 2 [ 5]
'3' ( 51) |011001 19 [ 6]
'4' ( 52) |011010 1a [ 6]
'5' ( 53) |011011 1b [ 6]
'6' ( 54) |011100 1c [ 6]
'7' ( 55) |011101 1d [ 6]
'8' ( 56) |011110 1e [ 6]
'9' ( 57) |011111 1f [ 6]
':' ( 58) |1011100 5c [ 7]
';' ( 59) |11111011 fb [ 8]
'<' ( 60) |11111111|1111100 7ffc [15]
'=' ( 61) |100000 20 [ 6]
'>' ( 62) |11111111|1011 ffb [12]
'?' ( 63) |11111111|00 3fc [10]
'@' ( 64) |11111111|11010 1ffa [13]
'A' ( 65) |100001 21 [ 6]
'B' ( 66) |1011101 5d [ 7]
'C' ( 67) |1011110 5e [ 7]
'D' ( 68) |1011111 5f [ 7]
'E' ( 69) |1100000 60 [ 7]
'F' ( 70) |1100001 61 [ 7]
'G' ( 71) |1100010 62 [ 7]
'H' ( 72) |1100011 63 [ 7]
'I' ( 73) |1100100 64 [ 7]
'J' ( 74) |1100101 65 [ 7]
'K' ( 75) |1100110 66 [ 7]
'L' ( 76) |1100111 67 [ 7]
'M' ( 77) |1101000 68 [ 7]
'N' ( 78) |1101001 69 [ 7]
'O' ( 79) |1101010 6a [ 7]
'P' ( 80) |1101011 6b [ 7]
'Q' ( 81) |1101100 6c [ 7]
'R' ( 82) |1101101 6d [ 7]
'S' ( 83) |1101110 6e [ 7]
'T' ( 84) |1101111 6f [ 7]
'U' ( 85) |1110000 70 [ 7]
'V' ( 86) |1110001 71 [ 7]
'W' ( 87) |1110010 72 [ 7]
'X' ( 88) |11111100 fc [ 8]
'Y' ( 89) |1110011 73 [ 7]
'Z' ( 90) |11111101 fd [ 8]
'[' ( 91) |11111111|11011 1ffb [13]
'\' ( 92) |11111111|11111110|000 7fff0 [19]
']' ( 93) |11111111|11100 1ffc [13]
'^' ( 94) |11111111|111100 3ffc [14]
'_' ( 95) |100010 22 [ 6]
'`' ( 96) |11111111|1111101 7ffd [15]
'a' ( 97) |00011 3 [ 5]
'b' ( 98) |100011 23 [ 6]
'c' ( 99) |00100 4 [ 5]
'd' (100) |100100 24 [ 6]
'e' (101) |00101 5 [ 5]
'f' (102) |100101 25 [ 6]
'g' (103) |100110 26 [ 6]
'h' (104) |100111 27 [ 6]
'i' (105) |00110 6 [ 5]
'j' (106) |1110100 74 [ 7]
'k' (107) |1110101 75 [ 7]
'l' (108) |101000 28 [ 6]
'm' (109) |101001 29 [ 6]
'n' (110) |101010 2a [ 6]
'o' (111) |00111 7 [ 5]
'p' (112) |101011 2b [ 6]
'q' (113) |1110110 76 [ 7]
'r' (114) |101100 2c [ 6]
's' (115) |01000 8 [ 5]
't' (116) |01001 9 [ 5]
'u' (117) |101101 2d [ 6]
'v' (118) |1110111 77 [ 7]
'w' (119) |1111000 78 [ 7]
'x' (120) |1111001 79 [ 7]
'y' (121) |1111010 7a [ 7]
'z' (122) |1111011 7b [ 7]
'{' (123) |11111111|1111110 7ffe [15]
'|' (124) |11111111|100 7fc [11]
'}' (125) |11111111|111101 3ffd [14]
'~' (126) |11111111|11101 1ffd [13]
(127) |11111111|11111111|11111111|1100 ffffffc [28]
(128) |11111111|11111110|0110 fffe6 [20]
(129) |11111111|11111111|010010 3fffd2 [22]
(130) |11111111|11111110|0111 fffe7 [20]
(131) |11111111|11111110|1000 fffe8 [20]
(132) |11111111|11111111|010011 3fffd3 [22]
(133) |11111111|11111111|010100 3fffd4 [22]
(134) |11111111|11111111|010101 3fffd5 [22]
(135) |11111111|11111111|1011001 7fffd9 [23]
(136) |11111111|11111111|010110 3fffd6 [22]
(137) |11111111|11111111|1011010 7fffda [23]
(138) |11111111|11111111|1011011 7fffdb [23]
(139) |11111111|11111111|1011100 7fffdc [23]
(140) |11111111|11111111|1011101 7fffdd [23]
(141) |11111111|11111111|1011110 7fffde [23]
(142) |11111111|11111111|11101011 ffffeb [24]
(143) |11111111|11111111|1011111 7fffdf [23]
(144) |11111111|11111111|11101100 ffffec [24]
(145) |11111111|11111111|11101101 ffffed [24]
(146) |11111111|11111111|010111 3fffd7 [22]
(147) |11111111|11111111|1100000 7fffe0 [23]
(148) |11111111|11111111|11101110 ffffee [24]
(149) |11111111|11111111|1100001 7fffe1 [23]
(150) |11111111|11111111|1100010 7fffe2 [23]
(151) |11111111|11111111|1100011 7fffe3 [23]
(152) |11111111|11111111|1100100 7fffe4 [23]
(153) |11111111|11111110|11100 1fffdc [21]
(154) |11111111|11111111|011000 3fffd8 [22]
(155) |11111111|11111111|1100101 7fffe5 [23]
(156) |11111111|11111111|011001 3fffd9 [22]
(157) |11111111|11111111|1100110 7fffe6 [23]
(158) |11111111|11111111|1100111 7fffe7 [23]
(159) |11111111|11111111|11101111 ffffef [24]
(160) |11111111|11111111|011010 3fffda [22]
(161) |11111111|11111110|11101 1fffdd [21]
(162) |11111111|11111110|1001 fffe9 [20]
(163) |11111111|11111111|011011 3fffdb [22]
(164) |11111111|11111111|011100 3fffdc [22]
(165) |11111111|11111111|1101000 7fffe8 [23]
(166) |11111111|11111111|1101001 7fffe9 [23]
(167) |11111111|11111110|11110 1fffde [21]
(168) |11111111|11111111|1101010 7fffea [23]
(169) |11111111|11111111|011101 3fffdd [22]
(170) |11111111|11111111|011110 3fffde [22]
(171) |11111111|11111111|11110000 fffff0 [24]
(172) |11111111|11111110|11111 1fffdf [21]
(173) |11111111|11111111|011111 3fffdf [22]
(174) |11111111|11111111|1101011 7fffeb [23]
(175) |11111111|11111111|1101100 7fffec [23]
(176) |11111111|11111111|00000 1fffe0 [21]
(177) |11111111|11111111|00001 1fffe1 [21]
(178) |11111111|11111111|100000 3fffe0 [22]
(179) |11111111|11111111|00010 1fffe2 [21]
(180) |11111111|11111111|1101101 7fffed [23]
(181) |11111111|11111111|100001 3fffe1 [22]
(182) |11111111|11111111|1101110 7fffee [23]
(183) |11111111|11111111|1101111 7fffef [23]
(184) |11111111|11111110|1010 fffea [20]
(185) |11111111|11111111|100010 3fffe2 [22]
(186) |11111111|11111111|100011 3fffe3 [22]
(187) |11111111|11111111|100100 3fffe4 [22]
(188) |11111111|11111111|1110000 7ffff0 [23]
(189) |11111111|11111111|100101 3fffe5 [22]
(190) |11111111|11111111|100110 3fffe6 [22]
(191) |11111111|11111111|1110001 7ffff1 [23]
(192) |11111111|11111111|11111000|00 3ffffe0 [26]
(193) |11111111|11111111|11111000|01 3ffffe1 [26]
(194) |11111111|11111110|1011 fffeb [20]
(195) |11111111|11111110|001 7fff1 [19]
(196) |11111111|11111111|100111 3fffe7 [22]
(197) |11111111|11111111|1110010 7ffff2 [23]
(198) |11111111|11111111|101000 3fffe8 [22]
(199) |11111111|11111111|11110110|0 1ffffec [25]
(200) |11111111|11111111|11111000|10 3ffffe2 [26]
(201) |11111111|11111111|11111000|11 3ffffe3 [26]
(202) |11111111|11111111|11111001|00 3ffffe4 [26]
(203) |11111111|11111111|11111011|110 7ffffde [27]
(204) |11111111|11111111|11111011|111 7ffffdf [27]
(205) |11111111|11111111|11111001|01 3ffffe5 [26]
(206) |11111111|11111111|11110001 fffff1 [24]
(207) |11111111|11111111|11110110|1 1ffffed [25]
(208) |11111111|11111110|010 7fff2 [19]
(209) |11111111|11111111|00011 1fffe3 [21]
(210) |11111111|11111111|11111001|10 3ffffe6 [26]
(211) |11111111|11111111|11111100|000 7ffffe0 [27]
(212) |11111111|11111111|11111100|001 7ffffe1 [27]
(213) |11111111|11111111|11111001|11 3ffffe7 [26]
(214) |11111111|11111111|11111100|010 7ffffe2 [27]
(215) |11111111|11111111|11110010 fffff2 [24]
(216) |11111111|11111111|00100 1fffe4 [21]
(217) |11111111|11111111|00101 1fffe5 [21]
(218) |11111111|11111111|11111010|00 3ffffe8 [26]
(219) |11111111|11111111|11111010|01 3ffffe9 [26]
(220) |11111111|11111111|11111111|1101 ffffffd [28]
(221) |11111111|11111111|11111100|011 7ffffe3 [27]
(222) |11111111|11111111|11111100|100 7ffffe4 [27]
(223) |11111111|11111111|11111100|101 7ffffe5 [27]
(224) |11111111|11111110|1100 fffec [20]
(225) |11111111|11111111|11110011 fffff3 [24]
(226) |11111111|11111110|1101 fffed [20]
(227) |11111111|11111111|00110 1fffe6 [21]
(228) |11111111|11111111|101001 3fffe9 [22]
(229) |11111111|11111111|00111 1fffe7 [21]
(230) |11111111|11111111|01000 1fffe8 [21]
(231) |11111111|11111111|1110011 7ffff3 [23]
(232) |11111111|11111111|101010 3fffea [22]
(233) |11111111|11111111|101011 3fffeb [22]
(234) |11111111|11111111|11110111|0 1ffffee [25]
(235) |11111111|11111111|11110111|1 1ffffef [25]
(236) |11111111|11111111|11110100 fffff4 [24]
(237) |11111111|11111111|11110101 fffff5 [24]
(238) |11111111|11111111|11111010|10 3ffffea [26]
(239) |11111111|11111111|1110100 7ffff4 [23]
(240) |11111111|11111111|11111010|11 3ffffeb [26]
(241) |11111111|11111111|11111100|110 7ffffe6 [27]
(242) |11111111|11111111|11111011|00 3ffffec [26]
(243) |11111111|11111111|11111011|01 3ffffed [26]
(244) |11111111|11111111|11111100|111 7ffffe7 [27]
(245) |11111111|11111111|11111101|000 7ffffe8 [27]
(246) |11111111|11111111|11111101|001 7ffffe9 [27]
(247) |11111111|11111111|11111101|010 7ffffea [27]
(248) |11111111|11111111|11111101|011 7ffffeb [27]
(249) |11111111|11111111|11111111|1110 ffffffe [28]
(250) |11111111|11111111|11111101|100 7ffffec [27]
(251) |11111111|11111111|11111101|101 7ffffed [27]
(252) |11111111|11111111|11111101|110 7ffffee [27]
(253) |11111111|11111111|11111101|111 7ffffef [27]
(254) |11111111|11111111|11111110|000 7fffff0 [27]
(255) |11111111|11111111|11111011|10 3ffffee [26]
EOS (256) |11111111|11111111|11111111|111111 3fffffff [30]
"""
class Node:
def __init__(self, term = None):
self.term = term
self.left = None
self.right = None
self.trans = []
self.id = None
self.accept = False
class Context:
def __init__(self):
self.next_id_ = 0
self.root = Node()
def next_id(self):
id = self.next_id_
self.next_id_ += 1
return id
def _add(node, sym, bits):
if len(bits) == 0:
node.term = sym
return
else:
if bits[0] == '0':
if node.left is None:
node.left = Node()
child = node.left
else:
if node.right is None:
node.right = Node()
child = node.right
_add(child, sym, bits[1:])
def huffman_tree_add(ctx, sym, bits):
_add(ctx.root, sym, bits)
def _set_node_id(ctx, node, prefix):
if node.term is not None:
return
if len(prefix) <= 7 and [1] * len(prefix) == prefix:
node.accept = True
node.id = ctx.next_id()
_set_node_id(ctx, node.left, prefix + [0])
_set_node_id(ctx, node.right, prefix + [1])
def huffman_tree_set_node_id(ctx):
_set_node_id(ctx, ctx.root, [])
def _traverse(node, sym, start_node, root, left):
if left == 0:
if sym == 256:
sym = None
node = None
start_node.trans.append((node, sym))
return
if node.term is not None:
node = root
def go(node):
if node.term is not None:
assert sym is None
nsym = node.term
else:
nsym = sym
_traverse(node, nsym, start_node, root, left - 1)
go(node.left)
go(node.right)
def _build_transition_table(ctx, node):
if node is None:
return
_traverse(node, None, node, ctx.root, 4)
_build_transition_table(ctx, node.left)
_build_transition_table(ctx, node.right)
def huffman_tree_build_transition_table(ctx):
_build_transition_table(ctx, ctx.root)
NGHTTP2_HUFF_ACCEPTED = 1
NGHTTP2_HUFF_SYM = 1 << 1
NGHTTP2_HUFF_FAIL = 1 << 2
def _print_transition_table(node):
if node.term is not None:
return
print '/* {} */'.format(node.id)
print '{'
for nd, sym in node.trans:
flags = 0
if sym is None:
out = 0
else:
out = sym
flags |= NGHTTP2_HUFF_SYM
if nd is None:
id = 0
flags |= NGHTTP2_HUFF_FAIL
else:
id = nd.id
if id is None:
# if nd.id is None, it is a leaf node
id = 0
flags |= NGHTTP2_HUFF_ACCEPTED
elif nd.accept:
flags |= NGHTTP2_HUFF_ACCEPTED
print ' {{{}, 0x{:02x}, {}}},'.format(id, flags, out)
print '},'
_print_transition_table(node.left)
_print_transition_table(node.right)
def huffman_tree_print_transition_table(ctx):
_print_transition_table(ctx.root)
if __name__ == '__main__':
ctx = Context()
symbol_tbl = [(None, 0) for i in range(257)]
for line in StringIO.StringIO(HUFFMAN_CODE_TABLE):
m = re.match(
r'.*\(\s*(\d+)\)\s+([|01]+)\s+(\S+)\s+\[\s*(\d+)\].*', line)
if m:
sym = int(m.group(1))
bits = re.sub(r'\|', '', m.group(2))
code = m.group(3)
nbits = int(m.group(4))
if len(code) > 8:
raise Error('Code is more than 4 bytes long')
assert(len(bits) == nbits)
symbol_tbl[sym] = (nbits, code)
huffman_tree_add(ctx, sym, bits)
huffman_tree_set_node_id(ctx)
huffman_tree_build_transition_table(ctx)
print '''\
typedef struct {
uint32_t nbits;
uint32_t code;
} nghttp2_huff_sym;
'''
print '''\
const nghttp2_huff_sym huff_sym_table[] = {'''
for i in range(257):
print '''\
{{ {}, 0x{}u }}{}\
'''.format(symbol_tbl[i][0], symbol_tbl[i][1], ',' if i < 256 else '')
print '};'
print ''
print '''\
enum {{
NGHTTP2_HUFF_ACCEPTED = {},
NGHTTP2_HUFF_SYM = {},
NGHTTP2_HUFF_FAIL = {},
}} nghttp2_huff_decode_flag;
'''.format(NGHTTP2_HUFF_ACCEPTED, NGHTTP2_HUFF_SYM, NGHTTP2_HUFF_FAIL)
print '''\
typedef struct {
uint8_t state;
uint8_t flags;
uint8_t sym;
} nghttp2_huff_decode;
'''
print '''\
const nghttp2_huff_decode huff_decode_table[][16] = {'''
huffman_tree_print_transition_table(ctx)
print '};'
| StarcoderdataPython |
236087 | N, Q = map(int,input().split())
A = [0] + list(map(int,input().split()))
# print(N, Q)
# print(A)
# print("----")
for i in range(Q):
t, x, y = map(int,input().split())
if t == 1:
A[x] = A[x] ^ y
if t == 2:
xor_list = A[x:-1] + [y]
for j in range(len(xor_list)-1):
res = xor_list[j] ^ xor_list[j+1]
print(res) | StarcoderdataPython |
3222764 | #!/usr/bin/env python
from unittest import TestLoader, TestSuite, TextTestRunner
import test_main
import test_minimal
import test_formula
import test_json
import test_use_case
import test_styles
modules = [test_main, test_minimal, test_formula, test_json, test_use_case, test_styles]
loader = TestLoader()
if __name__ == "__main__":
suite = TestSuite()
for m in modules:
suite.addTest(loader.loadTestsFromModule(m))
TextTestRunner(verbosity=1).run(suite)
| StarcoderdataPython |
6662803 | import random
def quick_sort_helper(a, beg, end):
length = end - beg + 1
if length <= 1:
return
if length == 2:
if a[beg] > a[end]:
a[beg], a[end] = a[end], a[beg]
return
pivot_index = random.randint(beg, end)
pivot = a[pivot_index]
# 1 swap pivot with beginning
if pivot_index != beg:
a[beg], a[pivot_index] = a[pivot_index], a[beg]
# 2 scan rest fo the array and partition
smaller = beg
for bigger in range(beg + 1, end + 1):
if a[bigger] < pivot:
smaller += 1
a[smaller], a[bigger] = a[bigger], a[smaller]
# 3 swap back pivot with beginning
a[beg], a[smaller] = a[smaller], a[beg]
quick_sort_helper(a, beg, smaller - 1)
quick_sort_helper(a, smaller + 1, end)
def quick_sort(a):
quick_sort_helper(a, 0, len(a) - 1)
import pytest
@pytest.mark.parametrize("i", range(20))
def test_quick_sort(i):
length = random.randint(10, 30)
a = [random.randint(0, 1000) for j in range(length)]
a2 = sorted(a)
quick_sort(a)
print(a)
print(a2)
for j in range(length):
assert a[j] == a2[j]
@pytest.mark.parametrize("i", range(20))
def test_quick_sort_rng(i):
rng = random.SystemRandom()
length = rng.randint(10, 30)
a = [rng.randint(0, 1000) for j in range(length)]
a2 = sorted(a)
quick_sort(a)
print(a)
print(a2)
for j in range(length):
assert a[j] == a2[j]
pytest.main()
| StarcoderdataPython |
390871 | from Levenshtein import distance
import pandas as pd
def compute_lev(x, y):
try:
return distance(x, y)
except TypeError:
# print("problem with retu ",x)
return len(y)
def compute_lev_norm(x, y):
try:
return y / len(x)
except TypeError:
return 1
def compute_lev_table_from_airr(airr, target_antibody):
lev_df = pd.DataFrame()
# FW1
lev_df["fwr1_aa_lev"] = airr["fwr1_aa"].apply(lambda x: compute_lev(x, target_antibody["fwr1_aa"].get_aa()))
# FW1 Norm
lev_df["fwr1_aa_lev_norm"] = list(map(lambda x, y: y / len(x), airr["fwr1_aa"], lev_df["fwr1_aa_lev"]))
# CDR1
lev_df["cdr1_aa_lev"] = airr["cdr1_aa"].apply(lambda x: compute_lev(x, target_antibody["cdr1_aa"].get_aa()))
# CDR1 Norm
lev_df["cdr1_aa_lev_norm"] = list(map(lambda x, y: y / len(x), airr["cdr1_aa"], lev_df["cdr1_aa_lev"]))
# FW2
lev_df["fwr2_aa_lev"] = airr["fwr2_aa"].apply(lambda x: compute_lev(x, target_antibody["fwr2_aa"].get_aa()))
# FW2 Norm
lev_df["fwr2_aa_lev_norm"] = list(map(lambda x, y: y / len(x), airr["fwr2_aa"], lev_df["fwr2_aa_lev"]))
# CDR2
lev_df["cdr2_aa_lev"] = airr["cdr2_aa"].apply(lambda x: compute_lev(x, target_antibody["cdr2_aa"].get_aa()))
# CDR2 Norm
lev_df["cdr2_aa_lev_norm"] = list(map(lambda x, y: y / len(x), airr["cdr2_aa"], lev_df["cdr2_aa_lev"]))
# FW3
lev_df["fwr3_aa_lev"] = airr["fwr3_aa"].apply(lambda x: compute_lev(x, target_antibody["fwr3_aa"].get_aa()))
# FW3 Norm
lev_df["fwr3_aa_lev_norm"] = list(map(lambda x, y: y / len(x), airr["fwr3_aa"], lev_df["fwr3_aa_lev"]))
# CDR3
lev_df["cdr3_aa_lev"] = airr["cdr3_aa"].apply(lambda x: compute_lev(x, target_antibody["cdr3_aa"].get_aa()))
# CDR3 Norm
lev_df["cdr3_aa_lev_norm"] = list(map(lambda x, y: y / len(x), airr["cdr3_aa"], lev_df["cdr3_aa_lev"]))
# FW4
lev_df["fwr4_aa_lev"] = airr["fwr4_aa"].apply(lambda x: compute_lev(x, target_antibody["fwr4_aa"].get_aa()))
# CDR4 Norm
lev_df["fwr4_aa_lev_norm"] = list(map(lambda x, y: y / len(x), airr["fwr4_aa"], lev_df["fwr4_aa_lev"]))
lev_df["vh_fwr_lev_sum"] = lev_df["fwr1_aa_lev"] + lev_df["fwr2_aa_lev"] + lev_df["fwr3_aa_lev"]
lev_df["vh_fwr_lev_norm"] = lev_df["fwr1_aa_lev_norm"] + lev_df["fwr2_aa_lev_norm"] + lev_df["fwr3_aa_lev_norm"]
lev_df["vh_cdr_lev_sum"] = lev_df["cdr1_aa_lev"] + lev_df["cdr2_aa_lev"]
lev_df["vh_cdr_lev_norm"] = lev_df["cdr1_aa_lev_norm"] + lev_df["cdr2_aa_lev_norm"]
lev_df["vh_cdr_lev_sum_w_cdr3"] = lev_df["cdr1_aa_lev"] + lev_df["cdr2_aa_lev"] + lev_df["cdr3_aa_lev"]
lev_df["vh_cdr_lev_norm_w_cdr3"] = (
lev_df["cdr1_aa_lev_norm"] + lev_df["cdr2_aa_lev_norm"] + lev_df["cdr3_aa_lev_norm"]
)
lev_df["vh_sum"] = lev_df["vh_fwr_lev_sum"] + lev_df["vh_cdr_lev_sum"]
lev_df["vh_norm"] = lev_df["vh_fwr_lev_norm"] + lev_df["vh_cdr_lev_norm"]
# And set index
lev_df.index = airr.index
return lev_df
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.