file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
BratsCRFNeighborWhole.py | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 2 11:10:34 2016
@author: rsk
"""
import numpy as np
import pystruct
import re
import time
import nibabel as nib
import cPickle
import gzip
import os
import sys
import matplotlib.cm as cm
import matplotlib.pyplot as plt
from math import *
import itertools
os.chdir("/home/bmi/CRF")
from pystruct.models import GraphCRF, LatentNodeCRF
from pystruct.learners import NSlackSSVM, OneSlackSSVM, LatentSSVM, FrankWolfeSSVM
from pystruct.datasets import make_simple_2x2
from pystruct.utils import make_grid_edges, plot_grid
from sklearn.metrics import confusion_matrix,precision_score,recall_score,f1_score
from collections import Counter
from CRFUtils import *
from BratsCheckPredictions import *
import math
from math import *
train_path="/media/bmi/MyPassport/new_n4/Recon_2013_data/N4_zscore_training_t1_t1c_hist_match"
test_path="/media/bmi/MyPassport/new_n4/Recon_2013_data/N4_zscore_testing_t1_t1c_hist_match"
#train_path="/media/bmi/MyPassport/n4_entire/Recon_2013_data/training_longitudnal"
#test_path="/media/bmi/MyPassport/n4_entire/Recon_2013_data/testing_longitudnal"
#train_path="/home/rsk/Documents/PyStruct/data/train"
#test_path="/home/rsk/Documents/PyStruct/data/test"
#%%
#################################################################################################
#Training the model
def trainModel_Neighbor(num_iter=5,inference="qpbo",trainer="NSlack",num_train=2,num_test=1,C=0.1,edges="180x180_dist1_diag0",inputs=[1,1,1,1,1,1],features="post+img+pred",neighbor=-1,directed=False,savePred=False):
padding=(30,30,30,30)
if directed==True:
features +="+directed"
resultsDir = os.getcwd()+'/CRFResults'
nameLen = len(os.listdir(resultsDir))
edgeFeature = edges
filename=str(nameLen)+"_CRF_iter_"+str(num_iter)+"_"+inference+"_"+trainer+"_"+features+"_"+str(num_train)+"_"+str(num_test)+"_"+edgeFeature
print "Loading training slices"
start = time.clock()
train =extractSlices2(train_path,num_train,padding,neighbor=neighbor,inputs=inputs)
end= time.clock()
train_load_time = (end-start)/60.0
[trainLayers,trainTruth,sliceShape] = train
print "Training slices loaded in %f" % (train_load_time)
n_features= len(trainLayers[0][0,0])
print "Layer shape is : "
print trainLayers[0].shape
print "Training the model"
edges= np.load("/home/bmi/CRF/edges/"+edges+".npy")
G = [edges for x in trainLayers]
print trainLayers[0].shape
trainLayers = np.array( [x.reshape((sliceShape[0]*sliceShape[1],n_features)) for x in trainLayers] )
trainTruth = np.array( [x.reshape((sliceShape[0]*sliceShape[1],)).astype(int) for x in trainTruth] )
if inference=='ogm':
crf = GraphCRF(inference_method=('ogm',{'alg':'fm'}),directed=directed)
else:
crf = GraphCRF(inference_method=inference,directed=directed)
if trainer=="Frank":
svm = FrankWolfeSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=6,verbose=1)
elif trainer=="NSlack":
svm = NSlackSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=-1,verbose=1)
else:
svm = OneSlackSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=-1,verbose=1)
start = time.clock()
asdf = zip(trainLayers,G)
svm.fit(asdf,trainTruth)
end = time.clock()
train_time = (end-start)/60.0
print "The training took %f" % (train_time)
print "Model parameter size :"
print svm.w.shape
print "making predictions on train data"
predTrain = svm.predict(asdf)
trainDice=[]
for i in range(len(trainLayers)):
diceScore = accuracy(predTrain[i],trainTruth[i])
trainDice.append(diceScore)
meanTrainDice = sum(trainDice)/len(trainLayers)
del trainLayers,trainTruth
################################################################################################
overallDicePerPatient=[] # For overall test Dice
extDicePerPatient=[]
PatientTruthLayers=[]
PatientPredLayers=[]
PREC=[]
RECALL=[]
F1=[]
LayerwiseDiceTotal=[]
testResultFile = open(os.getcwd()+"/CRFResults/"+filename+".csv",'a')
testResultFile.write("folderName,numLayers, Overall Dice, precision , recall, extDice"+"\n")
counter=0
print "Loading the test slices"
for folder in os.listdir(test_path):
path = test_path + "/" + folder
layerDiceScores=''
data = extractTestSlices2(path,padding,neighbor=neighbor,inputs=inputs)
if data!=0:
[testLayers,testTruth,sliceShape,startSlice,endSlice] = data
# trueTestLayers=testLayers | testLayers = np.array( [x.reshape((sliceShape[0]*sliceShape[1],n_features)) for x in testLayers] )
testTruth = np.array( [x.reshape((sliceShape[0]*sliceShape[1],)).astype(int) for x in testTruth] )
asdfTest = zip(testLayers,GTest)
predTest = svm.predict(asdfTest)
LayerwiseDice=[]
for i in range(len(testLayers)):
diceScore = accuracy(predTest[i],testTruth[i])
layerDiceScores+=","+str(diceScore)
if math.isnan(diceScore):
if sum(predTest[i])==0 and sum(testTruth[i])==0:
LayerwiseDice.append(1.0)
continue
LayerwiseDice.append(diceScore)
LayerwiseDiceTotal.append(LayerwiseDice)
### Imputing the predicted pixels into full volume
if savePred==True:
finalPatientPred = np.zeros((240,240,150))
finalPatientTruth = np.zeros((240,240,150))
predInsert = np.dstack(tuple([x.reshape(180,180) for x in predTest]))
truthInsert = np.dstack(tuple([x.reshape(180,180) for x in testTruth]))
finalPatientPred[30:(240-30),30:(240-30),startSlice:endSlice] = predInsert
finalPatientTruth[30:(240-30),30:(240-30),startSlice:endSlice] = truthInsert
finalPatientPred = finalPatientPred.astype('int')
# print "saving at "+ path+"/"+filename+"whole"
np.save(path+"/"+folder+filename+"whole",finalPatientPred)
# print "predInsert shape"
# print predInsert.shape
# finalPatientPred = np.reshape(finalPatientPred,(240*240*150,)).astype('int')
# finalPatientTruth = np.reshape(finalPatientTruth,(240*240*150,)).astype('int')
#
# print "Counters"
# print Counter(list(np.hstack(testTruth)))
# print Counter(list(finalPatientTruth))
# print confusion_matrix(np.hstack(predTest),np.hstack(testTruth))
# print confusion_matrix(finalPatientPred,finalPatientTruth)
overallTestDice = accuracy(np.hstack(predTest),np.hstack(testTruth))
extDice = np.mean ( np.array(LayerwiseDice)[ range(10) + range(len(LayerwiseDice)-10, len(LayerwiseDice)) ] )
prec,recall,f1 = precision_score(np.hstack(testTruth),np.hstack(predTest)) , recall_score(np.hstack(testTruth),np.hstack(predTest)) , f1_score(np.hstack(testTruth),np.hstack(predTest))
print "Patient %d : Overall test DICE for %s is : %f and extDice is %f"%(counter,folder,overallTestDice,extDice)
print "Precision : %f Recall : %f F1 : %f " %(prec,recall,f1)
print "__________________________________________"
# testResultFile.write(folder+","+str(len(testLayers))+","+str(meanTestDice)+","+str(overallTestDice) ","+str(np.max(testDice)) +","+ str(np.min(testDice))+"\n" )
testResultFile.write(folder+","+str(len(testLayers)) + ","+ str(overallTestDice) + ","+str(prec)+","+str(recall)+","+str(extDice)+layerDiceScores+"\n" )
overallDicePerPatient.append(overallTestDice)
extDicePerPatient.append(extDice)
PREC.append(prec), RECALL.append(recall) , F1.append(f1)
PatientTruthLayers.append(testTruth)
PatientPredLayers.append(predTest)
counter+=1
if counter==num_test and num_test!=-1:
break
######################################################################################################
print "Done testing slices"
overallDice = sum(overallDicePerPatient)/len(PatientTruthLayers)
overallPrec = sum(PREC)/len(PatientTruthLayers)
overallRecall = sum(RECALL)/len(PatientTruthLayers)
overallExtDice = np.mean(extDicePerPatient)
print "Overall DICE : %f Precision : %f Recall : %f extDice : %f "%(overallDice,overallPrec,overallRecall,overallExtDice)
print "############################################"
# testOutput=np.array([PatientPredLayers,PatientTruthLayers,trueTestLayers])
testOutput=np.array([PatientPredLayers,PatientTruthLayers])
########### Saving the models ######################################################################
# print "Saving the model"
# modelDir = os.getcwd()+"/CRFModel/"
# svmModel = open(modelDir+filename+"_model"+".pkl",'wb')
# cPickle.dump(svm,svmModel,protocol=cPickle.HIGHEST_PROTOCOL)
# svmModel.close()
#
# print "saving the predictions"
# predFileTest = open(os.getcwd()+"/CRFPred/"+filename+"_pred.pkl",'wb')
# cPickle.dump(testOutput,predFileTest,protocol=cPickle.HIGHEST_PROTOCOL)
# predFileTest.close()
#Saving layerWise PatientScore
layerDataLog = open(os.getcwd()+"/CRFModel/"+filename+"_layer.pkl",'wb')
cPickle.dump(LayerwiseDiceTotal,layerDataLog,protocol = cPickle.HIGHEST_PROTOCOL)
layerDataLog.close()
resultLog = os.getcwd()+"/CRFResults/TestResultFinal.csv"
resultFile = open(resultLog,'a')
resultFile.write(time.ctime()+","+str(num_iter)+","+str(num_train)+","+str(num_test)+","+inference+","+
trainer+","+str(C)+","+str(train_time)+","+str(meanTrainDice)+","+str(overallDice)+","+
str(np.std(overallDicePerPatient))+","+edgeFeature+","+"None"+","+features+","+filename +","+ str(overallPrec) +","+ str(overallRecall) +","+ str(overallExtDice)+","+"Flair(5)+T2(9)-Without last 4 train Layers"+"\n")
resultFile.close()
testResultFile.close()
return | GTest = [edges for x in testLayers] | random_line_split |
BratsCRFNeighborWhole.py | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 2 11:10:34 2016
@author: rsk
"""
import numpy as np
import pystruct
import re
import time
import nibabel as nib
import cPickle
import gzip
import os
import sys
import matplotlib.cm as cm
import matplotlib.pyplot as plt
from math import *
import itertools
os.chdir("/home/bmi/CRF")
from pystruct.models import GraphCRF, LatentNodeCRF
from pystruct.learners import NSlackSSVM, OneSlackSSVM, LatentSSVM, FrankWolfeSSVM
from pystruct.datasets import make_simple_2x2
from pystruct.utils import make_grid_edges, plot_grid
from sklearn.metrics import confusion_matrix,precision_score,recall_score,f1_score
from collections import Counter
from CRFUtils import *
from BratsCheckPredictions import *
import math
from math import *
train_path="/media/bmi/MyPassport/new_n4/Recon_2013_data/N4_zscore_training_t1_t1c_hist_match"
test_path="/media/bmi/MyPassport/new_n4/Recon_2013_data/N4_zscore_testing_t1_t1c_hist_match"
#train_path="/media/bmi/MyPassport/n4_entire/Recon_2013_data/training_longitudnal"
#test_path="/media/bmi/MyPassport/n4_entire/Recon_2013_data/testing_longitudnal"
#train_path="/home/rsk/Documents/PyStruct/data/train"
#test_path="/home/rsk/Documents/PyStruct/data/test"
#%%
#################################################################################################
#Training the model
def trainModel_Neighbor(num_iter=5,inference="qpbo",trainer="NSlack",num_train=2,num_test=1,C=0.1,edges="180x180_dist1_diag0",inputs=[1,1,1,1,1,1],features="post+img+pred",neighbor=-1,directed=False,savePred=False):
| padding=(30,30,30,30)
if directed==True:
features +="+directed"
resultsDir = os.getcwd()+'/CRFResults'
nameLen = len(os.listdir(resultsDir))
edgeFeature = edges
filename=str(nameLen)+"_CRF_iter_"+str(num_iter)+"_"+inference+"_"+trainer+"_"+features+"_"+str(num_train)+"_"+str(num_test)+"_"+edgeFeature
print "Loading training slices"
start = time.clock()
train =extractSlices2(train_path,num_train,padding,neighbor=neighbor,inputs=inputs)
end= time.clock()
train_load_time = (end-start)/60.0
[trainLayers,trainTruth,sliceShape] = train
print "Training slices loaded in %f" % (train_load_time)
n_features= len(trainLayers[0][0,0])
print "Layer shape is : "
print trainLayers[0].shape
print "Training the model"
edges= np.load("/home/bmi/CRF/edges/"+edges+".npy")
G = [edges for x in trainLayers]
print trainLayers[0].shape
trainLayers = np.array( [x.reshape((sliceShape[0]*sliceShape[1],n_features)) for x in trainLayers] )
trainTruth = np.array( [x.reshape((sliceShape[0]*sliceShape[1],)).astype(int) for x in trainTruth] )
if inference=='ogm':
crf = GraphCRF(inference_method=('ogm',{'alg':'fm'}),directed=directed)
else:
crf = GraphCRF(inference_method=inference,directed=directed)
if trainer=="Frank":
svm = FrankWolfeSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=6,verbose=1)
elif trainer=="NSlack":
svm = NSlackSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=-1,verbose=1)
else:
svm = OneSlackSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=-1,verbose=1)
start = time.clock()
asdf = zip(trainLayers,G)
svm.fit(asdf,trainTruth)
end = time.clock()
train_time = (end-start)/60.0
print "The training took %f" % (train_time)
print "Model parameter size :"
print svm.w.shape
print "making predictions on train data"
predTrain = svm.predict(asdf)
trainDice=[]
for i in range(len(trainLayers)):
diceScore = accuracy(predTrain[i],trainTruth[i])
trainDice.append(diceScore)
meanTrainDice = sum(trainDice)/len(trainLayers)
del trainLayers,trainTruth
################################################################################################
overallDicePerPatient=[] # For overall test Dice
extDicePerPatient=[]
PatientTruthLayers=[]
PatientPredLayers=[]
PREC=[]
RECALL=[]
F1=[]
LayerwiseDiceTotal=[]
testResultFile = open(os.getcwd()+"/CRFResults/"+filename+".csv",'a')
testResultFile.write("folderName,numLayers, Overall Dice, precision , recall, extDice"+"\n")
counter=0
print "Loading the test slices"
for folder in os.listdir(test_path):
path = test_path + "/" + folder
layerDiceScores=''
data = extractTestSlices2(path,padding,neighbor=neighbor,inputs=inputs)
if data!=0:
[testLayers,testTruth,sliceShape,startSlice,endSlice] = data
# trueTestLayers=testLayers
GTest = [edges for x in testLayers]
testLayers = np.array( [x.reshape((sliceShape[0]*sliceShape[1],n_features)) for x in testLayers] )
testTruth = np.array( [x.reshape((sliceShape[0]*sliceShape[1],)).astype(int) for x in testTruth] )
asdfTest = zip(testLayers,GTest)
predTest = svm.predict(asdfTest)
LayerwiseDice=[]
for i in range(len(testLayers)):
diceScore = accuracy(predTest[i],testTruth[i])
layerDiceScores+=","+str(diceScore)
if math.isnan(diceScore):
if sum(predTest[i])==0 and sum(testTruth[i])==0:
LayerwiseDice.append(1.0)
continue
LayerwiseDice.append(diceScore)
LayerwiseDiceTotal.append(LayerwiseDice)
### Imputing the predicted pixels into full volume
if savePred==True:
finalPatientPred = np.zeros((240,240,150))
finalPatientTruth = np.zeros((240,240,150))
predInsert = np.dstack(tuple([x.reshape(180,180) for x in predTest]))
truthInsert = np.dstack(tuple([x.reshape(180,180) for x in testTruth]))
finalPatientPred[30:(240-30),30:(240-30),startSlice:endSlice] = predInsert
finalPatientTruth[30:(240-30),30:(240-30),startSlice:endSlice] = truthInsert
finalPatientPred = finalPatientPred.astype('int')
# print "saving at "+ path+"/"+filename+"whole"
np.save(path+"/"+folder+filename+"whole",finalPatientPred)
# print "predInsert shape"
# print predInsert.shape
# finalPatientPred = np.reshape(finalPatientPred,(240*240*150,)).astype('int')
# finalPatientTruth = np.reshape(finalPatientTruth,(240*240*150,)).astype('int')
#
# print "Counters"
# print Counter(list(np.hstack(testTruth)))
# print Counter(list(finalPatientTruth))
# print confusion_matrix(np.hstack(predTest),np.hstack(testTruth))
# print confusion_matrix(finalPatientPred,finalPatientTruth)
overallTestDice = accuracy(np.hstack(predTest),np.hstack(testTruth))
extDice = np.mean ( np.array(LayerwiseDice)[ range(10) + range(len(LayerwiseDice)-10, len(LayerwiseDice)) ] )
prec,recall,f1 = precision_score(np.hstack(testTruth),np.hstack(predTest)) , recall_score(np.hstack(testTruth),np.hstack(predTest)) , f1_score(np.hstack(testTruth),np.hstack(predTest))
print "Patient %d : Overall test DICE for %s is : %f and extDice is %f"%(counter,folder,overallTestDice,extDice)
print "Precision : %f Recall : %f F1 : %f " %(prec,recall,f1)
print "__________________________________________"
# testResultFile.write(folder+","+str(len(testLayers))+","+str(meanTestDice)+","+str(overallTestDice) ","+str(np.max(testDice)) +","+ str(np.min(testDice))+"\n" )
testResultFile.write(folder+","+str(len(testLayers)) + ","+ str(overallTestDice) + ","+str(prec)+","+str(recall)+","+str(extDice)+layerDiceScores+"\n" )
overallDicePerPatient.append(overallTestDice)
extDicePerPatient.append(extDice)
PREC.append(prec), RECALL.append(recall) , F1.append(f1)
PatientTruthLayers.append(testTruth)
PatientPredLayers.append(predTest)
counter+=1
if counter==num_test and num_test!=-1:
break
######################################################################################################
print "Done testing slices"
overallDice = sum(overallDicePerPatient)/len(PatientTruthLayers)
overallPrec = sum(PREC)/len(PatientTruthLayers)
overallRecall = sum(RECALL)/len(PatientTruthLayers)
overallExtDice = np.mean(extDicePerPatient)
print "Overall DICE : %f Precision : %f Recall : %f extDice : %f "%(overallDice,overallPrec,overallRecall,overallExtDice)
print "############################################"
# testOutput=np.array([PatientPredLayers,PatientTruthLayers,trueTestLayers])
testOutput=np.array([PatientPredLayers,PatientTruthLayers])
########### Saving the models ######################################################################
# print "Saving the model"
# modelDir = os.getcwd()+"/CRFModel/"
# svmModel = open(modelDir+filename+"_model"+".pkl",'wb')
# cPickle.dump(svm,svmModel,protocol=cPickle.HIGHEST_PROTOCOL)
# svmModel.close()
#
# print "saving the predictions"
# predFileTest = open(os.getcwd()+"/CRFPred/"+filename+"_pred.pkl",'wb')
# cPickle.dump(testOutput,predFileTest,protocol=cPickle.HIGHEST_PROTOCOL)
# predFileTest.close()
#Saving layerWise PatientScore
layerDataLog = open(os.getcwd()+"/CRFModel/"+filename+"_layer.pkl",'wb')
cPickle.dump(LayerwiseDiceTotal,layerDataLog,protocol = cPickle.HIGHEST_PROTOCOL)
layerDataLog.close()
resultLog = os.getcwd()+"/CRFResults/TestResultFinal.csv"
resultFile = open(resultLog,'a')
resultFile.write(time.ctime()+","+str(num_iter)+","+str(num_train)+","+str(num_test)+","+inference+","+
trainer+","+str(C)+","+str(train_time)+","+str(meanTrainDice)+","+str(overallDice)+","+
str(np.std(overallDicePerPatient))+","+edgeFeature+","+"None"+","+features+","+filename +","+ str(overallPrec) +","+ str(overallRecall) +","+ str(overallExtDice)+","+"Flair(5)+T2(9)-Without last 4 train Layers"+"\n")
resultFile.close()
testResultFile.close()
return | identifier_body | |
BratsCRFNeighborWhole.py | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 2 11:10:34 2016
@author: rsk
"""
import numpy as np
import pystruct
import re
import time
import nibabel as nib
import cPickle
import gzip
import os
import sys
import matplotlib.cm as cm
import matplotlib.pyplot as plt
from math import *
import itertools
os.chdir("/home/bmi/CRF")
from pystruct.models import GraphCRF, LatentNodeCRF
from pystruct.learners import NSlackSSVM, OneSlackSSVM, LatentSSVM, FrankWolfeSSVM
from pystruct.datasets import make_simple_2x2
from pystruct.utils import make_grid_edges, plot_grid
from sklearn.metrics import confusion_matrix,precision_score,recall_score,f1_score
from collections import Counter
from CRFUtils import *
from BratsCheckPredictions import *
import math
from math import *
train_path="/media/bmi/MyPassport/new_n4/Recon_2013_data/N4_zscore_training_t1_t1c_hist_match"
test_path="/media/bmi/MyPassport/new_n4/Recon_2013_data/N4_zscore_testing_t1_t1c_hist_match"
#train_path="/media/bmi/MyPassport/n4_entire/Recon_2013_data/training_longitudnal"
#test_path="/media/bmi/MyPassport/n4_entire/Recon_2013_data/testing_longitudnal"
#train_path="/home/rsk/Documents/PyStruct/data/train"
#test_path="/home/rsk/Documents/PyStruct/data/test"
#%%
#################################################################################################
#Training the model
def | (num_iter=5,inference="qpbo",trainer="NSlack",num_train=2,num_test=1,C=0.1,edges="180x180_dist1_diag0",inputs=[1,1,1,1,1,1],features="post+img+pred",neighbor=-1,directed=False,savePred=False):
padding=(30,30,30,30)
if directed==True:
features +="+directed"
resultsDir = os.getcwd()+'/CRFResults'
nameLen = len(os.listdir(resultsDir))
edgeFeature = edges
filename=str(nameLen)+"_CRF_iter_"+str(num_iter)+"_"+inference+"_"+trainer+"_"+features+"_"+str(num_train)+"_"+str(num_test)+"_"+edgeFeature
print "Loading training slices"
start = time.clock()
train =extractSlices2(train_path,num_train,padding,neighbor=neighbor,inputs=inputs)
end= time.clock()
train_load_time = (end-start)/60.0
[trainLayers,trainTruth,sliceShape] = train
print "Training slices loaded in %f" % (train_load_time)
n_features= len(trainLayers[0][0,0])
print "Layer shape is : "
print trainLayers[0].shape
print "Training the model"
edges= np.load("/home/bmi/CRF/edges/"+edges+".npy")
G = [edges for x in trainLayers]
print trainLayers[0].shape
trainLayers = np.array( [x.reshape((sliceShape[0]*sliceShape[1],n_features)) for x in trainLayers] )
trainTruth = np.array( [x.reshape((sliceShape[0]*sliceShape[1],)).astype(int) for x in trainTruth] )
if inference=='ogm':
crf = GraphCRF(inference_method=('ogm',{'alg':'fm'}),directed=directed)
else:
crf = GraphCRF(inference_method=inference,directed=directed)
if trainer=="Frank":
svm = FrankWolfeSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=6,verbose=1)
elif trainer=="NSlack":
svm = NSlackSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=-1,verbose=1)
else:
svm = OneSlackSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=-1,verbose=1)
start = time.clock()
asdf = zip(trainLayers,G)
svm.fit(asdf,trainTruth)
end = time.clock()
train_time = (end-start)/60.0
print "The training took %f" % (train_time)
print "Model parameter size :"
print svm.w.shape
print "making predictions on train data"
predTrain = svm.predict(asdf)
trainDice=[]
for i in range(len(trainLayers)):
diceScore = accuracy(predTrain[i],trainTruth[i])
trainDice.append(diceScore)
meanTrainDice = sum(trainDice)/len(trainLayers)
del trainLayers,trainTruth
################################################################################################
overallDicePerPatient=[] # For overall test Dice
extDicePerPatient=[]
PatientTruthLayers=[]
PatientPredLayers=[]
PREC=[]
RECALL=[]
F1=[]
LayerwiseDiceTotal=[]
testResultFile = open(os.getcwd()+"/CRFResults/"+filename+".csv",'a')
testResultFile.write("folderName,numLayers, Overall Dice, precision , recall, extDice"+"\n")
counter=0
print "Loading the test slices"
for folder in os.listdir(test_path):
path = test_path + "/" + folder
layerDiceScores=''
data = extractTestSlices2(path,padding,neighbor=neighbor,inputs=inputs)
if data!=0:
[testLayers,testTruth,sliceShape,startSlice,endSlice] = data
# trueTestLayers=testLayers
GTest = [edges for x in testLayers]
testLayers = np.array( [x.reshape((sliceShape[0]*sliceShape[1],n_features)) for x in testLayers] )
testTruth = np.array( [x.reshape((sliceShape[0]*sliceShape[1],)).astype(int) for x in testTruth] )
asdfTest = zip(testLayers,GTest)
predTest = svm.predict(asdfTest)
LayerwiseDice=[]
for i in range(len(testLayers)):
diceScore = accuracy(predTest[i],testTruth[i])
layerDiceScores+=","+str(diceScore)
if math.isnan(diceScore):
if sum(predTest[i])==0 and sum(testTruth[i])==0:
LayerwiseDice.append(1.0)
continue
LayerwiseDice.append(diceScore)
LayerwiseDiceTotal.append(LayerwiseDice)
### Imputing the predicted pixels into full volume
if savePred==True:
finalPatientPred = np.zeros((240,240,150))
finalPatientTruth = np.zeros((240,240,150))
predInsert = np.dstack(tuple([x.reshape(180,180) for x in predTest]))
truthInsert = np.dstack(tuple([x.reshape(180,180) for x in testTruth]))
finalPatientPred[30:(240-30),30:(240-30),startSlice:endSlice] = predInsert
finalPatientTruth[30:(240-30),30:(240-30),startSlice:endSlice] = truthInsert
finalPatientPred = finalPatientPred.astype('int')
# print "saving at "+ path+"/"+filename+"whole"
np.save(path+"/"+folder+filename+"whole",finalPatientPred)
# print "predInsert shape"
# print predInsert.shape
# finalPatientPred = np.reshape(finalPatientPred,(240*240*150,)).astype('int')
# finalPatientTruth = np.reshape(finalPatientTruth,(240*240*150,)).astype('int')
#
# print "Counters"
# print Counter(list(np.hstack(testTruth)))
# print Counter(list(finalPatientTruth))
# print confusion_matrix(np.hstack(predTest),np.hstack(testTruth))
# print confusion_matrix(finalPatientPred,finalPatientTruth)
overallTestDice = accuracy(np.hstack(predTest),np.hstack(testTruth))
extDice = np.mean ( np.array(LayerwiseDice)[ range(10) + range(len(LayerwiseDice)-10, len(LayerwiseDice)) ] )
prec,recall,f1 = precision_score(np.hstack(testTruth),np.hstack(predTest)) , recall_score(np.hstack(testTruth),np.hstack(predTest)) , f1_score(np.hstack(testTruth),np.hstack(predTest))
print "Patient %d : Overall test DICE for %s is : %f and extDice is %f"%(counter,folder,overallTestDice,extDice)
print "Precision : %f Recall : %f F1 : %f " %(prec,recall,f1)
print "__________________________________________"
# testResultFile.write(folder+","+str(len(testLayers))+","+str(meanTestDice)+","+str(overallTestDice) ","+str(np.max(testDice)) +","+ str(np.min(testDice))+"\n" )
testResultFile.write(folder+","+str(len(testLayers)) + ","+ str(overallTestDice) + ","+str(prec)+","+str(recall)+","+str(extDice)+layerDiceScores+"\n" )
overallDicePerPatient.append(overallTestDice)
extDicePerPatient.append(extDice)
PREC.append(prec), RECALL.append(recall) , F1.append(f1)
PatientTruthLayers.append(testTruth)
PatientPredLayers.append(predTest)
counter+=1
if counter==num_test and num_test!=-1:
break
######################################################################################################
print "Done testing slices"
overallDice = sum(overallDicePerPatient)/len(PatientTruthLayers)
overallPrec = sum(PREC)/len(PatientTruthLayers)
overallRecall = sum(RECALL)/len(PatientTruthLayers)
overallExtDice = np.mean(extDicePerPatient)
print "Overall DICE : %f Precision : %f Recall : %f extDice : %f "%(overallDice,overallPrec,overallRecall,overallExtDice)
print "############################################"
# testOutput=np.array([PatientPredLayers,PatientTruthLayers,trueTestLayers])
testOutput=np.array([PatientPredLayers,PatientTruthLayers])
########### Saving the models ######################################################################
# print "Saving the model"
# modelDir = os.getcwd()+"/CRFModel/"
# svmModel = open(modelDir+filename+"_model"+".pkl",'wb')
# cPickle.dump(svm,svmModel,protocol=cPickle.HIGHEST_PROTOCOL)
# svmModel.close()
#
# print "saving the predictions"
# predFileTest = open(os.getcwd()+"/CRFPred/"+filename+"_pred.pkl",'wb')
# cPickle.dump(testOutput,predFileTest,protocol=cPickle.HIGHEST_PROTOCOL)
# predFileTest.close()
#Saving layerWise PatientScore
layerDataLog = open(os.getcwd()+"/CRFModel/"+filename+"_layer.pkl",'wb')
cPickle.dump(LayerwiseDiceTotal,layerDataLog,protocol = cPickle.HIGHEST_PROTOCOL)
layerDataLog.close()
resultLog = os.getcwd()+"/CRFResults/TestResultFinal.csv"
resultFile = open(resultLog,'a')
resultFile.write(time.ctime()+","+str(num_iter)+","+str(num_train)+","+str(num_test)+","+inference+","+
trainer+","+str(C)+","+str(train_time)+","+str(meanTrainDice)+","+str(overallDice)+","+
str(np.std(overallDicePerPatient))+","+edgeFeature+","+"None"+","+features+","+filename +","+ str(overallPrec) +","+ str(overallRecall) +","+ str(overallExtDice)+","+"Flair(5)+T2(9)-Without last 4 train Layers"+"\n")
resultFile.close()
testResultFile.close()
return | trainModel_Neighbor | identifier_name |
BratsCRFNeighborWhole.py | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 2 11:10:34 2016
@author: rsk
"""
import numpy as np
import pystruct
import re
import time
import nibabel as nib
import cPickle
import gzip
import os
import sys
import matplotlib.cm as cm
import matplotlib.pyplot as plt
from math import *
import itertools
os.chdir("/home/bmi/CRF")
from pystruct.models import GraphCRF, LatentNodeCRF
from pystruct.learners import NSlackSSVM, OneSlackSSVM, LatentSSVM, FrankWolfeSSVM
from pystruct.datasets import make_simple_2x2
from pystruct.utils import make_grid_edges, plot_grid
from sklearn.metrics import confusion_matrix,precision_score,recall_score,f1_score
from collections import Counter
from CRFUtils import *
from BratsCheckPredictions import *
import math
from math import *
train_path="/media/bmi/MyPassport/new_n4/Recon_2013_data/N4_zscore_training_t1_t1c_hist_match"
test_path="/media/bmi/MyPassport/new_n4/Recon_2013_data/N4_zscore_testing_t1_t1c_hist_match"
#train_path="/media/bmi/MyPassport/n4_entire/Recon_2013_data/training_longitudnal"
#test_path="/media/bmi/MyPassport/n4_entire/Recon_2013_data/testing_longitudnal"
#train_path="/home/rsk/Documents/PyStruct/data/train"
#test_path="/home/rsk/Documents/PyStruct/data/test"
#%%
#################################################################################################
#Training the model
def trainModel_Neighbor(num_iter=5,inference="qpbo",trainer="NSlack",num_train=2,num_test=1,C=0.1,edges="180x180_dist1_diag0",inputs=[1,1,1,1,1,1],features="post+img+pred",neighbor=-1,directed=False,savePred=False):
padding=(30,30,30,30)
if directed==True:
features +="+directed"
resultsDir = os.getcwd()+'/CRFResults'
nameLen = len(os.listdir(resultsDir))
edgeFeature = edges
filename=str(nameLen)+"_CRF_iter_"+str(num_iter)+"_"+inference+"_"+trainer+"_"+features+"_"+str(num_train)+"_"+str(num_test)+"_"+edgeFeature
print "Loading training slices"
start = time.clock()
train =extractSlices2(train_path,num_train,padding,neighbor=neighbor,inputs=inputs)
end= time.clock()
train_load_time = (end-start)/60.0
[trainLayers,trainTruth,sliceShape] = train
print "Training slices loaded in %f" % (train_load_time)
n_features= len(trainLayers[0][0,0])
print "Layer shape is : "
print trainLayers[0].shape
print "Training the model"
edges= np.load("/home/bmi/CRF/edges/"+edges+".npy")
G = [edges for x in trainLayers]
print trainLayers[0].shape
trainLayers = np.array( [x.reshape((sliceShape[0]*sliceShape[1],n_features)) for x in trainLayers] )
trainTruth = np.array( [x.reshape((sliceShape[0]*sliceShape[1],)).astype(int) for x in trainTruth] )
if inference=='ogm':
crf = GraphCRF(inference_method=('ogm',{'alg':'fm'}),directed=directed)
else:
|
if trainer=="Frank":
svm = FrankWolfeSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=6,verbose=1)
elif trainer=="NSlack":
svm = NSlackSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=-1,verbose=1)
else:
svm = OneSlackSSVM(model = crf,max_iter=num_iter,C=C,n_jobs=-1,verbose=1)
start = time.clock()
asdf = zip(trainLayers,G)
svm.fit(asdf,trainTruth)
end = time.clock()
train_time = (end-start)/60.0
print "The training took %f" % (train_time)
print "Model parameter size :"
print svm.w.shape
print "making predictions on train data"
predTrain = svm.predict(asdf)
trainDice=[]
for i in range(len(trainLayers)):
diceScore = accuracy(predTrain[i],trainTruth[i])
trainDice.append(diceScore)
meanTrainDice = sum(trainDice)/len(trainLayers)
del trainLayers,trainTruth
################################################################################################
overallDicePerPatient=[] # For overall test Dice
extDicePerPatient=[]
PatientTruthLayers=[]
PatientPredLayers=[]
PREC=[]
RECALL=[]
F1=[]
LayerwiseDiceTotal=[]
testResultFile = open(os.getcwd()+"/CRFResults/"+filename+".csv",'a')
testResultFile.write("folderName,numLayers, Overall Dice, precision , recall, extDice"+"\n")
counter=0
print "Loading the test slices"
for folder in os.listdir(test_path):
path = test_path + "/" + folder
layerDiceScores=''
data = extractTestSlices2(path,padding,neighbor=neighbor,inputs=inputs)
if data!=0:
[testLayers,testTruth,sliceShape,startSlice,endSlice] = data
# trueTestLayers=testLayers
GTest = [edges for x in testLayers]
testLayers = np.array( [x.reshape((sliceShape[0]*sliceShape[1],n_features)) for x in testLayers] )
testTruth = np.array( [x.reshape((sliceShape[0]*sliceShape[1],)).astype(int) for x in testTruth] )
asdfTest = zip(testLayers,GTest)
predTest = svm.predict(asdfTest)
LayerwiseDice=[]
for i in range(len(testLayers)):
diceScore = accuracy(predTest[i],testTruth[i])
layerDiceScores+=","+str(diceScore)
if math.isnan(diceScore):
if sum(predTest[i])==0 and sum(testTruth[i])==0:
LayerwiseDice.append(1.0)
continue
LayerwiseDice.append(diceScore)
LayerwiseDiceTotal.append(LayerwiseDice)
### Imputing the predicted pixels into full volume
if savePred==True:
finalPatientPred = np.zeros((240,240,150))
finalPatientTruth = np.zeros((240,240,150))
predInsert = np.dstack(tuple([x.reshape(180,180) for x in predTest]))
truthInsert = np.dstack(tuple([x.reshape(180,180) for x in testTruth]))
finalPatientPred[30:(240-30),30:(240-30),startSlice:endSlice] = predInsert
finalPatientTruth[30:(240-30),30:(240-30),startSlice:endSlice] = truthInsert
finalPatientPred = finalPatientPred.astype('int')
# print "saving at "+ path+"/"+filename+"whole"
np.save(path+"/"+folder+filename+"whole",finalPatientPred)
# print "predInsert shape"
# print predInsert.shape
# finalPatientPred = np.reshape(finalPatientPred,(240*240*150,)).astype('int')
# finalPatientTruth = np.reshape(finalPatientTruth,(240*240*150,)).astype('int')
#
# print "Counters"
# print Counter(list(np.hstack(testTruth)))
# print Counter(list(finalPatientTruth))
# print confusion_matrix(np.hstack(predTest),np.hstack(testTruth))
# print confusion_matrix(finalPatientPred,finalPatientTruth)
overallTestDice = accuracy(np.hstack(predTest),np.hstack(testTruth))
extDice = np.mean ( np.array(LayerwiseDice)[ range(10) + range(len(LayerwiseDice)-10, len(LayerwiseDice)) ] )
prec,recall,f1 = precision_score(np.hstack(testTruth),np.hstack(predTest)) , recall_score(np.hstack(testTruth),np.hstack(predTest)) , f1_score(np.hstack(testTruth),np.hstack(predTest))
print "Patient %d : Overall test DICE for %s is : %f and extDice is %f"%(counter,folder,overallTestDice,extDice)
print "Precision : %f Recall : %f F1 : %f " %(prec,recall,f1)
print "__________________________________________"
# testResultFile.write(folder+","+str(len(testLayers))+","+str(meanTestDice)+","+str(overallTestDice) ","+str(np.max(testDice)) +","+ str(np.min(testDice))+"\n" )
testResultFile.write(folder+","+str(len(testLayers)) + ","+ str(overallTestDice) + ","+str(prec)+","+str(recall)+","+str(extDice)+layerDiceScores+"\n" )
overallDicePerPatient.append(overallTestDice)
extDicePerPatient.append(extDice)
PREC.append(prec), RECALL.append(recall) , F1.append(f1)
PatientTruthLayers.append(testTruth)
PatientPredLayers.append(predTest)
counter+=1
if counter==num_test and num_test!=-1:
break
######################################################################################################
print "Done testing slices"
overallDice = sum(overallDicePerPatient)/len(PatientTruthLayers)
overallPrec = sum(PREC)/len(PatientTruthLayers)
overallRecall = sum(RECALL)/len(PatientTruthLayers)
overallExtDice = np.mean(extDicePerPatient)
print "Overall DICE : %f Precision : %f Recall : %f extDice : %f "%(overallDice,overallPrec,overallRecall,overallExtDice)
print "############################################"
# testOutput=np.array([PatientPredLayers,PatientTruthLayers,trueTestLayers])
testOutput=np.array([PatientPredLayers,PatientTruthLayers])
########### Saving the models ######################################################################
# print "Saving the model"
# modelDir = os.getcwd()+"/CRFModel/"
# svmModel = open(modelDir+filename+"_model"+".pkl",'wb')
# cPickle.dump(svm,svmModel,protocol=cPickle.HIGHEST_PROTOCOL)
# svmModel.close()
#
# print "saving the predictions"
# predFileTest = open(os.getcwd()+"/CRFPred/"+filename+"_pred.pkl",'wb')
# cPickle.dump(testOutput,predFileTest,protocol=cPickle.HIGHEST_PROTOCOL)
# predFileTest.close()
#Saving layerWise PatientScore
layerDataLog = open(os.getcwd()+"/CRFModel/"+filename+"_layer.pkl",'wb')
cPickle.dump(LayerwiseDiceTotal,layerDataLog,protocol = cPickle.HIGHEST_PROTOCOL)
layerDataLog.close()
resultLog = os.getcwd()+"/CRFResults/TestResultFinal.csv"
resultFile = open(resultLog,'a')
resultFile.write(time.ctime()+","+str(num_iter)+","+str(num_train)+","+str(num_test)+","+inference+","+
trainer+","+str(C)+","+str(train_time)+","+str(meanTrainDice)+","+str(overallDice)+","+
str(np.std(overallDicePerPatient))+","+edgeFeature+","+"None"+","+features+","+filename +","+ str(overallPrec) +","+ str(overallRecall) +","+ str(overallExtDice)+","+"Flair(5)+T2(9)-Without last 4 train Layers"+"\n")
resultFile.close()
testResultFile.close()
return | crf = GraphCRF(inference_method=inference,directed=directed) | conditional_block |
playlist.py | import os
import os.path
import random
import re
import settings
import launch
# maintains library of all 'things' to play and management of playlists
VIDEO_DIR = os.path.join(settings.media_path, 'video')
if not os.path.exists(VIDEO_DIR):
assert False, 'media dir %s not found' % VIDEO_DIR
class Content(object):
def __init__(self, sketch, name=None, **kwargs):
# sketch to run in the java build
self.sketch = sketch
# name of this content option; must be unique; defaults to sketch name
self.name = (name or sketch)
# custom params to provide to the sketch
self.params = kwargs.get('params', {})
# true if content is only meant to be run manually, not part of a playlist
self.manual = kwargs.get('manual', False)
# if sketch can only run on certain geometries, list of compatible geometries
self.geometries = kwargs.get('geometries', None)
# if true, stretch content to fit the viewport
self.stretch_aspect = kwargs.get('stretch_aspect', False)
# a function (placement -> bool) that overrides the built-in placement selection
self.placement_filter = kwargs.get('placement_filter')
## audio settings ##
# true if content responds to audio input
self.sound_reactive = kwargs.get('sound_reactive', False)
# if sound_reactive, false if content can still be shown without audio input available
self.sound_required = kwargs.get('sound_required', True)
# relative volume adjustment for audio input (to give more/less responsiveness)
self.volume_adjust = kwargs.get('volume_adjust', 1.)
# true if sketch has audio out (that we actually want to hear)
self.has_audio = kwargs.get('has_audio', False)
# true if content can use kinect
self.kinect_enabled = kwargs.get('kinect_enabled', False)
# if kinect_enabled, false if content still works without connect
self.kinect_required = kwargs.get('kinect_required', True)
self.server_side_parameters = kwargs.get('server_side_parameters', [])
## sketch-dependent parameters ##
# video
# length of video in seconds -- set automatically
self.duration = self.get_video_duration()
# how to play the video:
# - 'shuffle': play a random excerpt for the specific runtime
# - 'full': play the video start to finish
self.play_mode = kwargs.get('play_mode', 'shuffle')
# screencast
# command to launch program to be cast
self.cmdline = kwargs.get('cmdline')
# a hook to further configure/interact with the program after it's launched
self.post_launch = kwargs.get('post_launch')
if set(kwargs.keys()) - set(self.__dict__):
assert False, 'unrecognized arg'
def get_video_duration(self):
if not self.sketch == 'video':
return
vid = self.params['path']
try:
duration = float(os.popen('mediainfo --Inform="Video;%%Duration%%" "%s"' % vid).readlines()[0].strip())/1000.
except RuntimeError:
print 'could not read duration of %s' % vid
duration = 0
return duration
def to_json_info(self):
info = dict((k, getattr(self, k)) for k in ('name', 'sound_reactive', 'has_audio', 'kinect_enabled'))
if self.play_mode == 'full':
info['duration'] = self.duration
return info
# placement filter that ensures crisp alignment with lsdome panel/pixel geometry
def pixel_exact(p):
# forcing zero-rotation is sufficient for lsdome, and achieves the same spirit for prometheus (which
# doesn't have a concept of 'pixel exact') while still allowing for some variation in wing overlap
return getattr(p, 'rot', 0) == 0 and p.is_1to1
# like pixel_exact, but stretch to fit full canvas (so not 'exact', but still 'aligned')
def align_but_stretch(p):
return getattr(p, 'rot', 0) == 0 and p.stretch
_all_content = None
def all_content():
global _all_content
if not _all_content:
_all_content = [
Content('black', '[util] black (note: keeps running and using cpu)', manual=True),
Content('gridtest', '[util] uvw grid test', geometries=['lsdome'], manual=True),
Content('fctest', '[util] fc topology test', params={'fcconfig': fadecandy_config()}),
Content('layouttest', '[util] cartesian test (mouse)', manual=True, placement_filter=pixel_exact),
Content('binary', '[util] binary decomp', manual=True),
Content('cloud'),
Content('dontknow'),
Content('harmonics', geometries=['lsdome']),
Content('moire'),
Content('rings'),
Content('tube'),
Content('twinkle'),
Content('fft', sound_reactive=True),
Content('pixelflock', sound_reactive=True, sound_required=False, kinect_enabled=True, kinect_required=False),
Content('kinectdepth', 'kinectdepth', kinect_enabled=True,
placement_filter=align_but_stretch),
Content('screencast', 'projectm', cmdline='projectM-pulseaudio', sound_reactive=True, volume_adjust=1.5,
server_side_parameters=projectm_parameters(),
post_launch=lambda manager: projectm_control(manager, 'next'), # get off the default pattern
),
Content('stream', 'hdmi-in', manual=True, stretch_aspect=True, params={
'camera': 'FHD Capture: FHD Capture',
}),
Content('kaleidoscope', geometries=['lsdome'], placement_filter=pixel_exact, params={'scale': 2.}),
Content('kaleidoscope', geometries=['prometheus'], params={'scale': 3.2}),
Content('imgkaleidoscope', 'hearts', geometries=['lsdome'], placement_filter=pixel_exact, params={
'image': "res/img/hearts.jpg",
'scale': 1.,
'source_scale': 1.3,
'speed': .25,
}),
Content('video', 'video:chrissy_poi_zoom', geometries=['lsdome'], params={
'path': os.path.join(VIDEO_DIR, 'hayley_chrissy_fire_spinning.mp4'),
}, placement_filter=lambda p: p.name == 'poi (01-10 21:44)'),
]
_all_content.extend(load_videos())
_all_content = [c for c in _all_content if not c.geometries or settings.geometry in c.geometries]
_all_content = [c for c in _all_content if not (c.kinect_enabled and c.kinect_required) or settings.kinect]
for c in _all_content:
if c.kinect_enabled and settings.kinect and not c.placement_filter:
# when kinect used, ensure display lines up with camera
c.placement_filter = pixel_exact
assert len(set(c.name for c in _all_content)) == len(_all_content), 'content names not unique'
_all_content = dict((c.name, c) for c in _all_content)
return _all_content
def load_videos():
vids = [f.strip() for f in os.popen('find "%s" -type f' % VIDEO_DIR).readlines()]
for vid in vids:
# TODO placement restrictions? joan of arc require mirror mode?
# do special things for certain videos -- should probably make this more maintainable
args = {}
if 'knife' in vid:
args['play_mode'] = 'full'
if any(k in vid for k in ('knife', 'flood')):
args['has_audio'] = True
yield Content('video', 'video:%s' % os.path.relpath(vid, VIDEO_DIR), stretch_aspect=True, params={
'path': vid,
}, **args)
def fadecandy_config():
if settings.geometry == 'lsdome':
fcconfig = 'lsdome_%spanel.json' % settings.num_panels
elif settings.geometry == 'prometheus':
fcconfig = 'prometheus_wing.json'
return os.path.join(settings.repo_root, 'src/config/fadecandy', fcconfig)
def projectm_control(mgr, command):
interaction = {
'next': 'key r',
'toggle-lock': 'key l',
}[command]
launch.gui_interaction(mgr.content.window_id, interaction)
def projectm_parameters():
|
def game_content(rom):
try:
args = launch.launch_emulator(rom)
except:
return None
name = os.path.splitext(os.path.relpath(os.path.abspath(rom), settings.roms_path))[0]
return Content('screencast', name, cmdline=args['cmd'], params=args.get('params', {}), stretch_aspect=True, has_audio=True)
_games_content = None
def load_games(filt):
def all_roms_path_files():
for dirpath, _, filenames in os.walk(settings.roms_path):
for f in filenames:
yield os.path.join(dirpath, f)
global _games_content
if not _games_content:
_games_content = filter(None, map(game_content, all_roms_path_files()))
_games_content = dict((c.name, c) for c in _games_content)
print len(_games_content), 'roms'
if filt == 'favs':
return filter_games_favorites(_games_content)
else:
return filter_games(_games_content, filt)
def filter_games(all_games, filt):
def name_to_search_key(name):
name = os.path.split(name)[1]
name = name.split('(')[0]
words = name.lower().split()
words = [re.sub('[^a-z0-9]', '', w) for w in words]
return filter(None, words)
def match_key(query, key):
return all(any(kw.startswith(qw) for kw in key) for qw in query)
return dict((k, v) for k, v in all_games.iteritems() if match_key(name_to_search_key(filt), name_to_search_key(k)))
def filter_games_favorites(all_games):
with open(settings.rom_favorites) as f:
favs = set(os.path.splitext(g.strip())[0] for g in f.readlines())
return dict((k, v) for k, v in all_games.iteritems() if k in favs)
class Playlist(object):
def __init__(self, name, choices):
self.name = name
# a mapping of content to relative likelihood
self.choices = choices if type(choices) == type({}) else dict((c, 1.) for c in choices)
self.last_played = None
def _all_choices_except_last_played(self):
for choice in self.choices.keys():
if choice == self.last_played and len(self.choices) > 1:
continue
yield choice
def get_likelihood(self, choice):
return self.choices[choice]
# TODO reduce likelihood of previous N selections
def get_next(self):
total_likelihood = sum(self.get_likelihood(choice) for choice in self._all_choices_except_last_played())
rand = random.uniform(0, total_likelihood)
cumulative_likelihood = 0
choice = None
for ch in self._all_choices_except_last_played():
cumulative_likelihood += self.get_likelihood(ch)
if cumulative_likelihood > rand:
choice = ch
break
self.last_played = choice
return choice
def to_json(self):
return {
'name': self.name,
'items': sorted(c.name for c in self.choices.keys()),
}
def load_playlists():
base = Playlist('(almost) everything', (c for c in all_content().values() if not c.manual))
nosound = Playlist('no sound-reactive', (c for c in base.choices.keys() if not c.sound_reactive or not c.sound_required))
playlists = [base, nosound]
playlists.extend(load_playlist_files())
return dict((pl.name, pl) for pl in playlists)
def load_playlist_files():
playlist_files = os.listdir(settings.playlists_dir)
for filename in playlist_files:
name, ext = os.path.splitext(filename)
if ext != '.playlist':
continue
path = os.path.join(settings.playlists_dir, filename)
with open(path) as f:
entries = filter(None, (ln.strip() for ln in f.readlines()))
def parse_entry(entry):
parts = entry.split('|')
try:
return (all_content()[parts[0]], float(parts[1]) if len(parts) > 1 else 1.)
except KeyError:
print 'content "%s" not available for playlist "%s"' % (parts[0], name)
yield Playlist(name, dict(filter(None, map(parse_entry, entries))))
| import animations
class ProjectMNextPatternAction(animations.Parameter):
def param_def(self):
return {
'name': 'next pattern',
'isAction': True,
}
def handle_input_event(self, type, val):
if type != 'press':
return
projectm_control(self.manager, 'next')
def _update_value(self, val):
pass
return [ProjectMNextPatternAction] | identifier_body |
playlist.py | import os
import os.path
import random
import re
import settings
import launch
# maintains library of all 'things' to play and management of playlists
VIDEO_DIR = os.path.join(settings.media_path, 'video')
if not os.path.exists(VIDEO_DIR):
assert False, 'media dir %s not found' % VIDEO_DIR
class Content(object):
def __init__(self, sketch, name=None, **kwargs):
# sketch to run in the java build
self.sketch = sketch
# name of this content option; must be unique; defaults to sketch name
self.name = (name or sketch)
# custom params to provide to the sketch
self.params = kwargs.get('params', {})
# true if content is only meant to be run manually, not part of a playlist
self.manual = kwargs.get('manual', False)
# if sketch can only run on certain geometries, list of compatible geometries
self.geometries = kwargs.get('geometries', None)
# if true, stretch content to fit the viewport
self.stretch_aspect = kwargs.get('stretch_aspect', False)
# a function (placement -> bool) that overrides the built-in placement selection
self.placement_filter = kwargs.get('placement_filter')
## audio settings ##
# true if content responds to audio input
self.sound_reactive = kwargs.get('sound_reactive', False)
# if sound_reactive, false if content can still be shown without audio input available
self.sound_required = kwargs.get('sound_required', True)
# relative volume adjustment for audio input (to give more/less responsiveness)
self.volume_adjust = kwargs.get('volume_adjust', 1.)
# true if sketch has audio out (that we actually want to hear)
self.has_audio = kwargs.get('has_audio', False)
# true if content can use kinect
self.kinect_enabled = kwargs.get('kinect_enabled', False)
# if kinect_enabled, false if content still works without connect
self.kinect_required = kwargs.get('kinect_required', True)
self.server_side_parameters = kwargs.get('server_side_parameters', [])
## sketch-dependent parameters ##
# video
# length of video in seconds -- set automatically
self.duration = self.get_video_duration()
# how to play the video:
# - 'shuffle': play a random excerpt for the specific runtime
# - 'full': play the video start to finish
self.play_mode = kwargs.get('play_mode', 'shuffle')
# screencast
# command to launch program to be cast
self.cmdline = kwargs.get('cmdline')
# a hook to further configure/interact with the program after it's launched
self.post_launch = kwargs.get('post_launch')
if set(kwargs.keys()) - set(self.__dict__):
assert False, 'unrecognized arg'
def get_video_duration(self):
if not self.sketch == 'video':
return
vid = self.params['path']
try:
duration = float(os.popen('mediainfo --Inform="Video;%%Duration%%" "%s"' % vid).readlines()[0].strip())/1000.
except RuntimeError:
print 'could not read duration of %s' % vid
duration = 0
return duration
def to_json_info(self):
info = dict((k, getattr(self, k)) for k in ('name', 'sound_reactive', 'has_audio', 'kinect_enabled'))
if self.play_mode == 'full':
info['duration'] = self.duration
return info
# placement filter that ensures crisp alignment with lsdome panel/pixel geometry
def pixel_exact(p):
# forcing zero-rotation is sufficient for lsdome, and achieves the same spirit for prometheus (which
# doesn't have a concept of 'pixel exact') while still allowing for some variation in wing overlap
return getattr(p, 'rot', 0) == 0 and p.is_1to1
# like pixel_exact, but stretch to fit full canvas (so not 'exact', but still 'aligned')
def align_but_stretch(p):
return getattr(p, 'rot', 0) == 0 and p.stretch
_all_content = None
def all_content():
global _all_content
if not _all_content:
_all_content = [
Content('black', '[util] black (note: keeps running and using cpu)', manual=True),
Content('gridtest', '[util] uvw grid test', geometries=['lsdome'], manual=True),
Content('fctest', '[util] fc topology test', params={'fcconfig': fadecandy_config()}),
Content('layouttest', '[util] cartesian test (mouse)', manual=True, placement_filter=pixel_exact),
Content('binary', '[util] binary decomp', manual=True),
Content('cloud'),
Content('dontknow'),
Content('harmonics', geometries=['lsdome']),
Content('moire'),
Content('rings'),
Content('tube'),
Content('twinkle'),
Content('fft', sound_reactive=True),
Content('pixelflock', sound_reactive=True, sound_required=False, kinect_enabled=True, kinect_required=False),
Content('kinectdepth', 'kinectdepth', kinect_enabled=True,
placement_filter=align_but_stretch),
Content('screencast', 'projectm', cmdline='projectM-pulseaudio', sound_reactive=True, volume_adjust=1.5,
server_side_parameters=projectm_parameters(),
post_launch=lambda manager: projectm_control(manager, 'next'), # get off the default pattern
), | Content('kaleidoscope', geometries=['lsdome'], placement_filter=pixel_exact, params={'scale': 2.}),
Content('kaleidoscope', geometries=['prometheus'], params={'scale': 3.2}),
Content('imgkaleidoscope', 'hearts', geometries=['lsdome'], placement_filter=pixel_exact, params={
'image': "res/img/hearts.jpg",
'scale': 1.,
'source_scale': 1.3,
'speed': .25,
}),
Content('video', 'video:chrissy_poi_zoom', geometries=['lsdome'], params={
'path': os.path.join(VIDEO_DIR, 'hayley_chrissy_fire_spinning.mp4'),
}, placement_filter=lambda p: p.name == 'poi (01-10 21:44)'),
]
_all_content.extend(load_videos())
_all_content = [c for c in _all_content if not c.geometries or settings.geometry in c.geometries]
_all_content = [c for c in _all_content if not (c.kinect_enabled and c.kinect_required) or settings.kinect]
for c in _all_content:
if c.kinect_enabled and settings.kinect and not c.placement_filter:
# when kinect used, ensure display lines up with camera
c.placement_filter = pixel_exact
assert len(set(c.name for c in _all_content)) == len(_all_content), 'content names not unique'
_all_content = dict((c.name, c) for c in _all_content)
return _all_content
def load_videos():
vids = [f.strip() for f in os.popen('find "%s" -type f' % VIDEO_DIR).readlines()]
for vid in vids:
# TODO placement restrictions? joan of arc require mirror mode?
# do special things for certain videos -- should probably make this more maintainable
args = {}
if 'knife' in vid:
args['play_mode'] = 'full'
if any(k in vid for k in ('knife', 'flood')):
args['has_audio'] = True
yield Content('video', 'video:%s' % os.path.relpath(vid, VIDEO_DIR), stretch_aspect=True, params={
'path': vid,
}, **args)
def fadecandy_config():
if settings.geometry == 'lsdome':
fcconfig = 'lsdome_%spanel.json' % settings.num_panels
elif settings.geometry == 'prometheus':
fcconfig = 'prometheus_wing.json'
return os.path.join(settings.repo_root, 'src/config/fadecandy', fcconfig)
def projectm_control(mgr, command):
interaction = {
'next': 'key r',
'toggle-lock': 'key l',
}[command]
launch.gui_interaction(mgr.content.window_id, interaction)
def projectm_parameters():
import animations
class ProjectMNextPatternAction(animations.Parameter):
def param_def(self):
return {
'name': 'next pattern',
'isAction': True,
}
def handle_input_event(self, type, val):
if type != 'press':
return
projectm_control(self.manager, 'next')
def _update_value(self, val):
pass
return [ProjectMNextPatternAction]
def game_content(rom):
try:
args = launch.launch_emulator(rom)
except:
return None
name = os.path.splitext(os.path.relpath(os.path.abspath(rom), settings.roms_path))[0]
return Content('screencast', name, cmdline=args['cmd'], params=args.get('params', {}), stretch_aspect=True, has_audio=True)
_games_content = None
def load_games(filt):
def all_roms_path_files():
for dirpath, _, filenames in os.walk(settings.roms_path):
for f in filenames:
yield os.path.join(dirpath, f)
global _games_content
if not _games_content:
_games_content = filter(None, map(game_content, all_roms_path_files()))
_games_content = dict((c.name, c) for c in _games_content)
print len(_games_content), 'roms'
if filt == 'favs':
return filter_games_favorites(_games_content)
else:
return filter_games(_games_content, filt)
def filter_games(all_games, filt):
def name_to_search_key(name):
name = os.path.split(name)[1]
name = name.split('(')[0]
words = name.lower().split()
words = [re.sub('[^a-z0-9]', '', w) for w in words]
return filter(None, words)
def match_key(query, key):
return all(any(kw.startswith(qw) for kw in key) for qw in query)
return dict((k, v) for k, v in all_games.iteritems() if match_key(name_to_search_key(filt), name_to_search_key(k)))
def filter_games_favorites(all_games):
with open(settings.rom_favorites) as f:
favs = set(os.path.splitext(g.strip())[0] for g in f.readlines())
return dict((k, v) for k, v in all_games.iteritems() if k in favs)
class Playlist(object):
def __init__(self, name, choices):
self.name = name
# a mapping of content to relative likelihood
self.choices = choices if type(choices) == type({}) else dict((c, 1.) for c in choices)
self.last_played = None
def _all_choices_except_last_played(self):
for choice in self.choices.keys():
if choice == self.last_played and len(self.choices) > 1:
continue
yield choice
def get_likelihood(self, choice):
return self.choices[choice]
# TODO reduce likelihood of previous N selections
def get_next(self):
total_likelihood = sum(self.get_likelihood(choice) for choice in self._all_choices_except_last_played())
rand = random.uniform(0, total_likelihood)
cumulative_likelihood = 0
choice = None
for ch in self._all_choices_except_last_played():
cumulative_likelihood += self.get_likelihood(ch)
if cumulative_likelihood > rand:
choice = ch
break
self.last_played = choice
return choice
def to_json(self):
return {
'name': self.name,
'items': sorted(c.name for c in self.choices.keys()),
}
def load_playlists():
base = Playlist('(almost) everything', (c for c in all_content().values() if not c.manual))
nosound = Playlist('no sound-reactive', (c for c in base.choices.keys() if not c.sound_reactive or not c.sound_required))
playlists = [base, nosound]
playlists.extend(load_playlist_files())
return dict((pl.name, pl) for pl in playlists)
def load_playlist_files():
playlist_files = os.listdir(settings.playlists_dir)
for filename in playlist_files:
name, ext = os.path.splitext(filename)
if ext != '.playlist':
continue
path = os.path.join(settings.playlists_dir, filename)
with open(path) as f:
entries = filter(None, (ln.strip() for ln in f.readlines()))
def parse_entry(entry):
parts = entry.split('|')
try:
return (all_content()[parts[0]], float(parts[1]) if len(parts) > 1 else 1.)
except KeyError:
print 'content "%s" not available for playlist "%s"' % (parts[0], name)
yield Playlist(name, dict(filter(None, map(parse_entry, entries)))) | Content('stream', 'hdmi-in', manual=True, stretch_aspect=True, params={
'camera': 'FHD Capture: FHD Capture',
}),
| random_line_split |
playlist.py | import os
import os.path
import random
import re
import settings
import launch
# maintains library of all 'things' to play and management of playlists
VIDEO_DIR = os.path.join(settings.media_path, 'video')
if not os.path.exists(VIDEO_DIR):
assert False, 'media dir %s not found' % VIDEO_DIR
class Content(object):
def __init__(self, sketch, name=None, **kwargs):
# sketch to run in the java build
self.sketch = sketch
# name of this content option; must be unique; defaults to sketch name
self.name = (name or sketch)
# custom params to provide to the sketch
self.params = kwargs.get('params', {})
# true if content is only meant to be run manually, not part of a playlist
self.manual = kwargs.get('manual', False)
# if sketch can only run on certain geometries, list of compatible geometries
self.geometries = kwargs.get('geometries', None)
# if true, stretch content to fit the viewport
self.stretch_aspect = kwargs.get('stretch_aspect', False)
# a function (placement -> bool) that overrides the built-in placement selection
self.placement_filter = kwargs.get('placement_filter')
## audio settings ##
# true if content responds to audio input
self.sound_reactive = kwargs.get('sound_reactive', False)
# if sound_reactive, false if content can still be shown without audio input available
self.sound_required = kwargs.get('sound_required', True)
# relative volume adjustment for audio input (to give more/less responsiveness)
self.volume_adjust = kwargs.get('volume_adjust', 1.)
# true if sketch has audio out (that we actually want to hear)
self.has_audio = kwargs.get('has_audio', False)
# true if content can use kinect
self.kinect_enabled = kwargs.get('kinect_enabled', False)
# if kinect_enabled, false if content still works without connect
self.kinect_required = kwargs.get('kinect_required', True)
self.server_side_parameters = kwargs.get('server_side_parameters', [])
## sketch-dependent parameters ##
# video
# length of video in seconds -- set automatically
self.duration = self.get_video_duration()
# how to play the video:
# - 'shuffle': play a random excerpt for the specific runtime
# - 'full': play the video start to finish
self.play_mode = kwargs.get('play_mode', 'shuffle')
# screencast
# command to launch program to be cast
self.cmdline = kwargs.get('cmdline')
# a hook to further configure/interact with the program after it's launched
self.post_launch = kwargs.get('post_launch')
if set(kwargs.keys()) - set(self.__dict__):
assert False, 'unrecognized arg'
def get_video_duration(self):
if not self.sketch == 'video':
return
vid = self.params['path']
try:
duration = float(os.popen('mediainfo --Inform="Video;%%Duration%%" "%s"' % vid).readlines()[0].strip())/1000.
except RuntimeError:
print 'could not read duration of %s' % vid
duration = 0
return duration
def to_json_info(self):
info = dict((k, getattr(self, k)) for k in ('name', 'sound_reactive', 'has_audio', 'kinect_enabled'))
if self.play_mode == 'full':
info['duration'] = self.duration
return info
# placement filter that ensures crisp alignment with lsdome panel/pixel geometry
def pixel_exact(p):
# forcing zero-rotation is sufficient for lsdome, and achieves the same spirit for prometheus (which
# doesn't have a concept of 'pixel exact') while still allowing for some variation in wing overlap
return getattr(p, 'rot', 0) == 0 and p.is_1to1
# like pixel_exact, but stretch to fit full canvas (so not 'exact', but still 'aligned')
def align_but_stretch(p):
return getattr(p, 'rot', 0) == 0 and p.stretch
_all_content = None
def all_content():
global _all_content
if not _all_content:
|
return _all_content
def load_videos():
vids = [f.strip() for f in os.popen('find "%s" -type f' % VIDEO_DIR).readlines()]
for vid in vids:
# TODO placement restrictions? joan of arc require mirror mode?
# do special things for certain videos -- should probably make this more maintainable
args = {}
if 'knife' in vid:
args['play_mode'] = 'full'
if any(k in vid for k in ('knife', 'flood')):
args['has_audio'] = True
yield Content('video', 'video:%s' % os.path.relpath(vid, VIDEO_DIR), stretch_aspect=True, params={
'path': vid,
}, **args)
def fadecandy_config():
if settings.geometry == 'lsdome':
fcconfig = 'lsdome_%spanel.json' % settings.num_panels
elif settings.geometry == 'prometheus':
fcconfig = 'prometheus_wing.json'
return os.path.join(settings.repo_root, 'src/config/fadecandy', fcconfig)
def projectm_control(mgr, command):
interaction = {
'next': 'key r',
'toggle-lock': 'key l',
}[command]
launch.gui_interaction(mgr.content.window_id, interaction)
def projectm_parameters():
import animations
class ProjectMNextPatternAction(animations.Parameter):
def param_def(self):
return {
'name': 'next pattern',
'isAction': True,
}
def handle_input_event(self, type, val):
if type != 'press':
return
projectm_control(self.manager, 'next')
def _update_value(self, val):
pass
return [ProjectMNextPatternAction]
def game_content(rom):
try:
args = launch.launch_emulator(rom)
except:
return None
name = os.path.splitext(os.path.relpath(os.path.abspath(rom), settings.roms_path))[0]
return Content('screencast', name, cmdline=args['cmd'], params=args.get('params', {}), stretch_aspect=True, has_audio=True)
_games_content = None
def load_games(filt):
def all_roms_path_files():
for dirpath, _, filenames in os.walk(settings.roms_path):
for f in filenames:
yield os.path.join(dirpath, f)
global _games_content
if not _games_content:
_games_content = filter(None, map(game_content, all_roms_path_files()))
_games_content = dict((c.name, c) for c in _games_content)
print len(_games_content), 'roms'
if filt == 'favs':
return filter_games_favorites(_games_content)
else:
return filter_games(_games_content, filt)
def filter_games(all_games, filt):
def name_to_search_key(name):
name = os.path.split(name)[1]
name = name.split('(')[0]
words = name.lower().split()
words = [re.sub('[^a-z0-9]', '', w) for w in words]
return filter(None, words)
def match_key(query, key):
return all(any(kw.startswith(qw) for kw in key) for qw in query)
return dict((k, v) for k, v in all_games.iteritems() if match_key(name_to_search_key(filt), name_to_search_key(k)))
def filter_games_favorites(all_games):
with open(settings.rom_favorites) as f:
favs = set(os.path.splitext(g.strip())[0] for g in f.readlines())
return dict((k, v) for k, v in all_games.iteritems() if k in favs)
class Playlist(object):
def __init__(self, name, choices):
self.name = name
# a mapping of content to relative likelihood
self.choices = choices if type(choices) == type({}) else dict((c, 1.) for c in choices)
self.last_played = None
def _all_choices_except_last_played(self):
for choice in self.choices.keys():
if choice == self.last_played and len(self.choices) > 1:
continue
yield choice
def get_likelihood(self, choice):
return self.choices[choice]
# TODO reduce likelihood of previous N selections
def get_next(self):
total_likelihood = sum(self.get_likelihood(choice) for choice in self._all_choices_except_last_played())
rand = random.uniform(0, total_likelihood)
cumulative_likelihood = 0
choice = None
for ch in self._all_choices_except_last_played():
cumulative_likelihood += self.get_likelihood(ch)
if cumulative_likelihood > rand:
choice = ch
break
self.last_played = choice
return choice
def to_json(self):
return {
'name': self.name,
'items': sorted(c.name for c in self.choices.keys()),
}
def load_playlists():
base = Playlist('(almost) everything', (c for c in all_content().values() if not c.manual))
nosound = Playlist('no sound-reactive', (c for c in base.choices.keys() if not c.sound_reactive or not c.sound_required))
playlists = [base, nosound]
playlists.extend(load_playlist_files())
return dict((pl.name, pl) for pl in playlists)
def load_playlist_files():
playlist_files = os.listdir(settings.playlists_dir)
for filename in playlist_files:
name, ext = os.path.splitext(filename)
if ext != '.playlist':
continue
path = os.path.join(settings.playlists_dir, filename)
with open(path) as f:
entries = filter(None, (ln.strip() for ln in f.readlines()))
def parse_entry(entry):
parts = entry.split('|')
try:
return (all_content()[parts[0]], float(parts[1]) if len(parts) > 1 else 1.)
except KeyError:
print 'content "%s" not available for playlist "%s"' % (parts[0], name)
yield Playlist(name, dict(filter(None, map(parse_entry, entries))))
| _all_content = [
Content('black', '[util] black (note: keeps running and using cpu)', manual=True),
Content('gridtest', '[util] uvw grid test', geometries=['lsdome'], manual=True),
Content('fctest', '[util] fc topology test', params={'fcconfig': fadecandy_config()}),
Content('layouttest', '[util] cartesian test (mouse)', manual=True, placement_filter=pixel_exact),
Content('binary', '[util] binary decomp', manual=True),
Content('cloud'),
Content('dontknow'),
Content('harmonics', geometries=['lsdome']),
Content('moire'),
Content('rings'),
Content('tube'),
Content('twinkle'),
Content('fft', sound_reactive=True),
Content('pixelflock', sound_reactive=True, sound_required=False, kinect_enabled=True, kinect_required=False),
Content('kinectdepth', 'kinectdepth', kinect_enabled=True,
placement_filter=align_but_stretch),
Content('screencast', 'projectm', cmdline='projectM-pulseaudio', sound_reactive=True, volume_adjust=1.5,
server_side_parameters=projectm_parameters(),
post_launch=lambda manager: projectm_control(manager, 'next'), # get off the default pattern
),
Content('stream', 'hdmi-in', manual=True, stretch_aspect=True, params={
'camera': 'FHD Capture: FHD Capture',
}),
Content('kaleidoscope', geometries=['lsdome'], placement_filter=pixel_exact, params={'scale': 2.}),
Content('kaleidoscope', geometries=['prometheus'], params={'scale': 3.2}),
Content('imgkaleidoscope', 'hearts', geometries=['lsdome'], placement_filter=pixel_exact, params={
'image': "res/img/hearts.jpg",
'scale': 1.,
'source_scale': 1.3,
'speed': .25,
}),
Content('video', 'video:chrissy_poi_zoom', geometries=['lsdome'], params={
'path': os.path.join(VIDEO_DIR, 'hayley_chrissy_fire_spinning.mp4'),
}, placement_filter=lambda p: p.name == 'poi (01-10 21:44)'),
]
_all_content.extend(load_videos())
_all_content = [c for c in _all_content if not c.geometries or settings.geometry in c.geometries]
_all_content = [c for c in _all_content if not (c.kinect_enabled and c.kinect_required) or settings.kinect]
for c in _all_content:
if c.kinect_enabled and settings.kinect and not c.placement_filter:
# when kinect used, ensure display lines up with camera
c.placement_filter = pixel_exact
assert len(set(c.name for c in _all_content)) == len(_all_content), 'content names not unique'
_all_content = dict((c.name, c) for c in _all_content) | conditional_block |
playlist.py | import os
import os.path
import random
import re
import settings
import launch
# maintains library of all 'things' to play and management of playlists
VIDEO_DIR = os.path.join(settings.media_path, 'video')
if not os.path.exists(VIDEO_DIR):
assert False, 'media dir %s not found' % VIDEO_DIR
class Content(object):
def __init__(self, sketch, name=None, **kwargs):
# sketch to run in the java build
self.sketch = sketch
# name of this content option; must be unique; defaults to sketch name
self.name = (name or sketch)
# custom params to provide to the sketch
self.params = kwargs.get('params', {})
# true if content is only meant to be run manually, not part of a playlist
self.manual = kwargs.get('manual', False)
# if sketch can only run on certain geometries, list of compatible geometries
self.geometries = kwargs.get('geometries', None)
# if true, stretch content to fit the viewport
self.stretch_aspect = kwargs.get('stretch_aspect', False)
# a function (placement -> bool) that overrides the built-in placement selection
self.placement_filter = kwargs.get('placement_filter')
## audio settings ##
# true if content responds to audio input
self.sound_reactive = kwargs.get('sound_reactive', False)
# if sound_reactive, false if content can still be shown without audio input available
self.sound_required = kwargs.get('sound_required', True)
# relative volume adjustment for audio input (to give more/less responsiveness)
self.volume_adjust = kwargs.get('volume_adjust', 1.)
# true if sketch has audio out (that we actually want to hear)
self.has_audio = kwargs.get('has_audio', False)
# true if content can use kinect
self.kinect_enabled = kwargs.get('kinect_enabled', False)
# if kinect_enabled, false if content still works without connect
self.kinect_required = kwargs.get('kinect_required', True)
self.server_side_parameters = kwargs.get('server_side_parameters', [])
## sketch-dependent parameters ##
# video
# length of video in seconds -- set automatically
self.duration = self.get_video_duration()
# how to play the video:
# - 'shuffle': play a random excerpt for the specific runtime
# - 'full': play the video start to finish
self.play_mode = kwargs.get('play_mode', 'shuffle')
# screencast
# command to launch program to be cast
self.cmdline = kwargs.get('cmdline')
# a hook to further configure/interact with the program after it's launched
self.post_launch = kwargs.get('post_launch')
if set(kwargs.keys()) - set(self.__dict__):
assert False, 'unrecognized arg'
def get_video_duration(self):
if not self.sketch == 'video':
return
vid = self.params['path']
try:
duration = float(os.popen('mediainfo --Inform="Video;%%Duration%%" "%s"' % vid).readlines()[0].strip())/1000.
except RuntimeError:
print 'could not read duration of %s' % vid
duration = 0
return duration
def to_json_info(self):
info = dict((k, getattr(self, k)) for k in ('name', 'sound_reactive', 'has_audio', 'kinect_enabled'))
if self.play_mode == 'full':
info['duration'] = self.duration
return info
# placement filter that ensures crisp alignment with lsdome panel/pixel geometry
def pixel_exact(p):
# forcing zero-rotation is sufficient for lsdome, and achieves the same spirit for prometheus (which
# doesn't have a concept of 'pixel exact') while still allowing for some variation in wing overlap
return getattr(p, 'rot', 0) == 0 and p.is_1to1
# like pixel_exact, but stretch to fit full canvas (so not 'exact', but still 'aligned')
def align_but_stretch(p):
return getattr(p, 'rot', 0) == 0 and p.stretch
_all_content = None
def all_content():
global _all_content
if not _all_content:
_all_content = [
Content('black', '[util] black (note: keeps running and using cpu)', manual=True),
Content('gridtest', '[util] uvw grid test', geometries=['lsdome'], manual=True),
Content('fctest', '[util] fc topology test', params={'fcconfig': fadecandy_config()}),
Content('layouttest', '[util] cartesian test (mouse)', manual=True, placement_filter=pixel_exact),
Content('binary', '[util] binary decomp', manual=True),
Content('cloud'),
Content('dontknow'),
Content('harmonics', geometries=['lsdome']),
Content('moire'),
Content('rings'),
Content('tube'),
Content('twinkle'),
Content('fft', sound_reactive=True),
Content('pixelflock', sound_reactive=True, sound_required=False, kinect_enabled=True, kinect_required=False),
Content('kinectdepth', 'kinectdepth', kinect_enabled=True,
placement_filter=align_but_stretch),
Content('screencast', 'projectm', cmdline='projectM-pulseaudio', sound_reactive=True, volume_adjust=1.5,
server_side_parameters=projectm_parameters(),
post_launch=lambda manager: projectm_control(manager, 'next'), # get off the default pattern
),
Content('stream', 'hdmi-in', manual=True, stretch_aspect=True, params={
'camera': 'FHD Capture: FHD Capture',
}),
Content('kaleidoscope', geometries=['lsdome'], placement_filter=pixel_exact, params={'scale': 2.}),
Content('kaleidoscope', geometries=['prometheus'], params={'scale': 3.2}),
Content('imgkaleidoscope', 'hearts', geometries=['lsdome'], placement_filter=pixel_exact, params={
'image': "res/img/hearts.jpg",
'scale': 1.,
'source_scale': 1.3,
'speed': .25,
}),
Content('video', 'video:chrissy_poi_zoom', geometries=['lsdome'], params={
'path': os.path.join(VIDEO_DIR, 'hayley_chrissy_fire_spinning.mp4'),
}, placement_filter=lambda p: p.name == 'poi (01-10 21:44)'),
]
_all_content.extend(load_videos())
_all_content = [c for c in _all_content if not c.geometries or settings.geometry in c.geometries]
_all_content = [c for c in _all_content if not (c.kinect_enabled and c.kinect_required) or settings.kinect]
for c in _all_content:
if c.kinect_enabled and settings.kinect and not c.placement_filter:
# when kinect used, ensure display lines up with camera
c.placement_filter = pixel_exact
assert len(set(c.name for c in _all_content)) == len(_all_content), 'content names not unique'
_all_content = dict((c.name, c) for c in _all_content)
return _all_content
def load_videos():
vids = [f.strip() for f in os.popen('find "%s" -type f' % VIDEO_DIR).readlines()]
for vid in vids:
# TODO placement restrictions? joan of arc require mirror mode?
# do special things for certain videos -- should probably make this more maintainable
args = {}
if 'knife' in vid:
args['play_mode'] = 'full'
if any(k in vid for k in ('knife', 'flood')):
args['has_audio'] = True
yield Content('video', 'video:%s' % os.path.relpath(vid, VIDEO_DIR), stretch_aspect=True, params={
'path': vid,
}, **args)
def fadecandy_config():
if settings.geometry == 'lsdome':
fcconfig = 'lsdome_%spanel.json' % settings.num_panels
elif settings.geometry == 'prometheus':
fcconfig = 'prometheus_wing.json'
return os.path.join(settings.repo_root, 'src/config/fadecandy', fcconfig)
def projectm_control(mgr, command):
interaction = {
'next': 'key r',
'toggle-lock': 'key l',
}[command]
launch.gui_interaction(mgr.content.window_id, interaction)
def projectm_parameters():
import animations
class ProjectMNextPatternAction(animations.Parameter):
def param_def(self):
return {
'name': 'next pattern',
'isAction': True,
}
def handle_input_event(self, type, val):
if type != 'press':
return
projectm_control(self.manager, 'next')
def _update_value(self, val):
pass
return [ProjectMNextPatternAction]
def game_content(rom):
try:
args = launch.launch_emulator(rom)
except:
return None
name = os.path.splitext(os.path.relpath(os.path.abspath(rom), settings.roms_path))[0]
return Content('screencast', name, cmdline=args['cmd'], params=args.get('params', {}), stretch_aspect=True, has_audio=True)
_games_content = None
def load_games(filt):
def all_roms_path_files():
for dirpath, _, filenames in os.walk(settings.roms_path):
for f in filenames:
yield os.path.join(dirpath, f)
global _games_content
if not _games_content:
_games_content = filter(None, map(game_content, all_roms_path_files()))
_games_content = dict((c.name, c) for c in _games_content)
print len(_games_content), 'roms'
if filt == 'favs':
return filter_games_favorites(_games_content)
else:
return filter_games(_games_content, filt)
def filter_games(all_games, filt):
def name_to_search_key(name):
name = os.path.split(name)[1]
name = name.split('(')[0]
words = name.lower().split()
words = [re.sub('[^a-z0-9]', '', w) for w in words]
return filter(None, words)
def match_key(query, key):
return all(any(kw.startswith(qw) for kw in key) for qw in query)
return dict((k, v) for k, v in all_games.iteritems() if match_key(name_to_search_key(filt), name_to_search_key(k)))
def filter_games_favorites(all_games):
with open(settings.rom_favorites) as f:
favs = set(os.path.splitext(g.strip())[0] for g in f.readlines())
return dict((k, v) for k, v in all_games.iteritems() if k in favs)
class Playlist(object):
def __init__(self, name, choices):
self.name = name
# a mapping of content to relative likelihood
self.choices = choices if type(choices) == type({}) else dict((c, 1.) for c in choices)
self.last_played = None
def _all_choices_except_last_played(self):
for choice in self.choices.keys():
if choice == self.last_played and len(self.choices) > 1:
continue
yield choice
def get_likelihood(self, choice):
return self.choices[choice]
# TODO reduce likelihood of previous N selections
def get_next(self):
total_likelihood = sum(self.get_likelihood(choice) for choice in self._all_choices_except_last_played())
rand = random.uniform(0, total_likelihood)
cumulative_likelihood = 0
choice = None
for ch in self._all_choices_except_last_played():
cumulative_likelihood += self.get_likelihood(ch)
if cumulative_likelihood > rand:
choice = ch
break
self.last_played = choice
return choice
def to_json(self):
return {
'name': self.name,
'items': sorted(c.name for c in self.choices.keys()),
}
def load_playlists():
base = Playlist('(almost) everything', (c for c in all_content().values() if not c.manual))
nosound = Playlist('no sound-reactive', (c for c in base.choices.keys() if not c.sound_reactive or not c.sound_required))
playlists = [base, nosound]
playlists.extend(load_playlist_files())
return dict((pl.name, pl) for pl in playlists)
def load_playlist_files():
playlist_files = os.listdir(settings.playlists_dir)
for filename in playlist_files:
name, ext = os.path.splitext(filename)
if ext != '.playlist':
continue
path = os.path.join(settings.playlists_dir, filename)
with open(path) as f:
entries = filter(None, (ln.strip() for ln in f.readlines()))
def | (entry):
parts = entry.split('|')
try:
return (all_content()[parts[0]], float(parts[1]) if len(parts) > 1 else 1.)
except KeyError:
print 'content "%s" not available for playlist "%s"' % (parts[0], name)
yield Playlist(name, dict(filter(None, map(parse_entry, entries))))
| parse_entry | identifier_name |
rcu.rs | //! Reset and clock unit
use crate::pac::RCU;
use riscv::interrupt;
use crate::time::Hertz;
use core::cmp;
/// Extension trait that sets up the `RCU` peripheral
pub trait RcuExt {
/// Configure the clocks of the `RCU` peripheral
fn configure(self) -> UnconfiguredRcu;
}
impl RcuExt for RCU {
fn configure(self) -> UnconfiguredRcu {
UnconfiguredRcu::new(self)
}
}
/// Configured RCU peripheral
pub struct Rcu {
/// Frozen clock frequencies
pub clocks: Clocks,
pub(crate) regs: RCU,
}
pub struct UnconfiguredRcu {
hxtal: Option<u32>,
sysclk: Option<u32>,
regs: RCU,
}
impl UnconfiguredRcu {
fn new(rcu: RCU) -> Self {
Self {
hxtal: None,
sysclk: None,
regs: rcu,
}
}
/// Uses an external oscillator instead of IRC8M (internal RC oscillator) as the high-speed
/// clock source. Will result in a hang if an external oscillator is not connected or it fails
/// to start.
pub fn ext_hf_clock(mut self, freq: impl Into<Hertz>) -> Self {
let freq = freq.into().0;
assert!(4_000_000 <= freq && freq <= 32_000_000);
self.hxtal = Some(freq);
self
}
/// Sets the desired frequency for the SYSCLK clock
pub fn sysclk(mut self, freq: impl Into<Hertz>) -> Self {
let freq = freq.into().0;
assert!(freq <= 108_000_000);
self.sysclk = Some(freq);
self
}
/// Freezes clock configuration, making it effective
pub fn freeze(self) -> Rcu {
const IRC8M: u32 = 8_000_000;
let target_sysclk = self.sysclk.unwrap_or(IRC8M);
let (scs_bits, use_pll) = match (self.hxtal, target_sysclk) {
(Some(freq), sysclk) if freq == sysclk => (0b01, false),
(None, sysclk) if IRC8M == sysclk => (0b00, false),
_ => (0b10, true),
};
let pllsel_bit;
let predv0_bits;
let pllmf_bits;
if use_pll {
let pllmf;
if let Some(hxtal_freq) = self.hxtal {
// Use external clock + divider
pllsel_bit = true;
let calculate_pll = |source: u32, target: u32| -> Option<(u8, u8)> {
const PLL_IN_MIN: u32 = 600_000;
let div_max = cmp::min(16, source / PLL_IN_MIN);
for d in 1..=div_max {
let pllsource = source / d;
let pllm = target / pllsource;
if pllm < 2 || pllm == 15 || pllm > 32{
continue;
}
let actual_freq = pllsource * pllm;
if actual_freq == target {
return Some((d as u8, pllm as u8));
}
}
None
};
let (d, m) = calculate_pll(hxtal_freq, target_sysclk).expect("invalid sysclk value");
predv0_bits = d - 1;
pllmf = m;
} else {
// IRC8M/2 is used as an input clock
pllsel_bit = false;
let pllsource = IRC8M / 2;
let m = target_sysclk / pllsource;
let m = cmp::max(2, cmp::min(m, 32));
assert_ne!(m, 15, "invalid sysclk value");
let actual_sysclk = pllsource * m;
assert_eq!(target_sysclk, actual_sysclk, "invalid sysclk value");
predv0_bits = 0;
pllmf = m as u8;
}
pllmf_bits = match pllmf {
2..=14 => pllmf - 2,
16..=32 => pllmf - 1,
_ => unreachable!("invalid pll multiplier"),
};
} else {
pllsel_bit = false;
predv0_bits = 0;
pllmf_bits = 0;
}
// Switch to the internal clock
let rcu = unsafe { &*crate::pac::RCU::ptr() };
rcu.ctl.modify(|_, w| w.irc8men().set_bit()); // Enable IRC8M oscillator
while rcu.ctl.read().irc8mstb().bit_is_clear() {} // Wait for oscillator to stabilize
rcu.cfg0.modify(|_, w| unsafe { w.scs().bits(0b00) }); // Switch to the internal oscillator
rcu.ctl.modify(|_, w| w.pllen().clear_bit()); // Disable PLL
// Set bus prescalers
rcu.cfg0.modify(|_, w| unsafe { w.ahbpsc().bits(0b0000) }); // CK_SYS
rcu.cfg0.modify(|_, w| unsafe { w.apb1psc().bits(0b100) }); // CK_AHB / 2
rcu.cfg0.modify(|_, w| unsafe { w.apb2psc().bits(0b000) }); // CK_AHB
let apb1_psc = 2;
let apb2_psc = 1;
if self.hxtal.is_some() {
// Enable external oscillator
rcu.ctl.modify(|_, w| w.hxtalen().set_bit());
// Wait for oscillator to stabilize
while rcu.ctl.read().hxtalstb().bit_is_clear() {}
// Select HXTAL as prescaler input source clock
rcu.cfg1.modify(|_, w| w.predv0sel().clear_bit());
// Configure the prescaler
rcu.cfg1.modify(|_, w| unsafe { w.predv0().bits(predv0_bits) });
}
if use_pll {
// Configure PLL input selector
rcu.cfg0.modify(|_, w| w.pllsel().bit(pllsel_bit));
// Configure PLL multiplier
rcu.cfg0.modify(|_, w| unsafe { w
.pllmf_4().bit(pllmf_bits & 0x10 != 0)
.pllmf_3_0().bits(pllmf_bits & 0xf)
});
// Enable PLL
rcu.ctl.modify(|_, w| w.pllen().set_bit());
// Wait for PLL to stabilize
while rcu.ctl.read().pllstb().bit_is_clear() {}
} else {
// Disable PLL
rcu.ctl.modify(|_, w| w.pllen().clear_bit());
}
// Switch to the configured clock source
rcu.cfg0.modify(|_, w| unsafe { w.scs().bits(scs_bits) });
let usbclk_valid;
if use_pll {
let pllclk = target_sysclk;
let (valid, pr) = match pllclk {
48_000_000 => (true, 0b01), // pllclk / 1
72_000_000 => (true, 0b00), // pllclk / 1.5
96_000_000 => (true, 0b11), // pllclk / 2
_ => (false, 0),
};
usbclk_valid = valid;
// Configure USB prescaler
rcu.cfg0.modify(|_, w| unsafe { w.usbfspsc().bits(pr) });
} else {
usbclk_valid = false;
}
let clocks = Clocks {
sysclk: Hertz(target_sysclk),
apb1_psc,
apb2_psc,
usbclk_valid
};
Rcu {
clocks,
regs: self.regs
}
}
}
#[derive(Copy, Clone)]
pub struct Clocks {
sysclk: Hertz,
apb1_psc: u8,
apb2_psc: u8,
usbclk_valid: bool,
}
impl Clocks {
/// Returns the system (core) frequency
pub const fn sysclk(&self) -> Hertz {
self.sysclk
}
/// Returns the frequency of the AHB
pub const fn hclk(&self) -> Hertz {
self.sysclk
}
/// Returns the frequency of the APB1
pub const fn pclk1(&self) -> Hertz {
Hertz(self.sysclk.0 / self.apb1_psc as u32)
}
/// Returns the frequency of the APB2
pub const fn pclk2(&self) -> Hertz {
Hertz(self.sysclk.0 / self.apb2_psc as u32)
}
/// Returns the frequency of the SysTick timer
pub const fn systick(&self) -> Hertz {
Hertz(self.sysclk.0 / 4)
}
/// Returns the frequency of the TIMER0 base clock
pub fn timer0(&self) -> Hertz {
let pclk2 = self.pclk2();
if self.apb2_psc == 1 {
pclk2
} else {
Hertz(pclk2.0 * 2)
}
}
/// Returns the frequency of the TIMER1..6 base clock
pub fn timerx(&self) -> Hertz {
let pclk1 = self.pclk1();
if self.apb1_psc == 1 {
pclk1
} else {
Hertz(pclk1.0 * 2)
}
}
/// Returns whether the USBCLK clock frequency is valid for the USB peripheral
pub const fn usbclk_valid(&self) -> bool {
self.usbclk_valid
}
}
macro_rules! base_freq {
($($PER:ident => $func:ident,)+) => {
$(
impl BaseFrequency for crate::pac::$PER {
#[inline(always)]
fn base_frequency(rcu: &Rcu) -> Hertz {
rcu.clocks.$func()
}
}
)+
}
}
base_freq! {
ADC0 => pclk2,
ADC1 => pclk2,
I2C0 => pclk1,
I2C1 => pclk1,
SPI0 => pclk2,
SPI1 => pclk1,
SPI2 => pclk1,
TIMER0 => timer0,
TIMER1 => timerx,
TIMER2 => timerx,
TIMER3 => timerx,
TIMER4 => timerx,
TIMER5 => timerx,
TIMER6 => timerx,
UART3 => pclk1,
UART4 => pclk1,
USART0 => pclk2,
USART1 => pclk1,
USART2 => pclk1,
}
pub(crate) mod closed_traits {
use super::Rcu;
use crate::time::Hertz;
/// Enable/disable peripheral
pub trait Enable {
fn enable(rcu: &mut Rcu);
fn disable(rcu: &mut Rcu);
}
/// Reset peripheral
pub trait Reset {
fn reset(rcu: &mut Rcu);
}
pub trait BaseFrequency {
fn base_frequency(rcu: &Rcu) -> Hertz;
}
}
pub(crate) use closed_traits::*;
macro_rules! bus_enable {
($PER:ident => ($apben:ident, $peren:ident)) => {
impl Enable for crate::pac::$PER {
#[inline(always)]
fn enable(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apben.modify(|_, w| w.$peren().set_bit());
});
}
#[inline(always)]
fn disable(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apben.modify(|_, w| w.$peren().clear_bit());
});
}
}
}
}
macro_rules! bus {
($($PER:ident => ($apben:ident, $apbrst:ident, $peren:ident, $perrst:ident),)+) => {
$(
bus_enable!($PER => ($apben, $peren));
impl Reset for crate::pac::$PER {
#[inline(always)]
fn reset(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apbrst.modify(|_, w| w.$perrst().set_bit());
rcu.regs.$apbrst.modify(|_, w| w.$perrst().clear_bit());
});
}
}
)+
}
}
bus! {
ADC0 => (apb2en, apb2rst, adc0en, adc0rst),
ADC1 => (apb2en, apb2rst, adc1en, adc1rst),
AFIO => (apb2en, apb2rst, afen, afrst),
BKP => (apb1en, apb1rst, bkpien, bkpirst),
CAN0 => (apb1en, apb1rst, can0en, can0rst),
CAN1 => (apb1en, apb1rst, can1en, can1rst),
DAC => (apb1en, apb1rst, dacen, dacrst),
GPIOA => (apb2en, apb2rst, paen, parst),
GPIOB => (apb2en, apb2rst, pben, pbrst),
GPIOC => (apb2en, apb2rst, pcen, pcrst),
GPIOD => (apb2en, apb2rst, pden, pdrst),
GPIOE => (apb2en, apb2rst, peen, perst),
I2C0 => (apb1en, apb1rst, i2c0en, i2c0rst),
I2C1 => (apb1en, apb1rst, i2c1en, i2c1rst),
PMU => (apb1en, apb1rst, pmuen, pmurst),
SPI0 => (apb2en, apb2rst, spi0en, spi0rst),
SPI1 => (apb1en, apb1rst, spi1en, spi1rst),
SPI2 => (apb1en, apb1rst, spi2en, spi2rst),
TIMER0 => (apb2en, apb2rst, timer0en, timer0rst),
TIMER1 => (apb1en, apb1rst, timer1en, timer1rst),
TIMER2 => (apb1en, apb1rst, timer2en, timer2rst),
TIMER3 => (apb1en, apb1rst, timer3en, timer3rst),
TIMER4 => (apb1en, apb1rst, timer4en, timer4rst),
TIMER5 => (apb1en, apb1rst, timer5en, timer5rst),
TIMER6 => (apb1en, apb1rst, timer6en, timer6rst),
UART3 => (apb1en, apb1rst, uart3en, uart3rst),
UART4 => (apb1en, apb1rst, uart4en, uart4rst),
USART0 => (apb2en, apb2rst, usart0en, usart0rst),
USART1 => (apb1en, apb1rst, usart1en, usart1rst),
USART2 => (apb1en, apb1rst, usart2en, usart2rst),
USBFS_GLOBAL => (ahben, ahbrst, usbfsen, usbfsrst),
WWDGT => (apb1en, apb1rst, wwdgten, wwdgtrst), | bus_enable!(DMA1 => (ahben, dma1en));
bus_enable!(EXMC => (ahben, exmcen)); | }
bus_enable!(CRC => (ahben, crcen));
bus_enable!(DMA0 => (ahben, dma0en)); | random_line_split |
rcu.rs | //! Reset and clock unit
use crate::pac::RCU;
use riscv::interrupt;
use crate::time::Hertz;
use core::cmp;
/// Extension trait that sets up the `RCU` peripheral
pub trait RcuExt {
/// Configure the clocks of the `RCU` peripheral
fn configure(self) -> UnconfiguredRcu;
}
impl RcuExt for RCU {
fn configure(self) -> UnconfiguredRcu {
UnconfiguredRcu::new(self)
}
}
/// Configured RCU peripheral
pub struct Rcu {
/// Frozen clock frequencies
pub clocks: Clocks,
pub(crate) regs: RCU,
}
pub struct UnconfiguredRcu {
hxtal: Option<u32>,
sysclk: Option<u32>,
regs: RCU,
}
impl UnconfiguredRcu {
fn new(rcu: RCU) -> Self {
Self {
hxtal: None,
sysclk: None,
regs: rcu,
}
}
/// Uses an external oscillator instead of IRC8M (internal RC oscillator) as the high-speed
/// clock source. Will result in a hang if an external oscillator is not connected or it fails
/// to start.
pub fn ext_hf_clock(mut self, freq: impl Into<Hertz>) -> Self {
let freq = freq.into().0;
assert!(4_000_000 <= freq && freq <= 32_000_000);
self.hxtal = Some(freq);
self
}
/// Sets the desired frequency for the SYSCLK clock
pub fn sysclk(mut self, freq: impl Into<Hertz>) -> Self {
let freq = freq.into().0;
assert!(freq <= 108_000_000);
self.sysclk = Some(freq);
self
}
/// Freezes clock configuration, making it effective
pub fn freeze(self) -> Rcu {
const IRC8M: u32 = 8_000_000;
let target_sysclk = self.sysclk.unwrap_or(IRC8M);
let (scs_bits, use_pll) = match (self.hxtal, target_sysclk) {
(Some(freq), sysclk) if freq == sysclk => (0b01, false),
(None, sysclk) if IRC8M == sysclk => (0b00, false),
_ => (0b10, true),
};
let pllsel_bit;
let predv0_bits;
let pllmf_bits;
if use_pll {
let pllmf;
if let Some(hxtal_freq) = self.hxtal {
// Use external clock + divider
pllsel_bit = true;
let calculate_pll = |source: u32, target: u32| -> Option<(u8, u8)> {
const PLL_IN_MIN: u32 = 600_000;
let div_max = cmp::min(16, source / PLL_IN_MIN);
for d in 1..=div_max {
let pllsource = source / d;
let pllm = target / pllsource;
if pllm < 2 || pllm == 15 || pllm > 32{
continue;
}
let actual_freq = pllsource * pllm;
if actual_freq == target {
return Some((d as u8, pllm as u8));
}
}
None
};
let (d, m) = calculate_pll(hxtal_freq, target_sysclk).expect("invalid sysclk value");
predv0_bits = d - 1;
pllmf = m;
} else {
// IRC8M/2 is used as an input clock
pllsel_bit = false;
let pllsource = IRC8M / 2;
let m = target_sysclk / pllsource;
let m = cmp::max(2, cmp::min(m, 32));
assert_ne!(m, 15, "invalid sysclk value");
let actual_sysclk = pllsource * m;
assert_eq!(target_sysclk, actual_sysclk, "invalid sysclk value");
predv0_bits = 0;
pllmf = m as u8;
}
pllmf_bits = match pllmf {
2..=14 => pllmf - 2,
16..=32 => pllmf - 1,
_ => unreachable!("invalid pll multiplier"),
};
} else {
pllsel_bit = false;
predv0_bits = 0;
pllmf_bits = 0;
}
// Switch to the internal clock
let rcu = unsafe { &*crate::pac::RCU::ptr() };
rcu.ctl.modify(|_, w| w.irc8men().set_bit()); // Enable IRC8M oscillator
while rcu.ctl.read().irc8mstb().bit_is_clear() {} // Wait for oscillator to stabilize
rcu.cfg0.modify(|_, w| unsafe { w.scs().bits(0b00) }); // Switch to the internal oscillator
rcu.ctl.modify(|_, w| w.pllen().clear_bit()); // Disable PLL
// Set bus prescalers
rcu.cfg0.modify(|_, w| unsafe { w.ahbpsc().bits(0b0000) }); // CK_SYS
rcu.cfg0.modify(|_, w| unsafe { w.apb1psc().bits(0b100) }); // CK_AHB / 2
rcu.cfg0.modify(|_, w| unsafe { w.apb2psc().bits(0b000) }); // CK_AHB
let apb1_psc = 2;
let apb2_psc = 1;
if self.hxtal.is_some() {
// Enable external oscillator
rcu.ctl.modify(|_, w| w.hxtalen().set_bit());
// Wait for oscillator to stabilize
while rcu.ctl.read().hxtalstb().bit_is_clear() {}
// Select HXTAL as prescaler input source clock
rcu.cfg1.modify(|_, w| w.predv0sel().clear_bit());
// Configure the prescaler
rcu.cfg1.modify(|_, w| unsafe { w.predv0().bits(predv0_bits) });
}
if use_pll {
// Configure PLL input selector
rcu.cfg0.modify(|_, w| w.pllsel().bit(pllsel_bit));
// Configure PLL multiplier
rcu.cfg0.modify(|_, w| unsafe { w
.pllmf_4().bit(pllmf_bits & 0x10 != 0)
.pllmf_3_0().bits(pllmf_bits & 0xf)
});
// Enable PLL
rcu.ctl.modify(|_, w| w.pllen().set_bit());
// Wait for PLL to stabilize
while rcu.ctl.read().pllstb().bit_is_clear() {}
} else {
// Disable PLL
rcu.ctl.modify(|_, w| w.pllen().clear_bit());
}
// Switch to the configured clock source
rcu.cfg0.modify(|_, w| unsafe { w.scs().bits(scs_bits) });
let usbclk_valid;
if use_pll {
let pllclk = target_sysclk;
let (valid, pr) = match pllclk {
48_000_000 => (true, 0b01), // pllclk / 1
72_000_000 => (true, 0b00), // pllclk / 1.5
96_000_000 => (true, 0b11), // pllclk / 2
_ => (false, 0),
};
usbclk_valid = valid;
// Configure USB prescaler
rcu.cfg0.modify(|_, w| unsafe { w.usbfspsc().bits(pr) });
} else {
usbclk_valid = false;
}
let clocks = Clocks {
sysclk: Hertz(target_sysclk),
apb1_psc,
apb2_psc,
usbclk_valid
};
Rcu {
clocks,
regs: self.regs
}
}
}
#[derive(Copy, Clone)]
pub struct Clocks {
sysclk: Hertz,
apb1_psc: u8,
apb2_psc: u8,
usbclk_valid: bool,
}
impl Clocks {
/// Returns the system (core) frequency
pub const fn sysclk(&self) -> Hertz {
self.sysclk
}
/// Returns the frequency of the AHB
pub const fn hclk(&self) -> Hertz {
self.sysclk
}
/// Returns the frequency of the APB1
pub const fn pclk1(&self) -> Hertz {
Hertz(self.sysclk.0 / self.apb1_psc as u32)
}
/// Returns the frequency of the APB2
pub const fn pclk2(&self) -> Hertz {
Hertz(self.sysclk.0 / self.apb2_psc as u32)
}
/// Returns the frequency of the SysTick timer
pub const fn systick(&self) -> Hertz {
Hertz(self.sysclk.0 / 4)
}
/// Returns the frequency of the TIMER0 base clock
pub fn timer0(&self) -> Hertz |
/// Returns the frequency of the TIMER1..6 base clock
pub fn timerx(&self) -> Hertz {
let pclk1 = self.pclk1();
if self.apb1_psc == 1 {
pclk1
} else {
Hertz(pclk1.0 * 2)
}
}
/// Returns whether the USBCLK clock frequency is valid for the USB peripheral
pub const fn usbclk_valid(&self) -> bool {
self.usbclk_valid
}
}
macro_rules! base_freq {
($($PER:ident => $func:ident,)+) => {
$(
impl BaseFrequency for crate::pac::$PER {
#[inline(always)]
fn base_frequency(rcu: &Rcu) -> Hertz {
rcu.clocks.$func()
}
}
)+
}
}
base_freq! {
ADC0 => pclk2,
ADC1 => pclk2,
I2C0 => pclk1,
I2C1 => pclk1,
SPI0 => pclk2,
SPI1 => pclk1,
SPI2 => pclk1,
TIMER0 => timer0,
TIMER1 => timerx,
TIMER2 => timerx,
TIMER3 => timerx,
TIMER4 => timerx,
TIMER5 => timerx,
TIMER6 => timerx,
UART3 => pclk1,
UART4 => pclk1,
USART0 => pclk2,
USART1 => pclk1,
USART2 => pclk1,
}
pub(crate) mod closed_traits {
use super::Rcu;
use crate::time::Hertz;
/// Enable/disable peripheral
pub trait Enable {
fn enable(rcu: &mut Rcu);
fn disable(rcu: &mut Rcu);
}
/// Reset peripheral
pub trait Reset {
fn reset(rcu: &mut Rcu);
}
pub trait BaseFrequency {
fn base_frequency(rcu: &Rcu) -> Hertz;
}
}
pub(crate) use closed_traits::*;
macro_rules! bus_enable {
($PER:ident => ($apben:ident, $peren:ident)) => {
impl Enable for crate::pac::$PER {
#[inline(always)]
fn enable(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apben.modify(|_, w| w.$peren().set_bit());
});
}
#[inline(always)]
fn disable(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apben.modify(|_, w| w.$peren().clear_bit());
});
}
}
}
}
macro_rules! bus {
($($PER:ident => ($apben:ident, $apbrst:ident, $peren:ident, $perrst:ident),)+) => {
$(
bus_enable!($PER => ($apben, $peren));
impl Reset for crate::pac::$PER {
#[inline(always)]
fn reset(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apbrst.modify(|_, w| w.$perrst().set_bit());
rcu.regs.$apbrst.modify(|_, w| w.$perrst().clear_bit());
});
}
}
)+
}
}
bus! {
ADC0 => (apb2en, apb2rst, adc0en, adc0rst),
ADC1 => (apb2en, apb2rst, adc1en, adc1rst),
AFIO => (apb2en, apb2rst, afen, afrst),
BKP => (apb1en, apb1rst, bkpien, bkpirst),
CAN0 => (apb1en, apb1rst, can0en, can0rst),
CAN1 => (apb1en, apb1rst, can1en, can1rst),
DAC => (apb1en, apb1rst, dacen, dacrst),
GPIOA => (apb2en, apb2rst, paen, parst),
GPIOB => (apb2en, apb2rst, pben, pbrst),
GPIOC => (apb2en, apb2rst, pcen, pcrst),
GPIOD => (apb2en, apb2rst, pden, pdrst),
GPIOE => (apb2en, apb2rst, peen, perst),
I2C0 => (apb1en, apb1rst, i2c0en, i2c0rst),
I2C1 => (apb1en, apb1rst, i2c1en, i2c1rst),
PMU => (apb1en, apb1rst, pmuen, pmurst),
SPI0 => (apb2en, apb2rst, spi0en, spi0rst),
SPI1 => (apb1en, apb1rst, spi1en, spi1rst),
SPI2 => (apb1en, apb1rst, spi2en, spi2rst),
TIMER0 => (apb2en, apb2rst, timer0en, timer0rst),
TIMER1 => (apb1en, apb1rst, timer1en, timer1rst),
TIMER2 => (apb1en, apb1rst, timer2en, timer2rst),
TIMER3 => (apb1en, apb1rst, timer3en, timer3rst),
TIMER4 => (apb1en, apb1rst, timer4en, timer4rst),
TIMER5 => (apb1en, apb1rst, timer5en, timer5rst),
TIMER6 => (apb1en, apb1rst, timer6en, timer6rst),
UART3 => (apb1en, apb1rst, uart3en, uart3rst),
UART4 => (apb1en, apb1rst, uart4en, uart4rst),
USART0 => (apb2en, apb2rst, usart0en, usart0rst),
USART1 => (apb1en, apb1rst, usart1en, usart1rst),
USART2 => (apb1en, apb1rst, usart2en, usart2rst),
USBFS_GLOBAL => (ahben, ahbrst, usbfsen, usbfsrst),
WWDGT => (apb1en, apb1rst, wwdgten, wwdgtrst),
}
bus_enable!(CRC => (ahben, crcen));
bus_enable!(DMA0 => (ahben, dma0en));
bus_enable!(DMA1 => (ahben, dma1en));
bus_enable!(EXMC => (ahben, exmcen));
| {
let pclk2 = self.pclk2();
if self.apb2_psc == 1 {
pclk2
} else {
Hertz(pclk2.0 * 2)
}
} | identifier_body |
rcu.rs | //! Reset and clock unit
use crate::pac::RCU;
use riscv::interrupt;
use crate::time::Hertz;
use core::cmp;
/// Extension trait that sets up the `RCU` peripheral
pub trait RcuExt {
/// Configure the clocks of the `RCU` peripheral
fn configure(self) -> UnconfiguredRcu;
}
impl RcuExt for RCU {
fn configure(self) -> UnconfiguredRcu {
UnconfiguredRcu::new(self)
}
}
/// Configured RCU peripheral
pub struct Rcu {
/// Frozen clock frequencies
pub clocks: Clocks,
pub(crate) regs: RCU,
}
pub struct UnconfiguredRcu {
hxtal: Option<u32>,
sysclk: Option<u32>,
regs: RCU,
}
impl UnconfiguredRcu {
fn new(rcu: RCU) -> Self {
Self {
hxtal: None,
sysclk: None,
regs: rcu,
}
}
/// Uses an external oscillator instead of IRC8M (internal RC oscillator) as the high-speed
/// clock source. Will result in a hang if an external oscillator is not connected or it fails
/// to start.
pub fn ext_hf_clock(mut self, freq: impl Into<Hertz>) -> Self {
let freq = freq.into().0;
assert!(4_000_000 <= freq && freq <= 32_000_000);
self.hxtal = Some(freq);
self
}
/// Sets the desired frequency for the SYSCLK clock
pub fn sysclk(mut self, freq: impl Into<Hertz>) -> Self {
let freq = freq.into().0;
assert!(freq <= 108_000_000);
self.sysclk = Some(freq);
self
}
/// Freezes clock configuration, making it effective
pub fn freeze(self) -> Rcu {
const IRC8M: u32 = 8_000_000;
let target_sysclk = self.sysclk.unwrap_or(IRC8M);
let (scs_bits, use_pll) = match (self.hxtal, target_sysclk) {
(Some(freq), sysclk) if freq == sysclk => (0b01, false),
(None, sysclk) if IRC8M == sysclk => (0b00, false),
_ => (0b10, true),
};
let pllsel_bit;
let predv0_bits;
let pllmf_bits;
if use_pll {
let pllmf;
if let Some(hxtal_freq) = self.hxtal {
// Use external clock + divider
pllsel_bit = true;
let calculate_pll = |source: u32, target: u32| -> Option<(u8, u8)> {
const PLL_IN_MIN: u32 = 600_000;
let div_max = cmp::min(16, source / PLL_IN_MIN);
for d in 1..=div_max {
let pllsource = source / d;
let pllm = target / pllsource;
if pllm < 2 || pllm == 15 || pllm > 32{
continue;
}
let actual_freq = pllsource * pllm;
if actual_freq == target {
return Some((d as u8, pllm as u8));
}
}
None
};
let (d, m) = calculate_pll(hxtal_freq, target_sysclk).expect("invalid sysclk value");
predv0_bits = d - 1;
pllmf = m;
} else {
// IRC8M/2 is used as an input clock
pllsel_bit = false;
let pllsource = IRC8M / 2;
let m = target_sysclk / pllsource;
let m = cmp::max(2, cmp::min(m, 32));
assert_ne!(m, 15, "invalid sysclk value");
let actual_sysclk = pllsource * m;
assert_eq!(target_sysclk, actual_sysclk, "invalid sysclk value");
predv0_bits = 0;
pllmf = m as u8;
}
pllmf_bits = match pllmf {
2..=14 => pllmf - 2,
16..=32 => pllmf - 1,
_ => unreachable!("invalid pll multiplier"),
};
} else {
pllsel_bit = false;
predv0_bits = 0;
pllmf_bits = 0;
}
// Switch to the internal clock
let rcu = unsafe { &*crate::pac::RCU::ptr() };
rcu.ctl.modify(|_, w| w.irc8men().set_bit()); // Enable IRC8M oscillator
while rcu.ctl.read().irc8mstb().bit_is_clear() {} // Wait for oscillator to stabilize
rcu.cfg0.modify(|_, w| unsafe { w.scs().bits(0b00) }); // Switch to the internal oscillator
rcu.ctl.modify(|_, w| w.pllen().clear_bit()); // Disable PLL
// Set bus prescalers
rcu.cfg0.modify(|_, w| unsafe { w.ahbpsc().bits(0b0000) }); // CK_SYS
rcu.cfg0.modify(|_, w| unsafe { w.apb1psc().bits(0b100) }); // CK_AHB / 2
rcu.cfg0.modify(|_, w| unsafe { w.apb2psc().bits(0b000) }); // CK_AHB
let apb1_psc = 2;
let apb2_psc = 1;
if self.hxtal.is_some() {
// Enable external oscillator
rcu.ctl.modify(|_, w| w.hxtalen().set_bit());
// Wait for oscillator to stabilize
while rcu.ctl.read().hxtalstb().bit_is_clear() {}
// Select HXTAL as prescaler input source clock
rcu.cfg1.modify(|_, w| w.predv0sel().clear_bit());
// Configure the prescaler
rcu.cfg1.modify(|_, w| unsafe { w.predv0().bits(predv0_bits) });
}
if use_pll {
// Configure PLL input selector
rcu.cfg0.modify(|_, w| w.pllsel().bit(pllsel_bit));
// Configure PLL multiplier
rcu.cfg0.modify(|_, w| unsafe { w
.pllmf_4().bit(pllmf_bits & 0x10 != 0)
.pllmf_3_0().bits(pllmf_bits & 0xf)
});
// Enable PLL
rcu.ctl.modify(|_, w| w.pllen().set_bit());
// Wait for PLL to stabilize
while rcu.ctl.read().pllstb().bit_is_clear() {}
} else {
// Disable PLL
rcu.ctl.modify(|_, w| w.pllen().clear_bit());
}
// Switch to the configured clock source
rcu.cfg0.modify(|_, w| unsafe { w.scs().bits(scs_bits) });
let usbclk_valid;
if use_pll {
let pllclk = target_sysclk;
let (valid, pr) = match pllclk {
48_000_000 => (true, 0b01), // pllclk / 1
72_000_000 => (true, 0b00), // pllclk / 1.5
96_000_000 => (true, 0b11), // pllclk / 2
_ => (false, 0),
};
usbclk_valid = valid;
// Configure USB prescaler
rcu.cfg0.modify(|_, w| unsafe { w.usbfspsc().bits(pr) });
} else {
usbclk_valid = false;
}
let clocks = Clocks {
sysclk: Hertz(target_sysclk),
apb1_psc,
apb2_psc,
usbclk_valid
};
Rcu {
clocks,
regs: self.regs
}
}
}
#[derive(Copy, Clone)]
pub struct Clocks {
sysclk: Hertz,
apb1_psc: u8,
apb2_psc: u8,
usbclk_valid: bool,
}
impl Clocks {
/// Returns the system (core) frequency
pub const fn sysclk(&self) -> Hertz {
self.sysclk
}
/// Returns the frequency of the AHB
pub const fn hclk(&self) -> Hertz {
self.sysclk
}
/// Returns the frequency of the APB1
pub const fn pclk1(&self) -> Hertz {
Hertz(self.sysclk.0 / self.apb1_psc as u32)
}
/// Returns the frequency of the APB2
pub const fn pclk2(&self) -> Hertz {
Hertz(self.sysclk.0 / self.apb2_psc as u32)
}
/// Returns the frequency of the SysTick timer
pub const fn | (&self) -> Hertz {
Hertz(self.sysclk.0 / 4)
}
/// Returns the frequency of the TIMER0 base clock
pub fn timer0(&self) -> Hertz {
let pclk2 = self.pclk2();
if self.apb2_psc == 1 {
pclk2
} else {
Hertz(pclk2.0 * 2)
}
}
/// Returns the frequency of the TIMER1..6 base clock
pub fn timerx(&self) -> Hertz {
let pclk1 = self.pclk1();
if self.apb1_psc == 1 {
pclk1
} else {
Hertz(pclk1.0 * 2)
}
}
/// Returns whether the USBCLK clock frequency is valid for the USB peripheral
pub const fn usbclk_valid(&self) -> bool {
self.usbclk_valid
}
}
macro_rules! base_freq {
($($PER:ident => $func:ident,)+) => {
$(
impl BaseFrequency for crate::pac::$PER {
#[inline(always)]
fn base_frequency(rcu: &Rcu) -> Hertz {
rcu.clocks.$func()
}
}
)+
}
}
base_freq! {
ADC0 => pclk2,
ADC1 => pclk2,
I2C0 => pclk1,
I2C1 => pclk1,
SPI0 => pclk2,
SPI1 => pclk1,
SPI2 => pclk1,
TIMER0 => timer0,
TIMER1 => timerx,
TIMER2 => timerx,
TIMER3 => timerx,
TIMER4 => timerx,
TIMER5 => timerx,
TIMER6 => timerx,
UART3 => pclk1,
UART4 => pclk1,
USART0 => pclk2,
USART1 => pclk1,
USART2 => pclk1,
}
pub(crate) mod closed_traits {
use super::Rcu;
use crate::time::Hertz;
/// Enable/disable peripheral
pub trait Enable {
fn enable(rcu: &mut Rcu);
fn disable(rcu: &mut Rcu);
}
/// Reset peripheral
pub trait Reset {
fn reset(rcu: &mut Rcu);
}
pub trait BaseFrequency {
fn base_frequency(rcu: &Rcu) -> Hertz;
}
}
pub(crate) use closed_traits::*;
macro_rules! bus_enable {
($PER:ident => ($apben:ident, $peren:ident)) => {
impl Enable for crate::pac::$PER {
#[inline(always)]
fn enable(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apben.modify(|_, w| w.$peren().set_bit());
});
}
#[inline(always)]
fn disable(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apben.modify(|_, w| w.$peren().clear_bit());
});
}
}
}
}
macro_rules! bus {
($($PER:ident => ($apben:ident, $apbrst:ident, $peren:ident, $perrst:ident),)+) => {
$(
bus_enable!($PER => ($apben, $peren));
impl Reset for crate::pac::$PER {
#[inline(always)]
fn reset(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apbrst.modify(|_, w| w.$perrst().set_bit());
rcu.regs.$apbrst.modify(|_, w| w.$perrst().clear_bit());
});
}
}
)+
}
}
bus! {
ADC0 => (apb2en, apb2rst, adc0en, adc0rst),
ADC1 => (apb2en, apb2rst, adc1en, adc1rst),
AFIO => (apb2en, apb2rst, afen, afrst),
BKP => (apb1en, apb1rst, bkpien, bkpirst),
CAN0 => (apb1en, apb1rst, can0en, can0rst),
CAN1 => (apb1en, apb1rst, can1en, can1rst),
DAC => (apb1en, apb1rst, dacen, dacrst),
GPIOA => (apb2en, apb2rst, paen, parst),
GPIOB => (apb2en, apb2rst, pben, pbrst),
GPIOC => (apb2en, apb2rst, pcen, pcrst),
GPIOD => (apb2en, apb2rst, pden, pdrst),
GPIOE => (apb2en, apb2rst, peen, perst),
I2C0 => (apb1en, apb1rst, i2c0en, i2c0rst),
I2C1 => (apb1en, apb1rst, i2c1en, i2c1rst),
PMU => (apb1en, apb1rst, pmuen, pmurst),
SPI0 => (apb2en, apb2rst, spi0en, spi0rst),
SPI1 => (apb1en, apb1rst, spi1en, spi1rst),
SPI2 => (apb1en, apb1rst, spi2en, spi2rst),
TIMER0 => (apb2en, apb2rst, timer0en, timer0rst),
TIMER1 => (apb1en, apb1rst, timer1en, timer1rst),
TIMER2 => (apb1en, apb1rst, timer2en, timer2rst),
TIMER3 => (apb1en, apb1rst, timer3en, timer3rst),
TIMER4 => (apb1en, apb1rst, timer4en, timer4rst),
TIMER5 => (apb1en, apb1rst, timer5en, timer5rst),
TIMER6 => (apb1en, apb1rst, timer6en, timer6rst),
UART3 => (apb1en, apb1rst, uart3en, uart3rst),
UART4 => (apb1en, apb1rst, uart4en, uart4rst),
USART0 => (apb2en, apb2rst, usart0en, usart0rst),
USART1 => (apb1en, apb1rst, usart1en, usart1rst),
USART2 => (apb1en, apb1rst, usart2en, usart2rst),
USBFS_GLOBAL => (ahben, ahbrst, usbfsen, usbfsrst),
WWDGT => (apb1en, apb1rst, wwdgten, wwdgtrst),
}
bus_enable!(CRC => (ahben, crcen));
bus_enable!(DMA0 => (ahben, dma0en));
bus_enable!(DMA1 => (ahben, dma1en));
bus_enable!(EXMC => (ahben, exmcen));
| systick | identifier_name |
rcu.rs | //! Reset and clock unit
use crate::pac::RCU;
use riscv::interrupt;
use crate::time::Hertz;
use core::cmp;
/// Extension trait that sets up the `RCU` peripheral
pub trait RcuExt {
/// Configure the clocks of the `RCU` peripheral
fn configure(self) -> UnconfiguredRcu;
}
impl RcuExt for RCU {
fn configure(self) -> UnconfiguredRcu {
UnconfiguredRcu::new(self)
}
}
/// Configured RCU peripheral
pub struct Rcu {
/// Frozen clock frequencies
pub clocks: Clocks,
pub(crate) regs: RCU,
}
pub struct UnconfiguredRcu {
hxtal: Option<u32>,
sysclk: Option<u32>,
regs: RCU,
}
impl UnconfiguredRcu {
fn new(rcu: RCU) -> Self {
Self {
hxtal: None,
sysclk: None,
regs: rcu,
}
}
/// Uses an external oscillator instead of IRC8M (internal RC oscillator) as the high-speed
/// clock source. Will result in a hang if an external oscillator is not connected or it fails
/// to start.
pub fn ext_hf_clock(mut self, freq: impl Into<Hertz>) -> Self {
let freq = freq.into().0;
assert!(4_000_000 <= freq && freq <= 32_000_000);
self.hxtal = Some(freq);
self
}
/// Sets the desired frequency for the SYSCLK clock
pub fn sysclk(mut self, freq: impl Into<Hertz>) -> Self {
let freq = freq.into().0;
assert!(freq <= 108_000_000);
self.sysclk = Some(freq);
self
}
/// Freezes clock configuration, making it effective
pub fn freeze(self) -> Rcu {
const IRC8M: u32 = 8_000_000;
let target_sysclk = self.sysclk.unwrap_or(IRC8M);
let (scs_bits, use_pll) = match (self.hxtal, target_sysclk) {
(Some(freq), sysclk) if freq == sysclk => (0b01, false),
(None, sysclk) if IRC8M == sysclk => (0b00, false),
_ => (0b10, true),
};
let pllsel_bit;
let predv0_bits;
let pllmf_bits;
if use_pll | else {
pllsel_bit = false;
predv0_bits = 0;
pllmf_bits = 0;
}
// Switch to the internal clock
let rcu = unsafe { &*crate::pac::RCU::ptr() };
rcu.ctl.modify(|_, w| w.irc8men().set_bit()); // Enable IRC8M oscillator
while rcu.ctl.read().irc8mstb().bit_is_clear() {} // Wait for oscillator to stabilize
rcu.cfg0.modify(|_, w| unsafe { w.scs().bits(0b00) }); // Switch to the internal oscillator
rcu.ctl.modify(|_, w| w.pllen().clear_bit()); // Disable PLL
// Set bus prescalers
rcu.cfg0.modify(|_, w| unsafe { w.ahbpsc().bits(0b0000) }); // CK_SYS
rcu.cfg0.modify(|_, w| unsafe { w.apb1psc().bits(0b100) }); // CK_AHB / 2
rcu.cfg0.modify(|_, w| unsafe { w.apb2psc().bits(0b000) }); // CK_AHB
let apb1_psc = 2;
let apb2_psc = 1;
if self.hxtal.is_some() {
// Enable external oscillator
rcu.ctl.modify(|_, w| w.hxtalen().set_bit());
// Wait for oscillator to stabilize
while rcu.ctl.read().hxtalstb().bit_is_clear() {}
// Select HXTAL as prescaler input source clock
rcu.cfg1.modify(|_, w| w.predv0sel().clear_bit());
// Configure the prescaler
rcu.cfg1.modify(|_, w| unsafe { w.predv0().bits(predv0_bits) });
}
if use_pll {
// Configure PLL input selector
rcu.cfg0.modify(|_, w| w.pllsel().bit(pllsel_bit));
// Configure PLL multiplier
rcu.cfg0.modify(|_, w| unsafe { w
.pllmf_4().bit(pllmf_bits & 0x10 != 0)
.pllmf_3_0().bits(pllmf_bits & 0xf)
});
// Enable PLL
rcu.ctl.modify(|_, w| w.pllen().set_bit());
// Wait for PLL to stabilize
while rcu.ctl.read().pllstb().bit_is_clear() {}
} else {
// Disable PLL
rcu.ctl.modify(|_, w| w.pllen().clear_bit());
}
// Switch to the configured clock source
rcu.cfg0.modify(|_, w| unsafe { w.scs().bits(scs_bits) });
let usbclk_valid;
if use_pll {
let pllclk = target_sysclk;
let (valid, pr) = match pllclk {
48_000_000 => (true, 0b01), // pllclk / 1
72_000_000 => (true, 0b00), // pllclk / 1.5
96_000_000 => (true, 0b11), // pllclk / 2
_ => (false, 0),
};
usbclk_valid = valid;
// Configure USB prescaler
rcu.cfg0.modify(|_, w| unsafe { w.usbfspsc().bits(pr) });
} else {
usbclk_valid = false;
}
let clocks = Clocks {
sysclk: Hertz(target_sysclk),
apb1_psc,
apb2_psc,
usbclk_valid
};
Rcu {
clocks,
regs: self.regs
}
}
}
#[derive(Copy, Clone)]
pub struct Clocks {
sysclk: Hertz,
apb1_psc: u8,
apb2_psc: u8,
usbclk_valid: bool,
}
impl Clocks {
/// Returns the system (core) frequency
pub const fn sysclk(&self) -> Hertz {
self.sysclk
}
/// Returns the frequency of the AHB
pub const fn hclk(&self) -> Hertz {
self.sysclk
}
/// Returns the frequency of the APB1
pub const fn pclk1(&self) -> Hertz {
Hertz(self.sysclk.0 / self.apb1_psc as u32)
}
/// Returns the frequency of the APB2
pub const fn pclk2(&self) -> Hertz {
Hertz(self.sysclk.0 / self.apb2_psc as u32)
}
/// Returns the frequency of the SysTick timer
pub const fn systick(&self) -> Hertz {
Hertz(self.sysclk.0 / 4)
}
/// Returns the frequency of the TIMER0 base clock
pub fn timer0(&self) -> Hertz {
let pclk2 = self.pclk2();
if self.apb2_psc == 1 {
pclk2
} else {
Hertz(pclk2.0 * 2)
}
}
/// Returns the frequency of the TIMER1..6 base clock
pub fn timerx(&self) -> Hertz {
let pclk1 = self.pclk1();
if self.apb1_psc == 1 {
pclk1
} else {
Hertz(pclk1.0 * 2)
}
}
/// Returns whether the USBCLK clock frequency is valid for the USB peripheral
pub const fn usbclk_valid(&self) -> bool {
self.usbclk_valid
}
}
macro_rules! base_freq {
($($PER:ident => $func:ident,)+) => {
$(
impl BaseFrequency for crate::pac::$PER {
#[inline(always)]
fn base_frequency(rcu: &Rcu) -> Hertz {
rcu.clocks.$func()
}
}
)+
}
}
base_freq! {
ADC0 => pclk2,
ADC1 => pclk2,
I2C0 => pclk1,
I2C1 => pclk1,
SPI0 => pclk2,
SPI1 => pclk1,
SPI2 => pclk1,
TIMER0 => timer0,
TIMER1 => timerx,
TIMER2 => timerx,
TIMER3 => timerx,
TIMER4 => timerx,
TIMER5 => timerx,
TIMER6 => timerx,
UART3 => pclk1,
UART4 => pclk1,
USART0 => pclk2,
USART1 => pclk1,
USART2 => pclk1,
}
pub(crate) mod closed_traits {
use super::Rcu;
use crate::time::Hertz;
/// Enable/disable peripheral
pub trait Enable {
fn enable(rcu: &mut Rcu);
fn disable(rcu: &mut Rcu);
}
/// Reset peripheral
pub trait Reset {
fn reset(rcu: &mut Rcu);
}
pub trait BaseFrequency {
fn base_frequency(rcu: &Rcu) -> Hertz;
}
}
pub(crate) use closed_traits::*;
macro_rules! bus_enable {
($PER:ident => ($apben:ident, $peren:ident)) => {
impl Enable for crate::pac::$PER {
#[inline(always)]
fn enable(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apben.modify(|_, w| w.$peren().set_bit());
});
}
#[inline(always)]
fn disable(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apben.modify(|_, w| w.$peren().clear_bit());
});
}
}
}
}
macro_rules! bus {
($($PER:ident => ($apben:ident, $apbrst:ident, $peren:ident, $perrst:ident),)+) => {
$(
bus_enable!($PER => ($apben, $peren));
impl Reset for crate::pac::$PER {
#[inline(always)]
fn reset(rcu: &mut Rcu) {
interrupt::free(|_| {
rcu.regs.$apbrst.modify(|_, w| w.$perrst().set_bit());
rcu.regs.$apbrst.modify(|_, w| w.$perrst().clear_bit());
});
}
}
)+
}
}
bus! {
ADC0 => (apb2en, apb2rst, adc0en, adc0rst),
ADC1 => (apb2en, apb2rst, adc1en, adc1rst),
AFIO => (apb2en, apb2rst, afen, afrst),
BKP => (apb1en, apb1rst, bkpien, bkpirst),
CAN0 => (apb1en, apb1rst, can0en, can0rst),
CAN1 => (apb1en, apb1rst, can1en, can1rst),
DAC => (apb1en, apb1rst, dacen, dacrst),
GPIOA => (apb2en, apb2rst, paen, parst),
GPIOB => (apb2en, apb2rst, pben, pbrst),
GPIOC => (apb2en, apb2rst, pcen, pcrst),
GPIOD => (apb2en, apb2rst, pden, pdrst),
GPIOE => (apb2en, apb2rst, peen, perst),
I2C0 => (apb1en, apb1rst, i2c0en, i2c0rst),
I2C1 => (apb1en, apb1rst, i2c1en, i2c1rst),
PMU => (apb1en, apb1rst, pmuen, pmurst),
SPI0 => (apb2en, apb2rst, spi0en, spi0rst),
SPI1 => (apb1en, apb1rst, spi1en, spi1rst),
SPI2 => (apb1en, apb1rst, spi2en, spi2rst),
TIMER0 => (apb2en, apb2rst, timer0en, timer0rst),
TIMER1 => (apb1en, apb1rst, timer1en, timer1rst),
TIMER2 => (apb1en, apb1rst, timer2en, timer2rst),
TIMER3 => (apb1en, apb1rst, timer3en, timer3rst),
TIMER4 => (apb1en, apb1rst, timer4en, timer4rst),
TIMER5 => (apb1en, apb1rst, timer5en, timer5rst),
TIMER6 => (apb1en, apb1rst, timer6en, timer6rst),
UART3 => (apb1en, apb1rst, uart3en, uart3rst),
UART4 => (apb1en, apb1rst, uart4en, uart4rst),
USART0 => (apb2en, apb2rst, usart0en, usart0rst),
USART1 => (apb1en, apb1rst, usart1en, usart1rst),
USART2 => (apb1en, apb1rst, usart2en, usart2rst),
USBFS_GLOBAL => (ahben, ahbrst, usbfsen, usbfsrst),
WWDGT => (apb1en, apb1rst, wwdgten, wwdgtrst),
}
bus_enable!(CRC => (ahben, crcen));
bus_enable!(DMA0 => (ahben, dma0en));
bus_enable!(DMA1 => (ahben, dma1en));
bus_enable!(EXMC => (ahben, exmcen));
| {
let pllmf;
if let Some(hxtal_freq) = self.hxtal {
// Use external clock + divider
pllsel_bit = true;
let calculate_pll = |source: u32, target: u32| -> Option<(u8, u8)> {
const PLL_IN_MIN: u32 = 600_000;
let div_max = cmp::min(16, source / PLL_IN_MIN);
for d in 1..=div_max {
let pllsource = source / d;
let pllm = target / pllsource;
if pllm < 2 || pllm == 15 || pllm > 32{
continue;
}
let actual_freq = pllsource * pllm;
if actual_freq == target {
return Some((d as u8, pllm as u8));
}
}
None
};
let (d, m) = calculate_pll(hxtal_freq, target_sysclk).expect("invalid sysclk value");
predv0_bits = d - 1;
pllmf = m;
} else {
// IRC8M/2 is used as an input clock
pllsel_bit = false;
let pllsource = IRC8M / 2;
let m = target_sysclk / pllsource;
let m = cmp::max(2, cmp::min(m, 32));
assert_ne!(m, 15, "invalid sysclk value");
let actual_sysclk = pllsource * m;
assert_eq!(target_sysclk, actual_sysclk, "invalid sysclk value");
predv0_bits = 0;
pllmf = m as u8;
}
pllmf_bits = match pllmf {
2..=14 => pllmf - 2,
16..=32 => pllmf - 1,
_ => unreachable!("invalid pll multiplier"),
};
} | conditional_block |
types.go | package parse
import (
"encoding/json"
"errors"
"fmt"
"io"
"strconv"
"strings"
"time"
"github.com/danjacques/hangouts-migrate/util"
)
type EventType string
const (
EventTypeRenameConversation EventType = "RENAME_CONVERSATION"
EventTypeAddUser = "ADD_USER"
EventTypeRegularChatMessage = "REGULAR_CHAT_MESSAGE"
)
type EmbedItemType string
const (
EmbedItemPlusPhoto EmbedItemType = "PLUS_PHOTO"
EmbedItemPlaceV2 = "PLACE_V2"
EmbedItemThingV2 = "THING_V2"
EmbedItemThing = "THING"
)
type ParticipantRegistry struct {
allParticipants []*ParticipantData
participantsByGaiaID map[string]*ParticipantData
participantsByChatID map[string]*ParticipantData
}
func (reg *ParticipantRegistry) Register(data *ParticipantData) {
if id := data.ID.GaiaID; id != "" {
if reg.participantsByGaiaID == nil {
reg.participantsByGaiaID = make(map[string]*ParticipantData)
}
reg.participantsByGaiaID[id] = data
}
if id := data.ID.ChatID; id != "" {
if reg.participantsByChatID == nil {
reg.participantsByChatID = make(map[string]*ParticipantData)
}
reg.participantsByChatID[id] = data
}
reg.allParticipants = append(reg.allParticipants, data)
}
func (reg *ParticipantRegistry) AllParticipants() []*ParticipantData {
return reg.allParticipants
}
func (reg *ParticipantRegistry) ForID(pid *ParticipantID) *ParticipantData {
if pid.GaiaID != "" {
if v := reg.participantsByGaiaID[pid.GaiaID]; v != nil {
return v
}
}
if pid.ChatID != "" {
if v := reg.participantsByChatID[pid.ChatID]; v != nil {
return v
}
}
return nil
}
type Conversation struct {
Conversation *ConversationEntry `json:"conversation"`
EventsMessage json.RawMessage `json:"events"`
initialized bool
reg ParticipantRegistry
events []json.RawMessage
decodedEvents []*Event
}
func (ce *Conversation) initialize() error {
if ce.initialized {
return nil
}
if err := json.Unmarshal([]byte(ce.EventsMessage), &ce.events); err != nil {
return err
}
ce.decodedEvents = make([]*Event, len(ce.events))
for _, pd := range ce.Conversation.ConversationInfo.ParticipantData {
ce.reg.Register(pd)
}
ce.initialized = true
return nil
}
func (ce *Conversation) ParticipantRegistry() *ParticipantRegistry { return &ce.reg }
func (ce *Conversation) EventsSize() int {
return len(ce.events)
}
func (ce *Conversation) ResolveAll() error {
for i := 0; i < len(ce.events); i++ {
_, err := ce.Event(i)
if err != nil {
return err
}
}
return nil
}
func (ce *Conversation) Event(i int) (*Event, error) {
if i < 0 || i >= len(ce.events) {
return nil, errors.New("Index out of bounds")
}
if ce.decodedEvents[i] == nil {
var event Event
if err := json.Unmarshal(ce.events[i], &event); err != nil {
return nil, err
}
ce.decodedEvents[i] = &event
}
return ce.decodedEvents[i], nil
}
type ConversationEntry struct {
ConversationInfo *ConversationInfo `json:"conversation"`
}
type SingleID struct {
ID string `json:"id"`
}
type ConversationInfo struct {
ID *SingleID `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
CurrentParticipant []*ParticipantID `json:"current_participant"`
ParticipantData []*ParticipantData `json:"participant_data"`
}
type ParticipantID struct {
GaiaID string `json:"gaia_id"`
ChatID string `json:"chat_id"`
}
func (pid *ParticipantID) String() string |
// Matches returns true if pid's Gaia or Chat ID are both populated and match
// the equivalent values in other.
func (pid *ParticipantID) Matches(other *ParticipantID) bool {
if pid.GaiaID != "" && pid.GaiaID == other.GaiaID {
return true
}
if pid.ChatID != "" && pid.ChatID == other.ChatID {
return true
}
return false
}
type ParticipantData struct {
ID ParticipantID `json:"id"`
FallbackName string `json:"fallback_name"`
ParticipantType string `json:"participant_type"`
DomainID string `json:"domain_id"`
}
func (pd *ParticipantData) DisplayName() string {
return pd.FallbackName
}
type MessageContentSegment struct {
Type string `json:"type"`
Text string `json:"text"`
Formatting struct {
Bold bool `json:"bold"`
Italics bool `json:"italics"`
Strikethrough bool `json:"strikethrough"`
Underline bool `json:"underline"`
} `json:"formatting"`
LinkData *struct {
LinkTarget string `json:"link_target"`
} `json:"link_data"`
}
type Thumbnail struct {
URL string `json:"url"`
ImageURL string `json:"image_url"`
WidthPx int64 `json:"width_px"`
HeightPx int64 `json:"height_px"`
}
type PlusPhoto struct {
Thumbnail *Thumbnail `json:"thumbnail"`
OwnerObfuscatedID string `json:"owner_obfuscated_id"`
AlbumID string `json:"album_id"`
PhotoID string `json:"photo_id"`
URL string `json:"url"`
OriginalContentURL string `json:"original_content_url"`
MediaType string `json:"media_type"`
}
type GeoCoordinatesV2 struct {
Latitude float64 `json:"latitude"`
Longitude float64 `json:"longitude"`
}
type PostalAddressV2 struct {
StreetAddress string `json:"street_address"`
Name string `json:"name"`
AddressCountry string `json:"address_country"`
AddressLocality string `json:"address_locality"`
AddressRegion string `json:"address_region"`
PostalCode string `json:"postal_code"`
}
type ImageObjectV2 struct {
URL string `json:"url"`
}
type PlaceV2 struct {
URL string `json:"url"`
Name string `json:"name"`
Address *struct {
PostalAddressV2 PostalAddressV2 `json:"postal_address_v2"`
} `json:"address"`
Geo *struct {
GeoCoordinatesV2 GeoCoordinatesV2 `json:"geo_coordinates_v2"`
} `json:"geo"`
ID string `json:"id"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
type RepresentativeImage struct {
Type []EmbedItemType `json:"type"`
ID string `json:"id"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
type ThingV2 struct {
URL string `json:"url"`
Name string `json:"name"`
RepresentativeImage *RepresentativeImage `json:"representative_image"`
}
type EmbedItem struct {
Type []EmbedItemType `json:"type"`
ID string `json:"id"`
PlusPhoto *PlusPhoto `json:"plus_photo"`
PlaceV2 *PlaceV2 `json:"place_v2"`
ThingV2 *ThingV2 `json:"thing_v2"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
func (ei *EmbedItem) Key() string {
if pp := ei.PlusPhoto; pp != nil {
return fmt.Sprintf("%s:%s", pp.AlbumID, pp.PhotoID)
}
if p := ei.ThingV2; p != nil {
// Use a hash of the Thing's URL.
return util.HashForKey(p.URL)
}
return ei.ID
}
type MessageContentAttachment struct {
EmbedItem *EmbedItem `json:"embed_item"`
ID string `json:"id"`
}
type MessageContent struct {
Segment []*MessageContentSegment `json:"segment"`
Attachment []*MessageContentAttachment `json:"attachment"`
}
type ChatMessage struct {
MessageContent *MessageContent `json:"message_content"`
}
type ConversationRename struct {
NewName string `json:"new_name"`
OldName string `json:"old_name"`
}
type MembershipChange struct {
Type string `json:"type"`
ParticipantID []*ParticipantID `json:"participant_id"`
LeaveReason string `json:"leave_reason"`
}
type Event struct {
ConversationID *SingleID `json:"conversation_id"`
SenderID *ParticipantID `json:"sender_id"`
Timestamp string `json:"timestamp"`
ConversationRename *ConversationRename `json:"conversation_rename"`
ChatMessage *ChatMessage `json:"chat_message"`
MembershipChange *MembershipChange `json:"membership_change"`
EventID string `json:"event_id"`
EventType EventType `json:"event_type"`
}
func (e *Event) Time() (time.Time, error) {
// Timestamp is in microseconds from epoch.
micros, err := strconv.ParseInt(e.Timestamp, 10, 64)
if err != nil {
return time.Time{}, err
}
return time.Unix(0, micros*1000), nil
}
func (e *Event) Description(reg *ParticipantRegistry) (string, error) {
var parts []string
// Time
switch t, err := e.Time(); err {
case nil:
parts = append(parts, t.In(time.Local).Format(time.RFC3339Nano))
default:
parts = append(parts, fmt.Sprintf("Timestmap Error (%s)", e.Timestamp))
}
if sid := e.SenderID; sid != nil {
var pd *ParticipantData
if reg != nil {
pd = reg.ForID(sid)
}
if pd != nil {
parts = append(parts, fmt.Sprintf("Sender: %s", pd.DisplayName()))
} else {
parts = append(parts, fmt.Sprintf("Sender (UNKNOWN): %s", sid))
}
}
if r := e.ConversationRename; r != nil {
parts = append(parts, fmt.Sprintf("Rename from %q to %q", r.OldName, r.NewName))
}
if r := e.ChatMessage; r != nil {
if mc := r.MessageContent; mc != nil {
for _, s := range mc.Segment {
parts = append(parts, s.Text)
}
}
}
return strings.Join(parts, "\n"), nil
}
func (e *Event) AllWords() []string {
var words []string
if r := e.ChatMessage; r != nil {
if mc := r.MessageContent; mc != nil {
for _, s := range mc.Segment {
words = append(words, strings.Fields(s.Text)...)
}
}
}
return words
}
type Root struct {
Conversations []*Conversation `json:"conversations"`
conversationIDMap map[string]*Conversation
conversationNameMap map[string]string
}
func (r *Root) Decode(reader io.Reader) error {
dec := json.NewDecoder(reader)
if err := dec.Decode(r); err != nil {
return err
}
r.conversationIDMap = make(map[string]*Conversation, len(r.Conversations))
r.conversationNameMap = make(map[string]string, len(r.Conversations))
for _, ce := range r.Conversations {
if c := ce.Conversation; c != nil {
if info := c.ConversationInfo; info != nil {
r.conversationIDMap[info.ID.ID] = ce
r.conversationNameMap[info.Name] = info.ID.ID
}
}
}
return nil
}
func (r *Root) GetConversationMap() map[string]string { return r.conversationNameMap }
func (r *Root) GetConversation(id string) (*Conversation, error) {
c := r.conversationIDMap[id]
if c == nil {
return nil, errors.New("unknown conversation ID")
}
if err := c.initialize(); err != nil {
return nil, err
}
return c, nil
}
| {
return fmt.Sprintf("gaia:%s/chat:%s", pid.GaiaID, pid.ChatID)
} | identifier_body |
types.go | package parse
import (
"encoding/json"
"errors"
"fmt"
"io"
"strconv"
"strings"
"time"
"github.com/danjacques/hangouts-migrate/util"
)
type EventType string
const (
EventTypeRenameConversation EventType = "RENAME_CONVERSATION"
EventTypeAddUser = "ADD_USER"
EventTypeRegularChatMessage = "REGULAR_CHAT_MESSAGE"
)
type EmbedItemType string
const (
EmbedItemPlusPhoto EmbedItemType = "PLUS_PHOTO"
EmbedItemPlaceV2 = "PLACE_V2"
EmbedItemThingV2 = "THING_V2"
EmbedItemThing = "THING"
)
type ParticipantRegistry struct {
allParticipants []*ParticipantData
participantsByGaiaID map[string]*ParticipantData
participantsByChatID map[string]*ParticipantData
}
func (reg *ParticipantRegistry) Register(data *ParticipantData) {
if id := data.ID.GaiaID; id != "" {
if reg.participantsByGaiaID == nil {
reg.participantsByGaiaID = make(map[string]*ParticipantData)
}
reg.participantsByGaiaID[id] = data
}
if id := data.ID.ChatID; id != "" {
if reg.participantsByChatID == nil {
reg.participantsByChatID = make(map[string]*ParticipantData)
}
reg.participantsByChatID[id] = data
}
reg.allParticipants = append(reg.allParticipants, data)
}
func (reg *ParticipantRegistry) AllParticipants() []*ParticipantData {
return reg.allParticipants
}
func (reg *ParticipantRegistry) ForID(pid *ParticipantID) *ParticipantData {
if pid.GaiaID != "" {
if v := reg.participantsByGaiaID[pid.GaiaID]; v != nil {
return v
}
}
if pid.ChatID != "" {
if v := reg.participantsByChatID[pid.ChatID]; v != nil {
return v
}
}
return nil
}
type Conversation struct {
Conversation *ConversationEntry `json:"conversation"`
EventsMessage json.RawMessage `json:"events"`
initialized bool
reg ParticipantRegistry
events []json.RawMessage
decodedEvents []*Event
}
func (ce *Conversation) initialize() error {
if ce.initialized {
return nil
}
if err := json.Unmarshal([]byte(ce.EventsMessage), &ce.events); err != nil {
return err
}
ce.decodedEvents = make([]*Event, len(ce.events))
for _, pd := range ce.Conversation.ConversationInfo.ParticipantData {
ce.reg.Register(pd)
}
ce.initialized = true
return nil
}
func (ce *Conversation) ParticipantRegistry() *ParticipantRegistry { return &ce.reg }
func (ce *Conversation) EventsSize() int {
return len(ce.events)
}
func (ce *Conversation) ResolveAll() error {
for i := 0; i < len(ce.events); i++ {
_, err := ce.Event(i)
if err != nil {
return err
}
}
return nil
}
func (ce *Conversation) Event(i int) (*Event, error) {
if i < 0 || i >= len(ce.events) {
return nil, errors.New("Index out of bounds")
}
if ce.decodedEvents[i] == nil {
var event Event
if err := json.Unmarshal(ce.events[i], &event); err != nil {
return nil, err
}
ce.decodedEvents[i] = &event
}
return ce.decodedEvents[i], nil
}
type ConversationEntry struct {
ConversationInfo *ConversationInfo `json:"conversation"`
}
type SingleID struct {
ID string `json:"id"`
}
type ConversationInfo struct {
ID *SingleID `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
CurrentParticipant []*ParticipantID `json:"current_participant"`
ParticipantData []*ParticipantData `json:"participant_data"`
}
type ParticipantID struct {
GaiaID string `json:"gaia_id"`
ChatID string `json:"chat_id"`
}
func (pid *ParticipantID) String() string {
return fmt.Sprintf("gaia:%s/chat:%s", pid.GaiaID, pid.ChatID)
}
// Matches returns true if pid's Gaia or Chat ID are both populated and match
// the equivalent values in other.
func (pid *ParticipantID) Matches(other *ParticipantID) bool {
if pid.GaiaID != "" && pid.GaiaID == other.GaiaID {
return true
}
if pid.ChatID != "" && pid.ChatID == other.ChatID {
return true
}
return false
}
type ParticipantData struct {
ID ParticipantID `json:"id"`
FallbackName string `json:"fallback_name"`
ParticipantType string `json:"participant_type"`
DomainID string `json:"domain_id"`
}
func (pd *ParticipantData) DisplayName() string {
return pd.FallbackName
}
type MessageContentSegment struct {
Type string `json:"type"`
Text string `json:"text"`
Formatting struct {
Bold bool `json:"bold"`
Italics bool `json:"italics"`
Strikethrough bool `json:"strikethrough"`
Underline bool `json:"underline"`
} `json:"formatting"`
LinkData *struct {
LinkTarget string `json:"link_target"`
} `json:"link_data"`
}
type Thumbnail struct {
URL string `json:"url"`
ImageURL string `json:"image_url"`
WidthPx int64 `json:"width_px"`
HeightPx int64 `json:"height_px"`
}
type PlusPhoto struct {
Thumbnail *Thumbnail `json:"thumbnail"`
OwnerObfuscatedID string `json:"owner_obfuscated_id"`
AlbumID string `json:"album_id"`
PhotoID string `json:"photo_id"`
URL string `json:"url"`
OriginalContentURL string `json:"original_content_url"`
MediaType string `json:"media_type"`
}
type GeoCoordinatesV2 struct {
Latitude float64 `json:"latitude"`
Longitude float64 `json:"longitude"`
}
type PostalAddressV2 struct {
StreetAddress string `json:"street_address"`
Name string `json:"name"`
AddressCountry string `json:"address_country"`
AddressLocality string `json:"address_locality"`
AddressRegion string `json:"address_region"`
PostalCode string `json:"postal_code"`
}
type ImageObjectV2 struct {
URL string `json:"url"`
}
type PlaceV2 struct {
URL string `json:"url"`
Name string `json:"name"`
Address *struct {
PostalAddressV2 PostalAddressV2 `json:"postal_address_v2"`
} `json:"address"`
Geo *struct {
GeoCoordinatesV2 GeoCoordinatesV2 `json:"geo_coordinates_v2"`
} `json:"geo"`
ID string `json:"id"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
type RepresentativeImage struct {
Type []EmbedItemType `json:"type"`
ID string `json:"id"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
type ThingV2 struct {
URL string `json:"url"`
Name string `json:"name"`
RepresentativeImage *RepresentativeImage `json:"representative_image"`
}
type EmbedItem struct {
Type []EmbedItemType `json:"type"`
ID string `json:"id"`
PlusPhoto *PlusPhoto `json:"plus_photo"`
PlaceV2 *PlaceV2 `json:"place_v2"`
ThingV2 *ThingV2 `json:"thing_v2"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
func (ei *EmbedItem) | () string {
if pp := ei.PlusPhoto; pp != nil {
return fmt.Sprintf("%s:%s", pp.AlbumID, pp.PhotoID)
}
if p := ei.ThingV2; p != nil {
// Use a hash of the Thing's URL.
return util.HashForKey(p.URL)
}
return ei.ID
}
type MessageContentAttachment struct {
EmbedItem *EmbedItem `json:"embed_item"`
ID string `json:"id"`
}
type MessageContent struct {
Segment []*MessageContentSegment `json:"segment"`
Attachment []*MessageContentAttachment `json:"attachment"`
}
type ChatMessage struct {
MessageContent *MessageContent `json:"message_content"`
}
type ConversationRename struct {
NewName string `json:"new_name"`
OldName string `json:"old_name"`
}
type MembershipChange struct {
Type string `json:"type"`
ParticipantID []*ParticipantID `json:"participant_id"`
LeaveReason string `json:"leave_reason"`
}
type Event struct {
ConversationID *SingleID `json:"conversation_id"`
SenderID *ParticipantID `json:"sender_id"`
Timestamp string `json:"timestamp"`
ConversationRename *ConversationRename `json:"conversation_rename"`
ChatMessage *ChatMessage `json:"chat_message"`
MembershipChange *MembershipChange `json:"membership_change"`
EventID string `json:"event_id"`
EventType EventType `json:"event_type"`
}
func (e *Event) Time() (time.Time, error) {
// Timestamp is in microseconds from epoch.
micros, err := strconv.ParseInt(e.Timestamp, 10, 64)
if err != nil {
return time.Time{}, err
}
return time.Unix(0, micros*1000), nil
}
func (e *Event) Description(reg *ParticipantRegistry) (string, error) {
var parts []string
// Time
switch t, err := e.Time(); err {
case nil:
parts = append(parts, t.In(time.Local).Format(time.RFC3339Nano))
default:
parts = append(parts, fmt.Sprintf("Timestmap Error (%s)", e.Timestamp))
}
if sid := e.SenderID; sid != nil {
var pd *ParticipantData
if reg != nil {
pd = reg.ForID(sid)
}
if pd != nil {
parts = append(parts, fmt.Sprintf("Sender: %s", pd.DisplayName()))
} else {
parts = append(parts, fmt.Sprintf("Sender (UNKNOWN): %s", sid))
}
}
if r := e.ConversationRename; r != nil {
parts = append(parts, fmt.Sprintf("Rename from %q to %q", r.OldName, r.NewName))
}
if r := e.ChatMessage; r != nil {
if mc := r.MessageContent; mc != nil {
for _, s := range mc.Segment {
parts = append(parts, s.Text)
}
}
}
return strings.Join(parts, "\n"), nil
}
func (e *Event) AllWords() []string {
var words []string
if r := e.ChatMessage; r != nil {
if mc := r.MessageContent; mc != nil {
for _, s := range mc.Segment {
words = append(words, strings.Fields(s.Text)...)
}
}
}
return words
}
type Root struct {
Conversations []*Conversation `json:"conversations"`
conversationIDMap map[string]*Conversation
conversationNameMap map[string]string
}
func (r *Root) Decode(reader io.Reader) error {
dec := json.NewDecoder(reader)
if err := dec.Decode(r); err != nil {
return err
}
r.conversationIDMap = make(map[string]*Conversation, len(r.Conversations))
r.conversationNameMap = make(map[string]string, len(r.Conversations))
for _, ce := range r.Conversations {
if c := ce.Conversation; c != nil {
if info := c.ConversationInfo; info != nil {
r.conversationIDMap[info.ID.ID] = ce
r.conversationNameMap[info.Name] = info.ID.ID
}
}
}
return nil
}
func (r *Root) GetConversationMap() map[string]string { return r.conversationNameMap }
func (r *Root) GetConversation(id string) (*Conversation, error) {
c := r.conversationIDMap[id]
if c == nil {
return nil, errors.New("unknown conversation ID")
}
if err := c.initialize(); err != nil {
return nil, err
}
return c, nil
}
| Key | identifier_name |
types.go | package parse
import (
"encoding/json"
"errors"
"fmt"
"io"
"strconv"
"strings"
"time"
"github.com/danjacques/hangouts-migrate/util"
)
type EventType string
const (
EventTypeRenameConversation EventType = "RENAME_CONVERSATION"
EventTypeAddUser = "ADD_USER"
EventTypeRegularChatMessage = "REGULAR_CHAT_MESSAGE"
)
type EmbedItemType string
const (
EmbedItemPlusPhoto EmbedItemType = "PLUS_PHOTO"
EmbedItemPlaceV2 = "PLACE_V2"
EmbedItemThingV2 = "THING_V2"
EmbedItemThing = "THING"
)
type ParticipantRegistry struct {
allParticipants []*ParticipantData
participantsByGaiaID map[string]*ParticipantData
participantsByChatID map[string]*ParticipantData
}
func (reg *ParticipantRegistry) Register(data *ParticipantData) {
if id := data.ID.GaiaID; id != "" {
if reg.participantsByGaiaID == nil {
reg.participantsByGaiaID = make(map[string]*ParticipantData)
}
reg.participantsByGaiaID[id] = data
}
if id := data.ID.ChatID; id != "" {
if reg.participantsByChatID == nil {
reg.participantsByChatID = make(map[string]*ParticipantData)
}
reg.participantsByChatID[id] = data
}
reg.allParticipants = append(reg.allParticipants, data)
}
func (reg *ParticipantRegistry) AllParticipants() []*ParticipantData {
return reg.allParticipants
}
func (reg *ParticipantRegistry) ForID(pid *ParticipantID) *ParticipantData {
if pid.GaiaID != "" {
if v := reg.participantsByGaiaID[pid.GaiaID]; v != nil {
return v
}
}
if pid.ChatID != "" {
if v := reg.participantsByChatID[pid.ChatID]; v != nil {
return v
}
}
return nil
}
type Conversation struct {
Conversation *ConversationEntry `json:"conversation"`
EventsMessage json.RawMessage `json:"events"`
initialized bool
reg ParticipantRegistry
events []json.RawMessage
decodedEvents []*Event
}
func (ce *Conversation) initialize() error {
if ce.initialized {
return nil
}
if err := json.Unmarshal([]byte(ce.EventsMessage), &ce.events); err != nil {
return err
}
ce.decodedEvents = make([]*Event, len(ce.events))
for _, pd := range ce.Conversation.ConversationInfo.ParticipantData {
ce.reg.Register(pd)
}
ce.initialized = true
return nil
}
func (ce *Conversation) ParticipantRegistry() *ParticipantRegistry { return &ce.reg }
func (ce *Conversation) EventsSize() int {
return len(ce.events)
}
func (ce *Conversation) ResolveAll() error {
for i := 0; i < len(ce.events); i++ {
_, err := ce.Event(i)
if err != nil {
return err
}
}
return nil
}
func (ce *Conversation) Event(i int) (*Event, error) {
if i < 0 || i >= len(ce.events) {
return nil, errors.New("Index out of bounds")
}
if ce.decodedEvents[i] == nil |
return ce.decodedEvents[i], nil
}
type ConversationEntry struct {
ConversationInfo *ConversationInfo `json:"conversation"`
}
type SingleID struct {
ID string `json:"id"`
}
type ConversationInfo struct {
ID *SingleID `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
CurrentParticipant []*ParticipantID `json:"current_participant"`
ParticipantData []*ParticipantData `json:"participant_data"`
}
type ParticipantID struct {
GaiaID string `json:"gaia_id"`
ChatID string `json:"chat_id"`
}
func (pid *ParticipantID) String() string {
return fmt.Sprintf("gaia:%s/chat:%s", pid.GaiaID, pid.ChatID)
}
// Matches returns true if pid's Gaia or Chat ID are both populated and match
// the equivalent values in other.
func (pid *ParticipantID) Matches(other *ParticipantID) bool {
if pid.GaiaID != "" && pid.GaiaID == other.GaiaID {
return true
}
if pid.ChatID != "" && pid.ChatID == other.ChatID {
return true
}
return false
}
type ParticipantData struct {
ID ParticipantID `json:"id"`
FallbackName string `json:"fallback_name"`
ParticipantType string `json:"participant_type"`
DomainID string `json:"domain_id"`
}
func (pd *ParticipantData) DisplayName() string {
return pd.FallbackName
}
type MessageContentSegment struct {
Type string `json:"type"`
Text string `json:"text"`
Formatting struct {
Bold bool `json:"bold"`
Italics bool `json:"italics"`
Strikethrough bool `json:"strikethrough"`
Underline bool `json:"underline"`
} `json:"formatting"`
LinkData *struct {
LinkTarget string `json:"link_target"`
} `json:"link_data"`
}
type Thumbnail struct {
URL string `json:"url"`
ImageURL string `json:"image_url"`
WidthPx int64 `json:"width_px"`
HeightPx int64 `json:"height_px"`
}
type PlusPhoto struct {
Thumbnail *Thumbnail `json:"thumbnail"`
OwnerObfuscatedID string `json:"owner_obfuscated_id"`
AlbumID string `json:"album_id"`
PhotoID string `json:"photo_id"`
URL string `json:"url"`
OriginalContentURL string `json:"original_content_url"`
MediaType string `json:"media_type"`
}
type GeoCoordinatesV2 struct {
Latitude float64 `json:"latitude"`
Longitude float64 `json:"longitude"`
}
type PostalAddressV2 struct {
StreetAddress string `json:"street_address"`
Name string `json:"name"`
AddressCountry string `json:"address_country"`
AddressLocality string `json:"address_locality"`
AddressRegion string `json:"address_region"`
PostalCode string `json:"postal_code"`
}
type ImageObjectV2 struct {
URL string `json:"url"`
}
type PlaceV2 struct {
URL string `json:"url"`
Name string `json:"name"`
Address *struct {
PostalAddressV2 PostalAddressV2 `json:"postal_address_v2"`
} `json:"address"`
Geo *struct {
GeoCoordinatesV2 GeoCoordinatesV2 `json:"geo_coordinates_v2"`
} `json:"geo"`
ID string `json:"id"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
type RepresentativeImage struct {
Type []EmbedItemType `json:"type"`
ID string `json:"id"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
type ThingV2 struct {
URL string `json:"url"`
Name string `json:"name"`
RepresentativeImage *RepresentativeImage `json:"representative_image"`
}
type EmbedItem struct {
Type []EmbedItemType `json:"type"`
ID string `json:"id"`
PlusPhoto *PlusPhoto `json:"plus_photo"`
PlaceV2 *PlaceV2 `json:"place_v2"`
ThingV2 *ThingV2 `json:"thing_v2"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
func (ei *EmbedItem) Key() string {
if pp := ei.PlusPhoto; pp != nil {
return fmt.Sprintf("%s:%s", pp.AlbumID, pp.PhotoID)
}
if p := ei.ThingV2; p != nil {
// Use a hash of the Thing's URL.
return util.HashForKey(p.URL)
}
return ei.ID
}
type MessageContentAttachment struct {
EmbedItem *EmbedItem `json:"embed_item"`
ID string `json:"id"`
}
type MessageContent struct {
Segment []*MessageContentSegment `json:"segment"`
Attachment []*MessageContentAttachment `json:"attachment"`
}
type ChatMessage struct {
MessageContent *MessageContent `json:"message_content"`
}
type ConversationRename struct {
NewName string `json:"new_name"`
OldName string `json:"old_name"`
}
type MembershipChange struct {
Type string `json:"type"`
ParticipantID []*ParticipantID `json:"participant_id"`
LeaveReason string `json:"leave_reason"`
}
type Event struct {
ConversationID *SingleID `json:"conversation_id"`
SenderID *ParticipantID `json:"sender_id"`
Timestamp string `json:"timestamp"`
ConversationRename *ConversationRename `json:"conversation_rename"`
ChatMessage *ChatMessage `json:"chat_message"`
MembershipChange *MembershipChange `json:"membership_change"`
EventID string `json:"event_id"`
EventType EventType `json:"event_type"`
}
func (e *Event) Time() (time.Time, error) {
// Timestamp is in microseconds from epoch.
micros, err := strconv.ParseInt(e.Timestamp, 10, 64)
if err != nil {
return time.Time{}, err
}
return time.Unix(0, micros*1000), nil
}
func (e *Event) Description(reg *ParticipantRegistry) (string, error) {
var parts []string
// Time
switch t, err := e.Time(); err {
case nil:
parts = append(parts, t.In(time.Local).Format(time.RFC3339Nano))
default:
parts = append(parts, fmt.Sprintf("Timestmap Error (%s)", e.Timestamp))
}
if sid := e.SenderID; sid != nil {
var pd *ParticipantData
if reg != nil {
pd = reg.ForID(sid)
}
if pd != nil {
parts = append(parts, fmt.Sprintf("Sender: %s", pd.DisplayName()))
} else {
parts = append(parts, fmt.Sprintf("Sender (UNKNOWN): %s", sid))
}
}
if r := e.ConversationRename; r != nil {
parts = append(parts, fmt.Sprintf("Rename from %q to %q", r.OldName, r.NewName))
}
if r := e.ChatMessage; r != nil {
if mc := r.MessageContent; mc != nil {
for _, s := range mc.Segment {
parts = append(parts, s.Text)
}
}
}
return strings.Join(parts, "\n"), nil
}
func (e *Event) AllWords() []string {
var words []string
if r := e.ChatMessage; r != nil {
if mc := r.MessageContent; mc != nil {
for _, s := range mc.Segment {
words = append(words, strings.Fields(s.Text)...)
}
}
}
return words
}
type Root struct {
Conversations []*Conversation `json:"conversations"`
conversationIDMap map[string]*Conversation
conversationNameMap map[string]string
}
func (r *Root) Decode(reader io.Reader) error {
dec := json.NewDecoder(reader)
if err := dec.Decode(r); err != nil {
return err
}
r.conversationIDMap = make(map[string]*Conversation, len(r.Conversations))
r.conversationNameMap = make(map[string]string, len(r.Conversations))
for _, ce := range r.Conversations {
if c := ce.Conversation; c != nil {
if info := c.ConversationInfo; info != nil {
r.conversationIDMap[info.ID.ID] = ce
r.conversationNameMap[info.Name] = info.ID.ID
}
}
}
return nil
}
func (r *Root) GetConversationMap() map[string]string { return r.conversationNameMap }
func (r *Root) GetConversation(id string) (*Conversation, error) {
c := r.conversationIDMap[id]
if c == nil {
return nil, errors.New("unknown conversation ID")
}
if err := c.initialize(); err != nil {
return nil, err
}
return c, nil
}
| {
var event Event
if err := json.Unmarshal(ce.events[i], &event); err != nil {
return nil, err
}
ce.decodedEvents[i] = &event
} | conditional_block |
types.go | package parse
import (
"encoding/json"
"errors"
"fmt"
"io"
"strconv"
"strings"
"time"
"github.com/danjacques/hangouts-migrate/util"
)
type EventType string
const (
EventTypeRenameConversation EventType = "RENAME_CONVERSATION"
EventTypeAddUser = "ADD_USER"
EventTypeRegularChatMessage = "REGULAR_CHAT_MESSAGE"
)
type EmbedItemType string
const (
EmbedItemPlusPhoto EmbedItemType = "PLUS_PHOTO"
EmbedItemPlaceV2 = "PLACE_V2"
EmbedItemThingV2 = "THING_V2"
EmbedItemThing = "THING"
)
type ParticipantRegistry struct {
allParticipants []*ParticipantData
participantsByGaiaID map[string]*ParticipantData
participantsByChatID map[string]*ParticipantData
}
func (reg *ParticipantRegistry) Register(data *ParticipantData) {
if id := data.ID.GaiaID; id != "" {
if reg.participantsByGaiaID == nil {
reg.participantsByGaiaID = make(map[string]*ParticipantData)
}
reg.participantsByGaiaID[id] = data
}
if id := data.ID.ChatID; id != "" {
if reg.participantsByChatID == nil {
reg.participantsByChatID = make(map[string]*ParticipantData)
}
reg.participantsByChatID[id] = data
}
reg.allParticipants = append(reg.allParticipants, data)
}
func (reg *ParticipantRegistry) AllParticipants() []*ParticipantData {
return reg.allParticipants
}
func (reg *ParticipantRegistry) ForID(pid *ParticipantID) *ParticipantData {
if pid.GaiaID != "" {
if v := reg.participantsByGaiaID[pid.GaiaID]; v != nil {
return v
}
}
if pid.ChatID != "" {
if v := reg.participantsByChatID[pid.ChatID]; v != nil {
return v
}
}
return nil
}
type Conversation struct {
Conversation *ConversationEntry `json:"conversation"`
EventsMessage json.RawMessage `json:"events"`
initialized bool
reg ParticipantRegistry
events []json.RawMessage
decodedEvents []*Event
}
func (ce *Conversation) initialize() error {
if ce.initialized {
return nil
}
if err := json.Unmarshal([]byte(ce.EventsMessage), &ce.events); err != nil {
return err
}
ce.decodedEvents = make([]*Event, len(ce.events))
for _, pd := range ce.Conversation.ConversationInfo.ParticipantData {
ce.reg.Register(pd)
}
ce.initialized = true
return nil
}
func (ce *Conversation) ParticipantRegistry() *ParticipantRegistry { return &ce.reg }
func (ce *Conversation) EventsSize() int {
return len(ce.events)
}
func (ce *Conversation) ResolveAll() error {
for i := 0; i < len(ce.events); i++ {
_, err := ce.Event(i)
if err != nil {
return err
}
}
return nil
}
func (ce *Conversation) Event(i int) (*Event, error) {
if i < 0 || i >= len(ce.events) {
return nil, errors.New("Index out of bounds")
}
if ce.decodedEvents[i] == nil {
var event Event
if err := json.Unmarshal(ce.events[i], &event); err != nil {
return nil, err
}
ce.decodedEvents[i] = &event
}
return ce.decodedEvents[i], nil
}
type ConversationEntry struct {
ConversationInfo *ConversationInfo `json:"conversation"`
}
type SingleID struct {
ID string `json:"id"`
}
type ConversationInfo struct {
ID *SingleID `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
CurrentParticipant []*ParticipantID `json:"current_participant"`
ParticipantData []*ParticipantData `json:"participant_data"`
}
type ParticipantID struct {
GaiaID string `json:"gaia_id"`
ChatID string `json:"chat_id"`
}
func (pid *ParticipantID) String() string {
return fmt.Sprintf("gaia:%s/chat:%s", pid.GaiaID, pid.ChatID)
}
// Matches returns true if pid's Gaia or Chat ID are both populated and match
// the equivalent values in other.
func (pid *ParticipantID) Matches(other *ParticipantID) bool {
if pid.GaiaID != "" && pid.GaiaID == other.GaiaID {
return true
}
if pid.ChatID != "" && pid.ChatID == other.ChatID {
return true
}
return false
}
type ParticipantData struct {
ID ParticipantID `json:"id"`
FallbackName string `json:"fallback_name"`
ParticipantType string `json:"participant_type"`
DomainID string `json:"domain_id"`
}
func (pd *ParticipantData) DisplayName() string {
return pd.FallbackName
}
type MessageContentSegment struct {
Type string `json:"type"`
Text string `json:"text"`
Formatting struct {
Bold bool `json:"bold"`
Italics bool `json:"italics"`
Strikethrough bool `json:"strikethrough"`
Underline bool `json:"underline"`
} `json:"formatting"`
LinkData *struct {
LinkTarget string `json:"link_target"`
} `json:"link_data"`
}
type Thumbnail struct {
URL string `json:"url"`
ImageURL string `json:"image_url"`
WidthPx int64 `json:"width_px"`
HeightPx int64 `json:"height_px"`
}
type PlusPhoto struct {
Thumbnail *Thumbnail `json:"thumbnail"`
OwnerObfuscatedID string `json:"owner_obfuscated_id"`
AlbumID string `json:"album_id"`
PhotoID string `json:"photo_id"`
URL string `json:"url"`
OriginalContentURL string `json:"original_content_url"`
MediaType string `json:"media_type"`
}
type GeoCoordinatesV2 struct {
Latitude float64 `json:"latitude"`
Longitude float64 `json:"longitude"`
}
type PostalAddressV2 struct {
StreetAddress string `json:"street_address"`
Name string `json:"name"`
AddressCountry string `json:"address_country"`
AddressLocality string `json:"address_locality"`
AddressRegion string `json:"address_region"`
PostalCode string `json:"postal_code"`
}
type ImageObjectV2 struct {
URL string `json:"url"`
}
type PlaceV2 struct {
URL string `json:"url"`
Name string `json:"name"`
Address *struct {
PostalAddressV2 PostalAddressV2 `json:"postal_address_v2"`
} `json:"address"`
Geo *struct {
GeoCoordinatesV2 GeoCoordinatesV2 `json:"geo_coordinates_v2"`
} `json:"geo"`
ID string `json:"id"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
type RepresentativeImage struct {
Type []EmbedItemType `json:"type"`
ID string `json:"id"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
type ThingV2 struct {
URL string `json:"url"`
Name string `json:"name"`
RepresentativeImage *RepresentativeImage `json:"representative_image"`
}
type EmbedItem struct {
Type []EmbedItemType `json:"type"`
ID string `json:"id"`
PlusPhoto *PlusPhoto `json:"plus_photo"`
PlaceV2 *PlaceV2 `json:"place_v2"`
ThingV2 *ThingV2 `json:"thing_v2"`
ImageObjectV2 *ImageObjectV2 `json:"image_object_v2"`
}
func (ei *EmbedItem) Key() string {
if pp := ei.PlusPhoto; pp != nil {
return fmt.Sprintf("%s:%s", pp.AlbumID, pp.PhotoID)
}
if p := ei.ThingV2; p != nil {
// Use a hash of the Thing's URL.
return util.HashForKey(p.URL)
}
return ei.ID
}
type MessageContentAttachment struct {
EmbedItem *EmbedItem `json:"embed_item"` |
type MessageContent struct {
Segment []*MessageContentSegment `json:"segment"`
Attachment []*MessageContentAttachment `json:"attachment"`
}
type ChatMessage struct {
MessageContent *MessageContent `json:"message_content"`
}
type ConversationRename struct {
NewName string `json:"new_name"`
OldName string `json:"old_name"`
}
type MembershipChange struct {
Type string `json:"type"`
ParticipantID []*ParticipantID `json:"participant_id"`
LeaveReason string `json:"leave_reason"`
}
type Event struct {
ConversationID *SingleID `json:"conversation_id"`
SenderID *ParticipantID `json:"sender_id"`
Timestamp string `json:"timestamp"`
ConversationRename *ConversationRename `json:"conversation_rename"`
ChatMessage *ChatMessage `json:"chat_message"`
MembershipChange *MembershipChange `json:"membership_change"`
EventID string `json:"event_id"`
EventType EventType `json:"event_type"`
}
func (e *Event) Time() (time.Time, error) {
// Timestamp is in microseconds from epoch.
micros, err := strconv.ParseInt(e.Timestamp, 10, 64)
if err != nil {
return time.Time{}, err
}
return time.Unix(0, micros*1000), nil
}
func (e *Event) Description(reg *ParticipantRegistry) (string, error) {
var parts []string
// Time
switch t, err := e.Time(); err {
case nil:
parts = append(parts, t.In(time.Local).Format(time.RFC3339Nano))
default:
parts = append(parts, fmt.Sprintf("Timestmap Error (%s)", e.Timestamp))
}
if sid := e.SenderID; sid != nil {
var pd *ParticipantData
if reg != nil {
pd = reg.ForID(sid)
}
if pd != nil {
parts = append(parts, fmt.Sprintf("Sender: %s", pd.DisplayName()))
} else {
parts = append(parts, fmt.Sprintf("Sender (UNKNOWN): %s", sid))
}
}
if r := e.ConversationRename; r != nil {
parts = append(parts, fmt.Sprintf("Rename from %q to %q", r.OldName, r.NewName))
}
if r := e.ChatMessage; r != nil {
if mc := r.MessageContent; mc != nil {
for _, s := range mc.Segment {
parts = append(parts, s.Text)
}
}
}
return strings.Join(parts, "\n"), nil
}
func (e *Event) AllWords() []string {
var words []string
if r := e.ChatMessage; r != nil {
if mc := r.MessageContent; mc != nil {
for _, s := range mc.Segment {
words = append(words, strings.Fields(s.Text)...)
}
}
}
return words
}
type Root struct {
Conversations []*Conversation `json:"conversations"`
conversationIDMap map[string]*Conversation
conversationNameMap map[string]string
}
func (r *Root) Decode(reader io.Reader) error {
dec := json.NewDecoder(reader)
if err := dec.Decode(r); err != nil {
return err
}
r.conversationIDMap = make(map[string]*Conversation, len(r.Conversations))
r.conversationNameMap = make(map[string]string, len(r.Conversations))
for _, ce := range r.Conversations {
if c := ce.Conversation; c != nil {
if info := c.ConversationInfo; info != nil {
r.conversationIDMap[info.ID.ID] = ce
r.conversationNameMap[info.Name] = info.ID.ID
}
}
}
return nil
}
func (r *Root) GetConversationMap() map[string]string { return r.conversationNameMap }
func (r *Root) GetConversation(id string) (*Conversation, error) {
c := r.conversationIDMap[id]
if c == nil {
return nil, errors.New("unknown conversation ID")
}
if err := c.initialize(); err != nil {
return nil, err
}
return c, nil
} | ID string `json:"id"`
} | random_line_split |
lib.rs | #![doc(html_root_url = "https://docs.rs/broadword/0.2.2")]
//! Broadword operations treat a `u64` as a parallel vector of eight `u8`s or `i8`s.
//! This module also provides a population count function [`count_ones`](fn.count_ones.html) and a
//! select function [`select1`](fn.select1.html).
//!
//! The algorithms here are from [Sebastiano Vigna, “Broadword Implementation of
//! Rank/Select Queries,”](http://sux.di.unimi.it/paper.pdf) but with several changes from
//! that work:
//!
//! - Vigna uses a 17-digit (68-bit) constant “0x0F0F0F0F0F0F0F0F0.” I believe
//! the correct constant is these 64 bits: 0x0F0F_0F0F_0F0F_0F0F.
//!
//! - Arithmetic operations are assumed to wrap on overflow. If this
//! were not the case, Algorithm 1 ([count_ones](fn.count_ones.html))
//! would overflow its last line, when multiplying by L₈.
//!
//! - Line 2 of Algorithm 2 should read
//!
//! ```
//! # let mut s: u64 = 0;
//! s = (s & 0x3333_3333_3333_3333) + ((s >> 2) & 0x3333_3333_3333_3333);
//! ```
//!
//! In the paper, the shifted `s` appears as `x`.
#[cfg(test)]
#[macro_use]
extern crate quickcheck;
/// Has the lowest bit of every byte set: `0x0101_0101_0101_0101`.
pub const L8: u64 = 0x0101_0101_0101_0101;
/// Has the highest bit of every byte set: `0x8080_8080_8080_8080`.
pub const H8: u64 = 0x8080_8080_8080_8080;
/// Counts the number of ones in a `u64`.
///
/// Branchless. Uses the broadword algorithm from Vigna.
///
/// # Examples
///
/// ```
/// use broadword::count_ones;
///
/// assert_eq!( count_ones(0x0000_0000_0000_0000), 0 );
/// assert_eq!( count_ones(0x0000_0001_0000_0000), 1 );
/// assert_eq!( count_ones(0x0000_0001_0400_0000), 2 );
/// assert_eq!( count_ones(0x0000_0001_0600_0000), 3 );
/// assert_eq!( count_ones(0x3333_0001_0600_0000), 11 );
/// ```
#[inline]
pub fn count_ones(mut x: u64) -> usize {
x = x - ((x & 0xAAAA_AAAA_AAAA_AAAA) >> 1);
x = (x & 0x3333_3333_3333_3333) + ((x >> 2) & 0x3333_3333_3333_3333);
x = (x + (x >> 4)) & 0x0F0F_0F0F_0F0F_0F0F;
(x.wrapping_mul(L8) >> 56) as usize
}
/// Finds the index of the `r`th one bit in `x`.
///
/// Uses the broadword algorithm from Vigna.
/// Note that bits are numbered from least-significant to most.
///
/// # Examples
///
/// ```
/// use broadword::select1;
///
/// assert_eq!( select1(0, 0x0000_0000_0000_0000), None );
/// assert_eq!( select1(0, 0x0000_0000_0000_0001), Some(0) );
/// assert_eq!( select1(0, 0x0000_0000_0000_0002), Some(1) );
/// assert_eq!( select1(0, 0x0000_0000_0000_0004), Some(2) );
/// assert_eq!( select1(2, 0x0000_0000_0000_0004), None );
/// assert_eq!( select1(2, 0x0000_1010_1010_0114), Some(8) );
/// assert_eq!( select1(3, 0x0000_1010_1010_0114), Some(20) );
/// assert_eq!( select1(4, 0x0000_1010_1010_0114), Some(28) );
/// ```
#[inline]
pub fn select1(r: usize, x: u64) -> Option<usize> {
let result = select1_raw(r, x);
if result == 72 {None} else {Some(result)}
}
/// Finds the index of the `r`th one bit in `x`, returning 72 when not found.
///
/// Branchless. Uses the broadword algorithm from Vigna.
/// Note that bits are numbered from least-significant to most.
#[inline]
#[allow(clippy::many_single_char_names)]
pub fn select1_raw(r: usize, x: u64) -> usize {
let r = r as u64;
let mut s = x - ((x & 0xAAAA_AAAA_AAAA_AAAA) >> 1);
s = (s & 0x3333_3333_3333_3333) + ((s >> 2) & 0x3333_3333_3333_3333);
s = ((s + (s >> 4)) & 0x0F0F_0F0F_0F0F_0F0F).wrapping_mul(L8);
let b = (i_le8(s, r.wrapping_mul(L8)) >> 7).wrapping_mul(L8)>> 53 & !7;
let l = r - ((s << 8).wrapping_shr(b as u32) & 0xFF);
s = (u_nz8((x.wrapping_shr(b as u32) & 0xFF)
.wrapping_mul(L8) & 0x8040_2010_0804_0201) >> 7)
.wrapping_mul(L8);
(b + ((i_le8(s, l.wrapping_mul(L8)) >> 7).wrapping_mul(L8) >> 56)) as usize
}
/// Parallel ≤, treating a `u64` as a vector of 8 `u8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::u_le8;
///
/// assert_eq!( u_le8(0x03_03_04_17_92_A0_A0_A1,
/// 0x04_03_03_92_17_A0_A0_A0),
/// 0x80_80_00_80_00_80_80_00 );
/// ```
#[inline]
pub fn u_le8(x: u64, y: u64) -> u64 {
((((y | H8) - (x & !H8)) | (x ^ y)) ^ (x & !y)) & H8
}
/// Parallel ≤, treating a `u64` as a vector of 8 `i8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::i_le8;
///
/// assert_eq!( i_le8(0x03_03_04_00_FF_A0_A0_A1,
/// 0x04_03_03_FF_00_A0_A0_A0),
/// 0x80_80_00_00_80_80_80_00 );
/// ```
#[inline]
pub fn i_le8(x: u64, y: u64) -> u64 {
(((y | H8) - (x & !H8)) ^ x ^ y) & H8
}
/// Parallel >0, treating a `u64` as a vector of 8 `u8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::u_nz8;
///
/// assert_eq!( u_nz8(0x00_01_A9_40_20_17_00_06),
/// 0x00_80_80_80_80_80_00_80 );
#[inline]
pub fn u_nz8(x: u64) -> u64 {
(((x | H8) - L8) | x) & H8
}
#[cfg(test)]
#[allow(clippy::many_single_char_names)]
mod test {
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use quickcheck::TestResult;
use super::*;
#[test]
fn count_ones_0() {
assert_eq!(0, count_ones(0));
}
#[test]
fn count_ones_1() {
assert_eq!(1, count_ones(1));
}
#[test]
fn count_ones_0000_0000_0000_0010() {
assert_eq!(1, count_ones(0x0000_0000_0000_0010));
}
#[test]
fn count_ones_1000_0000_0000_0000() {
assert_eq!(1, count_ones(0x1000_0000_0000_0000));
}
#[test]
fn count_ones_ffff_ffff_ffff_ffff() {
assert_eq!(64, count_ones(0xFFFF_FFFF_FFFF_FFFF));
}
fn count_ones_prop_base(word: u64) -> bool {
count_ones(word) == word.count_ones() as usize
}
quickcheck! {
fn count_ones_prop(word: u64) -> bool {
count_ones_prop_base(word)
}
fn count_ones_prop_hash(word: u64) -> bool {
count_ones_prop_base(hash(&word))
}
}
#[test]
fn select1_0_0() {
assert_eq!(None, select1(0, 0));
}
#[test]
fn select1_0_1() {
assert_eq!(Some(0), select1(0, 1));
}
#[test]
fn select1_0_2() {
assert_eq!(Some(1), select1(0, 2));
}
#[test]
fn select1_0_3() {
assert_eq!(Some(0), select1(0, 3));
}
#[test]
fn select1_1_2() {
assert_eq!(None, select1(1, 2));
}
#[test]
fn select1_1_3() {
assert_eq!(Some(1), select1(1, 3));
}
#[test]
fn select1_3_13() {
assert_eq!(None, select1(3, 0b1101));
}
fn select1_slow(r: usize, x: u64) -> Option<usize> {
let mut count = 0;
for index in 0 .. 64 {
if (x >> index) & 1 == 1 {
count += 1;
}
if count == r + 1 {
return Some(index);
}
}
None
}
fn select1_prop_base(r: u8, x: u64) -> TestResult {
if r > 64 { return TestResult::discard(); }
TestResult::from_bool(
select1(r as usize, x) == select1_slow(r as usize, x))
}
quickcheck! {
fn select1_prop(r: u8, x: u64) -> TestResult {
select1_prop_base(r, x)
}
fn select1_prop_hash(r: u8, x: u64) -> TestResult {
select1_prop_base(r, hash(&x))
}
}
fn get_bits(x: u64, i: u8, n: u8) -> u64 {
let mask = if n == 64 {!0} else {(1 << n) - 1};
(x >> i) & mask | }
quickcheck! {
fn u_nz8_prop(argument: (u64, u64, u64, u64)) -> bool {
let n = hash(&argument);
let r = u_nz8(n);
for i in 0..8 {
let ni = get_bits(n, 8 * i, 8);
let ri = get_bits(r, 8 * i, 8);
if (ni != 0) != (ri == 0x80) {
return false;
}
}
true
}
}
#[test]
fn u_nz8_works() {
assert_eq!(b(0, 0, 0, 0, 0, 0, 0, 0),
u_nz8(u(0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 1, 1, 0, 1, 0, 1, 1, 1),
u_nz8(u(45, 12, 0, 129, 0, 3, 80, 1)));
assert_eq!(b(1, 1, 1, 1, 1, 1, 1, 1),
u_nz8(u(1, 2, 3, 4, 5, 6, 7, 8)));
assert_eq!(b( 1, 1, 1, 1, 0, 1, 1, 1),
u_nz8(0xFF_FF_FF_FF_00_FF_FF_FF));
}
fn u_le8_prop_base(n: u64, m: u64) -> bool {
let r = u_le8(n, m);
for i in 0..8 {
let ni = get_bits(n, 8 * i, 8);
let mi = get_bits(m, 8 * i, 8);
let ri = get_bits(r, 8 * i, 8);
if (ni <= mi) != (ri == 0x80) {
return false;
}
}
true
}
quickcheck! {
fn u_le8_prop(n: u64, m: u64) -> bool {
u_le8_prop_base(n, m)
}
fn u_le8_prop_hashed(n: (u64, u64, u64, u64),
m: (u64, u64, u64, u64)) -> bool {
let n = hash(&n);
let m = hash(&m);
u_le8_prop_base(n, m)
}
}
#[test]
fn le8_works() {
assert_eq!(b( 1, 1, 1, 1, 0, 0, 0, 0),
i_le8(i(0, 0, 0, 0, 0, 0, 0, 0),
i( 3, 2, 1, 0, -1, -2, -3, -4)));
assert_eq!(b( 0, 0, 0, 1, 1, 1, 1, 1),
i_le8(i(3, 2, 1, 0, -1, -2, -3, -4),
i( 0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 0, 0, 1, 1, 1, 1, 1, 1),
i_le8(i(19, 18, 17, 16, 15, 0, -1, -2),
i(17, 17, 17, 17, 17, 17, 17, 17)));
assert_eq!(b( 1, 1, 0, 0, 0, 0, 0, 0),
i_le8(i(-9, -8, -7, 0, 1, 2, 3, 4),
i(-8, -8, -8, -8, -8, -8, -8, -8)));
assert_eq!(b( 0, 1, 0, 1, 1, 0, 1, 0),
i_le8(i(8, 3, 46, 0, 0, 0, -6, -1),
i( 7, 3, 24, 1, 0, -9, 5, -2)));
}
#[test]
fn u_le8_works() {
assert_eq!(b( 1, 1, 1, 1, 1, 1, 1, 1),
u_le8(u( 0, 0, 0, 0, 0, 0, 0, 0),
u( 7, 6, 5, 4, 3, 2, 1, 0)));
assert_eq!(b( 1, 0, 0, 0, 0, 0, 0, 0),
u_le8(u( 0, 1, 2, 3, 4, 5, 6, 7),
u( 0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 0, 0, 1, 1, 1, 1, 1, 1),
u_le8(u(19, 18, 17, 16, 15, 14, 13, 12),
u(17, 17, 17, 17, 17, 17, 17, 17)));
assert_eq!(b( 0, 1, 0, 1, 1, 0, 1, 0),
u_le8(u( 8, 3, 46, 0, 0, 9, 3, 2),
u( 7, 3, 24, 1, 0, 0, 5, 1)));
}
/// Helpers for creating u64s.
fn b(a: u64, b: u64, c: u64, d: u64,
e: u64, f: u64, g: u64, h: u64) -> u64 {
(a << 63) | (b << 55) | (c << 47) | (d << 39) |
(e << 31) | (f << 23) | (g << 15) | (h << 7)
}
fn u(a: u8, b: u8, c: u8, d: u8,
e: u8, f: u8, g: u8, h: u8) -> u64 {
((a as u64) << 56)
| ((b as u64) << 48)
| ((c as u64) << 40)
| ((d as u64) << 32)
| ((e as u64) << 24)
| ((f as u64) << 16)
| ((g as u64) << 8)
| (h as u64)
}
fn i(a: i8, b: i8, c: i8, d: i8,
e: i8, f: i8, g: i8, h: i8) -> u64 {
u(a as u8, b as u8, c as u8, d as u8,
e as u8, f as u8, g as u8, h as u8)
}
fn hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
t.hash(&mut s);
s.finish()
}
} | random_line_split | |
lib.rs | #![doc(html_root_url = "https://docs.rs/broadword/0.2.2")]
//! Broadword operations treat a `u64` as a parallel vector of eight `u8`s or `i8`s.
//! This module also provides a population count function [`count_ones`](fn.count_ones.html) and a
//! select function [`select1`](fn.select1.html).
//!
//! The algorithms here are from [Sebastiano Vigna, “Broadword Implementation of
//! Rank/Select Queries,”](http://sux.di.unimi.it/paper.pdf) but with several changes from
//! that work:
//!
//! - Vigna uses a 17-digit (68-bit) constant “0x0F0F0F0F0F0F0F0F0.” I believe
//! the correct constant is these 64 bits: 0x0F0F_0F0F_0F0F_0F0F.
//!
//! - Arithmetic operations are assumed to wrap on overflow. If this
//! were not the case, Algorithm 1 ([count_ones](fn.count_ones.html))
//! would overflow its last line, when multiplying by L₈.
//!
//! - Line 2 of Algorithm 2 should read
//!
//! ```
//! # let mut s: u64 = 0;
//! s = (s & 0x3333_3333_3333_3333) + ((s >> 2) & 0x3333_3333_3333_3333);
//! ```
//!
//! In the paper, the shifted `s` appears as `x`.
#[cfg(test)]
#[macro_use]
extern crate quickcheck;
/// Has the lowest bit of every byte set: `0x0101_0101_0101_0101`.
pub const L8: u64 = 0x0101_0101_0101_0101;
/// Has the highest bit of every byte set: `0x8080_8080_8080_8080`.
pub const H8: u64 = 0x8080_8080_8080_8080;
/// Counts the number of ones in a `u64`.
///
/// Branchless. Uses the broadword algorithm from Vigna.
///
/// # Examples
///
/// ```
/// use broadword::count_ones;
///
/// assert_eq!( count_ones(0x0000_0000_0000_0000), 0 );
/// assert_eq!( count_ones(0x0000_0001_0000_0000), 1 );
/// assert_eq!( count_ones(0x0000_0001_0400_0000), 2 );
/// assert_eq!( count_ones(0x0000_0001_0600_0000), 3 );
/// assert_eq!( count_ones(0x3333_0001_0600_0000), 11 );
/// ```
#[inline]
pub fn count_ones(mut x: u64) -> usize {
x = x - ((x & 0xAAAA_AAAA_AAAA_AAAA) >> 1);
x = (x & 0x3333_3333_3333_3333) + ((x >> 2) & 0x3333_3333_3333_3333);
x = (x + (x >> 4)) & 0x0F0F_0F0F_0F0F_0F0F;
(x.wrapping_mul(L8) >> 56) as usize
}
/// Finds the index of the `r`th one bit in `x`.
///
/// Uses the broadword algorithm from Vigna.
/// Note that bits are numbered from least-significant to most.
///
/// # Examples
///
/// ```
/// use broadword::select1;
///
/// assert_eq!( select1(0, 0x0000_0000_0000_0000), None );
/// assert_eq!( select1(0, 0x0000_0000_0000_0001), Some(0) );
/// assert_eq!( select1(0, 0x0000_0000_0000_0002), Some(1) );
/// assert_eq!( select1(0, 0x0000_0000_0000_0004), Some(2) );
/// assert_eq!( select1(2, 0x0000_0000_0000_0004), None );
/// assert_eq!( select1(2, 0x0000_1010_1010_0114), Some(8) );
/// assert_eq!( select1(3, 0x0000_1010_1010_0114), Some(20) );
/// assert_eq!( select1(4, 0x0000_1010_1010_0114), Some(28) );
/// ```
#[inline]
pub fn select1(r: usize, x: u64) -> Option<usize> {
let result = select1_raw(r, x);
if result == 72 {None} else {Some(result)}
}
/// Finds the index of the `r`th one bit in `x`, returning 72 when not found.
///
/// Branchless. Uses the broadword algorithm from Vigna.
/// Note that bits are numbered from least-significant to most.
#[inline]
#[allow(clippy::many_single_char_names)]
pub fn select1_raw(r: usize, x: u64) -> usize {
let r = r as u64;
let mut s = x - ((x & 0xAAAA_AAAA_AAAA_AAAA) >> 1);
s = (s & 0x3333_3333_3333_3333) + ((s >> 2) & 0x3333_3333_3333_3333);
s = ((s + (s >> 4)) & 0x0F0F_0F0F_0F0F_0F0F).wrapping_mul(L8);
let b = (i_le8(s, r.wrapping_mul(L8)) >> 7).wrapping_mul(L8)>> 53 & !7;
let l = r - ((s << 8).wrapping_shr(b as u32) & 0xFF);
s = (u_nz8((x.wrapping_shr(b as u32) & 0xFF)
.wrapping_mul(L8) & 0x8040_2010_0804_0201) >> 7)
.wrapping_mul(L8);
(b + ((i_le8(s, l.wrapping_mul(L8)) >> 7).wrapping_mul(L8) >> 56)) as usize
}
/// Parallel ≤, treating a `u64` as a vector of 8 `u8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::u_le8;
///
/// assert_eq!( u_le8(0x03_03_04_17_92_A0_A0_A1,
/// 0x04_03_03_92_17_A0_A0_A0),
/// 0x80_80_00_80_00_80_80_00 );
/// ```
#[inline]
pub fn u_le8(x: u64, y: u64) -> u64 {
((((y | H8) - (x & !H8)) | (x ^ y)) ^ (x & !y)) & H8
}
/// Parallel ≤, treating a `u64` as a vector of 8 `i8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::i_le8;
///
/// assert_eq!( i_le8(0x03_03_04_00_FF_A0_A0_A1,
/// 0x04_03_03_FF_00_A0_A0_A0),
/// 0x80_80_00_00_80_80_80_00 );
/// ```
#[inline]
pub fn i_le8(x: u64, y: u64) -> u64 {
(((y | H8) - (x & !H8)) ^ x ^ y) & H8
}
/// Parallel >0, treating a `u64` as a vector of 8 `u8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::u_nz8;
///
/// assert_eq!( u_nz8(0x00_01_A9_40_20_17_00_06),
/// 0x00_80_80_80_80_80_00_80 );
#[inline]
pub fn u_nz8(x: u64) -> u64 {
(((x | H8) - L8) | x) & H8
}
#[cfg(test)]
#[allow(clippy::many_single_char_names)]
mod test {
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use quickcheck::TestResult;
use super::*;
#[test]
fn count_ones_0() {
assert_eq!(0, count_ones(0));
}
#[test]
fn count_ones_1() {
assert_eq!(1, count_ones(1));
}
#[test]
fn count_ones_0000_0000_0000_0010() {
assert_eq!(1, count_ones(0x0000_0000_0000_0010));
}
#[test]
fn count_ones_1000_0000_0000_0000() {
assert_eq!(1, count_ones(0x1000_0000_0000_0000));
}
#[test]
fn count_ones_ffff_ffff_ffff_ffff() {
assert_eq!(64, count_ones(0xFFFF_FFFF_FFFF_FFFF));
}
fn count_ones_prop_base(word: u64) -> bool {
count_ones(word) == word.count_ones() as usize
}
quickcheck! {
fn count_ones_prop(word: u64) -> bool {
count_ones_prop_base(word)
}
fn count_ones_prop_hash(word: u64) -> bool {
count_ones_prop_base(hash(&word))
}
}
#[test]
fn select1_0_0() {
assert_eq!(None, select1(0, 0));
}
#[test]
fn select1_0_1() {
assert_eq!(Some(0), select1(0, 1));
}
#[test]
fn select1_0_2() {
assert_eq!(Some(1), select1(0, 2));
}
#[test]
fn select1_0_3() {
assert_eq!(Some(0), select1(0, 3));
}
#[test]
fn select1_1_2() {
assert_eq!(None, select1(1, 2));
}
#[test]
fn select1_1_3() {
assert_eq!(Some(1), select1(1, 3));
}
#[test]
fn select1_3_13() {
assert_eq!(None, select1(3, 0b1101));
}
fn select1_slow(r: usize, x: u64) -> Option<usize> {
let mut count = 0;
for index in 0 .. 64 {
if (x >> index) & 1 == 1 {
count += 1;
}
if count == r + 1 {
return Some(index);
}
}
None
}
fn select1_prop_base(r: u8, x: u64) -> TestResult {
if r > 64 { return TestResult::discard(); }
TestResult::from_bool(
select1(r as usize, x) == select1_slow(r as usize, x))
}
quickcheck! {
fn select1_prop(r: u8, x: u64) -> TestResult {
select1_prop_base(r, x)
}
fn select1_prop_hash(r: u8, x: u64) -> TestResult {
select1_prop_base(r, hash(&x))
}
}
fn get_bits(x: u64, i: u8, n: u8) -> u64 {
let mask = if n == 64 {!0} else {(1 << n) - 1};
(x >> i) & mask
}
quickcheck! {
fn u_nz8_prop(argument: (u64, u64, u64, u64)) -> bool {
let n = hash(&argument);
let r = u_nz8(n);
for i in 0..8 {
let ni = get_bits(n, 8 * i, 8);
let ri = get_bits(r, 8 * i, 8);
if (ni != 0) != (ri == 0x80) {
return false;
}
}
true
}
}
#[test]
fn u_nz8_works() {
assert_eq!(b(0, 0, 0, 0, 0, 0, 0, 0),
u_nz8(u(0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 1, 1, 0, 1, 0, 1, 1, 1),
u_nz8(u(45, 12, 0, 129, 0, 3, 80, 1)));
assert_eq!(b(1, 1, 1, 1, 1, 1, 1, 1),
u_nz8(u(1, 2, 3, 4, 5, 6, 7, 8)));
assert_eq!(b( 1, 1, 1, 1, 0, 1, 1, 1),
u_nz8(0xFF_FF_FF_FF_00_FF_FF_FF));
}
fn u_le8_prop_base(n: u64, m: u64) -> bool {
let r = u_le8(n, m);
for i in 0..8 {
let ni = get_bits(n, 8 * i, 8);
let mi = get_bits(m, 8 * i, 8);
let ri = get_bits(r, 8 * i, 8);
if (ni <= mi) != (ri == 0x80) {
return false;
}
}
true
}
quickcheck! {
fn u_le8_prop(n: u64, m: u64) -> bool {
u_le8_prop_base(n, m)
}
fn u_le8_prop_hashed(n: (u64, u64, u64, u64),
m: (u64, u64, u64, u64)) -> bool {
let n = hash(&n);
let m = hash(&m);
u_le8_prop_base(n, m)
}
}
#[test]
fn le8_works() {
assert_eq!(b( 1, 1, 1, 1, 0, 0, 0, 0),
i_le8(i(0, 0, 0, 0, 0, 0, 0, 0),
i( 3, 2, 1, 0, -1, -2, -3, -4)));
assert_eq!(b( 0, 0, 0, 1, 1, 1, 1, 1),
i_le8(i(3, 2, 1, 0, -1, -2, -3, -4),
i( 0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 0, 0, 1, 1, 1, 1, 1, 1),
i_le8(i(19, 18, 17, 16, 15, 0, -1, -2),
i(17, 17, 17, 17, 17, 17, 17, 17)));
assert_eq!(b( 1, 1, 0, 0, 0, 0, 0, 0),
i_le8(i(-9, -8, -7, 0, 1, 2, 3, 4),
i(-8, -8, -8, -8, -8, -8, -8, -8)));
assert_eq!(b( 0, 1, 0, 1, 1, 0, 1, 0),
i_le8(i(8, 3, 46, 0, 0, 0, -6, -1),
i( 7, 3, 24, 1, 0, -9, 5, -2)));
}
#[test]
fn u_le8_works() {
assert_eq!(b( 1, 1, 1, 1, 1, 1, 1, 1),
u_le8(u( 0, 0, 0, 0, 0, 0, 0, 0),
u( 7, 6, 5, 4, 3, 2, 1, 0)));
assert_eq!(b( 1, 0, 0, 0, 0, 0, 0, 0),
u_le8(u( 0, 1, 2, 3, 4, 5, 6, 7),
u( 0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 0, 0, 1, 1, 1, 1, 1, 1),
u_le8(u(19, 18, 17, 16, 15, 14, 13, 12),
u(17, 17, 17, 17, 17, 17, 17, 17)));
assert_eq!(b( 0, 1, 0, 1, 1, 0, 1, 0),
u_le8(u( 8, 3, 46, 0, 0, 9, 3, 2),
u( 7, 3, 24, 1, 0, 0, 5, 1)));
}
/// Helpers for creating u64s.
fn b(a: u64, b: u64, c: u64, d: u64,
e: u64, f: u64, g: u64, h: u64) -> u64 {
(a << 63) | (b << 55) | (c << 47) | (d << 39) |
(e << 31) | (f << 23) | (g << 15) | (h << 7)
}
fn u(a: u8, b: u8, c: u8, d: u8,
e: u8, f: u8, g: u8, h: u8) -> u64 {
((a | i8, b: i8, c: i8, d: i8,
e: i8, f: i8, g: i8, h: i8) -> u64 {
u(a as u8, b as u8, c as u8, d as u8,
e as u8, f as u8, g as u8, h as u8)
}
fn hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
t.hash(&mut s);
s.finish()
}
}
| as u64) << 56)
| ((b as u64) << 48)
| ((c as u64) << 40)
| ((d as u64) << 32)
| ((e as u64) << 24)
| ((f as u64) << 16)
| ((g as u64) << 8)
| (h as u64)
}
fn i(a: | identifier_body |
lib.rs | #![doc(html_root_url = "https://docs.rs/broadword/0.2.2")]
//! Broadword operations treat a `u64` as a parallel vector of eight `u8`s or `i8`s.
//! This module also provides a population count function [`count_ones`](fn.count_ones.html) and a
//! select function [`select1`](fn.select1.html).
//!
//! The algorithms here are from [Sebastiano Vigna, “Broadword Implementation of
//! Rank/Select Queries,”](http://sux.di.unimi.it/paper.pdf) but with several changes from
//! that work:
//!
//! - Vigna uses a 17-digit (68-bit) constant “0x0F0F0F0F0F0F0F0F0.” I believe
//! the correct constant is these 64 bits: 0x0F0F_0F0F_0F0F_0F0F.
//!
//! - Arithmetic operations are assumed to wrap on overflow. If this
//! were not the case, Algorithm 1 ([count_ones](fn.count_ones.html))
//! would overflow its last line, when multiplying by L₈.
//!
//! - Line 2 of Algorithm 2 should read
//!
//! ```
//! # let mut s: u64 = 0;
//! s = (s & 0x3333_3333_3333_3333) + ((s >> 2) & 0x3333_3333_3333_3333);
//! ```
//!
//! In the paper, the shifted `s` appears as `x`.
#[cfg(test)]
#[macro_use]
extern crate quickcheck;
/// Has the lowest bit of every byte set: `0x0101_0101_0101_0101`.
pub const L8: u64 = 0x0101_0101_0101_0101;
/// Has the highest bit of every byte set: `0x8080_8080_8080_8080`.
pub const H8: u64 = 0x8080_8080_8080_8080;
/// Counts the number of ones in a `u64`.
///
/// Branchless. Uses the broadword algorithm from Vigna.
///
/// # Examples
///
/// ```
/// use broadword::count_ones;
///
/// assert_eq!( count_ones(0x0000_0000_0000_0000), 0 );
/// assert_eq!( count_ones(0x0000_0001_0000_0000), 1 );
/// assert_eq!( count_ones(0x0000_0001_0400_0000), 2 );
/// assert_eq!( count_ones(0x0000_0001_0600_0000), 3 );
/// assert_eq!( count_ones(0x3333_0001_0600_0000), 11 );
/// ```
#[inline]
pub fn count_ones(mut x: u64) -> usize {
x = x - ((x & 0xAAAA_AAAA_AAAA_AAAA) >> 1);
x = (x & 0x3333_3333_3333_3333) + ((x >> 2) & 0x3333_3333_3333_3333);
x = (x + (x >> 4)) & 0x0F0F_0F0F_0F0F_0F0F;
(x.wrapping_mul(L8) >> 56) as usize
}
/// Finds the index of the `r`th one bit in `x`.
///
/// Uses the broadword algorithm from Vigna.
/// Note that bits are numbered from least-significant to most.
///
/// # Examples
///
/// ```
/// use broadword::select1;
///
/// assert_eq!( select1(0, 0x0000_0000_0000_0000), None );
/// assert_eq!( select1(0, 0x0000_0000_0000_0001), Some(0) );
/// assert_eq!( select1(0, 0x0000_0000_0000_0002), Some(1) );
/// assert_eq!( select1(0, 0x0000_0000_0000_0004), Some(2) );
/// assert_eq!( select1(2, 0x0000_0000_0000_0004), None );
/// assert_eq!( select1(2, 0x0000_1010_1010_0114), Some(8) );
/// assert_eq!( select1(3, 0x0000_1010_1010_0114), Some(20) );
/// assert_eq!( select1(4, 0x0000_1010_1010_0114), Some(28) );
/// ```
#[inline]
pub fn select1(r: usize, x: u64) -> Option<usize> {
let result = select1_raw(r, x);
if result == 72 {None} else {Some(result)}
}
/// Finds the index of the `r`th one bit in `x`, returning 72 when not found.
///
/// Branchless. Uses the broadword algorithm from Vigna.
/// Note that bits are numbered from least-significant to most.
#[inline]
#[allow(clippy::many_single_char_names)]
pub fn select1_raw(r: usize, x: u64) -> usize {
let r = r as u64;
let mut s = x - ((x & 0xAAAA_AAAA_AAAA_AAAA) >> 1);
s = (s & 0x3333_3333_3333_3333) + ((s >> 2) & 0x3333_3333_3333_3333);
s = ((s + (s >> 4)) & 0x0F0F_0F0F_0F0F_0F0F).wrapping_mul(L8);
let b = (i_le8(s, r.wrapping_mul(L8)) >> 7).wrapping_mul(L8)>> 53 & !7;
let l = r - ((s << 8).wrapping_shr(b as u32) & 0xFF);
s = (u_nz8((x.wrapping_shr(b as u32) & 0xFF)
.wrapping_mul(L8) & 0x8040_2010_0804_0201) >> 7)
.wrapping_mul(L8);
(b + ((i_le8(s, l.wrapping_mul(L8)) >> 7).wrapping_mul(L8) >> 56)) as usize
}
/// Parallel ≤, treating a `u64` as a vector of 8 `u8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::u_le8;
///
/// assert_eq!( u_le8(0x03_03_04_17_92_A0_A0_A1,
/// 0x04_03_03_92_17_A0_A0_A0),
/// 0x80_80_00_80_00_80_80_00 );
/// ```
#[inline]
pub fn u_le8(x: u64, y: u64) -> u64 {
((((y | H8) - (x & !H8)) | (x ^ y)) ^ (x & !y)) & H8
}
/// Parallel ≤, treating a `u64` as a vector of 8 `i8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::i_le8;
///
/// assert_eq!( i_le8(0x03_03_04_00_FF_A0_A0_A1,
/// 0x04_03_03_FF_00_A0_A0_A0),
/// 0x80_80_00_00_80_80_80_00 );
/// ```
#[inline]
pub fn i_le8(x: u64, y: u64) -> u64 {
(((y | H8) - (x & !H8)) ^ x ^ y) & H8
}
/// Parallel >0, treating a `u64` as a vector of 8 `u8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::u_nz8;
///
/// assert_eq!( u_nz8(0x00_01_A9_40_20_17_00_06),
/// 0x00_80_80_80_80_80_00_80 );
#[inline]
pub fn u_nz8(x: u64) -> u64 {
(((x | H8) - L8) | x) & H8
}
#[cfg(test)]
#[allow(clippy::many_single_char_names)]
mod test {
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use quickcheck::TestResult;
use super::*;
#[test]
fn count_ones_0() {
assert_eq!(0, count_ones(0));
}
#[test]
fn count_ones_1() {
assert_eq!(1, count_ones(1));
}
#[test]
fn count_ones_0000_0000_0000_0010() {
assert_eq!(1, count_ones(0x0000_0000_0000_0010));
}
#[test]
fn count_ones_1000_0000_0000_0000() {
assert_eq!(1, count_ones(0x1000_0000_0000_0000));
}
#[test]
fn count_ones_fff | ssert_eq!(64, count_ones(0xFFFF_FFFF_FFFF_FFFF));
}
fn count_ones_prop_base(word: u64) -> bool {
count_ones(word) == word.count_ones() as usize
}
quickcheck! {
fn count_ones_prop(word: u64) -> bool {
count_ones_prop_base(word)
}
fn count_ones_prop_hash(word: u64) -> bool {
count_ones_prop_base(hash(&word))
}
}
#[test]
fn select1_0_0() {
assert_eq!(None, select1(0, 0));
}
#[test]
fn select1_0_1() {
assert_eq!(Some(0), select1(0, 1));
}
#[test]
fn select1_0_2() {
assert_eq!(Some(1), select1(0, 2));
}
#[test]
fn select1_0_3() {
assert_eq!(Some(0), select1(0, 3));
}
#[test]
fn select1_1_2() {
assert_eq!(None, select1(1, 2));
}
#[test]
fn select1_1_3() {
assert_eq!(Some(1), select1(1, 3));
}
#[test]
fn select1_3_13() {
assert_eq!(None, select1(3, 0b1101));
}
fn select1_slow(r: usize, x: u64) -> Option<usize> {
let mut count = 0;
for index in 0 .. 64 {
if (x >> index) & 1 == 1 {
count += 1;
}
if count == r + 1 {
return Some(index);
}
}
None
}
fn select1_prop_base(r: u8, x: u64) -> TestResult {
if r > 64 { return TestResult::discard(); }
TestResult::from_bool(
select1(r as usize, x) == select1_slow(r as usize, x))
}
quickcheck! {
fn select1_prop(r: u8, x: u64) -> TestResult {
select1_prop_base(r, x)
}
fn select1_prop_hash(r: u8, x: u64) -> TestResult {
select1_prop_base(r, hash(&x))
}
}
fn get_bits(x: u64, i: u8, n: u8) -> u64 {
let mask = if n == 64 {!0} else {(1 << n) - 1};
(x >> i) & mask
}
quickcheck! {
fn u_nz8_prop(argument: (u64, u64, u64, u64)) -> bool {
let n = hash(&argument);
let r = u_nz8(n);
for i in 0..8 {
let ni = get_bits(n, 8 * i, 8);
let ri = get_bits(r, 8 * i, 8);
if (ni != 0) != (ri == 0x80) {
return false;
}
}
true
}
}
#[test]
fn u_nz8_works() {
assert_eq!(b(0, 0, 0, 0, 0, 0, 0, 0),
u_nz8(u(0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 1, 1, 0, 1, 0, 1, 1, 1),
u_nz8(u(45, 12, 0, 129, 0, 3, 80, 1)));
assert_eq!(b(1, 1, 1, 1, 1, 1, 1, 1),
u_nz8(u(1, 2, 3, 4, 5, 6, 7, 8)));
assert_eq!(b( 1, 1, 1, 1, 0, 1, 1, 1),
u_nz8(0xFF_FF_FF_FF_00_FF_FF_FF));
}
fn u_le8_prop_base(n: u64, m: u64) -> bool {
let r = u_le8(n, m);
for i in 0..8 {
let ni = get_bits(n, 8 * i, 8);
let mi = get_bits(m, 8 * i, 8);
let ri = get_bits(r, 8 * i, 8);
if (ni <= mi) != (ri == 0x80) {
return false;
}
}
true
}
quickcheck! {
fn u_le8_prop(n: u64, m: u64) -> bool {
u_le8_prop_base(n, m)
}
fn u_le8_prop_hashed(n: (u64, u64, u64, u64),
m: (u64, u64, u64, u64)) -> bool {
let n = hash(&n);
let m = hash(&m);
u_le8_prop_base(n, m)
}
}
#[test]
fn le8_works() {
assert_eq!(b( 1, 1, 1, 1, 0, 0, 0, 0),
i_le8(i(0, 0, 0, 0, 0, 0, 0, 0),
i( 3, 2, 1, 0, -1, -2, -3, -4)));
assert_eq!(b( 0, 0, 0, 1, 1, 1, 1, 1),
i_le8(i(3, 2, 1, 0, -1, -2, -3, -4),
i( 0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 0, 0, 1, 1, 1, 1, 1, 1),
i_le8(i(19, 18, 17, 16, 15, 0, -1, -2),
i(17, 17, 17, 17, 17, 17, 17, 17)));
assert_eq!(b( 1, 1, 0, 0, 0, 0, 0, 0),
i_le8(i(-9, -8, -7, 0, 1, 2, 3, 4),
i(-8, -8, -8, -8, -8, -8, -8, -8)));
assert_eq!(b( 0, 1, 0, 1, 1, 0, 1, 0),
i_le8(i(8, 3, 46, 0, 0, 0, -6, -1),
i( 7, 3, 24, 1, 0, -9, 5, -2)));
}
#[test]
fn u_le8_works() {
assert_eq!(b( 1, 1, 1, 1, 1, 1, 1, 1),
u_le8(u( 0, 0, 0, 0, 0, 0, 0, 0),
u( 7, 6, 5, 4, 3, 2, 1, 0)));
assert_eq!(b( 1, 0, 0, 0, 0, 0, 0, 0),
u_le8(u( 0, 1, 2, 3, 4, 5, 6, 7),
u( 0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 0, 0, 1, 1, 1, 1, 1, 1),
u_le8(u(19, 18, 17, 16, 15, 14, 13, 12),
u(17, 17, 17, 17, 17, 17, 17, 17)));
assert_eq!(b( 0, 1, 0, 1, 1, 0, 1, 0),
u_le8(u( 8, 3, 46, 0, 0, 9, 3, 2),
u( 7, 3, 24, 1, 0, 0, 5, 1)));
}
/// Helpers for creating u64s.
fn b(a: u64, b: u64, c: u64, d: u64,
e: u64, f: u64, g: u64, h: u64) -> u64 {
(a << 63) | (b << 55) | (c << 47) | (d << 39) |
(e << 31) | (f << 23) | (g << 15) | (h << 7)
}
fn u(a: u8, b: u8, c: u8, d: u8,
e: u8, f: u8, g: u8, h: u8) -> u64 {
((a as u64) << 56)
| ((b as u64) << 48)
| ((c as u64) << 40)
| ((d as u64) << 32)
| ((e as u64) << 24)
| ((f as u64) << 16)
| ((g as u64) << 8)
| (h as u64)
}
fn i(a: i8, b: i8, c: i8, d: i8,
e: i8, f: i8, g: i8, h: i8) -> u64 {
u(a as u8, b as u8, c as u8, d as u8,
e as u8, f as u8, g as u8, h as u8)
}
fn hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
t.hash(&mut s);
s.finish()
}
}
| f_ffff_ffff_ffff() {
a | identifier_name |
lib.rs | #![doc(html_root_url = "https://docs.rs/broadword/0.2.2")]
//! Broadword operations treat a `u64` as a parallel vector of eight `u8`s or `i8`s.
//! This module also provides a population count function [`count_ones`](fn.count_ones.html) and a
//! select function [`select1`](fn.select1.html).
//!
//! The algorithms here are from [Sebastiano Vigna, “Broadword Implementation of
//! Rank/Select Queries,”](http://sux.di.unimi.it/paper.pdf) but with several changes from
//! that work:
//!
//! - Vigna uses a 17-digit (68-bit) constant “0x0F0F0F0F0F0F0F0F0.” I believe
//! the correct constant is these 64 bits: 0x0F0F_0F0F_0F0F_0F0F.
//!
//! - Arithmetic operations are assumed to wrap on overflow. If this
//! were not the case, Algorithm 1 ([count_ones](fn.count_ones.html))
//! would overflow its last line, when multiplying by L₈.
//!
//! - Line 2 of Algorithm 2 should read
//!
//! ```
//! # let mut s: u64 = 0;
//! s = (s & 0x3333_3333_3333_3333) + ((s >> 2) & 0x3333_3333_3333_3333);
//! ```
//!
//! In the paper, the shifted `s` appears as `x`.
#[cfg(test)]
#[macro_use]
extern crate quickcheck;
/// Has the lowest bit of every byte set: `0x0101_0101_0101_0101`.
pub const L8: u64 = 0x0101_0101_0101_0101;
/// Has the highest bit of every byte set: `0x8080_8080_8080_8080`.
pub const H8: u64 = 0x8080_8080_8080_8080;
/// Counts the number of ones in a `u64`.
///
/// Branchless. Uses the broadword algorithm from Vigna.
///
/// # Examples
///
/// ```
/// use broadword::count_ones;
///
/// assert_eq!( count_ones(0x0000_0000_0000_0000), 0 );
/// assert_eq!( count_ones(0x0000_0001_0000_0000), 1 );
/// assert_eq!( count_ones(0x0000_0001_0400_0000), 2 );
/// assert_eq!( count_ones(0x0000_0001_0600_0000), 3 );
/// assert_eq!( count_ones(0x3333_0001_0600_0000), 11 );
/// ```
#[inline]
pub fn count_ones(mut x: u64) -> usize {
x = x - ((x & 0xAAAA_AAAA_AAAA_AAAA) >> 1);
x = (x & 0x3333_3333_3333_3333) + ((x >> 2) & 0x3333_3333_3333_3333);
x = (x + (x >> 4)) & 0x0F0F_0F0F_0F0F_0F0F;
(x.wrapping_mul(L8) >> 56) as usize
}
/// Finds the index of the `r`th one bit in `x`.
///
/// Uses the broadword algorithm from Vigna.
/// Note that bits are numbered from least-significant to most.
///
/// # Examples
///
/// ```
/// use broadword::select1;
///
/// assert_eq!( select1(0, 0x0000_0000_0000_0000), None );
/// assert_eq!( select1(0, 0x0000_0000_0000_0001), Some(0) );
/// assert_eq!( select1(0, 0x0000_0000_0000_0002), Some(1) );
/// assert_eq!( select1(0, 0x0000_0000_0000_0004), Some(2) );
/// assert_eq!( select1(2, 0x0000_0000_0000_0004), None );
/// assert_eq!( select1(2, 0x0000_1010_1010_0114), Some(8) );
/// assert_eq!( select1(3, 0x0000_1010_1010_0114), Some(20) );
/// assert_eq!( select1(4, 0x0000_1010_1010_0114), Some(28) );
/// ```
#[inline]
pub fn select1(r: usize, x: u64) -> Option<usize> {
let result = select1_raw(r, x);
if result == 72 {None} else {Some(resu | nds the index of the `r`th one bit in `x`, returning 72 when not found.
///
/// Branchless. Uses the broadword algorithm from Vigna.
/// Note that bits are numbered from least-significant to most.
#[inline]
#[allow(clippy::many_single_char_names)]
pub fn select1_raw(r: usize, x: u64) -> usize {
let r = r as u64;
let mut s = x - ((x & 0xAAAA_AAAA_AAAA_AAAA) >> 1);
s = (s & 0x3333_3333_3333_3333) + ((s >> 2) & 0x3333_3333_3333_3333);
s = ((s + (s >> 4)) & 0x0F0F_0F0F_0F0F_0F0F).wrapping_mul(L8);
let b = (i_le8(s, r.wrapping_mul(L8)) >> 7).wrapping_mul(L8)>> 53 & !7;
let l = r - ((s << 8).wrapping_shr(b as u32) & 0xFF);
s = (u_nz8((x.wrapping_shr(b as u32) & 0xFF)
.wrapping_mul(L8) & 0x8040_2010_0804_0201) >> 7)
.wrapping_mul(L8);
(b + ((i_le8(s, l.wrapping_mul(L8)) >> 7).wrapping_mul(L8) >> 56)) as usize
}
/// Parallel ≤, treating a `u64` as a vector of 8 `u8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::u_le8;
///
/// assert_eq!( u_le8(0x03_03_04_17_92_A0_A0_A1,
/// 0x04_03_03_92_17_A0_A0_A0),
/// 0x80_80_00_80_00_80_80_00 );
/// ```
#[inline]
pub fn u_le8(x: u64, y: u64) -> u64 {
((((y | H8) - (x & !H8)) | (x ^ y)) ^ (x & !y)) & H8
}
/// Parallel ≤, treating a `u64` as a vector of 8 `i8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::i_le8;
///
/// assert_eq!( i_le8(0x03_03_04_00_FF_A0_A0_A1,
/// 0x04_03_03_FF_00_A0_A0_A0),
/// 0x80_80_00_00_80_80_80_00 );
/// ```
#[inline]
pub fn i_le8(x: u64, y: u64) -> u64 {
(((y | H8) - (x & !H8)) ^ x ^ y) & H8
}
/// Parallel >0, treating a `u64` as a vector of 8 `u8`s.
///
/// Branchless.
///
/// # Examples
///
/// ```
/// use broadword::u_nz8;
///
/// assert_eq!( u_nz8(0x00_01_A9_40_20_17_00_06),
/// 0x00_80_80_80_80_80_00_80 );
#[inline]
pub fn u_nz8(x: u64) -> u64 {
(((x | H8) - L8) | x) & H8
}
#[cfg(test)]
#[allow(clippy::many_single_char_names)]
mod test {
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use quickcheck::TestResult;
use super::*;
#[test]
fn count_ones_0() {
assert_eq!(0, count_ones(0));
}
#[test]
fn count_ones_1() {
assert_eq!(1, count_ones(1));
}
#[test]
fn count_ones_0000_0000_0000_0010() {
assert_eq!(1, count_ones(0x0000_0000_0000_0010));
}
#[test]
fn count_ones_1000_0000_0000_0000() {
assert_eq!(1, count_ones(0x1000_0000_0000_0000));
}
#[test]
fn count_ones_ffff_ffff_ffff_ffff() {
assert_eq!(64, count_ones(0xFFFF_FFFF_FFFF_FFFF));
}
fn count_ones_prop_base(word: u64) -> bool {
count_ones(word) == word.count_ones() as usize
}
quickcheck! {
fn count_ones_prop(word: u64) -> bool {
count_ones_prop_base(word)
}
fn count_ones_prop_hash(word: u64) -> bool {
count_ones_prop_base(hash(&word))
}
}
#[test]
fn select1_0_0() {
assert_eq!(None, select1(0, 0));
}
#[test]
fn select1_0_1() {
assert_eq!(Some(0), select1(0, 1));
}
#[test]
fn select1_0_2() {
assert_eq!(Some(1), select1(0, 2));
}
#[test]
fn select1_0_3() {
assert_eq!(Some(0), select1(0, 3));
}
#[test]
fn select1_1_2() {
assert_eq!(None, select1(1, 2));
}
#[test]
fn select1_1_3() {
assert_eq!(Some(1), select1(1, 3));
}
#[test]
fn select1_3_13() {
assert_eq!(None, select1(3, 0b1101));
}
fn select1_slow(r: usize, x: u64) -> Option<usize> {
let mut count = 0;
for index in 0 .. 64 {
if (x >> index) & 1 == 1 {
count += 1;
}
if count == r + 1 {
return Some(index);
}
}
None
}
fn select1_prop_base(r: u8, x: u64) -> TestResult {
if r > 64 { return TestResult::discard(); }
TestResult::from_bool(
select1(r as usize, x) == select1_slow(r as usize, x))
}
quickcheck! {
fn select1_prop(r: u8, x: u64) -> TestResult {
select1_prop_base(r, x)
}
fn select1_prop_hash(r: u8, x: u64) -> TestResult {
select1_prop_base(r, hash(&x))
}
}
fn get_bits(x: u64, i: u8, n: u8) -> u64 {
let mask = if n == 64 {!0} else {(1 << n) - 1};
(x >> i) & mask
}
quickcheck! {
fn u_nz8_prop(argument: (u64, u64, u64, u64)) -> bool {
let n = hash(&argument);
let r = u_nz8(n);
for i in 0..8 {
let ni = get_bits(n, 8 * i, 8);
let ri = get_bits(r, 8 * i, 8);
if (ni != 0) != (ri == 0x80) {
return false;
}
}
true
}
}
#[test]
fn u_nz8_works() {
assert_eq!(b(0, 0, 0, 0, 0, 0, 0, 0),
u_nz8(u(0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 1, 1, 0, 1, 0, 1, 1, 1),
u_nz8(u(45, 12, 0, 129, 0, 3, 80, 1)));
assert_eq!(b(1, 1, 1, 1, 1, 1, 1, 1),
u_nz8(u(1, 2, 3, 4, 5, 6, 7, 8)));
assert_eq!(b( 1, 1, 1, 1, 0, 1, 1, 1),
u_nz8(0xFF_FF_FF_FF_00_FF_FF_FF));
}
fn u_le8_prop_base(n: u64, m: u64) -> bool {
let r = u_le8(n, m);
for i in 0..8 {
let ni = get_bits(n, 8 * i, 8);
let mi = get_bits(m, 8 * i, 8);
let ri = get_bits(r, 8 * i, 8);
if (ni <= mi) != (ri == 0x80) {
return false;
}
}
true
}
quickcheck! {
fn u_le8_prop(n: u64, m: u64) -> bool {
u_le8_prop_base(n, m)
}
fn u_le8_prop_hashed(n: (u64, u64, u64, u64),
m: (u64, u64, u64, u64)) -> bool {
let n = hash(&n);
let m = hash(&m);
u_le8_prop_base(n, m)
}
}
#[test]
fn le8_works() {
assert_eq!(b( 1, 1, 1, 1, 0, 0, 0, 0),
i_le8(i(0, 0, 0, 0, 0, 0, 0, 0),
i( 3, 2, 1, 0, -1, -2, -3, -4)));
assert_eq!(b( 0, 0, 0, 1, 1, 1, 1, 1),
i_le8(i(3, 2, 1, 0, -1, -2, -3, -4),
i( 0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 0, 0, 1, 1, 1, 1, 1, 1),
i_le8(i(19, 18, 17, 16, 15, 0, -1, -2),
i(17, 17, 17, 17, 17, 17, 17, 17)));
assert_eq!(b( 1, 1, 0, 0, 0, 0, 0, 0),
i_le8(i(-9, -8, -7, 0, 1, 2, 3, 4),
i(-8, -8, -8, -8, -8, -8, -8, -8)));
assert_eq!(b( 0, 1, 0, 1, 1, 0, 1, 0),
i_le8(i(8, 3, 46, 0, 0, 0, -6, -1),
i( 7, 3, 24, 1, 0, -9, 5, -2)));
}
#[test]
fn u_le8_works() {
assert_eq!(b( 1, 1, 1, 1, 1, 1, 1, 1),
u_le8(u( 0, 0, 0, 0, 0, 0, 0, 0),
u( 7, 6, 5, 4, 3, 2, 1, 0)));
assert_eq!(b( 1, 0, 0, 0, 0, 0, 0, 0),
u_le8(u( 0, 1, 2, 3, 4, 5, 6, 7),
u( 0, 0, 0, 0, 0, 0, 0, 0)));
assert_eq!(b( 0, 0, 1, 1, 1, 1, 1, 1),
u_le8(u(19, 18, 17, 16, 15, 14, 13, 12),
u(17, 17, 17, 17, 17, 17, 17, 17)));
assert_eq!(b( 0, 1, 0, 1, 1, 0, 1, 0),
u_le8(u( 8, 3, 46, 0, 0, 9, 3, 2),
u( 7, 3, 24, 1, 0, 0, 5, 1)));
}
/// Helpers for creating u64s.
fn b(a: u64, b: u64, c: u64, d: u64,
e: u64, f: u64, g: u64, h: u64) -> u64 {
(a << 63) | (b << 55) | (c << 47) | (d << 39) |
(e << 31) | (f << 23) | (g << 15) | (h << 7)
}
fn u(a: u8, b: u8, c: u8, d: u8,
e: u8, f: u8, g: u8, h: u8) -> u64 {
((a as u64) << 56)
| ((b as u64) << 48)
| ((c as u64) << 40)
| ((d as u64) << 32)
| ((e as u64) << 24)
| ((f as u64) << 16)
| ((g as u64) << 8)
| (h as u64)
}
fn i(a: i8, b: i8, c: i8, d: i8,
e: i8, f: i8, g: i8, h: i8) -> u64 {
u(a as u8, b as u8, c as u8, d as u8,
e as u8, f as u8, g as u8, h as u8)
}
fn hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
t.hash(&mut s);
s.finish()
}
}
| lt)}
}
/// Fi | conditional_block |
uart.rs | //! Universal asynchronous receiver/transmitter with EasyDMA (UARTE)
//!
//! The driver provides only tranmission functionlity
//!
//! Author
//! -------------------
//!
//! * Author: Niklas Adolfsson <niklasadolfsson1@gmail.com>
//! * Date: March 10 2018
use core::cell::Cell;
use kernel;
use kernel::common::regs::{ReadOnly, ReadWrite, WriteOnly};
use nrf5x::pinmux;
const UARTE_BASE: u32 = 0x40002000;
static mut BYTE: u8 = 0;
#[repr(C)]
struct UarteRegisters {
pub task_startrx: WriteOnly<u32, Task::Register>, // 0x000
pub task_stoprx: WriteOnly<u32, Task::Register>, // 0x004
pub task_starttx: WriteOnly<u32, Task::Register>, // 0x008
pub task_stoptx: WriteOnly<u32, Task::Register>, // 0x00c
_reserved1: [u32; 7], // 0x010-0x02c
pub task_flush_rx: WriteOnly<u32, Task::Register>, // 0x02c
_reserved2: [u32; 52], // 0x030-0x100
pub event_cts: ReadWrite<u32, Event::Register>, // 0x100-0x104
pub event_ncts: ReadWrite<u32, Event::Register>, // 0x104-0x108
_reserved3: [u32; 2], // 0x108-0x110
pub event_endrx: ReadWrite<u32, Event::Register>, // 0x110-0x114
_reserved4: [u32; 3], // 0x114-0x120
pub event_endtx: ReadWrite<u32, Event::Register>, // 0x120-0x124
pub event_error: ReadWrite<u32, Event::Register>, // 0x124-0x128
_reserved6: [u32; 7], // 0x128-0x144
pub event_rxto: ReadWrite<u32, Event::Register>, // 0x144-0x148
_reserved7: [u32; 1], // 0x148-0x14C
pub event_rxstarted: ReadWrite<u32, Event::Register>, // 0x14C-0x150
pub event_txstarted: ReadWrite<u32, Event::Register>, // 0x150-0x154
_reserved8: [u32; 1], // 0x154-0x158
pub event_txstopped: ReadWrite<u32, Event::Register>, // 0x158-0x15c
_reserved9: [u32; 41], // 0x15c-0x200
pub shorts: ReadWrite<u32, Shorts::Register>, // 0x200-0x204
_reserved10: [u32; 64], // 0x204-0x304
pub intenset: ReadWrite<u32, Interrupt::Register>, // 0x304-0x308
pub intenclr: ReadWrite<u32, Interrupt::Register>, // 0x308-0x30C
_reserved11: [u32; 93], // 0x30C-0x480
pub errorsrc: ReadWrite<u32, ErrorSrc::Register>, // 0x480-0x484
_reserved12: [u32; 31], // 0x484-0x500
pub enable: ReadWrite<u32, Uart::Register>, // 0x500-0x504
_reserved13: [u32; 1], // 0x504-0x508
pub pselrts: ReadWrite<u32, Psel::Register>, // 0x508-0x50c
pub pseltxd: ReadWrite<u32, Psel::Register>, // 0x50c-0x510
pub pselcts: ReadWrite<u32, Psel::Register>, // 0x510-0x514
pub pselrxd: ReadWrite<u32, Psel::Register>, // 0x514-0x518
_reserved14: [u32; 3], // 0x518-0x524
pub baudrate: ReadWrite<u32, Baudrate::Register>, // 0x524-0x528
_reserved15: [u32; 3], // 0x528-0x534
pub rxd_ptr: ReadWrite<u32, Pointer::Register>, // 0x534-0x538
pub rxd_maxcnt: ReadWrite<u32, Counter::Register>, // 0x538-0x53c
pub rxd_amount: ReadOnly<u32, Counter::Register>, // 0x53c-0x540
_reserved16: [u32; 1], // 0x540-0x544
pub txd_ptr: ReadWrite<u32, Pointer::Register>, // 0x544-0x548
pub txd_maxcnt: ReadWrite<u32, Counter::Register>, // 0x548-0x54c
pub txd_amount: ReadOnly<u32, Counter::Register>, // 0x54c-0x550
_reserved17: [u32; 7], // 0x550-0x56C
pub config: ReadWrite<u32, Config::Register>, // 0x56C-0x570
}
#[cfg_attr(rustfmt, rustfmt_skip)]
register_bitfields! [u32,
/// Start task
Task [
ENABLE OFFSET(0) NUMBITS(1)
],
/// Read event
Event [
READY OFFSET(0) NUMBITS(1)
],
/// Shortcuts
Shorts [
// Shortcut between ENDRX and STARTRX
ENDRX_STARTRX OFFSET(5) NUMBITS(1),
// Shortcut between ENDRX and STOPRX
ENDRX_STOPRX OFFSET(6) NUMBITS(1)
],
/// UART Interrupts
Interrupt [
CTS OFFSET(0) NUMBITS(1),
NCTS OFFSET(1) NUMBITS(1),
ENDRX OFFSET(4) NUMBITS(1),
ENDTX OFFSET(8) NUMBITS(1),
ERROR OFFSET(9) NUMBITS(1),
RXTO OFFSET(17) NUMBITS(1),
RXSTARTED OFFSET(19) NUMBITS(1),
TXSTARTED OFFSET(20) NUMBITS(1),
TXSTOPPED OFFSET(22) NUMBITS(1)
],
/// UART Errors
ErrorSrc [
OVERRUN OFFSET(0) NUMBITS(1),
PARITY OFFSET(1) NUMBITS(1),
FRAMING OFFSET(2) NUMBITS(1),
BREAK OFFSET(3) NUMBITS(1)
],
/// Enable UART
Uart [
ENABLE OFFSET(0) NUMBITS(4) [
ON = 8,
OFF = 0
]
],
/// Pin select
Psel [
// Pin number
PIN OFFSET(0) NUMBITS(5),
// Connect/Disconnect
CONNECT OFFSET(31) NUMBITS(1)
],
/// Baudrate
Baudrate [
BAUDRAUTE OFFSET(0) NUMBITS(32)
],
/// DMA pointer
Pointer [
POINTER OFFSET(0) NUMBITS(32)
],
/// Counter value
Counter [
COUNTER OFFSET(0) NUMBITS(8)
],
/// Configuration of parity and flow control
Config [
HWFC OFFSET(0) NUMBITS(1),
PARITY OFFSET(1) NUMBITS(3)
]
];
/// UARTE
// It should never be instanced outside this module but because a static mutable reference to it
// is exported outside this module it must be `pub`
pub struct | {
regs: *const UarteRegisters,
client: Cell<Option<&'static kernel::hil::uart::Client>>,
buffer: kernel::common::take_cell::TakeCell<'static, [u8]>,
remaining_bytes: Cell<usize>,
offset: Cell<usize>,
}
#[derive(Copy, Clone)]
pub struct UARTParams {
pub baud_rate: u32,
}
/// UARTE0 handle
// This should only be accessed by the reset_handler on startup
pub static mut UARTE0: Uarte = Uarte::new();
impl Uarte {
/// Constructor
pub const fn new() -> Uarte {
Uarte {
regs: UARTE_BASE as *const UarteRegisters,
client: Cell::new(None),
buffer: kernel::common::take_cell::TakeCell::empty(),
remaining_bytes: Cell::new(0),
offset: Cell::new(0),
}
}
/// Configure which pins the UART should use for txd, rxd, cts and rts
pub fn configure(
&self,
txd: pinmux::Pinmux,
rxd: pinmux::Pinmux,
cts: pinmux::Pinmux,
rts: pinmux::Pinmux,
) {
let regs = unsafe { &*self.regs };
regs.pseltxd.write(Psel::PIN.val(txd.into()));
regs.pselrxd.write(Psel::PIN.val(rxd.into()));
regs.pselcts.write(Psel::PIN.val(cts.into()));
regs.pselrts.write(Psel::PIN.val(rts.into()));
}
fn set_baud_rate(&self, baud_rate: u32) {
let regs = unsafe { &*self.regs };
match baud_rate {
1200 => regs.baudrate.set(0x0004F000),
2400 => regs.baudrate.set(0x0009D000),
4800 => regs.baudrate.set(0x0013B000),
9600 => regs.baudrate.set(0x00275000),
14400 => regs.baudrate.set(0x003AF000),
19200 => regs.baudrate.set(0x004EA000),
28800 => regs.baudrate.set(0x0075C000),
38400 => regs.baudrate.set(0x009D0000),
57600 => regs.baudrate.set(0x00EB0000),
76800 => regs.baudrate.set(0x013A9000),
115200 => regs.baudrate.set(0x01D60000),
230400 => regs.baudrate.set(0x03B00000),
250000 => regs.baudrate.set(0x04000000),
460800 => regs.baudrate.set(0x07400000),
921600 => regs.baudrate.set(0x0F000000),
1000000 => regs.baudrate.set(0x10000000),
_ => regs.baudrate.set(0x01D60000), //setting default to 115200
}
}
// Enable UART peripheral, this need to disabled for low power applications
fn enable_uart(&self) {
let regs = unsafe { &*self.regs };
regs.enable.write(Uart::ENABLE::ON);
}
#[allow(dead_code)]
fn disable_uart(&self) {
let regs = unsafe { &*self.regs };
regs.enable.write(Uart::ENABLE::OFF);
}
#[allow(dead_code)]
fn enable_rx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenset.write(Interrupt::ENDRX::SET);
}
fn enable_tx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenset.write(Interrupt::ENDTX::SET);
}
#[allow(dead_code)]
fn disable_rx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenclr.write(Interrupt::ENDRX::SET);
}
fn disable_tx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenclr.write(Interrupt::ENDTX::SET);
}
/// UART interrupt handler that only listens to `tx_end` events
#[inline(never)]
pub fn handle_interrupt(&mut self) {
// disable interrupts
self.disable_tx_interrupts();
let regs = unsafe { &*self.regs };
if self.tx_ready() {
regs.event_endtx.write(Event::READY::CLEAR);
let tx_bytes = regs.txd_amount.get() as usize;
let rem = self.remaining_bytes.get();
// More bytes transmitted than requested `return silently`
// Cause probably a hardware fault
// FIXME: Progate error to the capsule
if tx_bytes > rem {
debug!("error more bytes than requested\r\n");
return;
}
self.remaining_bytes.set(rem - tx_bytes);
self.offset.set(tx_bytes);
if self.remaining_bytes.get() == 0 {
// Signal client write done
self.client.get().map(|client| {
self.buffer.take().map(|buffer| {
client.transmit_complete(buffer, kernel::hil::uart::Error::CommandComplete);
});
});
}
// Not all bytes have been transmitted then update offset and continue transmitting
else {
self.set_dma_pointer_to_buffer();
regs.task_starttx.write(Task::ENABLE::SET);
self.enable_tx_interrupts();
}
}
}
/// Transmit one byte at the time and the client is resposible for polling
/// This is used by the panic handler
pub unsafe fn send_byte(&self, byte: u8) {
let regs = &*self.regs;
self.remaining_bytes.set(1);
regs.event_endtx.write(Event::READY::CLEAR);
// precaution: copy value into variable with static lifetime
BYTE = byte;
regs.txd_ptr.set((&BYTE as *const u8) as u32);
regs.txd_maxcnt.write(Counter::COUNTER.val(1));
regs.task_starttx.write(Task::ENABLE::SET);
}
/// Check if the UART tranmission is done
pub fn tx_ready(&self) -> bool {
let regs = unsafe { &*self.regs };
regs.event_endtx.is_set(Event::READY)
}
fn set_dma_pointer_to_buffer(&self) {
let regs = unsafe { &*self.regs };
self.buffer.map(|buffer| {
regs.txd_ptr
.set(buffer[self.offset.get()..].as_ptr() as u32);
});
}
}
impl kernel::hil::uart::UART for Uarte {
fn set_client(&self, client: &'static kernel::hil::uart::Client) {
self.client.set(Some(client));
}
fn init(&self, params: kernel::hil::uart::UARTParams) {
self.enable_uart();
self.set_baud_rate(params.baud_rate);
}
fn transmit(&self, tx_data: &'static mut [u8], tx_len: usize) {
let regs = unsafe { &*self.regs };
if tx_len == 0 {
return;
}
self.remaining_bytes.set(tx_len);
self.offset.set(0);
self.buffer.replace(tx_data);
self.set_dma_pointer_to_buffer();
regs.txd_maxcnt.write(Counter::COUNTER.val(tx_len as u32));
regs.task_stoptx.write(Task::ENABLE::SET);
regs.task_starttx.write(Task::ENABLE::SET);
self.enable_tx_interrupts();
}
#[allow(unused)]
fn receive(&self, rx_buffer: &'static mut [u8], rx_len: usize) {
unimplemented!()
}
}
| Uarte | identifier_name |
uart.rs | //! Universal asynchronous receiver/transmitter with EasyDMA (UARTE)
//!
//! The driver provides only tranmission functionlity
//!
//! Author
//! -------------------
//!
//! * Author: Niklas Adolfsson <niklasadolfsson1@gmail.com>
//! * Date: March 10 2018
use core::cell::Cell;
use kernel;
use kernel::common::regs::{ReadOnly, ReadWrite, WriteOnly};
use nrf5x::pinmux;
const UARTE_BASE: u32 = 0x40002000;
static mut BYTE: u8 = 0;
#[repr(C)]
struct UarteRegisters {
pub task_startrx: WriteOnly<u32, Task::Register>, // 0x000
pub task_stoprx: WriteOnly<u32, Task::Register>, // 0x004
pub task_starttx: WriteOnly<u32, Task::Register>, // 0x008
pub task_stoptx: WriteOnly<u32, Task::Register>, // 0x00c
_reserved1: [u32; 7], // 0x010-0x02c
pub task_flush_rx: WriteOnly<u32, Task::Register>, // 0x02c
_reserved2: [u32; 52], // 0x030-0x100
pub event_cts: ReadWrite<u32, Event::Register>, // 0x100-0x104
pub event_ncts: ReadWrite<u32, Event::Register>, // 0x104-0x108
_reserved3: [u32; 2], // 0x108-0x110
pub event_endrx: ReadWrite<u32, Event::Register>, // 0x110-0x114
_reserved4: [u32; 3], // 0x114-0x120
pub event_endtx: ReadWrite<u32, Event::Register>, // 0x120-0x124
pub event_error: ReadWrite<u32, Event::Register>, // 0x124-0x128
_reserved6: [u32; 7], // 0x128-0x144
pub event_rxto: ReadWrite<u32, Event::Register>, // 0x144-0x148
_reserved7: [u32; 1], // 0x148-0x14C
pub event_rxstarted: ReadWrite<u32, Event::Register>, // 0x14C-0x150
pub event_txstarted: ReadWrite<u32, Event::Register>, // 0x150-0x154
_reserved8: [u32; 1], // 0x154-0x158
pub event_txstopped: ReadWrite<u32, Event::Register>, // 0x158-0x15c
_reserved9: [u32; 41], // 0x15c-0x200
pub shorts: ReadWrite<u32, Shorts::Register>, // 0x200-0x204
_reserved10: [u32; 64], // 0x204-0x304
pub intenset: ReadWrite<u32, Interrupt::Register>, // 0x304-0x308
pub intenclr: ReadWrite<u32, Interrupt::Register>, // 0x308-0x30C
_reserved11: [u32; 93], // 0x30C-0x480 | pub pselrts: ReadWrite<u32, Psel::Register>, // 0x508-0x50c
pub pseltxd: ReadWrite<u32, Psel::Register>, // 0x50c-0x510
pub pselcts: ReadWrite<u32, Psel::Register>, // 0x510-0x514
pub pselrxd: ReadWrite<u32, Psel::Register>, // 0x514-0x518
_reserved14: [u32; 3], // 0x518-0x524
pub baudrate: ReadWrite<u32, Baudrate::Register>, // 0x524-0x528
_reserved15: [u32; 3], // 0x528-0x534
pub rxd_ptr: ReadWrite<u32, Pointer::Register>, // 0x534-0x538
pub rxd_maxcnt: ReadWrite<u32, Counter::Register>, // 0x538-0x53c
pub rxd_amount: ReadOnly<u32, Counter::Register>, // 0x53c-0x540
_reserved16: [u32; 1], // 0x540-0x544
pub txd_ptr: ReadWrite<u32, Pointer::Register>, // 0x544-0x548
pub txd_maxcnt: ReadWrite<u32, Counter::Register>, // 0x548-0x54c
pub txd_amount: ReadOnly<u32, Counter::Register>, // 0x54c-0x550
_reserved17: [u32; 7], // 0x550-0x56C
pub config: ReadWrite<u32, Config::Register>, // 0x56C-0x570
}
#[cfg_attr(rustfmt, rustfmt_skip)]
register_bitfields! [u32,
/// Start task
Task [
ENABLE OFFSET(0) NUMBITS(1)
],
/// Read event
Event [
READY OFFSET(0) NUMBITS(1)
],
/// Shortcuts
Shorts [
// Shortcut between ENDRX and STARTRX
ENDRX_STARTRX OFFSET(5) NUMBITS(1),
// Shortcut between ENDRX and STOPRX
ENDRX_STOPRX OFFSET(6) NUMBITS(1)
],
/// UART Interrupts
Interrupt [
CTS OFFSET(0) NUMBITS(1),
NCTS OFFSET(1) NUMBITS(1),
ENDRX OFFSET(4) NUMBITS(1),
ENDTX OFFSET(8) NUMBITS(1),
ERROR OFFSET(9) NUMBITS(1),
RXTO OFFSET(17) NUMBITS(1),
RXSTARTED OFFSET(19) NUMBITS(1),
TXSTARTED OFFSET(20) NUMBITS(1),
TXSTOPPED OFFSET(22) NUMBITS(1)
],
/// UART Errors
ErrorSrc [
OVERRUN OFFSET(0) NUMBITS(1),
PARITY OFFSET(1) NUMBITS(1),
FRAMING OFFSET(2) NUMBITS(1),
BREAK OFFSET(3) NUMBITS(1)
],
/// Enable UART
Uart [
ENABLE OFFSET(0) NUMBITS(4) [
ON = 8,
OFF = 0
]
],
/// Pin select
Psel [
// Pin number
PIN OFFSET(0) NUMBITS(5),
// Connect/Disconnect
CONNECT OFFSET(31) NUMBITS(1)
],
/// Baudrate
Baudrate [
BAUDRAUTE OFFSET(0) NUMBITS(32)
],
/// DMA pointer
Pointer [
POINTER OFFSET(0) NUMBITS(32)
],
/// Counter value
Counter [
COUNTER OFFSET(0) NUMBITS(8)
],
/// Configuration of parity and flow control
Config [
HWFC OFFSET(0) NUMBITS(1),
PARITY OFFSET(1) NUMBITS(3)
]
];
/// UARTE
// It should never be instanced outside this module but because a static mutable reference to it
// is exported outside this module it must be `pub`
pub struct Uarte {
regs: *const UarteRegisters,
client: Cell<Option<&'static kernel::hil::uart::Client>>,
buffer: kernel::common::take_cell::TakeCell<'static, [u8]>,
remaining_bytes: Cell<usize>,
offset: Cell<usize>,
}
#[derive(Copy, Clone)]
pub struct UARTParams {
pub baud_rate: u32,
}
/// UARTE0 handle
// This should only be accessed by the reset_handler on startup
pub static mut UARTE0: Uarte = Uarte::new();
impl Uarte {
/// Constructor
pub const fn new() -> Uarte {
Uarte {
regs: UARTE_BASE as *const UarteRegisters,
client: Cell::new(None),
buffer: kernel::common::take_cell::TakeCell::empty(),
remaining_bytes: Cell::new(0),
offset: Cell::new(0),
}
}
/// Configure which pins the UART should use for txd, rxd, cts and rts
pub fn configure(
&self,
txd: pinmux::Pinmux,
rxd: pinmux::Pinmux,
cts: pinmux::Pinmux,
rts: pinmux::Pinmux,
) {
let regs = unsafe { &*self.regs };
regs.pseltxd.write(Psel::PIN.val(txd.into()));
regs.pselrxd.write(Psel::PIN.val(rxd.into()));
regs.pselcts.write(Psel::PIN.val(cts.into()));
regs.pselrts.write(Psel::PIN.val(rts.into()));
}
fn set_baud_rate(&self, baud_rate: u32) {
let regs = unsafe { &*self.regs };
match baud_rate {
1200 => regs.baudrate.set(0x0004F000),
2400 => regs.baudrate.set(0x0009D000),
4800 => regs.baudrate.set(0x0013B000),
9600 => regs.baudrate.set(0x00275000),
14400 => regs.baudrate.set(0x003AF000),
19200 => regs.baudrate.set(0x004EA000),
28800 => regs.baudrate.set(0x0075C000),
38400 => regs.baudrate.set(0x009D0000),
57600 => regs.baudrate.set(0x00EB0000),
76800 => regs.baudrate.set(0x013A9000),
115200 => regs.baudrate.set(0x01D60000),
230400 => regs.baudrate.set(0x03B00000),
250000 => regs.baudrate.set(0x04000000),
460800 => regs.baudrate.set(0x07400000),
921600 => regs.baudrate.set(0x0F000000),
1000000 => regs.baudrate.set(0x10000000),
_ => regs.baudrate.set(0x01D60000), //setting default to 115200
}
}
// Enable UART peripheral, this need to disabled for low power applications
fn enable_uart(&self) {
let regs = unsafe { &*self.regs };
regs.enable.write(Uart::ENABLE::ON);
}
#[allow(dead_code)]
fn disable_uart(&self) {
let regs = unsafe { &*self.regs };
regs.enable.write(Uart::ENABLE::OFF);
}
#[allow(dead_code)]
fn enable_rx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenset.write(Interrupt::ENDRX::SET);
}
fn enable_tx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenset.write(Interrupt::ENDTX::SET);
}
#[allow(dead_code)]
fn disable_rx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenclr.write(Interrupt::ENDRX::SET);
}
fn disable_tx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenclr.write(Interrupt::ENDTX::SET);
}
/// UART interrupt handler that only listens to `tx_end` events
#[inline(never)]
pub fn handle_interrupt(&mut self) {
// disable interrupts
self.disable_tx_interrupts();
let regs = unsafe { &*self.regs };
if self.tx_ready() {
regs.event_endtx.write(Event::READY::CLEAR);
let tx_bytes = regs.txd_amount.get() as usize;
let rem = self.remaining_bytes.get();
// More bytes transmitted than requested `return silently`
// Cause probably a hardware fault
// FIXME: Progate error to the capsule
if tx_bytes > rem {
debug!("error more bytes than requested\r\n");
return;
}
self.remaining_bytes.set(rem - tx_bytes);
self.offset.set(tx_bytes);
if self.remaining_bytes.get() == 0 {
// Signal client write done
self.client.get().map(|client| {
self.buffer.take().map(|buffer| {
client.transmit_complete(buffer, kernel::hil::uart::Error::CommandComplete);
});
});
}
// Not all bytes have been transmitted then update offset and continue transmitting
else {
self.set_dma_pointer_to_buffer();
regs.task_starttx.write(Task::ENABLE::SET);
self.enable_tx_interrupts();
}
}
}
/// Transmit one byte at the time and the client is resposible for polling
/// This is used by the panic handler
pub unsafe fn send_byte(&self, byte: u8) {
let regs = &*self.regs;
self.remaining_bytes.set(1);
regs.event_endtx.write(Event::READY::CLEAR);
// precaution: copy value into variable with static lifetime
BYTE = byte;
regs.txd_ptr.set((&BYTE as *const u8) as u32);
regs.txd_maxcnt.write(Counter::COUNTER.val(1));
regs.task_starttx.write(Task::ENABLE::SET);
}
/// Check if the UART tranmission is done
pub fn tx_ready(&self) -> bool {
let regs = unsafe { &*self.regs };
regs.event_endtx.is_set(Event::READY)
}
fn set_dma_pointer_to_buffer(&self) {
let regs = unsafe { &*self.regs };
self.buffer.map(|buffer| {
regs.txd_ptr
.set(buffer[self.offset.get()..].as_ptr() as u32);
});
}
}
impl kernel::hil::uart::UART for Uarte {
fn set_client(&self, client: &'static kernel::hil::uart::Client) {
self.client.set(Some(client));
}
fn init(&self, params: kernel::hil::uart::UARTParams) {
self.enable_uart();
self.set_baud_rate(params.baud_rate);
}
fn transmit(&self, tx_data: &'static mut [u8], tx_len: usize) {
let regs = unsafe { &*self.regs };
if tx_len == 0 {
return;
}
self.remaining_bytes.set(tx_len);
self.offset.set(0);
self.buffer.replace(tx_data);
self.set_dma_pointer_to_buffer();
regs.txd_maxcnt.write(Counter::COUNTER.val(tx_len as u32));
regs.task_stoptx.write(Task::ENABLE::SET);
regs.task_starttx.write(Task::ENABLE::SET);
self.enable_tx_interrupts();
}
#[allow(unused)]
fn receive(&self, rx_buffer: &'static mut [u8], rx_len: usize) {
unimplemented!()
}
} | pub errorsrc: ReadWrite<u32, ErrorSrc::Register>, // 0x480-0x484
_reserved12: [u32; 31], // 0x484-0x500
pub enable: ReadWrite<u32, Uart::Register>, // 0x500-0x504
_reserved13: [u32; 1], // 0x504-0x508 | random_line_split |
uart.rs | //! Universal asynchronous receiver/transmitter with EasyDMA (UARTE)
//!
//! The driver provides only tranmission functionlity
//!
//! Author
//! -------------------
//!
//! * Author: Niklas Adolfsson <niklasadolfsson1@gmail.com>
//! * Date: March 10 2018
use core::cell::Cell;
use kernel;
use kernel::common::regs::{ReadOnly, ReadWrite, WriteOnly};
use nrf5x::pinmux;
const UARTE_BASE: u32 = 0x40002000;
static mut BYTE: u8 = 0;
#[repr(C)]
struct UarteRegisters {
pub task_startrx: WriteOnly<u32, Task::Register>, // 0x000
pub task_stoprx: WriteOnly<u32, Task::Register>, // 0x004
pub task_starttx: WriteOnly<u32, Task::Register>, // 0x008
pub task_stoptx: WriteOnly<u32, Task::Register>, // 0x00c
_reserved1: [u32; 7], // 0x010-0x02c
pub task_flush_rx: WriteOnly<u32, Task::Register>, // 0x02c
_reserved2: [u32; 52], // 0x030-0x100
pub event_cts: ReadWrite<u32, Event::Register>, // 0x100-0x104
pub event_ncts: ReadWrite<u32, Event::Register>, // 0x104-0x108
_reserved3: [u32; 2], // 0x108-0x110
pub event_endrx: ReadWrite<u32, Event::Register>, // 0x110-0x114
_reserved4: [u32; 3], // 0x114-0x120
pub event_endtx: ReadWrite<u32, Event::Register>, // 0x120-0x124
pub event_error: ReadWrite<u32, Event::Register>, // 0x124-0x128
_reserved6: [u32; 7], // 0x128-0x144
pub event_rxto: ReadWrite<u32, Event::Register>, // 0x144-0x148
_reserved7: [u32; 1], // 0x148-0x14C
pub event_rxstarted: ReadWrite<u32, Event::Register>, // 0x14C-0x150
pub event_txstarted: ReadWrite<u32, Event::Register>, // 0x150-0x154
_reserved8: [u32; 1], // 0x154-0x158
pub event_txstopped: ReadWrite<u32, Event::Register>, // 0x158-0x15c
_reserved9: [u32; 41], // 0x15c-0x200
pub shorts: ReadWrite<u32, Shorts::Register>, // 0x200-0x204
_reserved10: [u32; 64], // 0x204-0x304
pub intenset: ReadWrite<u32, Interrupt::Register>, // 0x304-0x308
pub intenclr: ReadWrite<u32, Interrupt::Register>, // 0x308-0x30C
_reserved11: [u32; 93], // 0x30C-0x480
pub errorsrc: ReadWrite<u32, ErrorSrc::Register>, // 0x480-0x484
_reserved12: [u32; 31], // 0x484-0x500
pub enable: ReadWrite<u32, Uart::Register>, // 0x500-0x504
_reserved13: [u32; 1], // 0x504-0x508
pub pselrts: ReadWrite<u32, Psel::Register>, // 0x508-0x50c
pub pseltxd: ReadWrite<u32, Psel::Register>, // 0x50c-0x510
pub pselcts: ReadWrite<u32, Psel::Register>, // 0x510-0x514
pub pselrxd: ReadWrite<u32, Psel::Register>, // 0x514-0x518
_reserved14: [u32; 3], // 0x518-0x524
pub baudrate: ReadWrite<u32, Baudrate::Register>, // 0x524-0x528
_reserved15: [u32; 3], // 0x528-0x534
pub rxd_ptr: ReadWrite<u32, Pointer::Register>, // 0x534-0x538
pub rxd_maxcnt: ReadWrite<u32, Counter::Register>, // 0x538-0x53c
pub rxd_amount: ReadOnly<u32, Counter::Register>, // 0x53c-0x540
_reserved16: [u32; 1], // 0x540-0x544
pub txd_ptr: ReadWrite<u32, Pointer::Register>, // 0x544-0x548
pub txd_maxcnt: ReadWrite<u32, Counter::Register>, // 0x548-0x54c
pub txd_amount: ReadOnly<u32, Counter::Register>, // 0x54c-0x550
_reserved17: [u32; 7], // 0x550-0x56C
pub config: ReadWrite<u32, Config::Register>, // 0x56C-0x570
}
#[cfg_attr(rustfmt, rustfmt_skip)]
register_bitfields! [u32,
/// Start task
Task [
ENABLE OFFSET(0) NUMBITS(1)
],
/// Read event
Event [
READY OFFSET(0) NUMBITS(1)
],
/// Shortcuts
Shorts [
// Shortcut between ENDRX and STARTRX
ENDRX_STARTRX OFFSET(5) NUMBITS(1),
// Shortcut between ENDRX and STOPRX
ENDRX_STOPRX OFFSET(6) NUMBITS(1)
],
/// UART Interrupts
Interrupt [
CTS OFFSET(0) NUMBITS(1),
NCTS OFFSET(1) NUMBITS(1),
ENDRX OFFSET(4) NUMBITS(1),
ENDTX OFFSET(8) NUMBITS(1),
ERROR OFFSET(9) NUMBITS(1),
RXTO OFFSET(17) NUMBITS(1),
RXSTARTED OFFSET(19) NUMBITS(1),
TXSTARTED OFFSET(20) NUMBITS(1),
TXSTOPPED OFFSET(22) NUMBITS(1)
],
/// UART Errors
ErrorSrc [
OVERRUN OFFSET(0) NUMBITS(1),
PARITY OFFSET(1) NUMBITS(1),
FRAMING OFFSET(2) NUMBITS(1),
BREAK OFFSET(3) NUMBITS(1)
],
/// Enable UART
Uart [
ENABLE OFFSET(0) NUMBITS(4) [
ON = 8,
OFF = 0
]
],
/// Pin select
Psel [
// Pin number
PIN OFFSET(0) NUMBITS(5),
// Connect/Disconnect
CONNECT OFFSET(31) NUMBITS(1)
],
/// Baudrate
Baudrate [
BAUDRAUTE OFFSET(0) NUMBITS(32)
],
/// DMA pointer
Pointer [
POINTER OFFSET(0) NUMBITS(32)
],
/// Counter value
Counter [
COUNTER OFFSET(0) NUMBITS(8)
],
/// Configuration of parity and flow control
Config [
HWFC OFFSET(0) NUMBITS(1),
PARITY OFFSET(1) NUMBITS(3)
]
];
/// UARTE
// It should never be instanced outside this module but because a static mutable reference to it
// is exported outside this module it must be `pub`
pub struct Uarte {
regs: *const UarteRegisters,
client: Cell<Option<&'static kernel::hil::uart::Client>>,
buffer: kernel::common::take_cell::TakeCell<'static, [u8]>,
remaining_bytes: Cell<usize>,
offset: Cell<usize>,
}
#[derive(Copy, Clone)]
pub struct UARTParams {
pub baud_rate: u32,
}
/// UARTE0 handle
// This should only be accessed by the reset_handler on startup
pub static mut UARTE0: Uarte = Uarte::new();
impl Uarte {
/// Constructor
pub const fn new() -> Uarte {
Uarte {
regs: UARTE_BASE as *const UarteRegisters,
client: Cell::new(None),
buffer: kernel::common::take_cell::TakeCell::empty(),
remaining_bytes: Cell::new(0),
offset: Cell::new(0),
}
}
/// Configure which pins the UART should use for txd, rxd, cts and rts
pub fn configure(
&self,
txd: pinmux::Pinmux,
rxd: pinmux::Pinmux,
cts: pinmux::Pinmux,
rts: pinmux::Pinmux,
) {
let regs = unsafe { &*self.regs };
regs.pseltxd.write(Psel::PIN.val(txd.into()));
regs.pselrxd.write(Psel::PIN.val(rxd.into()));
regs.pselcts.write(Psel::PIN.val(cts.into()));
regs.pselrts.write(Psel::PIN.val(rts.into()));
}
fn set_baud_rate(&self, baud_rate: u32) {
let regs = unsafe { &*self.regs };
match baud_rate {
1200 => regs.baudrate.set(0x0004F000),
2400 => regs.baudrate.set(0x0009D000),
4800 => regs.baudrate.set(0x0013B000),
9600 => regs.baudrate.set(0x00275000),
14400 => regs.baudrate.set(0x003AF000),
19200 => regs.baudrate.set(0x004EA000),
28800 => regs.baudrate.set(0x0075C000),
38400 => regs.baudrate.set(0x009D0000),
57600 => regs.baudrate.set(0x00EB0000),
76800 => regs.baudrate.set(0x013A9000),
115200 => regs.baudrate.set(0x01D60000),
230400 => regs.baudrate.set(0x03B00000),
250000 => regs.baudrate.set(0x04000000),
460800 => regs.baudrate.set(0x07400000),
921600 => regs.baudrate.set(0x0F000000),
1000000 => regs.baudrate.set(0x10000000),
_ => regs.baudrate.set(0x01D60000), //setting default to 115200
}
}
// Enable UART peripheral, this need to disabled for low power applications
fn enable_uart(&self) {
let regs = unsafe { &*self.regs };
regs.enable.write(Uart::ENABLE::ON);
}
#[allow(dead_code)]
fn disable_uart(&self) {
let regs = unsafe { &*self.regs };
regs.enable.write(Uart::ENABLE::OFF);
}
#[allow(dead_code)]
fn enable_rx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenset.write(Interrupt::ENDRX::SET);
}
fn enable_tx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenset.write(Interrupt::ENDTX::SET);
}
#[allow(dead_code)]
fn disable_rx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenclr.write(Interrupt::ENDRX::SET);
}
fn disable_tx_interrupts(&self) {
let regs = unsafe { &*self.regs };
regs.intenclr.write(Interrupt::ENDTX::SET);
}
/// UART interrupt handler that only listens to `tx_end` events
#[inline(never)]
pub fn handle_interrupt(&mut self) {
// disable interrupts
self.disable_tx_interrupts();
let regs = unsafe { &*self.regs };
if self.tx_ready() |
}
/// Transmit one byte at the time and the client is resposible for polling
/// This is used by the panic handler
pub unsafe fn send_byte(&self, byte: u8) {
let regs = &*self.regs;
self.remaining_bytes.set(1);
regs.event_endtx.write(Event::READY::CLEAR);
// precaution: copy value into variable with static lifetime
BYTE = byte;
regs.txd_ptr.set((&BYTE as *const u8) as u32);
regs.txd_maxcnt.write(Counter::COUNTER.val(1));
regs.task_starttx.write(Task::ENABLE::SET);
}
/// Check if the UART tranmission is done
pub fn tx_ready(&self) -> bool {
let regs = unsafe { &*self.regs };
regs.event_endtx.is_set(Event::READY)
}
fn set_dma_pointer_to_buffer(&self) {
let regs = unsafe { &*self.regs };
self.buffer.map(|buffer| {
regs.txd_ptr
.set(buffer[self.offset.get()..].as_ptr() as u32);
});
}
}
impl kernel::hil::uart::UART for Uarte {
fn set_client(&self, client: &'static kernel::hil::uart::Client) {
self.client.set(Some(client));
}
fn init(&self, params: kernel::hil::uart::UARTParams) {
self.enable_uart();
self.set_baud_rate(params.baud_rate);
}
fn transmit(&self, tx_data: &'static mut [u8], tx_len: usize) {
let regs = unsafe { &*self.regs };
if tx_len == 0 {
return;
}
self.remaining_bytes.set(tx_len);
self.offset.set(0);
self.buffer.replace(tx_data);
self.set_dma_pointer_to_buffer();
regs.txd_maxcnt.write(Counter::COUNTER.val(tx_len as u32));
regs.task_stoptx.write(Task::ENABLE::SET);
regs.task_starttx.write(Task::ENABLE::SET);
self.enable_tx_interrupts();
}
#[allow(unused)]
fn receive(&self, rx_buffer: &'static mut [u8], rx_len: usize) {
unimplemented!()
}
}
| {
regs.event_endtx.write(Event::READY::CLEAR);
let tx_bytes = regs.txd_amount.get() as usize;
let rem = self.remaining_bytes.get();
// More bytes transmitted than requested `return silently`
// Cause probably a hardware fault
// FIXME: Progate error to the capsule
if tx_bytes > rem {
debug!("error more bytes than requested\r\n");
return;
}
self.remaining_bytes.set(rem - tx_bytes);
self.offset.set(tx_bytes);
if self.remaining_bytes.get() == 0 {
// Signal client write done
self.client.get().map(|client| {
self.buffer.take().map(|buffer| {
client.transmit_complete(buffer, kernel::hil::uart::Error::CommandComplete);
});
});
}
// Not all bytes have been transmitted then update offset and continue transmitting
else {
self.set_dma_pointer_to_buffer();
regs.task_starttx.write(Task::ENABLE::SET);
self.enable_tx_interrupts();
}
} | conditional_block |
plonk_util.rs | use crate::partition::get_subgroup_shift;
use crate::witness::Witness;
use crate::{ifft_with_precomputation_power_of_2, msm_execute_parallel, AffinePoint, CircuitBuilder, Curve, FftPrecomputation, Field, HaloCurve, MsmPrecomputation, Polynomial, PolynomialCommitment, ProjectivePoint, Target, NUM_ROUTED_WIRES};
use rayon::prelude::*;
/// Evaluate the polynomial which vanishes on any multiplicative subgroup of a given order `n`.
pub(crate) fn eval_zero_poly<F: Field>(n: usize, x: F) -> F {
// Z(x) = x^n - 1
x.exp_usize(n) - F::ONE
}
/// Evaluate the Lagrange basis `L_1` with `L_1(1) = 1`, and `L_1(x) = 0` for other members of an
/// order `n` multiplicative subgroup.
pub(crate) fn eval_l_1<F: Field>(n: usize, x: F) -> F {
if x.is_one() {
// The code below would divide by zero, since we have (x - 1) in both the numerator and
// denominator.
return F::ONE;
}
// L_1(x) = (x^n - 1) / (n * (x - 1))
// = Z(x) / (n * (x - 1))
eval_zero_poly(n, x) / (F::from_canonical_usize(n) * (x - F::ONE))
}
/// Computes a sum of terms weighted by powers of alpha.
pub fn reduce_with_powers<F: Field>(terms: &[F], alpha: F) -> F {
let mut sum = F::ZERO;
for &term in terms.iter().rev() {
sum = sum * alpha + term;
}
sum
}
/// Computes a sum of terms weighted by powers of alpha.
pub(crate) fn reduce_with_powers_recursive<C: HaloCurve>(
builder: &mut CircuitBuilder<C>,
terms: &[Target<C::ScalarField>],
alpha: Target<C::ScalarField>,
) -> Target<C::ScalarField> {
let mut sum = builder.zero_wire();
for &term in terms.iter().rev() {
sum = builder.mul_add(sum, alpha, term);
}
sum
}
/// Compute `n(x)` for a given `x`, where `n` is the injective function related to the Halo
/// endomorphism.
pub fn halo_n<C: HaloCurve>(s_bits: &[bool]) -> C::ScalarField {
// This is based on Algorithm 2 of the Halo paper, except that we start with (a, b) = (0, 0).
debug_assert_eq!(s_bits.len() % 2, 0, "Number of scalar bits must be even");
let zero = C::ScalarField::ZERO;
let mut a = zero;
let mut b = zero;
for s_bits_chunk in s_bits.chunks(2) {
let bit_lo = s_bits_chunk[0];
let bit_hi = s_bits_chunk[1];
let sign = if bit_lo {
C::ScalarField::ONE
} else {
C::ScalarField::NEG_ONE
};
let (c, d) = if bit_hi { (sign, zero) } else { (zero, sign) };
a = a.double() + c;
b = b.double() + d;
}
a * C::ZETA_SCALAR + b
}
/// Compute `[n(s)].P` for a given `s`, where `n` is the injective function related to the Halo
/// endomorphism.
pub fn halo_n_mul<C: HaloCurve>(s_bits: &[bool], p: AffinePoint<C>) -> AffinePoint<C> {
// This is based on Algorithm 1 of the Halo paper, except that we start with Acc = O.
debug_assert_eq!(s_bits.len() % 2, 0, "Number of scalar bits must be even");
let p_p = p.to_projective();
let p_n = -p_p;
let endo_p_p = p.endomorphism().to_projective();
let endo_p_n = -endo_p_p;
let mut acc = ProjectivePoint::<C>::ZERO;
for s_bits_chunk in s_bits.chunks(2) {
let bit_lo = s_bits_chunk[0];
let bit_hi = s_bits_chunk[1];
let s = if bit_hi {
if bit_lo {
endo_p_p
} else {
endo_p_n
}
} else if bit_lo {
p_p
} else {
p_n
};
acc = acc.double() + s;
}
acc.to_affine()
}
pub fn | <F: Field>(coeffs: &[F], x: F) -> F {
let mut ans = F::ZERO;
let mut x_pow = F::ONE;
for &c in coeffs {
ans = ans + (c * x_pow);
x_pow = x_pow * x;
}
ans
}
/// Compute `[x^0, x^1, ..., x^(n - 1)]`.
pub fn powers<F: Field>(x: F, n: usize) -> Vec<F> {
let mut powers = Vec::new();
let mut current = F::ONE;
for i in 0..n {
if i != 0 {
current = current * x;
}
powers.push(current);
}
powers
}
/// Compute `[x^0, x^1, ..., x^(n - 1)]`.
pub(crate) fn powers_recursive<C: HaloCurve>(
builder: &mut CircuitBuilder<C>,
x: Target<C::ScalarField>,
n: usize,
) -> Vec<Target<C::ScalarField>> {
let mut powers = Vec::new();
let mut current = builder.one_wire();
for i in 0..n {
if i != 0 {
current = builder.mul(current, x);
}
powers.push(current);
}
powers
}
/// Returns the evaluation of a list of polynomials at a point.
pub(crate) fn eval_polys<F: Field>(polys: &[Polynomial<F>], powers: &[F]) -> Vec<F> {
polys.iter().map(|p| p.eval_from_power(powers)).collect()
}
/// Zero-pad a list of `n` polynomial coefficients to a length of `8n`, which is the degree at
/// which we do most polynomial arithmetic.
pub(crate) fn pad_to_8n<F: Field>(coeffs: &[F]) -> Vec<F> {
let n = coeffs.len();
let mut result = coeffs.to_vec();
while result.len() < 8 * n {
result.push(F::ZERO);
}
result
}
pub(crate) fn values_to_polynomials<F: Field>(
values_vec: &[Vec<F>],
fft_precomputation: &FftPrecomputation<F>,
) -> Vec<Polynomial<F>> {
values_vec
.par_iter()
.map(|values| Polynomial::from_evaluations(values, fft_precomputation))
.collect()
}
pub(crate) fn polynomials_to_values_padded<F: Field>(
polys_vec: &[Polynomial<F>],
fft_precomputation: &FftPrecomputation<F>,
) -> Vec<Vec<F>> {
polys_vec
.par_iter()
.map(|poly| {
let padded_poly = poly.padded(poly.len() * 8);
padded_poly.eval_domain(fft_precomputation)
})
.collect()
}
/// Like `pedersen_commit`, but with no blinding factor.
pub fn pedersen_hash<C: Curve>(
xs: &[C::ScalarField],
pedersen_g_msm_precomputation: &MsmPrecomputation<C>,
) -> ProjectivePoint<C> {
msm_execute_parallel(pedersen_g_msm_precomputation, xs)
}
#[allow(dead_code)]
fn pedersen_commit<C: Curve>(
xs: &[C::ScalarField],
opening: C::ScalarField,
h: AffinePoint<C>,
pedersen_g_msm_precomputation: &MsmPrecomputation<C>,
) -> ProjectivePoint<C> {
// TODO: Couldn't get this working with *.
let h = h.to_projective();
let mul_precomputation = h.mul_precompute();
let blinding_term = h.mul_with_precomputation(opening, mul_precomputation);
msm_execute_parallel(pedersen_g_msm_precomputation, xs) + blinding_term
}
pub fn commit_polynomials<C: Curve>(
polynomials: &[Polynomial<C::ScalarField>],
msm_precomputation: &MsmPrecomputation<C>,
blinding_point: AffinePoint<C>,
blinding: bool,
) -> Vec<PolynomialCommitment<C>> {
PolynomialCommitment::coeffs_vec_to_commitments(
polynomials
.iter()
.map(|p| p.coeffs())
.collect::<Vec<_>>()
.as_slice(),
msm_precomputation,
blinding_point,
blinding,
)
}
// Generate Z, which is used in Plonk's permutation argument.
pub fn permutation_polynomial<F: Field>(
degree: usize,
subgroup: &[F],
witness: &Witness<F>,
sigma_values: &[Vec<F>],
beta: F,
gamma: F,
) -> Vec<F> {
let mut plonk_z_points = vec![F::ONE];
let k_is = (0..NUM_ROUTED_WIRES)
.map(get_subgroup_shift::<F>)
.collect::<Vec<_>>();
for i in 1..degree {
let x = subgroup[i - 1];
let mut numerator = F::ONE;
let mut denominator = F::ONE;
for j in 0..NUM_ROUTED_WIRES {
let wire_value = witness.get_indices(i - 1, j);
let k_i = k_is[j];
let s_id = k_i * x;
let s_sigma = sigma_values[j][8 * (i - 1)];
numerator = numerator * (wire_value + beta * s_id + gamma);
denominator = denominator * (wire_value + beta * s_sigma + gamma);
}
let last = *plonk_z_points.last().unwrap();
plonk_z_points.push(last * numerator / denominator);
}
plonk_z_points
}
pub fn sigma_polynomials<F: Field>(
sigma: Vec<usize>,
degree: usize,
subgroup_generator: F,
) -> Vec<Vec<F>> {
sigma
.chunks(degree)
.map(|chunk| {
chunk
.par_iter()
.map(|&x| {
get_subgroup_shift::<F>(x / degree) * subgroup_generator.exp_usize(x % degree)
})
.collect::<Vec<_>>()
})
.collect()
}
/// Given polynomials `[p_0,...,p_k]` of degree `degree` and `alpha \in F`, returns `\sum_{i=0}^k alpha^i p_i`.
pub(crate) fn scale_polynomials<F: Field>(
polynomials: Vec<Polynomial<F>>,
alpha: F,
degree: usize,
) -> Polynomial<F> {
let alpha_powers = powers(alpha, polynomials.len());
Polynomial::from(
(0..degree)
.map(|i| {
(0..polynomials.len())
.map(|j| polynomials[j][i] * alpha_powers[j])
.fold(F::ZERO, |acc, x| acc + x)
})
.collect::<Vec<_>>(),
)
}
#[allow(dead_code)]
pub(crate) fn polynomial_degree_plus_1<F: Field>(
points: &[F],
fft_precomputation: &FftPrecomputation<F>,
) -> usize {
let coeffs = ifft_with_precomputation_power_of_2(points, fft_precomputation);
coeffs.iter().rev().skip_while(|c| c.is_zero()).count()
}
// TODO: Maybe a streaming version using an `Iterator` would be faster and wouldn't require as much memory for large circuits.
// TODO: Optimize this.
pub fn halo_s<F: Field>(us: &[F]) -> Vec<F> {
let n = 1 << us.len();
let mut res = vec![F::ONE; n];
let us_inv = F::batch_multiplicative_inverse(us);
for (j, (&u, &u_inv)) in us.iter().rev().zip(us_inv.iter().rev()).enumerate() {
for (i, x) in res.iter_mut().enumerate() {
if i & (1 << j) == 0 {
*x = *x * u_inv;
} else {
*x = *x * u;
}
}
}
res
}
/// Evaluate `g(X, {u_i})` as defined in the Halo paper.
pub fn halo_g<F: Field>(x: F, us: &[F]) -> F {
let mut product = F::ONE;
let mut x_power = x;
for &u_i in us.iter().rev() {
let u_i_inv = u_i.multiplicative_inverse_assuming_nonzero();
let term = u_i * x_power + u_i_inv;
product = product * term;
x_power = x_power.square();
}
product
}
#[cfg(test)]
mod test {
use super::*;
use crate::{CircuitBuilder, Curve, Field, PartialWitness, Tweedledee};
#[test]
fn test_halo_n() {
type C = Tweedledee;
type SF = <Tweedledee as Curve>::ScalarField;
let p = C::convert(SF::rand()) * C::GENERATOR_PROJECTIVE;
let r = SF::rand();
let res = C::convert(halo_n::<C>(&r.to_canonical_bool_vec()[..128])) * p;
let p = p.to_affine();
assert_eq!(
res.to_affine(),
halo_n_mul::<C>(&r.to_canonical_bool_vec()[..128], p)
)
}
#[test]
fn test_permutation_polynomial() {
let mut builder = CircuitBuilder::<Tweedledee>::new(128);
let one = builder.one_wire();
let t = builder.add_virtual_target();
let t_sq = builder.square(t);
let quad = builder.add_many(&[one, t, t_sq]);
let seven =
builder.constant_wire(<Tweedledee as Curve>::ScalarField::from_canonical_usize(7));
let res = builder.sub(quad, seven);
builder.assert_zero(res);
let mut partial_witness = PartialWitness::new();
partial_witness.set_target(t, <Tweedledee as Curve>::ScalarField::TWO);
let circuit = builder.build();
let witness = circuit.generate_witness(partial_witness);
let beta = <Tweedledee as Curve>::ScalarField::rand();
let gamma = <Tweedledee as Curve>::ScalarField::rand();
let plonk_z_points_n = permutation_polynomial(
circuit.degree(),
&circuit.subgroup_n,
&witness,
&circuit.s_sigma_values_8n,
beta,
gamma,
);
// Verify that the permutation polynomial is well-formed.
let k_is = (0..NUM_ROUTED_WIRES)
.map(get_subgroup_shift::<<Tweedledee as Curve>::ScalarField>)
.collect::<Vec<_>>();
let wire_values = &witness.transpose();
for (i, &x) in circuit.subgroup_n.iter().enumerate() {
let (z_x, z_gz) = (
plonk_z_points_n[i],
plonk_z_points_n[(i + 1) % circuit.degree()],
);
let mut f_prime = <Tweedledee as Curve>::ScalarField::ONE;
let mut g_prime = <Tweedledee as Curve>::ScalarField::ONE;
for j in 0..NUM_ROUTED_WIRES {
let wire_value = wire_values[j][i];
let k_i = k_is[j];
let s_id = k_i * x;
let s_sigma = circuit.s_sigma_values_8n[j][8 * i];
f_prime = f_prime * (wire_value + beta * s_id + gamma);
g_prime = g_prime * (wire_value + beta * s_sigma + gamma);
}
let vanishing_v_shift_term = f_prime * z_x - g_prime * z_gz;
assert_eq!(
vanishing_v_shift_term,
<Tweedledee as Curve>::ScalarField::ZERO
);
}
}
#[test]
fn test_s_vector_g_function() {
type F = <Tweedledee as Curve>::ScalarField;
let us = (0..10).map(|_| F::rand()).collect::<Vec<_>>();
let x = F::rand();
assert_eq!(
F::inner_product(&halo_s(&us), &powers(x, 1 << 10)),
halo_g(x, &us)
);
}
}
| eval_poly | identifier_name |
plonk_util.rs | use crate::partition::get_subgroup_shift;
use crate::witness::Witness;
use crate::{ifft_with_precomputation_power_of_2, msm_execute_parallel, AffinePoint, CircuitBuilder, Curve, FftPrecomputation, Field, HaloCurve, MsmPrecomputation, Polynomial, PolynomialCommitment, ProjectivePoint, Target, NUM_ROUTED_WIRES};
use rayon::prelude::*;
/// Evaluate the polynomial which vanishes on any multiplicative subgroup of a given order `n`.
pub(crate) fn eval_zero_poly<F: Field>(n: usize, x: F) -> F {
// Z(x) = x^n - 1
x.exp_usize(n) - F::ONE
}
/// Evaluate the Lagrange basis `L_1` with `L_1(1) = 1`, and `L_1(x) = 0` for other members of an
/// order `n` multiplicative subgroup.
pub(crate) fn eval_l_1<F: Field>(n: usize, x: F) -> F {
if x.is_one() {
// The code below would divide by zero, since we have (x - 1) in both the numerator and
// denominator.
return F::ONE;
}
// L_1(x) = (x^n - 1) / (n * (x - 1))
// = Z(x) / (n * (x - 1))
eval_zero_poly(n, x) / (F::from_canonical_usize(n) * (x - F::ONE))
}
/// Computes a sum of terms weighted by powers of alpha.
pub fn reduce_with_powers<F: Field>(terms: &[F], alpha: F) -> F {
let mut sum = F::ZERO;
for &term in terms.iter().rev() {
sum = sum * alpha + term;
}
sum
}
/// Computes a sum of terms weighted by powers of alpha.
pub(crate) fn reduce_with_powers_recursive<C: HaloCurve>(
builder: &mut CircuitBuilder<C>,
terms: &[Target<C::ScalarField>],
alpha: Target<C::ScalarField>,
) -> Target<C::ScalarField> {
let mut sum = builder.zero_wire();
for &term in terms.iter().rev() {
sum = builder.mul_add(sum, alpha, term);
}
sum
}
/// Compute `n(x)` for a given `x`, where `n` is the injective function related to the Halo
/// endomorphism.
pub fn halo_n<C: HaloCurve>(s_bits: &[bool]) -> C::ScalarField {
// This is based on Algorithm 2 of the Halo paper, except that we start with (a, b) = (0, 0).
debug_assert_eq!(s_bits.len() % 2, 0, "Number of scalar bits must be even");
let zero = C::ScalarField::ZERO;
let mut a = zero;
let mut b = zero;
for s_bits_chunk in s_bits.chunks(2) {
let bit_lo = s_bits_chunk[0];
let bit_hi = s_bits_chunk[1];
let sign = if bit_lo {
C::ScalarField::ONE
} else {
C::ScalarField::NEG_ONE
};
let (c, d) = if bit_hi { (sign, zero) } else { (zero, sign) };
a = a.double() + c;
b = b.double() + d;
}
a * C::ZETA_SCALAR + b
}
/// Compute `[n(s)].P` for a given `s`, where `n` is the injective function related to the Halo
/// endomorphism.
pub fn halo_n_mul<C: HaloCurve>(s_bits: &[bool], p: AffinePoint<C>) -> AffinePoint<C> {
// This is based on Algorithm 1 of the Halo paper, except that we start with Acc = O.
debug_assert_eq!(s_bits.len() % 2, 0, "Number of scalar bits must be even");
let p_p = p.to_projective();
let p_n = -p_p;
let endo_p_p = p.endomorphism().to_projective();
let endo_p_n = -endo_p_p;
let mut acc = ProjectivePoint::<C>::ZERO;
for s_bits_chunk in s_bits.chunks(2) {
let bit_lo = s_bits_chunk[0];
let bit_hi = s_bits_chunk[1];
let s = if bit_hi {
if bit_lo {
endo_p_p
} else {
endo_p_n
}
} else if bit_lo {
p_p
} else {
p_n
};
acc = acc.double() + s;
}
acc.to_affine()
}
pub fn eval_poly<F: Field>(coeffs: &[F], x: F) -> F {
let mut ans = F::ZERO;
let mut x_pow = F::ONE;
for &c in coeffs {
ans = ans + (c * x_pow);
x_pow = x_pow * x;
}
ans
}
/// Compute `[x^0, x^1, ..., x^(n - 1)]`.
pub fn powers<F: Field>(x: F, n: usize) -> Vec<F> {
let mut powers = Vec::new();
let mut current = F::ONE;
for i in 0..n {
if i != 0 {
current = current * x;
}
powers.push(current);
}
powers
}
/// Compute `[x^0, x^1, ..., x^(n - 1)]`.
pub(crate) fn powers_recursive<C: HaloCurve>(
builder: &mut CircuitBuilder<C>,
x: Target<C::ScalarField>,
n: usize,
) -> Vec<Target<C::ScalarField>> {
let mut powers = Vec::new();
let mut current = builder.one_wire();
for i in 0..n {
if i != 0 {
current = builder.mul(current, x);
}
powers.push(current);
}
powers
}
/// Returns the evaluation of a list of polynomials at a point.
pub(crate) fn eval_polys<F: Field>(polys: &[Polynomial<F>], powers: &[F]) -> Vec<F> {
polys.iter().map(|p| p.eval_from_power(powers)).collect()
}
/// Zero-pad a list of `n` polynomial coefficients to a length of `8n`, which is the degree at
/// which we do most polynomial arithmetic.
pub(crate) fn pad_to_8n<F: Field>(coeffs: &[F]) -> Vec<F> {
let n = coeffs.len();
let mut result = coeffs.to_vec();
while result.len() < 8 * n {
result.push(F::ZERO);
}
result
}
pub(crate) fn values_to_polynomials<F: Field>(
values_vec: &[Vec<F>],
fft_precomputation: &FftPrecomputation<F>,
) -> Vec<Polynomial<F>> {
values_vec
.par_iter()
.map(|values| Polynomial::from_evaluations(values, fft_precomputation))
.collect()
}
pub(crate) fn polynomials_to_values_padded<F: Field>(
polys_vec: &[Polynomial<F>],
fft_precomputation: &FftPrecomputation<F>,
) -> Vec<Vec<F>> {
polys_vec
.par_iter()
.map(|poly| {
let padded_poly = poly.padded(poly.len() * 8);
padded_poly.eval_domain(fft_precomputation)
})
.collect()
}
/// Like `pedersen_commit`, but with no blinding factor.
pub fn pedersen_hash<C: Curve>(
xs: &[C::ScalarField],
pedersen_g_msm_precomputation: &MsmPrecomputation<C>,
) -> ProjectivePoint<C> {
msm_execute_parallel(pedersen_g_msm_precomputation, xs)
}
#[allow(dead_code)]
fn pedersen_commit<C: Curve>(
xs: &[C::ScalarField],
opening: C::ScalarField,
h: AffinePoint<C>,
pedersen_g_msm_precomputation: &MsmPrecomputation<C>,
) -> ProjectivePoint<C> {
// TODO: Couldn't get this working with *.
let h = h.to_projective();
let mul_precomputation = h.mul_precompute();
let blinding_term = h.mul_with_precomputation(opening, mul_precomputation);
msm_execute_parallel(pedersen_g_msm_precomputation, xs) + blinding_term
}
pub fn commit_polynomials<C: Curve>(
polynomials: &[Polynomial<C::ScalarField>],
msm_precomputation: &MsmPrecomputation<C>,
blinding_point: AffinePoint<C>,
blinding: bool,
) -> Vec<PolynomialCommitment<C>> {
PolynomialCommitment::coeffs_vec_to_commitments(
polynomials
.iter()
.map(|p| p.coeffs())
.collect::<Vec<_>>()
.as_slice(),
msm_precomputation,
blinding_point,
blinding,
)
}
// Generate Z, which is used in Plonk's permutation argument.
pub fn permutation_polynomial<F: Field>(
degree: usize,
subgroup: &[F],
witness: &Witness<F>,
sigma_values: &[Vec<F>],
beta: F,
gamma: F,
) -> Vec<F> {
let mut plonk_z_points = vec![F::ONE];
let k_is = (0..NUM_ROUTED_WIRES)
.map(get_subgroup_shift::<F>)
.collect::<Vec<_>>();
for i in 1..degree {
let x = subgroup[i - 1];
let mut numerator = F::ONE;
let mut denominator = F::ONE;
for j in 0..NUM_ROUTED_WIRES {
let wire_value = witness.get_indices(i - 1, j);
let k_i = k_is[j];
let s_id = k_i * x;
let s_sigma = sigma_values[j][8 * (i - 1)];
numerator = numerator * (wire_value + beta * s_id + gamma);
denominator = denominator * (wire_value + beta * s_sigma + gamma);
}
let last = *plonk_z_points.last().unwrap();
plonk_z_points.push(last * numerator / denominator);
}
plonk_z_points
}
pub fn sigma_polynomials<F: Field>(
sigma: Vec<usize>,
degree: usize,
subgroup_generator: F,
) -> Vec<Vec<F>> {
sigma
.chunks(degree)
.map(|chunk| {
chunk
.par_iter()
.map(|&x| {
get_subgroup_shift::<F>(x / degree) * subgroup_generator.exp_usize(x % degree)
})
.collect::<Vec<_>>()
})
.collect()
}
/// Given polynomials `[p_0,...,p_k]` of degree `degree` and `alpha \in F`, returns `\sum_{i=0}^k alpha^i p_i`.
pub(crate) fn scale_polynomials<F: Field>(
polynomials: Vec<Polynomial<F>>,
alpha: F,
degree: usize,
) -> Polynomial<F> {
let alpha_powers = powers(alpha, polynomials.len());
Polynomial::from(
(0..degree)
.map(|i| {
(0..polynomials.len())
.map(|j| polynomials[j][i] * alpha_powers[j])
.fold(F::ZERO, |acc, x| acc + x)
})
.collect::<Vec<_>>(),
)
}
#[allow(dead_code)]
pub(crate) fn polynomial_degree_plus_1<F: Field>(
points: &[F],
fft_precomputation: &FftPrecomputation<F>,
) -> usize {
let coeffs = ifft_with_precomputation_power_of_2(points, fft_precomputation);
coeffs.iter().rev().skip_while(|c| c.is_zero()).count()
}
// TODO: Maybe a streaming version using an `Iterator` would be faster and wouldn't require as much memory for large circuits.
// TODO: Optimize this.
pub fn halo_s<F: Field>(us: &[F]) -> Vec<F> {
let n = 1 << us.len();
let mut res = vec![F::ONE; n];
let us_inv = F::batch_multiplicative_inverse(us);
for (j, (&u, &u_inv)) in us.iter().rev().zip(us_inv.iter().rev()).enumerate() {
for (i, x) in res.iter_mut().enumerate() {
if i & (1 << j) == 0 {
*x = *x * u_inv;
} else {
*x = *x * u;
}
}
}
res
}
/// Evaluate `g(X, {u_i})` as defined in the Halo paper.
pub fn halo_g<F: Field>(x: F, us: &[F]) -> F {
let mut product = F::ONE;
let mut x_power = x;
for &u_i in us.iter().rev() {
let u_i_inv = u_i.multiplicative_inverse_assuming_nonzero();
let term = u_i * x_power + u_i_inv;
product = product * term;
x_power = x_power.square();
}
product
}
#[cfg(test)]
mod test {
use super::*;
use crate::{CircuitBuilder, Curve, Field, PartialWitness, Tweedledee};
#[test]
fn test_halo_n() {
type C = Tweedledee;
type SF = <Tweedledee as Curve>::ScalarField;
let p = C::convert(SF::rand()) * C::GENERATOR_PROJECTIVE;
let r = SF::rand();
let res = C::convert(halo_n::<C>(&r.to_canonical_bool_vec()[..128])) * p;
let p = p.to_affine();
assert_eq!(
res.to_affine(),
halo_n_mul::<C>(&r.to_canonical_bool_vec()[..128], p)
)
}
#[test]
fn test_permutation_polynomial() {
let mut builder = CircuitBuilder::<Tweedledee>::new(128);
let one = builder.one_wire();
let t = builder.add_virtual_target();
let t_sq = builder.square(t);
let quad = builder.add_many(&[one, t, t_sq]);
let seven =
builder.constant_wire(<Tweedledee as Curve>::ScalarField::from_canonical_usize(7));
let res = builder.sub(quad, seven);
builder.assert_zero(res);
let mut partial_witness = PartialWitness::new();
partial_witness.set_target(t, <Tweedledee as Curve>::ScalarField::TWO);
let circuit = builder.build();
let witness = circuit.generate_witness(partial_witness);
let beta = <Tweedledee as Curve>::ScalarField::rand();
let gamma = <Tweedledee as Curve>::ScalarField::rand();
let plonk_z_points_n = permutation_polynomial(
circuit.degree(),
&circuit.subgroup_n,
&witness,
&circuit.s_sigma_values_8n,
beta,
gamma,
);
// Verify that the permutation polynomial is well-formed.
let k_is = (0..NUM_ROUTED_WIRES)
.map(get_subgroup_shift::<<Tweedledee as Curve>::ScalarField>)
.collect::<Vec<_>>();
let wire_values = &witness.transpose();
for (i, &x) in circuit.subgroup_n.iter().enumerate() {
let (z_x, z_gz) = (
plonk_z_points_n[i],
plonk_z_points_n[(i + 1) % circuit.degree()],
);
let mut f_prime = <Tweedledee as Curve>::ScalarField::ONE;
let mut g_prime = <Tweedledee as Curve>::ScalarField::ONE;
for j in 0..NUM_ROUTED_WIRES {
let wire_value = wire_values[j][i];
let k_i = k_is[j];
let s_id = k_i * x;
let s_sigma = circuit.s_sigma_values_8n[j][8 * i];
f_prime = f_prime * (wire_value + beta * s_id + gamma);
g_prime = g_prime * (wire_value + beta * s_sigma + gamma);
}
let vanishing_v_shift_term = f_prime * z_x - g_prime * z_gz;
assert_eq!(
vanishing_v_shift_term,
<Tweedledee as Curve>::ScalarField::ZERO
);
}
}
#[test]
fn test_s_vector_g_function() |
}
| {
type F = <Tweedledee as Curve>::ScalarField;
let us = (0..10).map(|_| F::rand()).collect::<Vec<_>>();
let x = F::rand();
assert_eq!(
F::inner_product(&halo_s(&us), &powers(x, 1 << 10)),
halo_g(x, &us)
);
} | identifier_body |
plonk_util.rs | use crate::partition::get_subgroup_shift;
use crate::witness::Witness;
use crate::{ifft_with_precomputation_power_of_2, msm_execute_parallel, AffinePoint, CircuitBuilder, Curve, FftPrecomputation, Field, HaloCurve, MsmPrecomputation, Polynomial, PolynomialCommitment, ProjectivePoint, Target, NUM_ROUTED_WIRES};
use rayon::prelude::*;
/// Evaluate the polynomial which vanishes on any multiplicative subgroup of a given order `n`.
pub(crate) fn eval_zero_poly<F: Field>(n: usize, x: F) -> F {
// Z(x) = x^n - 1
x.exp_usize(n) - F::ONE
}
/// Evaluate the Lagrange basis `L_1` with `L_1(1) = 1`, and `L_1(x) = 0` for other members of an
/// order `n` multiplicative subgroup.
pub(crate) fn eval_l_1<F: Field>(n: usize, x: F) -> F {
if x.is_one() {
// The code below would divide by zero, since we have (x - 1) in both the numerator and
// denominator.
return F::ONE;
}
// L_1(x) = (x^n - 1) / (n * (x - 1))
// = Z(x) / (n * (x - 1))
eval_zero_poly(n, x) / (F::from_canonical_usize(n) * (x - F::ONE))
}
/// Computes a sum of terms weighted by powers of alpha.
pub fn reduce_with_powers<F: Field>(terms: &[F], alpha: F) -> F {
let mut sum = F::ZERO;
for &term in terms.iter().rev() {
sum = sum * alpha + term;
}
sum
}
/// Computes a sum of terms weighted by powers of alpha.
pub(crate) fn reduce_with_powers_recursive<C: HaloCurve>(
builder: &mut CircuitBuilder<C>,
terms: &[Target<C::ScalarField>],
alpha: Target<C::ScalarField>,
) -> Target<C::ScalarField> {
let mut sum = builder.zero_wire();
for &term in terms.iter().rev() {
sum = builder.mul_add(sum, alpha, term);
}
sum
}
/// Compute `n(x)` for a given `x`, where `n` is the injective function related to the Halo
/// endomorphism.
pub fn halo_n<C: HaloCurve>(s_bits: &[bool]) -> C::ScalarField {
// This is based on Algorithm 2 of the Halo paper, except that we start with (a, b) = (0, 0).
debug_assert_eq!(s_bits.len() % 2, 0, "Number of scalar bits must be even");
let zero = C::ScalarField::ZERO;
let mut a = zero;
let mut b = zero;
for s_bits_chunk in s_bits.chunks(2) {
let bit_lo = s_bits_chunk[0];
let bit_hi = s_bits_chunk[1];
let sign = if bit_lo {
C::ScalarField::ONE
} else {
C::ScalarField::NEG_ONE
};
let (c, d) = if bit_hi { (sign, zero) } else { (zero, sign) };
a = a.double() + c;
b = b.double() + d;
}
a * C::ZETA_SCALAR + b
}
/// Compute `[n(s)].P` for a given `s`, where `n` is the injective function related to the Halo
/// endomorphism.
pub fn halo_n_mul<C: HaloCurve>(s_bits: &[bool], p: AffinePoint<C>) -> AffinePoint<C> {
// This is based on Algorithm 1 of the Halo paper, except that we start with Acc = O.
debug_assert_eq!(s_bits.len() % 2, 0, "Number of scalar bits must be even");
let p_p = p.to_projective();
let p_n = -p_p;
let endo_p_p = p.endomorphism().to_projective();
let endo_p_n = -endo_p_p;
let mut acc = ProjectivePoint::<C>::ZERO;
for s_bits_chunk in s_bits.chunks(2) {
let bit_lo = s_bits_chunk[0];
let bit_hi = s_bits_chunk[1];
| if bit_lo {
endo_p_p
} else {
endo_p_n
}
} else if bit_lo {
p_p
} else {
p_n
};
acc = acc.double() + s;
}
acc.to_affine()
}
pub fn eval_poly<F: Field>(coeffs: &[F], x: F) -> F {
let mut ans = F::ZERO;
let mut x_pow = F::ONE;
for &c in coeffs {
ans = ans + (c * x_pow);
x_pow = x_pow * x;
}
ans
}
/// Compute `[x^0, x^1, ..., x^(n - 1)]`.
pub fn powers<F: Field>(x: F, n: usize) -> Vec<F> {
let mut powers = Vec::new();
let mut current = F::ONE;
for i in 0..n {
if i != 0 {
current = current * x;
}
powers.push(current);
}
powers
}
/// Compute `[x^0, x^1, ..., x^(n - 1)]`.
pub(crate) fn powers_recursive<C: HaloCurve>(
builder: &mut CircuitBuilder<C>,
x: Target<C::ScalarField>,
n: usize,
) -> Vec<Target<C::ScalarField>> {
let mut powers = Vec::new();
let mut current = builder.one_wire();
for i in 0..n {
if i != 0 {
current = builder.mul(current, x);
}
powers.push(current);
}
powers
}
/// Returns the evaluation of a list of polynomials at a point.
pub(crate) fn eval_polys<F: Field>(polys: &[Polynomial<F>], powers: &[F]) -> Vec<F> {
polys.iter().map(|p| p.eval_from_power(powers)).collect()
}
/// Zero-pad a list of `n` polynomial coefficients to a length of `8n`, which is the degree at
/// which we do most polynomial arithmetic.
pub(crate) fn pad_to_8n<F: Field>(coeffs: &[F]) -> Vec<F> {
let n = coeffs.len();
let mut result = coeffs.to_vec();
while result.len() < 8 * n {
result.push(F::ZERO);
}
result
}
pub(crate) fn values_to_polynomials<F: Field>(
values_vec: &[Vec<F>],
fft_precomputation: &FftPrecomputation<F>,
) -> Vec<Polynomial<F>> {
values_vec
.par_iter()
.map(|values| Polynomial::from_evaluations(values, fft_precomputation))
.collect()
}
pub(crate) fn polynomials_to_values_padded<F: Field>(
polys_vec: &[Polynomial<F>],
fft_precomputation: &FftPrecomputation<F>,
) -> Vec<Vec<F>> {
polys_vec
.par_iter()
.map(|poly| {
let padded_poly = poly.padded(poly.len() * 8);
padded_poly.eval_domain(fft_precomputation)
})
.collect()
}
/// Like `pedersen_commit`, but with no blinding factor.
pub fn pedersen_hash<C: Curve>(
xs: &[C::ScalarField],
pedersen_g_msm_precomputation: &MsmPrecomputation<C>,
) -> ProjectivePoint<C> {
msm_execute_parallel(pedersen_g_msm_precomputation, xs)
}
#[allow(dead_code)]
fn pedersen_commit<C: Curve>(
xs: &[C::ScalarField],
opening: C::ScalarField,
h: AffinePoint<C>,
pedersen_g_msm_precomputation: &MsmPrecomputation<C>,
) -> ProjectivePoint<C> {
// TODO: Couldn't get this working with *.
let h = h.to_projective();
let mul_precomputation = h.mul_precompute();
let blinding_term = h.mul_with_precomputation(opening, mul_precomputation);
msm_execute_parallel(pedersen_g_msm_precomputation, xs) + blinding_term
}
pub fn commit_polynomials<C: Curve>(
polynomials: &[Polynomial<C::ScalarField>],
msm_precomputation: &MsmPrecomputation<C>,
blinding_point: AffinePoint<C>,
blinding: bool,
) -> Vec<PolynomialCommitment<C>> {
PolynomialCommitment::coeffs_vec_to_commitments(
polynomials
.iter()
.map(|p| p.coeffs())
.collect::<Vec<_>>()
.as_slice(),
msm_precomputation,
blinding_point,
blinding,
)
}
// Generate Z, which is used in Plonk's permutation argument.
pub fn permutation_polynomial<F: Field>(
degree: usize,
subgroup: &[F],
witness: &Witness<F>,
sigma_values: &[Vec<F>],
beta: F,
gamma: F,
) -> Vec<F> {
let mut plonk_z_points = vec![F::ONE];
let k_is = (0..NUM_ROUTED_WIRES)
.map(get_subgroup_shift::<F>)
.collect::<Vec<_>>();
for i in 1..degree {
let x = subgroup[i - 1];
let mut numerator = F::ONE;
let mut denominator = F::ONE;
for j in 0..NUM_ROUTED_WIRES {
let wire_value = witness.get_indices(i - 1, j);
let k_i = k_is[j];
let s_id = k_i * x;
let s_sigma = sigma_values[j][8 * (i - 1)];
numerator = numerator * (wire_value + beta * s_id + gamma);
denominator = denominator * (wire_value + beta * s_sigma + gamma);
}
let last = *plonk_z_points.last().unwrap();
plonk_z_points.push(last * numerator / denominator);
}
plonk_z_points
}
pub fn sigma_polynomials<F: Field>(
sigma: Vec<usize>,
degree: usize,
subgroup_generator: F,
) -> Vec<Vec<F>> {
sigma
.chunks(degree)
.map(|chunk| {
chunk
.par_iter()
.map(|&x| {
get_subgroup_shift::<F>(x / degree) * subgroup_generator.exp_usize(x % degree)
})
.collect::<Vec<_>>()
})
.collect()
}
/// Given polynomials `[p_0,...,p_k]` of degree `degree` and `alpha \in F`, returns `\sum_{i=0}^k alpha^i p_i`.
pub(crate) fn scale_polynomials<F: Field>(
polynomials: Vec<Polynomial<F>>,
alpha: F,
degree: usize,
) -> Polynomial<F> {
let alpha_powers = powers(alpha, polynomials.len());
Polynomial::from(
(0..degree)
.map(|i| {
(0..polynomials.len())
.map(|j| polynomials[j][i] * alpha_powers[j])
.fold(F::ZERO, |acc, x| acc + x)
})
.collect::<Vec<_>>(),
)
}
#[allow(dead_code)]
pub(crate) fn polynomial_degree_plus_1<F: Field>(
points: &[F],
fft_precomputation: &FftPrecomputation<F>,
) -> usize {
let coeffs = ifft_with_precomputation_power_of_2(points, fft_precomputation);
coeffs.iter().rev().skip_while(|c| c.is_zero()).count()
}
// TODO: Maybe a streaming version using an `Iterator` would be faster and wouldn't require as much memory for large circuits.
// TODO: Optimize this.
pub fn halo_s<F: Field>(us: &[F]) -> Vec<F> {
let n = 1 << us.len();
let mut res = vec![F::ONE; n];
let us_inv = F::batch_multiplicative_inverse(us);
for (j, (&u, &u_inv)) in us.iter().rev().zip(us_inv.iter().rev()).enumerate() {
for (i, x) in res.iter_mut().enumerate() {
if i & (1 << j) == 0 {
*x = *x * u_inv;
} else {
*x = *x * u;
}
}
}
res
}
/// Evaluate `g(X, {u_i})` as defined in the Halo paper.
pub fn halo_g<F: Field>(x: F, us: &[F]) -> F {
let mut product = F::ONE;
let mut x_power = x;
for &u_i in us.iter().rev() {
let u_i_inv = u_i.multiplicative_inverse_assuming_nonzero();
let term = u_i * x_power + u_i_inv;
product = product * term;
x_power = x_power.square();
}
product
}
#[cfg(test)]
mod test {
use super::*;
use crate::{CircuitBuilder, Curve, Field, PartialWitness, Tweedledee};
#[test]
fn test_halo_n() {
type C = Tweedledee;
type SF = <Tweedledee as Curve>::ScalarField;
let p = C::convert(SF::rand()) * C::GENERATOR_PROJECTIVE;
let r = SF::rand();
let res = C::convert(halo_n::<C>(&r.to_canonical_bool_vec()[..128])) * p;
let p = p.to_affine();
assert_eq!(
res.to_affine(),
halo_n_mul::<C>(&r.to_canonical_bool_vec()[..128], p)
)
}
#[test]
fn test_permutation_polynomial() {
let mut builder = CircuitBuilder::<Tweedledee>::new(128);
let one = builder.one_wire();
let t = builder.add_virtual_target();
let t_sq = builder.square(t);
let quad = builder.add_many(&[one, t, t_sq]);
let seven =
builder.constant_wire(<Tweedledee as Curve>::ScalarField::from_canonical_usize(7));
let res = builder.sub(quad, seven);
builder.assert_zero(res);
let mut partial_witness = PartialWitness::new();
partial_witness.set_target(t, <Tweedledee as Curve>::ScalarField::TWO);
let circuit = builder.build();
let witness = circuit.generate_witness(partial_witness);
let beta = <Tweedledee as Curve>::ScalarField::rand();
let gamma = <Tweedledee as Curve>::ScalarField::rand();
let plonk_z_points_n = permutation_polynomial(
circuit.degree(),
&circuit.subgroup_n,
&witness,
&circuit.s_sigma_values_8n,
beta,
gamma,
);
// Verify that the permutation polynomial is well-formed.
let k_is = (0..NUM_ROUTED_WIRES)
.map(get_subgroup_shift::<<Tweedledee as Curve>::ScalarField>)
.collect::<Vec<_>>();
let wire_values = &witness.transpose();
for (i, &x) in circuit.subgroup_n.iter().enumerate() {
let (z_x, z_gz) = (
plonk_z_points_n[i],
plonk_z_points_n[(i + 1) % circuit.degree()],
);
let mut f_prime = <Tweedledee as Curve>::ScalarField::ONE;
let mut g_prime = <Tweedledee as Curve>::ScalarField::ONE;
for j in 0..NUM_ROUTED_WIRES {
let wire_value = wire_values[j][i];
let k_i = k_is[j];
let s_id = k_i * x;
let s_sigma = circuit.s_sigma_values_8n[j][8 * i];
f_prime = f_prime * (wire_value + beta * s_id + gamma);
g_prime = g_prime * (wire_value + beta * s_sigma + gamma);
}
let vanishing_v_shift_term = f_prime * z_x - g_prime * z_gz;
assert_eq!(
vanishing_v_shift_term,
<Tweedledee as Curve>::ScalarField::ZERO
);
}
}
#[test]
fn test_s_vector_g_function() {
type F = <Tweedledee as Curve>::ScalarField;
let us = (0..10).map(|_| F::rand()).collect::<Vec<_>>();
let x = F::rand();
assert_eq!(
F::inner_product(&halo_s(&us), &powers(x, 1 << 10)),
halo_g(x, &us)
);
}
} | let s = if bit_hi { | random_line_split |
plonk_util.rs | use crate::partition::get_subgroup_shift;
use crate::witness::Witness;
use crate::{ifft_with_precomputation_power_of_2, msm_execute_parallel, AffinePoint, CircuitBuilder, Curve, FftPrecomputation, Field, HaloCurve, MsmPrecomputation, Polynomial, PolynomialCommitment, ProjectivePoint, Target, NUM_ROUTED_WIRES};
use rayon::prelude::*;
/// Evaluate the polynomial which vanishes on any multiplicative subgroup of a given order `n`.
pub(crate) fn eval_zero_poly<F: Field>(n: usize, x: F) -> F {
// Z(x) = x^n - 1
x.exp_usize(n) - F::ONE
}
/// Evaluate the Lagrange basis `L_1` with `L_1(1) = 1`, and `L_1(x) = 0` for other members of an
/// order `n` multiplicative subgroup.
pub(crate) fn eval_l_1<F: Field>(n: usize, x: F) -> F {
if x.is_one() {
// The code below would divide by zero, since we have (x - 1) in both the numerator and
// denominator.
return F::ONE;
}
// L_1(x) = (x^n - 1) / (n * (x - 1))
// = Z(x) / (n * (x - 1))
eval_zero_poly(n, x) / (F::from_canonical_usize(n) * (x - F::ONE))
}
/// Computes a sum of terms weighted by powers of alpha.
pub fn reduce_with_powers<F: Field>(terms: &[F], alpha: F) -> F {
let mut sum = F::ZERO;
for &term in terms.iter().rev() {
sum = sum * alpha + term;
}
sum
}
/// Computes a sum of terms weighted by powers of alpha.
pub(crate) fn reduce_with_powers_recursive<C: HaloCurve>(
builder: &mut CircuitBuilder<C>,
terms: &[Target<C::ScalarField>],
alpha: Target<C::ScalarField>,
) -> Target<C::ScalarField> {
let mut sum = builder.zero_wire();
for &term in terms.iter().rev() {
sum = builder.mul_add(sum, alpha, term);
}
sum
}
/// Compute `n(x)` for a given `x`, where `n` is the injective function related to the Halo
/// endomorphism.
pub fn halo_n<C: HaloCurve>(s_bits: &[bool]) -> C::ScalarField {
// This is based on Algorithm 2 of the Halo paper, except that we start with (a, b) = (0, 0).
debug_assert_eq!(s_bits.len() % 2, 0, "Number of scalar bits must be even");
let zero = C::ScalarField::ZERO;
let mut a = zero;
let mut b = zero;
for s_bits_chunk in s_bits.chunks(2) {
let bit_lo = s_bits_chunk[0];
let bit_hi = s_bits_chunk[1];
let sign = if bit_lo {
C::ScalarField::ONE
} else | ;
let (c, d) = if bit_hi { (sign, zero) } else { (zero, sign) };
a = a.double() + c;
b = b.double() + d;
}
a * C::ZETA_SCALAR + b
}
/// Compute `[n(s)].P` for a given `s`, where `n` is the injective function related to the Halo
/// endomorphism.
pub fn halo_n_mul<C: HaloCurve>(s_bits: &[bool], p: AffinePoint<C>) -> AffinePoint<C> {
// This is based on Algorithm 1 of the Halo paper, except that we start with Acc = O.
debug_assert_eq!(s_bits.len() % 2, 0, "Number of scalar bits must be even");
let p_p = p.to_projective();
let p_n = -p_p;
let endo_p_p = p.endomorphism().to_projective();
let endo_p_n = -endo_p_p;
let mut acc = ProjectivePoint::<C>::ZERO;
for s_bits_chunk in s_bits.chunks(2) {
let bit_lo = s_bits_chunk[0];
let bit_hi = s_bits_chunk[1];
let s = if bit_hi {
if bit_lo {
endo_p_p
} else {
endo_p_n
}
} else if bit_lo {
p_p
} else {
p_n
};
acc = acc.double() + s;
}
acc.to_affine()
}
pub fn eval_poly<F: Field>(coeffs: &[F], x: F) -> F {
let mut ans = F::ZERO;
let mut x_pow = F::ONE;
for &c in coeffs {
ans = ans + (c * x_pow);
x_pow = x_pow * x;
}
ans
}
/// Compute `[x^0, x^1, ..., x^(n - 1)]`.
pub fn powers<F: Field>(x: F, n: usize) -> Vec<F> {
let mut powers = Vec::new();
let mut current = F::ONE;
for i in 0..n {
if i != 0 {
current = current * x;
}
powers.push(current);
}
powers
}
/// Compute `[x^0, x^1, ..., x^(n - 1)]`.
pub(crate) fn powers_recursive<C: HaloCurve>(
builder: &mut CircuitBuilder<C>,
x: Target<C::ScalarField>,
n: usize,
) -> Vec<Target<C::ScalarField>> {
let mut powers = Vec::new();
let mut current = builder.one_wire();
for i in 0..n {
if i != 0 {
current = builder.mul(current, x);
}
powers.push(current);
}
powers
}
/// Returns the evaluation of a list of polynomials at a point.
pub(crate) fn eval_polys<F: Field>(polys: &[Polynomial<F>], powers: &[F]) -> Vec<F> {
polys.iter().map(|p| p.eval_from_power(powers)).collect()
}
/// Zero-pad a list of `n` polynomial coefficients to a length of `8n`, which is the degree at
/// which we do most polynomial arithmetic.
pub(crate) fn pad_to_8n<F: Field>(coeffs: &[F]) -> Vec<F> {
let n = coeffs.len();
let mut result = coeffs.to_vec();
while result.len() < 8 * n {
result.push(F::ZERO);
}
result
}
pub(crate) fn values_to_polynomials<F: Field>(
values_vec: &[Vec<F>],
fft_precomputation: &FftPrecomputation<F>,
) -> Vec<Polynomial<F>> {
values_vec
.par_iter()
.map(|values| Polynomial::from_evaluations(values, fft_precomputation))
.collect()
}
pub(crate) fn polynomials_to_values_padded<F: Field>(
polys_vec: &[Polynomial<F>],
fft_precomputation: &FftPrecomputation<F>,
) -> Vec<Vec<F>> {
polys_vec
.par_iter()
.map(|poly| {
let padded_poly = poly.padded(poly.len() * 8);
padded_poly.eval_domain(fft_precomputation)
})
.collect()
}
/// Like `pedersen_commit`, but with no blinding factor.
pub fn pedersen_hash<C: Curve>(
xs: &[C::ScalarField],
pedersen_g_msm_precomputation: &MsmPrecomputation<C>,
) -> ProjectivePoint<C> {
msm_execute_parallel(pedersen_g_msm_precomputation, xs)
}
#[allow(dead_code)]
fn pedersen_commit<C: Curve>(
xs: &[C::ScalarField],
opening: C::ScalarField,
h: AffinePoint<C>,
pedersen_g_msm_precomputation: &MsmPrecomputation<C>,
) -> ProjectivePoint<C> {
// TODO: Couldn't get this working with *.
let h = h.to_projective();
let mul_precomputation = h.mul_precompute();
let blinding_term = h.mul_with_precomputation(opening, mul_precomputation);
msm_execute_parallel(pedersen_g_msm_precomputation, xs) + blinding_term
}
pub fn commit_polynomials<C: Curve>(
polynomials: &[Polynomial<C::ScalarField>],
msm_precomputation: &MsmPrecomputation<C>,
blinding_point: AffinePoint<C>,
blinding: bool,
) -> Vec<PolynomialCommitment<C>> {
PolynomialCommitment::coeffs_vec_to_commitments(
polynomials
.iter()
.map(|p| p.coeffs())
.collect::<Vec<_>>()
.as_slice(),
msm_precomputation,
blinding_point,
blinding,
)
}
// Generate Z, which is used in Plonk's permutation argument.
pub fn permutation_polynomial<F: Field>(
degree: usize,
subgroup: &[F],
witness: &Witness<F>,
sigma_values: &[Vec<F>],
beta: F,
gamma: F,
) -> Vec<F> {
let mut plonk_z_points = vec![F::ONE];
let k_is = (0..NUM_ROUTED_WIRES)
.map(get_subgroup_shift::<F>)
.collect::<Vec<_>>();
for i in 1..degree {
let x = subgroup[i - 1];
let mut numerator = F::ONE;
let mut denominator = F::ONE;
for j in 0..NUM_ROUTED_WIRES {
let wire_value = witness.get_indices(i - 1, j);
let k_i = k_is[j];
let s_id = k_i * x;
let s_sigma = sigma_values[j][8 * (i - 1)];
numerator = numerator * (wire_value + beta * s_id + gamma);
denominator = denominator * (wire_value + beta * s_sigma + gamma);
}
let last = *plonk_z_points.last().unwrap();
plonk_z_points.push(last * numerator / denominator);
}
plonk_z_points
}
pub fn sigma_polynomials<F: Field>(
sigma: Vec<usize>,
degree: usize,
subgroup_generator: F,
) -> Vec<Vec<F>> {
sigma
.chunks(degree)
.map(|chunk| {
chunk
.par_iter()
.map(|&x| {
get_subgroup_shift::<F>(x / degree) * subgroup_generator.exp_usize(x % degree)
})
.collect::<Vec<_>>()
})
.collect()
}
/// Given polynomials `[p_0,...,p_k]` of degree `degree` and `alpha \in F`, returns `\sum_{i=0}^k alpha^i p_i`.
pub(crate) fn scale_polynomials<F: Field>(
polynomials: Vec<Polynomial<F>>,
alpha: F,
degree: usize,
) -> Polynomial<F> {
let alpha_powers = powers(alpha, polynomials.len());
Polynomial::from(
(0..degree)
.map(|i| {
(0..polynomials.len())
.map(|j| polynomials[j][i] * alpha_powers[j])
.fold(F::ZERO, |acc, x| acc + x)
})
.collect::<Vec<_>>(),
)
}
#[allow(dead_code)]
pub(crate) fn polynomial_degree_plus_1<F: Field>(
points: &[F],
fft_precomputation: &FftPrecomputation<F>,
) -> usize {
let coeffs = ifft_with_precomputation_power_of_2(points, fft_precomputation);
coeffs.iter().rev().skip_while(|c| c.is_zero()).count()
}
// TODO: Maybe a streaming version using an `Iterator` would be faster and wouldn't require as much memory for large circuits.
// TODO: Optimize this.
pub fn halo_s<F: Field>(us: &[F]) -> Vec<F> {
let n = 1 << us.len();
let mut res = vec![F::ONE; n];
let us_inv = F::batch_multiplicative_inverse(us);
for (j, (&u, &u_inv)) in us.iter().rev().zip(us_inv.iter().rev()).enumerate() {
for (i, x) in res.iter_mut().enumerate() {
if i & (1 << j) == 0 {
*x = *x * u_inv;
} else {
*x = *x * u;
}
}
}
res
}
/// Evaluate `g(X, {u_i})` as defined in the Halo paper.
pub fn halo_g<F: Field>(x: F, us: &[F]) -> F {
let mut product = F::ONE;
let mut x_power = x;
for &u_i in us.iter().rev() {
let u_i_inv = u_i.multiplicative_inverse_assuming_nonzero();
let term = u_i * x_power + u_i_inv;
product = product * term;
x_power = x_power.square();
}
product
}
#[cfg(test)]
mod test {
use super::*;
use crate::{CircuitBuilder, Curve, Field, PartialWitness, Tweedledee};
#[test]
fn test_halo_n() {
type C = Tweedledee;
type SF = <Tweedledee as Curve>::ScalarField;
let p = C::convert(SF::rand()) * C::GENERATOR_PROJECTIVE;
let r = SF::rand();
let res = C::convert(halo_n::<C>(&r.to_canonical_bool_vec()[..128])) * p;
let p = p.to_affine();
assert_eq!(
res.to_affine(),
halo_n_mul::<C>(&r.to_canonical_bool_vec()[..128], p)
)
}
#[test]
fn test_permutation_polynomial() {
let mut builder = CircuitBuilder::<Tweedledee>::new(128);
let one = builder.one_wire();
let t = builder.add_virtual_target();
let t_sq = builder.square(t);
let quad = builder.add_many(&[one, t, t_sq]);
let seven =
builder.constant_wire(<Tweedledee as Curve>::ScalarField::from_canonical_usize(7));
let res = builder.sub(quad, seven);
builder.assert_zero(res);
let mut partial_witness = PartialWitness::new();
partial_witness.set_target(t, <Tweedledee as Curve>::ScalarField::TWO);
let circuit = builder.build();
let witness = circuit.generate_witness(partial_witness);
let beta = <Tweedledee as Curve>::ScalarField::rand();
let gamma = <Tweedledee as Curve>::ScalarField::rand();
let plonk_z_points_n = permutation_polynomial(
circuit.degree(),
&circuit.subgroup_n,
&witness,
&circuit.s_sigma_values_8n,
beta,
gamma,
);
// Verify that the permutation polynomial is well-formed.
let k_is = (0..NUM_ROUTED_WIRES)
.map(get_subgroup_shift::<<Tweedledee as Curve>::ScalarField>)
.collect::<Vec<_>>();
let wire_values = &witness.transpose();
for (i, &x) in circuit.subgroup_n.iter().enumerate() {
let (z_x, z_gz) = (
plonk_z_points_n[i],
plonk_z_points_n[(i + 1) % circuit.degree()],
);
let mut f_prime = <Tweedledee as Curve>::ScalarField::ONE;
let mut g_prime = <Tweedledee as Curve>::ScalarField::ONE;
for j in 0..NUM_ROUTED_WIRES {
let wire_value = wire_values[j][i];
let k_i = k_is[j];
let s_id = k_i * x;
let s_sigma = circuit.s_sigma_values_8n[j][8 * i];
f_prime = f_prime * (wire_value + beta * s_id + gamma);
g_prime = g_prime * (wire_value + beta * s_sigma + gamma);
}
let vanishing_v_shift_term = f_prime * z_x - g_prime * z_gz;
assert_eq!(
vanishing_v_shift_term,
<Tweedledee as Curve>::ScalarField::ZERO
);
}
}
#[test]
fn test_s_vector_g_function() {
type F = <Tweedledee as Curve>::ScalarField;
let us = (0..10).map(|_| F::rand()).collect::<Vec<_>>();
let x = F::rand();
assert_eq!(
F::inner_product(&halo_s(&us), &powers(x, 1 << 10)),
halo_g(x, &us)
);
}
}
| {
C::ScalarField::NEG_ONE
} | conditional_block |
Zhihu__Crawler_JsonVersion.py | #!/usr/bin/python
# --*-- encoding:utf-8 --*--
####################################################
# Zhihu Auto-Aogin
#
#
# Created on : 03/07/17
# Last Modified :
#
# Author : Pengcheng Zhou(Kevin)
#
####################################################
import re
from urllib import parse, request, error
from multiprocessing import Pool
import http.cookiejar as Cookie
import time
import json
from getpass import getpass
import ssl
# Cancel the certification of target site
ssl._create_default_https_context = ssl._create_unverified_context
import os
from User_Getter import User_Getter
import random
cookieFile = 'zhihu_cookie.txt'
class RedirectHandler(request.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers):
print ("Cookie过期,重新登录中....")
return
http_error_301 = http_error_302
class Zhihu(object):
def __init__(self) :
'''
Initialize
'''
self.pool = Pool(4)
self.cj = Cookie.MozillaCookieJar(cookieFile)
self.opener = request.build_opener(request.HTTPCookieProcessor(self.cj), RedirectHandler())
self.client_info = 'monsterzpc@gmail.com'
self.passwd = 'Zpc920515'
self.url = 'https://www.zhihu.com/login/email'
self.target_page = ''
self.position = 0
print('''
############################################################
# #
# Zhihu Auto_Login and Crawler by Pengcheng Zhou. #
# #
############################################################
''')
def get_xsrf(self) :
'''
Get a special dynamic string for login
'''
login_target_page = request.urlopen(self.url)
pattern = re.compile('<input type="hidden" name="_xsrf" value="(.*)"/>')
_xsrf = re.findall(pattern, login_target_page.read().decode('utf-8'))[0]
return _xsrf
def get_captcha_url(self):
url = 'https://www.zhihu.com' + '/captcha.gif?r=' + str(int(time.time())) + '&type=login'
f = request.urlopen(url)
with open('./cap.png', 'wb') as image:
image.write(f.read())
image.close()
def login(self):
'''
Execution of login
'''
if (self.client_info == '' or self.passwd == '') :
self.client_info = input('请输入账号:')
self.passwd = getpass('请输入密码:')
self.get_captcha_url()
captcha = input('请输入验证码:')
if (self.client_info.find("@") != -1) :
print('''正在使用邮箱登录...\n用户名:''' + self.client_info+ '\n' + '密码 : ' + len(self.passwd) * '*'+ '\n' )
else :
self.url = ''
print('正在使用手机登录...')
form = {'_xsrf' : self.get_xsrf(),
'password' : self.passwd,
'email' : self.client_info,
'captcha': captcha }
print(form)
try:
req = request.Request(self.url, parse.urlencode(form).encode('utf-8'))
f = self.opener.open(req)
self.cj.save()
print(json.loads(f.read().decode('utf-8'))["msg"] + "!")
print("=" * 100)
except:
print('Error!')
def get_capthca(self) :
'''
Interface for getting the captcha
'''
pass
def get_target_page(self):
'''
Get main target_page content after logged in
'''
try:
self.cj.load()
print('Cookie loaded....')
self.target_page = self.opener.open('https://www.zhihu.com/people/edit')
f = open('zhihu.html', 'wb')
f.write(target_page_content.read())
except:
self.login()
self.get_target_page()
def isLogged(self, user_client):
'''
test if Logged
'''
f = user_client.open('https://www.zhihu.com/settings/profile').geturl()
if (f != 'https://www.zhihu.com/settings/profile'):
return False
return True
def user_getter(sefl):
return User_Getter('https://www.zhihu.com/people/xiao-guai-shou-2/activ | s()
def profile_collector(self, text_path=None) :
'''
main entry for collecting user's profile including id, gender, education, career
'''
count = 0
self.cj.load()
user_list = []
# check the source of the data
if (text_path != None) :
with open(text_path, 'r') as source_list :
for line in source_list :
user_list.append(line.split('\n')[0])
source_list.close()
else :
user_list = []
initial_time = time.time()
while(len(user_list) > 0):
# this try except block is for resuming from the server' shutdown
try :
for item in user_list :
start_time = time.time()
user_id = item
print('=Writing information of [', user_id,']...')
url = 'https://www.zhihu.com/api/v4/members/' + user_id + '?include=locations%2Cemployments%2Cgender%2Ceducations%2Cbusiness%2Cvoteup_count%2Cthanked_Count%2Cfollower_count%2Cfollowing_count%2Ccover_url%2Cfollowing_topic_count%2Cfollowing_question_count%2Cfollowing_favlists_count%2Cfollowing_columns_count%2Canswer_count%2Carticles_count%2Cpins_count%2Cquestion_count%2Ccommercial_question_count%2Cfavorite_count%2Cfavorited_count%2Clogs_count%2Cmarked_answers_count%2Cmarked_answers_text%2Cmessage_thread_token%2Caccount_status%2Cis_active%2Cis_force_renamed%2Cis_bind_sina%2Csina_weibo_url%2Csina_weibo_name%2Cshow_sina_weibo%2Cis_blocking%2Cis_blocked%2Cis_following%2Cis_followed%2Cmutual_followees_count%2Cvote_to_count%2Cvote_from_count%2Cthank_to_count%2Cthank_from_count%2Cthanked_count%2Cdescription%2Chosted_live_count%2Cparticipated_live_count%2Callow_message%2Cindustry_category%2Corg_name%2Corg_homepage%2Cbadge%5B%3F(type%3Dbest_answerer)%5D.topics'
req = request.Request(url)
raw_data = self.opener.open(req).read()
json_data = json.loads(raw_data)
# get key and value
pic_url = json_data["avatar_url"].split('_')[0] + '_xll.jpg'
number_id = json_data["id"]
user_name = json_data["name"]
# education
if ("educations" in json_data) :
if (len(json_data["educations"]) != 0) :
if ("school" in json_data["educations"][0]) :
university = json_data["educations"][0]["school"]["name"]
else :
university = 'None'
if ("major" in json_data["educations"][0]) :
major = json_data["educations"][0]["major"]["name"]
else:
major = 'None'
else :
university = 'None'
major = 'None'
else :
university = 'None'
major = 'None'
# employments
if ("employments" in json_data) :
if (len(json_data["employments"]) != 0) :
if ("company" in json_data["employments"][0]) :
company = json_data["employments"][0]["company"]["name"]
else :
company = 'None'
if ("occupation" in json_data["employments"][0]) :
occupation = json_data["employments"][0]["job"]["name"]
else :
occupation = 'None'
else :
company = 'None'
occupation = 'None'
else :
company = 'None'
occupation = 'None'
# location
if ("locations" in json_data) :
if (len(json_data["locations"]) != 0) :
location = json_data["locations"][0]["name"]
else :
location = 'None'
else :
location = 'None'
# business
if ("business" in json_data ) :
industry = json_data["business"]["name"]
else :
industry = 'None'
intro = json_data["headline"]
autobiography = json_data["description"]
user_type = json_data["type"]
follower_count = json_data["follower_count"]
following_count = json_data["following_count"]
answers_count = json_data["answer_count"]
articles_count = json_data["articles_count"]
if (json_data["gender"] == 1) :
gender = 'male'
else :
gender = 'female'
data = {
'id' : number_id,
'user_id' : user_id,
'name' : user_name,
'gender' : gender,
'university' : university,
'major' : major,
'industry' : industry,
'company' : company,
'occupation' : occupation,
'location' : location,
'intro' : intro,
'autobiography' : autobiography,
'user_type' : str(user_type),
'follower_count' : str(follower_count),
'following_count' : str(following_count),
'answer-count' : str(answers_count),
'articles_count' : str(articles_count)
}
# process folder
if not (os.path.exists(os.path.join('./data/' ,user_name))): # check if the folder exists
os.makedirs(os.path.join('./data/' ,user_name))
path = os.path.join('./data/' ,user_name) + '/'
# generate store path
store_path = path + user_id
# write picture
with open(store_path + '.png', 'wb') as f:
f.write(self.bytes_getter(pic_url))
f.close()
target_page_url = 'https://www.zhihu.com/people/' + user_id + '/activities'
# write target_page
with open(store_path +'.html', 'wb') as f:
f.write(self.bytes_getter(target_page_url))
f.close()
with open(store_path + '.txt', 'w', encoding='utf-8') as f:
for item, value in data.items():
line = json.dumps(item + ":" + value, ensure_ascii=False) + "\n"
f.write(line)
#f.write(json.dumps(data, ensure_ascii=False))
f.close()
count += 1
print('Wrote Successfully! Time consumed :','%.2f'%(time.time() - start_time),"seconds. Crawled ",count, "users till now.")
print('[Total time:', '%.2f'%(time.time() - initial_time),'seconds]')
if (count % 10 == 0) :
cool_start = time.time()
cool_down_time = random.randint(0, 10)
print('#' * 20,'Cooling down for',cool_down_time,' seconds.','#' * 20)
time.sleep(cool_down_time)
time.sleep(1.5)
# record the position before a exception happens
self.position = user_id
except Exception as e:
print('Error! ', e)
# recover from exception, resume crawling from last user
finally :
index = user_list.index(self.position) + 1
user_list = user_list[index:]
time.sleep(10)
print('#'*20,'Resuming from server shutdown','#'*20)
# for retrieving document
def unicode_getter(self, target_url) :
return self.opener.open(target_url).read().decode('utf-8')
# for retrieving bytes such as pics
def bytes_getter(self, target_url) :
return self.opener.open(target_url).read()
# record ruuning time of program
start_time = time.time()
Zhihu = Zhihu()
Zhihu.login()
#Zhihu.profile_collector('./user_list.txt')
end_time = time.time()
print("[Totally elapsed: " , '%.2f'%(end_time - start_time), " seconds.]")
| ities').url | identifier_name |
Zhihu__Crawler_JsonVersion.py | #!/usr/bin/python
# --*-- encoding:utf-8 --*--
####################################################
# Zhihu Auto-Aogin
#
#
# Created on : 03/07/17
# Last Modified :
#
# Author : Pengcheng Zhou(Kevin)
#
####################################################
import re
from urllib import parse, request, error
from multiprocessing import Pool
import http.cookiejar as Cookie
import time
import json
from getpass import getpass
import ssl
# Cancel the certification of target site
ssl._create_default_https_context = ssl._create_unverified_context
import os
from User_Getter import User_Getter
import random
cookieFile = 'zhihu_cookie.txt'
class RedirectHandler(request.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers):
print ("Cookie过期,重新登录中....")
return
http_error_301 = http_error_302
class Zhihu(object):
def __init__(self) :
'''
Initialize
'''
self.pool = Pool(4)
self.cj = Cookie.MozillaCookieJar(cookieFile)
self.opener = request.build_opener(request.HTTPCookieProcessor(self.cj), RedirectHandler())
self.client_info = 'monsterzpc@gmail.com'
self.passwd = 'Zpc920515'
self.url = 'https://www.zhihu.com/login/email'
self.target_page = ''
self.position = 0
print('''
############################################################
# #
# Zhihu Auto_Login and Crawler by Pengcheng Zhou. #
# #
############################################################
''')
def get_xsrf(self) :
'''
Get a special dynamic string for login
'''
login_target_page = request.urlopen(self.url)
pattern = re.compile('<input type="hidden" name="_xsrf" value="(.*)"/>')
_xsrf = re.findall(pattern, login_target_page.read().decode('utf-8'))[0]
return _xsrf
def get_captcha_url(self):
url = 'https://www.zhihu.com' + '/captcha.gif?r=' + str(int(time.time())) + '&type=login'
f = request.urlopen(url)
with open('./cap.png', 'wb') as image:
image.write(f.read())
image.close()
def login(self):
'''
Execution of login
'''
if (self.client_info == '' or self.passwd == '') :
self.client_info = input('请输入账号:')
self.passwd = getpass('请输入密码:')
self.get_captcha_url()
captcha = input('请输入验证码:')
if (self.client_info.find("@") != -1) :
print('''正在使用邮箱登录...\n用户名:''' + self.client_info+ '\n' + '密码 : ' + len(self.passwd) * '*'+ '\n' )
else :
self.url = ''
print('正在使用手机登录...')
form = {'_xsrf' : self.get_xsrf(),
'password' : self.passwd,
'email' : self.client_info,
'captcha': captcha }
print(form)
try:
req = request.Request(self.url, parse.urlencode(form).encode('utf-8'))
f = self.opener.open(req)
self.cj.save()
print(json.loads(f.read().decode('utf-8'))["msg"] + "!")
print("=" * 100)
except:
print('Error!')
def get_capthca(self) :
'''
Interface for getting the captcha
'''
pass
def get_target_page(self):
'''
Get main target_page content after logged in
'''
try:
self.cj.load()
print('Cookie loaded....')
self.target_page = self.opener.open('https://www.zhihu.com/people/edit')
f = open('zhihu.html', 'wb')
f.write(target_page_content.read())
except:
self.login()
self.get_target_page()
def isLogged(self, user_client):
'''
test if Logged
'''
f = user_client.open('https://www.zhihu.com/settings/profile').geturl()
if (f != 'https://www.zhihu.com/settings/profile'):
return False
return True |
def user_getter(sefl):
return User_Getter('https://www.zhihu.com/people/xiao-guai-shou-2/activities').urls()
def profile_collector(self, text_path=None) :
'''
main entry for collecting user's profile including id, gender, education, career
'''
count = 0
self.cj.load()
user_list = []
# check the source of the data
if (text_path != None) :
with open(text_path, 'r') as source_list :
for line in source_list :
user_list.append(line.split('\n')[0])
source_list.close()
else :
user_list = []
initial_time = time.time()
while(len(user_list) > 0):
# this try except block is for resuming from the server' shutdown
try :
for item in user_list :
start_time = time.time()
user_id = item
print('=Writing information of [', user_id,']...')
url = 'https://www.zhihu.com/api/v4/members/' + user_id + '?include=locations%2Cemployments%2Cgender%2Ceducations%2Cbusiness%2Cvoteup_count%2Cthanked_Count%2Cfollower_count%2Cfollowing_count%2Ccover_url%2Cfollowing_topic_count%2Cfollowing_question_count%2Cfollowing_favlists_count%2Cfollowing_columns_count%2Canswer_count%2Carticles_count%2Cpins_count%2Cquestion_count%2Ccommercial_question_count%2Cfavorite_count%2Cfavorited_count%2Clogs_count%2Cmarked_answers_count%2Cmarked_answers_text%2Cmessage_thread_token%2Caccount_status%2Cis_active%2Cis_force_renamed%2Cis_bind_sina%2Csina_weibo_url%2Csina_weibo_name%2Cshow_sina_weibo%2Cis_blocking%2Cis_blocked%2Cis_following%2Cis_followed%2Cmutual_followees_count%2Cvote_to_count%2Cvote_from_count%2Cthank_to_count%2Cthank_from_count%2Cthanked_count%2Cdescription%2Chosted_live_count%2Cparticipated_live_count%2Callow_message%2Cindustry_category%2Corg_name%2Corg_homepage%2Cbadge%5B%3F(type%3Dbest_answerer)%5D.topics'
req = request.Request(url)
raw_data = self.opener.open(req).read()
json_data = json.loads(raw_data)
# get key and value
pic_url = json_data["avatar_url"].split('_')[0] + '_xll.jpg'
number_id = json_data["id"]
user_name = json_data["name"]
# education
if ("educations" in json_data) :
if (len(json_data["educations"]) != 0) :
if ("school" in json_data["educations"][0]) :
university = json_data["educations"][0]["school"]["name"]
else :
university = 'None'
if ("major" in json_data["educations"][0]) :
major = json_data["educations"][0]["major"]["name"]
else:
major = 'None'
else :
university = 'None'
major = 'None'
else :
university = 'None'
major = 'None'
# employments
if ("employments" in json_data) :
if (len(json_data["employments"]) != 0) :
if ("company" in json_data["employments"][0]) :
company = json_data["employments"][0]["company"]["name"]
else :
company = 'None'
if ("occupation" in json_data["employments"][0]) :
occupation = json_data["employments"][0]["job"]["name"]
else :
occupation = 'None'
else :
company = 'None'
occupation = 'None'
else :
company = 'None'
occupation = 'None'
# location
if ("locations" in json_data) :
if (len(json_data["locations"]) != 0) :
location = json_data["locations"][0]["name"]
else :
location = 'None'
else :
location = 'None'
# business
if ("business" in json_data ) :
industry = json_data["business"]["name"]
else :
industry = 'None'
intro = json_data["headline"]
autobiography = json_data["description"]
user_type = json_data["type"]
follower_count = json_data["follower_count"]
following_count = json_data["following_count"]
answers_count = json_data["answer_count"]
articles_count = json_data["articles_count"]
if (json_data["gender"] == 1) :
gender = 'male'
else :
gender = 'female'
data = {
'id' : number_id,
'user_id' : user_id,
'name' : user_name,
'gender' : gender,
'university' : university,
'major' : major,
'industry' : industry,
'company' : company,
'occupation' : occupation,
'location' : location,
'intro' : intro,
'autobiography' : autobiography,
'user_type' : str(user_type),
'follower_count' : str(follower_count),
'following_count' : str(following_count),
'answer-count' : str(answers_count),
'articles_count' : str(articles_count)
}
# process folder
if not (os.path.exists(os.path.join('./data/' ,user_name))): # check if the folder exists
os.makedirs(os.path.join('./data/' ,user_name))
path = os.path.join('./data/' ,user_name) + '/'
# generate store path
store_path = path + user_id
# write picture
with open(store_path + '.png', 'wb') as f:
f.write(self.bytes_getter(pic_url))
f.close()
target_page_url = 'https://www.zhihu.com/people/' + user_id + '/activities'
# write target_page
with open(store_path +'.html', 'wb') as f:
f.write(self.bytes_getter(target_page_url))
f.close()
with open(store_path + '.txt', 'w', encoding='utf-8') as f:
for item, value in data.items():
line = json.dumps(item + ":" + value, ensure_ascii=False) + "\n"
f.write(line)
#f.write(json.dumps(data, ensure_ascii=False))
f.close()
count += 1
print('Wrote Successfully! Time consumed :','%.2f'%(time.time() - start_time),"seconds. Crawled ",count, "users till now.")
print('[Total time:', '%.2f'%(time.time() - initial_time),'seconds]')
if (count % 10 == 0) :
cool_start = time.time()
cool_down_time = random.randint(0, 10)
print('#' * 20,'Cooling down for',cool_down_time,' seconds.','#' * 20)
time.sleep(cool_down_time)
time.sleep(1.5)
# record the position before a exception happens
self.position = user_id
except Exception as e:
print('Error! ', e)
# recover from exception, resume crawling from last user
finally :
index = user_list.index(self.position) + 1
user_list = user_list[index:]
time.sleep(10)
print('#'*20,'Resuming from server shutdown','#'*20)
# for retrieving document
def unicode_getter(self, target_url) :
return self.opener.open(target_url).read().decode('utf-8')
# for retrieving bytes such as pics
def bytes_getter(self, target_url) :
return self.opener.open(target_url).read()
# record ruuning time of program
start_time = time.time()
Zhihu = Zhihu()
Zhihu.login()
#Zhihu.profile_collector('./user_list.txt')
end_time = time.time()
print("[Totally elapsed: " , '%.2f'%(end_time - start_time), " seconds.]")
| conditional_block | |
Zhihu__Crawler_JsonVersion.py | #!/usr/bin/python
# --*-- encoding:utf-8 --*--
####################################################
# Zhihu Auto-Aogin
#
#
# Created on : 03/07/17
# Last Modified :
#
# Author : Pengcheng Zhou(Kevin)
#
####################################################
import re
from urllib import parse, request, error
from multiprocessing import Pool
import http.cookiejar as Cookie
import time
import json
from getpass import getpass
import ssl
# Cancel the certification of target site
ssl._create_default_https_context = ssl._create_unverified_context
import os
from User_Getter import User_Getter
import random
cookieFile = 'zhihu_cookie.txt'
class RedirectHandler(request.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers):
print ("Cookie过期,重新登录中....")
return
http_error_301 = http_error_302
class Zhihu(object):
def __init__(self) :
'''
Initialize
'''
self.pool = Pool(4)
self.cj = Cookie.MozillaCookieJar(cookieFile)
self.opener = request.build_opener(request.HTTPCookieProcessor(self.cj), RedirectHandler())
self.client_info = 'monsterzpc@gmail.com'
self.passwd = 'Zpc920515'
self.url = 'https://www.zhihu.com/login/email'
self.target_page = ''
self.position = 0
print('''
############################################################
# #
# Zhihu Auto_Login and Crawler by Pengcheng Zhou. #
# #
############################################################
''')
def get_xsrf(self) :
'''
Get a special dynamic string for login
'''
login_target_page = request.urlopen(self.url)
pattern = re.compile('<input type="hidden" name="_xsrf" value="(.*)"/>')
_xsrf = re.findall(pattern, login_target_page.read().decode('utf-8'))[0]
return _xsrf
def get_captcha_url(self):
url = 'https:// | f):
'''
Execution of login
'''
if (self.client_info == '' or self.passwd == '') :
self.client_info = input('请输入账号:')
self.passwd = getpass('请输入密码:')
self.get_captcha_url()
captcha = input('请输入验证码:')
if (self.client_info.find("@") != -1) :
print('''正在使用邮箱登录...\n用户名:''' + self.client_info+ '\n' + '密码 : ' + len(self.passwd) * '*'+ '\n' )
else :
self.url = ''
print('正在使用手机登录...')
form = {'_xsrf' : self.get_xsrf(),
'password' : self.passwd,
'email' : self.client_info,
'captcha': captcha }
print(form)
try:
req = request.Request(self.url, parse.urlencode(form).encode('utf-8'))
f = self.opener.open(req)
self.cj.save()
print(json.loads(f.read().decode('utf-8'))["msg"] + "!")
print("=" * 100)
except:
print('Error!')
def get_capthca(self) :
'''
Interface for getting the captcha
'''
pass
def get_target_page(self):
'''
Get main target_page content after logged in
'''
try:
self.cj.load()
print('Cookie loaded....')
self.target_page = self.opener.open('https://www.zhihu.com/people/edit')
f = open('zhihu.html', 'wb')
f.write(target_page_content.read())
except:
self.login()
self.get_target_page()
def isLogged(self, user_client):
'''
test if Logged
'''
f = user_client.open('https://www.zhihu.com/settings/profile').geturl()
if (f != 'https://www.zhihu.com/settings/profile'):
return False
return True
def user_getter(sefl):
return User_Getter('https://www.zhihu.com/people/xiao-guai-shou-2/activities').urls()
def profile_collector(self, text_path=None) :
'''
main entry for collecting user's profile including id, gender, education, career
'''
count = 0
self.cj.load()
user_list = []
# check the source of the data
if (text_path != None) :
with open(text_path, 'r') as source_list :
for line in source_list :
user_list.append(line.split('\n')[0])
source_list.close()
else :
user_list = []
initial_time = time.time()
while(len(user_list) > 0):
# this try except block is for resuming from the server' shutdown
try :
for item in user_list :
start_time = time.time()
user_id = item
print('=Writing information of [', user_id,']...')
url = 'https://www.zhihu.com/api/v4/members/' + user_id + '?include=locations%2Cemployments%2Cgender%2Ceducations%2Cbusiness%2Cvoteup_count%2Cthanked_Count%2Cfollower_count%2Cfollowing_count%2Ccover_url%2Cfollowing_topic_count%2Cfollowing_question_count%2Cfollowing_favlists_count%2Cfollowing_columns_count%2Canswer_count%2Carticles_count%2Cpins_count%2Cquestion_count%2Ccommercial_question_count%2Cfavorite_count%2Cfavorited_count%2Clogs_count%2Cmarked_answers_count%2Cmarked_answers_text%2Cmessage_thread_token%2Caccount_status%2Cis_active%2Cis_force_renamed%2Cis_bind_sina%2Csina_weibo_url%2Csina_weibo_name%2Cshow_sina_weibo%2Cis_blocking%2Cis_blocked%2Cis_following%2Cis_followed%2Cmutual_followees_count%2Cvote_to_count%2Cvote_from_count%2Cthank_to_count%2Cthank_from_count%2Cthanked_count%2Cdescription%2Chosted_live_count%2Cparticipated_live_count%2Callow_message%2Cindustry_category%2Corg_name%2Corg_homepage%2Cbadge%5B%3F(type%3Dbest_answerer)%5D.topics'
req = request.Request(url)
raw_data = self.opener.open(req).read()
json_data = json.loads(raw_data)
# get key and value
pic_url = json_data["avatar_url"].split('_')[0] + '_xll.jpg'
number_id = json_data["id"]
user_name = json_data["name"]
# education
if ("educations" in json_data) :
if (len(json_data["educations"]) != 0) :
if ("school" in json_data["educations"][0]) :
university = json_data["educations"][0]["school"]["name"]
else :
university = 'None'
if ("major" in json_data["educations"][0]) :
major = json_data["educations"][0]["major"]["name"]
else:
major = 'None'
else :
university = 'None'
major = 'None'
else :
university = 'None'
major = 'None'
# employments
if ("employments" in json_data) :
if (len(json_data["employments"]) != 0) :
if ("company" in json_data["employments"][0]) :
company = json_data["employments"][0]["company"]["name"]
else :
company = 'None'
if ("occupation" in json_data["employments"][0]) :
occupation = json_data["employments"][0]["job"]["name"]
else :
occupation = 'None'
else :
company = 'None'
occupation = 'None'
else :
company = 'None'
occupation = 'None'
# location
if ("locations" in json_data) :
if (len(json_data["locations"]) != 0) :
location = json_data["locations"][0]["name"]
else :
location = 'None'
else :
location = 'None'
# business
if ("business" in json_data ) :
industry = json_data["business"]["name"]
else :
industry = 'None'
intro = json_data["headline"]
autobiography = json_data["description"]
user_type = json_data["type"]
follower_count = json_data["follower_count"]
following_count = json_data["following_count"]
answers_count = json_data["answer_count"]
articles_count = json_data["articles_count"]
if (json_data["gender"] == 1) :
gender = 'male'
else :
gender = 'female'
data = {
'id' : number_id,
'user_id' : user_id,
'name' : user_name,
'gender' : gender,
'university' : university,
'major' : major,
'industry' : industry,
'company' : company,
'occupation' : occupation,
'location' : location,
'intro' : intro,
'autobiography' : autobiography,
'user_type' : str(user_type),
'follower_count' : str(follower_count),
'following_count' : str(following_count),
'answer-count' : str(answers_count),
'articles_count' : str(articles_count)
}
# process folder
if not (os.path.exists(os.path.join('./data/' ,user_name))): # check if the folder exists
os.makedirs(os.path.join('./data/' ,user_name))
path = os.path.join('./data/' ,user_name) + '/'
# generate store path
store_path = path + user_id
# write picture
with open(store_path + '.png', 'wb') as f:
f.write(self.bytes_getter(pic_url))
f.close()
target_page_url = 'https://www.zhihu.com/people/' + user_id + '/activities'
# write target_page
with open(store_path +'.html', 'wb') as f:
f.write(self.bytes_getter(target_page_url))
f.close()
with open(store_path + '.txt', 'w', encoding='utf-8') as f:
for item, value in data.items():
line = json.dumps(item + ":" + value, ensure_ascii=False) + "\n"
f.write(line)
#f.write(json.dumps(data, ensure_ascii=False))
f.close()
count += 1
print('Wrote Successfully! Time consumed :','%.2f'%(time.time() - start_time),"seconds. Crawled ",count, "users till now.")
print('[Total time:', '%.2f'%(time.time() - initial_time),'seconds]')
if (count % 10 == 0) :
cool_start = time.time()
cool_down_time = random.randint(0, 10)
print('#' * 20,'Cooling down for',cool_down_time,' seconds.','#' * 20)
time.sleep(cool_down_time)
time.sleep(1.5)
# record the position before a exception happens
self.position = user_id
except Exception as e:
print('Error! ', e)
# recover from exception, resume crawling from last user
finally :
index = user_list.index(self.position) + 1
user_list = user_list[index:]
time.sleep(10)
print('#'*20,'Resuming from server shutdown','#'*20)
# for retrieving document
def unicode_getter(self, target_url) :
return self.opener.open(target_url).read().decode('utf-8')
# for retrieving bytes such as pics
def bytes_getter(self, target_url) :
return self.opener.open(target_url).read()
# record ruuning time of program
start_time = time.time()
Zhihu = Zhihu()
Zhihu.login()
#Zhihu.profile_collector('./user_list.txt')
end_time = time.time()
print("[Totally elapsed: " , '%.2f'%(end_time - start_time), " seconds.]")
| www.zhihu.com' + '/captcha.gif?r=' + str(int(time.time())) + '&type=login'
f = request.urlopen(url)
with open('./cap.png', 'wb') as image:
image.write(f.read())
image.close()
def login(sel | identifier_body |
Zhihu__Crawler_JsonVersion.py | #
#
# Created on : 03/07/17
# Last Modified :
#
# Author : Pengcheng Zhou(Kevin)
#
####################################################
import re
from urllib import parse, request, error
from multiprocessing import Pool
import http.cookiejar as Cookie
import time
import json
from getpass import getpass
import ssl
# Cancel the certification of target site
ssl._create_default_https_context = ssl._create_unverified_context
import os
from User_Getter import User_Getter
import random
cookieFile = 'zhihu_cookie.txt'
class RedirectHandler(request.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers):
print ("Cookie过期,重新登录中....")
return
http_error_301 = http_error_302
class Zhihu(object):
def __init__(self) :
'''
Initialize
'''
self.pool = Pool(4)
self.cj = Cookie.MozillaCookieJar(cookieFile)
self.opener = request.build_opener(request.HTTPCookieProcessor(self.cj), RedirectHandler())
self.client_info = 'monsterzpc@gmail.com'
self.passwd = 'Zpc920515'
self.url = 'https://www.zhihu.com/login/email'
self.target_page = ''
self.position = 0
print('''
############################################################
# #
# Zhihu Auto_Login and Crawler by Pengcheng Zhou. #
# #
############################################################
''')
def get_xsrf(self) :
'''
Get a special dynamic string for login
'''
login_target_page = request.urlopen(self.url)
pattern = re.compile('<input type="hidden" name="_xsrf" value="(.*)"/>')
_xsrf = re.findall(pattern, login_target_page.read().decode('utf-8'))[0]
return _xsrf
def get_captcha_url(self):
url = 'https://www.zhihu.com' + '/captcha.gif?r=' + str(int(time.time())) + '&type=login'
f = request.urlopen(url)
with open('./cap.png', 'wb') as image:
image.write(f.read())
image.close()
def login(self):
'''
Execution of login
'''
if (self.client_info == '' or self.passwd == '') :
self.client_info = input('请输入账号:')
self.passwd = getpass('请输入密码:')
self.get_captcha_url()
captcha = input('请输入验证码:')
if (self.client_info.find("@") != -1) :
print('''正在使用邮箱登录...\n用户名:''' + self.client_info+ '\n' + '密码 : ' + len(self.passwd) * '*'+ '\n' )
else :
self.url = ''
print('正在使用手机登录...')
form = {'_xsrf' : self.get_xsrf(),
'password' : self.passwd,
'email' : self.client_info,
'captcha': captcha }
print(form)
try:
req = request.Request(self.url, parse.urlencode(form).encode('utf-8'))
f = self.opener.open(req)
self.cj.save()
print(json.loads(f.read().decode('utf-8'))["msg"] + "!")
print("=" * 100)
except:
print('Error!')
def get_capthca(self) :
'''
Interface for getting the captcha
'''
pass
def get_target_page(self):
'''
Get main target_page content after logged in
'''
try:
self.cj.load()
print('Cookie loaded....')
self.target_page = self.opener.open('https://www.zhihu.com/people/edit')
f = open('zhihu.html', 'wb')
f.write(target_page_content.read())
except:
self.login()
self.get_target_page()
def isLogged(self, user_client):
'''
test if Logged
'''
f = user_client.open('https://www.zhihu.com/settings/profile').geturl()
if (f != 'https://www.zhihu.com/settings/profile'):
return False
return True
def user_getter(sefl):
return User_Getter('https://www.zhihu.com/people/xiao-guai-shou-2/activities').urls()
def profile_collector(self, text_path=None) :
'''
main entry for collecting user's profile including id, gender, education, career
'''
count = 0
self.cj.load()
user_list = []
# check the source of the data
if (text_path != None) :
with open(text_path, 'r') as source_list :
for line in source_list :
user_list.append(line.split('\n')[0])
source_list.close()
else :
user_list = []
initial_time = time.time()
while(len(user_list) > 0):
# this try except block is for resuming from the server' shutdown
try :
for item in user_list :
start_time = time.time()
user_id = item
print('=Writing information of [', user_id,']...')
url = 'https://www.zhihu.com/api/v4/members/' + user_id + '?include=locations%2Cemployments%2Cgender%2Ceducations%2Cbusiness%2Cvoteup_count%2Cthanked_Count%2Cfollower_count%2Cfollowing_count%2Ccover_url%2Cfollowing_topic_count%2Cfollowing_question_count%2Cfollowing_favlists_count%2Cfollowing_columns_count%2Canswer_count%2Carticles_count%2Cpins_count%2Cquestion_count%2Ccommercial_question_count%2Cfavorite_count%2Cfavorited_count%2Clogs_count%2Cmarked_answers_count%2Cmarked_answers_text%2Cmessage_thread_token%2Caccount_status%2Cis_active%2Cis_force_renamed%2Cis_bind_sina%2Csina_weibo_url%2Csina_weibo_name%2Cshow_sina_weibo%2Cis_blocking%2Cis_blocked%2Cis_following%2Cis_followed%2Cmutual_followees_count%2Cvote_to_count%2Cvote_from_count%2Cthank_to_count%2Cthank_from_count%2Cthanked_count%2Cdescription%2Chosted_live_count%2Cparticipated_live_count%2Callow_message%2Cindustry_category%2Corg_name%2Corg_homepage%2Cbadge%5B%3F(type%3Dbest_answerer)%5D.topics'
req = request.Request(url)
raw_data = self.opener.open(req).read()
json_data = json.loads(raw_data)
# get key and value
pic_url = json_data["avatar_url"].split('_')[0] + '_xll.jpg'
number_id = json_data["id"]
user_name = json_data["name"]
# education
if ("educations" in json_data) :
if (len(json_data["educations"]) != 0) :
if ("school" in json_data["educations"][0]) :
university = json_data["educations"][0]["school"]["name"]
else :
university = 'None'
if ("major" in json_data["educations"][0]) :
major = json_data["educations"][0]["major"]["name"]
else:
major = 'None'
else :
university = 'None'
major = 'None'
else :
university = 'None'
major = 'None'
# employments
if ("employments" in json_data) :
if (len(json_data["employments"]) != 0) :
if ("company" in json_data["employments"][0]) :
company = json_data["employments"][0]["company"]["name"]
else :
company = 'None'
if ("occupation" in json_data["employments"][0]) :
occupation = json_data["employments"][0]["job"]["name"]
else :
occupation = 'None'
else :
company = 'None'
occupation = 'None'
else :
company = 'None'
occupation = 'None'
# location
if ("locations" in json_data) :
if (len(json_data["locations"]) != 0) :
location = json_data["locations"][0]["name"]
else :
location = 'None'
else :
location = 'None'
# business
if ("business" in json_data ) :
industry = json_data["business"]["name"]
else :
industry = 'None'
intro = json_data["headline"]
autobiography = json_data["description"]
user_type = json_data["type"]
follower_count = json_data["follower_count"]
following_count = json_data["following_count"]
answers_count = json_data["answer_count"]
articles_count = json_data["articles_count"]
if (json_data["gender"] == 1) :
gender = 'male'
else :
gender = 'female'
data = {
'id' : number_id,
'user_id' : user_id,
'name' : user_name,
'gender' : gender,
'university' : university,
'major' : major,
'industry' : industry,
'company' : company,
'occupation' : occupation,
'location' : location,
'intro' : intro,
'autobiography' : autobiography,
'user_type' : str(user_type),
'follower_count' : str(follower_count),
'following_count' : str(following_count),
'answer-count' : str(answers_count),
'articles_count' : str(articles_count)
}
# process folder
if not (os.path.exists(os.path.join('./data/' ,user_name))): # check if the folder exists
os.makedirs(os.path.join('./data/' ,user_name))
path = os.path.join('./data/' ,user_name) + '/'
# generate store path
store_path = path + user_id
# write picture
with open(store_path + '.png', 'wb') as f:
f.write(self.bytes_getter(pic_url))
f.close()
target_page_url = 'https://www.zhihu.com/people/' + user_id + '/activities'
# write target_page
with open(store_path +'.html', 'wb') as f:
f.write(self.bytes_getter(target_page_url))
f.close()
with open(store_path + '.txt', 'w', encoding='utf-8') as f:
for item, value in data.items():
line = json.dumps(item + ":" + value, ensure_ascii=False) + "\n"
f.write(line)
#f.write(json.dumps(data, ensure_ascii=False))
f.close()
count += 1
print('Wrote Successfully! Time consumed :','%.2f'%(time.time() - start_time),"seconds. Crawled ",count, "users till now.")
print('[Total time:', '%.2f'%(time.time() - initial_time),'seconds]')
if (count % 10 == 0) :
cool_start = time.time()
cool_down_time = random.randint(0, 10)
print('#' * 20,'Cooling down for',cool_down_time,' seconds.','#' * 20)
time.sleep(cool_down_time)
time.sleep(1.5)
# record the position before a exception happens
self.position = user_id
except Exception as e:
print('Error! ', e)
# recover from exception, resume crawling from last user
finally :
index = user_list.index(self.position) + 1
user_list = user_list[index:]
time.sleep(10)
print('#'*20,'Resuming from server shutdown','#'*20)
# for retrieving document
def unicode_getter(self, target_url) :
return self.opener.open(target_url).read().decode('utf-8')
# for retrieving bytes such as pics
def bytes_getter(self, target_url) :
return self.opener.open(target_url).read()
# record ruuning time of program
start_time = time.time()
Zhihu = Zhihu()
Zhihu.login()
#Zhihu.profile_collector('./user_list.txt')
end_time = time.time()
print("[Totally elapsed: " , '%.2f'%(end_time - start_time), " seconds.]") | #!/usr/bin/python
# --*-- encoding:utf-8 --*--
####################################################
# Zhihu Auto-Aogin | random_line_split | |
main.rs | #![no_std]
#![no_main]
use core::cell::RefCell;
use cortex_m::{asm::wfi, interrupt::Mutex};
use cortex_m_rt::entry;
use embedded_hal::spi::MODE_1;
use stm32f4xx_hal as hal;
use hal::{
adc::config::{Align, Clock, Continuous, Resolution, Scan},
gpio::{gpioa, Output, PushPull, gpioc::{PC10, PC11, PC12}, Alternate},
pac,
pac::{ADC1, ADC_COMMON, interrupt, Interrupt, TIM1, TIM2},
prelude::*,
pwm,
signature::VDDA_CALIB,
time::KiloHertz,
timer::{Event, Timer},
};
use odrive_rs::spi::Spi;
extern crate drv8301;
use drv8301::drv8301::Drv8301;
use odrive_rs::as5048a::AS5048A;
use odrive_rs::motor::Motor;
use odrive_rs::rcc::{Enable, Reset};
use cortex_m_semihosting::{hprint, hprintln};
use panic_halt as _;
//type TypeLed = gpioa::PA2<Output<PushPull>>;
//static G_LED: Mutex<RefCell<Option<TypeLed>>> = Mutex::new(RefCell::new(None));
type TypeSpi3 = Spi<pac::SPI3, (PC10<Alternate<stm32f4xx_hal::gpio::AF6>>, PC11<Alternate<stm32f4xx_hal::gpio::AF6>>, PC12<Alternate<stm32f4xx_hal::gpio::AF6>>)>;
static G_SPI3: Mutex<RefCell<Option<TypeSpi3>>> = Mutex::new(RefCell::new(None));
type TypeEncoder<'a> = AS5048A<'a, TypeSpi3, gpioa::PA3<Output<PushPull>>>;
//static G_AS5048A: Mutex<RefCell<Option<TypeEncoder>>> = Mutex::new(RefCell::new(None));
type TypeMotor = Motor;
static G_MOTOR: Mutex<RefCell<Option<TypeMotor>>> = Mutex::new(RefCell::new(None));
static G_TIM: Mutex<RefCell<Option<Timer<TIM2>>>> = Mutex::new(RefCell::new(None));
static mut CURRENT_A: f32 = 0.0;
static mut CURRENT_B: f32 = 0.0;
static mut CURRENT_C: f32 = 0.0;
static mut MOT_ANGLE: u16 = 0;
static mut MOT_ANGLE_OLD: u16 = 0;
static mut MOT_VELOCITY: f32 = 0.0;
static mut MOT_VELOCITY_OLD: f32 = 0.0;
static mut ERR_VELOCITY: f32 = 0.0;
static mut ERR_VELOCITY_INT: f32 = 0.0;
static mut REF_CURR_D: f32 = 0.0;
static mut REF_CURR_Q: f32 = 0.0;
// System
const TIM2_FREQ_KHZ: u32 = 10;
// Motor
const MOT_POLE_PAIRS: u16 = 12;
// Encoder
const ENC_RESOLUTION: u16 = 16384;
#[interrupt]
fn ADC() {
// current sensing
unsafe {
let max_sample:u32 = (1 << 12) - 1;
let device = pac::Peripherals::steal();
device.ADC1.sr.modify(|_, w| w.jeoc().clear_bit());
let jdr1_data = device.ADC1.jdr1.read().jdata().bits();
let jdr1_offset = 48u32;
let so1 = ( ( (u32::from(jdr1_data) + jdr1_offset) * VDDA_CALIB ) / max_sample) as u16;
let jdr2_data = device.ADC1.jdr2.read().jdata().bits();
let jdr2_offset = 118u32;
let so2 = ( ( (u32::from(jdr2_data) + jdr2_offset) * VDDA_CALIB ) / max_sample) as u16;
CURRENT_B = (so1 as f32 - 1650.0) / 200.0;
CURRENT_C = (so2 as f32 - 1650.0) / 200.0;
CURRENT_A = - CURRENT_B - CURRENT_C;
}
/*
// LED Debug
cortex_m::interrupt::free(|cs| {
if let Some(ref mut led) = G_LED.borrow(cs).borrow_mut().as_mut() {
led.toggle().unwrap();
}
});
*/
}
#[interrupt]
fn TIM2() {
cortex_m::interrupt::free(|cs| {
if let Some(ref mut tim) = G_TIM.borrow(cs).borrow_mut().as_mut() |
});
static mut SPI3: Option<TypeSpi3> = None;
static mut MOTOR: Option<TypeMotor> = None;
unsafe{
let mut spi3 = SPI3.get_or_insert_with(|| {
cortex_m::interrupt::free(|cs| {
G_SPI3.borrow(cs).replace(None).unwrap()
})
});
let device = pac::Peripherals::steal();
let gpioa = device.GPIOA.split();
let ncs = gpioa.pa3.into_push_pull_output();
let mut as5048: TypeEncoder = AS5048A::new(&mut spi3, ncs);
// AS5048A
let measured_angle = as5048.angle().unwrap();
let angle_offset = 650u16;
MOT_ANGLE = (measured_angle - angle_offset) % ENC_RESOLUTION;
let electric_angle = MOT_ANGLE % (ENC_RESOLUTION/MOT_POLE_PAIRS);
let motor = MOTOR.get_or_insert_with(|| {
cortex_m::interrupt::free(|cs| {
G_MOTOR.borrow(cs).replace(None).unwrap()
})
});
// Velocity control
const REF_VELOCITY: f32 = - 100.0;
const VELLOCITY_PGAIN: f32 = 0.1;
const VELOCITY_IGAIN: f32 = 0.00001;
let res_velocity =
if (ENC_RESOLUTION-1000) < MOT_ANGLE_OLD && MOT_ANGLE < 1000 {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 + ENC_RESOLUTION as f32 ) * TIM2_FREQ_KHZ as f32
} else if MOT_ANGLE_OLD < 1000 && MOT_ANGLE > (ENC_RESOLUTION-1000) {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 - ENC_RESOLUTION as f32 ) * TIM2_FREQ_KHZ as f32
} else {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 ) * TIM2_FREQ_KHZ as f32
};
let alpha = 0.1;
MOT_VELOCITY = alpha * res_velocity + (1.0 - alpha) * MOT_VELOCITY_OLD;
ERR_VELOCITY = MOT_VELOCITY - REF_VELOCITY;
ERR_VELOCITY_INT += ERR_VELOCITY;
REF_CURR_D = 0.0;
REF_CURR_Q = VELLOCITY_PGAIN * ERR_VELOCITY + VELOCITY_IGAIN * ERR_VELOCITY_INT;
REF_CURR_Q = -1.0 * REF_CURR_Q;
MOT_ANGLE_OLD = MOT_ANGLE;
MOT_VELOCITY_OLD = MOT_VELOCITY;
// select control mode
//motor.drive_profile().unwrap();
//motor.drive_sixstep().unwrap();
//motor.drive_anglebased_sixstep(electric_angle).unwrap();
motor.drive_foc(electric_angle, CURRENT_A, CURRENT_B, CURRENT_C, REF_CURR_D, REF_CURR_Q).unwrap();
}
/*
// LED Debug
cortex_m::interrupt::free(|cs| {
if let Some(ref mut led) = G_LED.borrow(cs).borrow_mut().as_mut() {
led.toggle().unwrap();
}
});
*/
}
#[entry]
fn main() -> ! {
let dp = pac::Peripherals::take().unwrap();
let cp = cortex_m::peripheral::Peripherals::take().unwrap();
let rcc = dp.RCC.constrain();
let clocks = rcc
.cfgr
.use_hse(8.mhz())
.sysclk(168.mhz())
.hclk(168.mhz())
.pclk1(42.mhz())
.pclk2(84.mhz())
.require_pll48clk()
.freeze();
let mut delay = hal::delay::Delay::new(cp.SYST, clocks);
let gpioa = dp.GPIOA.split();
let gpiob = dp.GPIOB.split();
let gpioc = dp.GPIOC.split();
// SPI3
let sck = gpioc.pc10.into_alternate_af6();
let miso = gpioc.pc11.into_alternate_af6();
let mosi = gpioc.pc12.into_alternate_af6();
let mut spi = Spi::spi3(
dp.SPI3,
(sck, miso, mosi),
MODE_1,
KiloHertz(2000).into(),
clocks,
);
// DRV8301
let ncs = gpioc.pc13.into_push_pull_output();
let en_gate = gpiob.pb12.into_push_pull_output();
let mut drv8301 = Drv8301::new(&mut spi, ncs, en_gate);
drv8301.init().unwrap();
// Move the pin into our global storage
cortex_m::interrupt::free(|cs| *G_SPI3.borrow(cs).borrow_mut() = Some(spi));
// PWM
let channels = (gpioa.pa8.into_alternate_af1(), gpioa.pa9.into_alternate_af1(), gpioa.pa10.into_alternate_af1(), gpioa.pa11.into_alternate_af1());
let pwm = pwm::tim1(dp.TIM1, channels, clocks, 16u32.khz());
let (ch1, ch2, ch3, ch4) = pwm;
let mut ch4 = ch4;
{
// Set complementary oututs mode as AF1
gpiob.pb13.into_alternate_af1();
gpiob.pb14.into_alternate_af1();
gpiob.pb15.into_alternate_af1();
unsafe {
let tim1_regb = &(*(TIM1::ptr()));
// Enable complementary outputs
tim1_regb.ccer.modify(|_, w| w.cc1ne().set_bit());
tim1_regb.ccer.modify(|_, w| w.cc2ne().set_bit());
tim1_regb.ccer.modify(|_, w| w.cc3ne().set_bit());
// Set dead time
tim1_regb.bdtr.modify(|_, w| w.dtg().bits(10));
// Center aligned
tim1_regb.cr1.modify(|_, w| w.cms().center_aligned1());
// OC4REF signal is used as trigger output
tim1_regb.cr2.modify(|_, w| w.mms().compare_oc4());
}
ch4.enable();
ch4.set_duty( (ch4.get_max_duty() as f32 * 0.99)as u16 );
}
delay.delay_ms(1u32);
// Motor
let mut motor = Motor::new(ch1, ch2, ch3, MOT_POLE_PAIRS, ENC_RESOLUTION);
motor.set_duty(0,0,0).unwrap();
motor.enable().unwrap();
delay.delay_ms(1u32);
/*
// for current sensing test
unsafe{
motor.set_hiz_c();
motor.set_duty((motor.max_duty as f32 * 0.6) as u16, (motor.max_duty as f32 * 0.4) as u16, 0u16).unwrap();
}
*/
cortex_m::interrupt::free(|cs| *G_MOTOR.borrow(cs).borrow_mut() = Some(motor));
// ADC1
gpioc.pc0.into_analog();
gpioc.pc1.into_analog();
unsafe {
// All ADCs share the same reset interface.
// NOTE(unsafe) this reference will only be used for atomic writes with no side effects.
let rcc = &(*pac::RCC::ptr());
// Enable the clock
pac::ADC1::enable(rcc);
pac::ADC1::reset(rcc);
let adcc_regb = &(*(ADC_COMMON::ptr()));
let adc1_regb = &(*(ADC1::ptr()));
// Probably unnecessary to disable the ADC in most cases but it shouldn't do any harm either
adc1_regb.cr2.modify(|_, w| w.adon().clear_bit());
// Config common
adcc_regb.ccr.modify(|_, w| w.adcpre().bits(Clock::Pclk2_div_2.into()));
// Config regular conversion
adc1_regb.cr1.modify(|_, w| w.res().bits(Resolution::Twelve.into()));
adc1_regb.cr1.modify(|_, w| w.scan().bit(Scan::Enabled.into()));
adc1_regb.cr2.modify(|_, w| w.align().bit(Align::Right.into()));
adc1_regb.cr2.modify(|_, w| w.cont().bit(Continuous::Single.into()));
// config injected conversion
adc1_regb.cr1.modify(|_, w | w.jeocie().enabled());
adc1_regb.cr2.modify(|_, w| w.jexten().rising_edge());
adc1_regb.cr2.modify(|_, w| w.jextsel().tim1cc4());
adc1_regb.jsqr.modify(|_, w| w.jl().bits(0b01));
adc1_regb.jsqr.modify(|_, w| w.jsq3().bits(10u8));
adc1_regb.jsqr.modify(|_, w| w.jsq4().bits(11u8));
adc1_regb.smpr1.modify(|_, w| w.smp10().cycles3());
adc1_regb.smpr1.modify(|_, w| w.smp11().cycles3());
// enable ADC
adc1_regb.cr2.modify(|_, w| w.adon().set_bit());
delay.delay_ms(1u32);
// enable interrupt
cortex_m::peripheral::NVIC::unmask(Interrupt::ADC);
}
/*
// Debug LED
let mut led = gpioa.pa2.into_push_pull_output();
let _ = led.set_high();
cortex_m::interrupt::free(|cs| *G_LED.borrow(cs).borrow_mut() = Some(led));
*/
// TIM2 Interrupt
let mut timer = Timer::tim2(dp.TIM2, TIM2_FREQ_KHZ.khz(), clocks);
timer.listen(Event::TimeOut);
cortex_m::interrupt::free(|cs| *G_TIM.borrow(cs).borrow_mut() = Some(timer));
//enable TIM2 interrupt
unsafe {
cortex_m::peripheral::NVIC::unmask(Interrupt::TIM2);
}
loop {
wfi();
/*
unsafe{
//hprintln!("CURRENT_A: {}A, CURRENT_B: {}A, CURRENT_C: {}A", CURRENT_A, CURRENT_B, CURRENT_C);
}
delay.delay_ms(1u32);
*/
}
}
| {
let _ = tim.wait();
} | conditional_block |
main.rs | #![no_std]
#![no_main]
use core::cell::RefCell;
use cortex_m::{asm::wfi, interrupt::Mutex};
use cortex_m_rt::entry;
use embedded_hal::spi::MODE_1;
use stm32f4xx_hal as hal;
use hal::{
adc::config::{Align, Clock, Continuous, Resolution, Scan},
gpio::{gpioa, Output, PushPull, gpioc::{PC10, PC11, PC12}, Alternate},
pac,
pac::{ADC1, ADC_COMMON, interrupt, Interrupt, TIM1, TIM2},
prelude::*,
pwm,
signature::VDDA_CALIB,
time::KiloHertz,
timer::{Event, Timer},
};
use odrive_rs::spi::Spi;
extern crate drv8301;
use drv8301::drv8301::Drv8301;
use odrive_rs::as5048a::AS5048A;
use odrive_rs::motor::Motor;
use odrive_rs::rcc::{Enable, Reset};
use cortex_m_semihosting::{hprint, hprintln};
use panic_halt as _;
//type TypeLed = gpioa::PA2<Output<PushPull>>;
//static G_LED: Mutex<RefCell<Option<TypeLed>>> = Mutex::new(RefCell::new(None));
type TypeSpi3 = Spi<pac::SPI3, (PC10<Alternate<stm32f4xx_hal::gpio::AF6>>, PC11<Alternate<stm32f4xx_hal::gpio::AF6>>, PC12<Alternate<stm32f4xx_hal::gpio::AF6>>)>;
static G_SPI3: Mutex<RefCell<Option<TypeSpi3>>> = Mutex::new(RefCell::new(None));
type TypeEncoder<'a> = AS5048A<'a, TypeSpi3, gpioa::PA3<Output<PushPull>>>;
//static G_AS5048A: Mutex<RefCell<Option<TypeEncoder>>> = Mutex::new(RefCell::new(None));
type TypeMotor = Motor;
static G_MOTOR: Mutex<RefCell<Option<TypeMotor>>> = Mutex::new(RefCell::new(None));
static G_TIM: Mutex<RefCell<Option<Timer<TIM2>>>> = Mutex::new(RefCell::new(None));
static mut CURRENT_A: f32 = 0.0;
static mut CURRENT_B: f32 = 0.0;
static mut CURRENT_C: f32 = 0.0;
static mut MOT_ANGLE: u16 = 0;
static mut MOT_ANGLE_OLD: u16 = 0;
static mut MOT_VELOCITY: f32 = 0.0;
static mut MOT_VELOCITY_OLD: f32 = 0.0;
static mut ERR_VELOCITY: f32 = 0.0;
static mut ERR_VELOCITY_INT: f32 = 0.0;
static mut REF_CURR_D: f32 = 0.0;
static mut REF_CURR_Q: f32 = 0.0;
// System
const TIM2_FREQ_KHZ: u32 = 10;
// Motor
const MOT_POLE_PAIRS: u16 = 12;
// Encoder
const ENC_RESOLUTION: u16 = 16384;
#[interrupt]
fn ADC() |
#[interrupt]
fn TIM2() {
cortex_m::interrupt::free(|cs| {
if let Some(ref mut tim) = G_TIM.borrow(cs).borrow_mut().as_mut() {
let _ = tim.wait();
}
});
static mut SPI3: Option<TypeSpi3> = None;
static mut MOTOR: Option<TypeMotor> = None;
unsafe{
let mut spi3 = SPI3.get_or_insert_with(|| {
cortex_m::interrupt::free(|cs| {
G_SPI3.borrow(cs).replace(None).unwrap()
})
});
let device = pac::Peripherals::steal();
let gpioa = device.GPIOA.split();
let ncs = gpioa.pa3.into_push_pull_output();
let mut as5048: TypeEncoder = AS5048A::new(&mut spi3, ncs);
// AS5048A
let measured_angle = as5048.angle().unwrap();
let angle_offset = 650u16;
MOT_ANGLE = (measured_angle - angle_offset) % ENC_RESOLUTION;
let electric_angle = MOT_ANGLE % (ENC_RESOLUTION/MOT_POLE_PAIRS);
let motor = MOTOR.get_or_insert_with(|| {
cortex_m::interrupt::free(|cs| {
G_MOTOR.borrow(cs).replace(None).unwrap()
})
});
// Velocity control
const REF_VELOCITY: f32 = - 100.0;
const VELLOCITY_PGAIN: f32 = 0.1;
const VELOCITY_IGAIN: f32 = 0.00001;
let res_velocity =
if (ENC_RESOLUTION-1000) < MOT_ANGLE_OLD && MOT_ANGLE < 1000 {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 + ENC_RESOLUTION as f32 ) * TIM2_FREQ_KHZ as f32
} else if MOT_ANGLE_OLD < 1000 && MOT_ANGLE > (ENC_RESOLUTION-1000) {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 - ENC_RESOLUTION as f32 ) * TIM2_FREQ_KHZ as f32
} else {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 ) * TIM2_FREQ_KHZ as f32
};
let alpha = 0.1;
MOT_VELOCITY = alpha * res_velocity + (1.0 - alpha) * MOT_VELOCITY_OLD;
ERR_VELOCITY = MOT_VELOCITY - REF_VELOCITY;
ERR_VELOCITY_INT += ERR_VELOCITY;
REF_CURR_D = 0.0;
REF_CURR_Q = VELLOCITY_PGAIN * ERR_VELOCITY + VELOCITY_IGAIN * ERR_VELOCITY_INT;
REF_CURR_Q = -1.0 * REF_CURR_Q;
MOT_ANGLE_OLD = MOT_ANGLE;
MOT_VELOCITY_OLD = MOT_VELOCITY;
// select control mode
//motor.drive_profile().unwrap();
//motor.drive_sixstep().unwrap();
//motor.drive_anglebased_sixstep(electric_angle).unwrap();
motor.drive_foc(electric_angle, CURRENT_A, CURRENT_B, CURRENT_C, REF_CURR_D, REF_CURR_Q).unwrap();
}
/*
// LED Debug
cortex_m::interrupt::free(|cs| {
if let Some(ref mut led) = G_LED.borrow(cs).borrow_mut().as_mut() {
led.toggle().unwrap();
}
});
*/
}
#[entry]
fn main() -> ! {
let dp = pac::Peripherals::take().unwrap();
let cp = cortex_m::peripheral::Peripherals::take().unwrap();
let rcc = dp.RCC.constrain();
let clocks = rcc
.cfgr
.use_hse(8.mhz())
.sysclk(168.mhz())
.hclk(168.mhz())
.pclk1(42.mhz())
.pclk2(84.mhz())
.require_pll48clk()
.freeze();
let mut delay = hal::delay::Delay::new(cp.SYST, clocks);
let gpioa = dp.GPIOA.split();
let gpiob = dp.GPIOB.split();
let gpioc = dp.GPIOC.split();
// SPI3
let sck = gpioc.pc10.into_alternate_af6();
let miso = gpioc.pc11.into_alternate_af6();
let mosi = gpioc.pc12.into_alternate_af6();
let mut spi = Spi::spi3(
dp.SPI3,
(sck, miso, mosi),
MODE_1,
KiloHertz(2000).into(),
clocks,
);
// DRV8301
let ncs = gpioc.pc13.into_push_pull_output();
let en_gate = gpiob.pb12.into_push_pull_output();
let mut drv8301 = Drv8301::new(&mut spi, ncs, en_gate);
drv8301.init().unwrap();
// Move the pin into our global storage
cortex_m::interrupt::free(|cs| *G_SPI3.borrow(cs).borrow_mut() = Some(spi));
// PWM
let channels = (gpioa.pa8.into_alternate_af1(), gpioa.pa9.into_alternate_af1(), gpioa.pa10.into_alternate_af1(), gpioa.pa11.into_alternate_af1());
let pwm = pwm::tim1(dp.TIM1, channels, clocks, 16u32.khz());
let (ch1, ch2, ch3, ch4) = pwm;
let mut ch4 = ch4;
{
// Set complementary oututs mode as AF1
gpiob.pb13.into_alternate_af1();
gpiob.pb14.into_alternate_af1();
gpiob.pb15.into_alternate_af1();
unsafe {
let tim1_regb = &(*(TIM1::ptr()));
// Enable complementary outputs
tim1_regb.ccer.modify(|_, w| w.cc1ne().set_bit());
tim1_regb.ccer.modify(|_, w| w.cc2ne().set_bit());
tim1_regb.ccer.modify(|_, w| w.cc3ne().set_bit());
// Set dead time
tim1_regb.bdtr.modify(|_, w| w.dtg().bits(10));
// Center aligned
tim1_regb.cr1.modify(|_, w| w.cms().center_aligned1());
// OC4REF signal is used as trigger output
tim1_regb.cr2.modify(|_, w| w.mms().compare_oc4());
}
ch4.enable();
ch4.set_duty( (ch4.get_max_duty() as f32 * 0.99)as u16 );
}
delay.delay_ms(1u32);
// Motor
let mut motor = Motor::new(ch1, ch2, ch3, MOT_POLE_PAIRS, ENC_RESOLUTION);
motor.set_duty(0,0,0).unwrap();
motor.enable().unwrap();
delay.delay_ms(1u32);
/*
// for current sensing test
unsafe{
motor.set_hiz_c();
motor.set_duty((motor.max_duty as f32 * 0.6) as u16, (motor.max_duty as f32 * 0.4) as u16, 0u16).unwrap();
}
*/
cortex_m::interrupt::free(|cs| *G_MOTOR.borrow(cs).borrow_mut() = Some(motor));
// ADC1
gpioc.pc0.into_analog();
gpioc.pc1.into_analog();
unsafe {
// All ADCs share the same reset interface.
// NOTE(unsafe) this reference will only be used for atomic writes with no side effects.
let rcc = &(*pac::RCC::ptr());
// Enable the clock
pac::ADC1::enable(rcc);
pac::ADC1::reset(rcc);
let adcc_regb = &(*(ADC_COMMON::ptr()));
let adc1_regb = &(*(ADC1::ptr()));
// Probably unnecessary to disable the ADC in most cases but it shouldn't do any harm either
adc1_regb.cr2.modify(|_, w| w.adon().clear_bit());
// Config common
adcc_regb.ccr.modify(|_, w| w.adcpre().bits(Clock::Pclk2_div_2.into()));
// Config regular conversion
adc1_regb.cr1.modify(|_, w| w.res().bits(Resolution::Twelve.into()));
adc1_regb.cr1.modify(|_, w| w.scan().bit(Scan::Enabled.into()));
adc1_regb.cr2.modify(|_, w| w.align().bit(Align::Right.into()));
adc1_regb.cr2.modify(|_, w| w.cont().bit(Continuous::Single.into()));
// config injected conversion
adc1_regb.cr1.modify(|_, w | w.jeocie().enabled());
adc1_regb.cr2.modify(|_, w| w.jexten().rising_edge());
adc1_regb.cr2.modify(|_, w| w.jextsel().tim1cc4());
adc1_regb.jsqr.modify(|_, w| w.jl().bits(0b01));
adc1_regb.jsqr.modify(|_, w| w.jsq3().bits(10u8));
adc1_regb.jsqr.modify(|_, w| w.jsq4().bits(11u8));
adc1_regb.smpr1.modify(|_, w| w.smp10().cycles3());
adc1_regb.smpr1.modify(|_, w| w.smp11().cycles3());
// enable ADC
adc1_regb.cr2.modify(|_, w| w.adon().set_bit());
delay.delay_ms(1u32);
// enable interrupt
cortex_m::peripheral::NVIC::unmask(Interrupt::ADC);
}
/*
// Debug LED
let mut led = gpioa.pa2.into_push_pull_output();
let _ = led.set_high();
cortex_m::interrupt::free(|cs| *G_LED.borrow(cs).borrow_mut() = Some(led));
*/
// TIM2 Interrupt
let mut timer = Timer::tim2(dp.TIM2, TIM2_FREQ_KHZ.khz(), clocks);
timer.listen(Event::TimeOut);
cortex_m::interrupt::free(|cs| *G_TIM.borrow(cs).borrow_mut() = Some(timer));
//enable TIM2 interrupt
unsafe {
cortex_m::peripheral::NVIC::unmask(Interrupt::TIM2);
}
loop {
wfi();
/*
unsafe{
//hprintln!("CURRENT_A: {}A, CURRENT_B: {}A, CURRENT_C: {}A", CURRENT_A, CURRENT_B, CURRENT_C);
}
delay.delay_ms(1u32);
*/
}
}
| {
// current sensing
unsafe {
let max_sample:u32 = (1 << 12) - 1;
let device = pac::Peripherals::steal();
device.ADC1.sr.modify(|_, w| w.jeoc().clear_bit());
let jdr1_data = device.ADC1.jdr1.read().jdata().bits();
let jdr1_offset = 48u32;
let so1 = ( ( (u32::from(jdr1_data) + jdr1_offset) * VDDA_CALIB ) / max_sample) as u16;
let jdr2_data = device.ADC1.jdr2.read().jdata().bits();
let jdr2_offset = 118u32;
let so2 = ( ( (u32::from(jdr2_data) + jdr2_offset) * VDDA_CALIB ) / max_sample) as u16;
CURRENT_B = (so1 as f32 - 1650.0) / 200.0;
CURRENT_C = (so2 as f32 - 1650.0) / 200.0;
CURRENT_A = - CURRENT_B - CURRENT_C;
}
/*
// LED Debug
cortex_m::interrupt::free(|cs| {
if let Some(ref mut led) = G_LED.borrow(cs).borrow_mut().as_mut() {
led.toggle().unwrap();
}
});
*/
} | identifier_body |
main.rs | #![no_std]
#![no_main]
use core::cell::RefCell;
use cortex_m::{asm::wfi, interrupt::Mutex};
use cortex_m_rt::entry;
use embedded_hal::spi::MODE_1;
use stm32f4xx_hal as hal;
use hal::{
adc::config::{Align, Clock, Continuous, Resolution, Scan},
gpio::{gpioa, Output, PushPull, gpioc::{PC10, PC11, PC12}, Alternate},
pac,
pac::{ADC1, ADC_COMMON, interrupt, Interrupt, TIM1, TIM2},
prelude::*,
pwm,
signature::VDDA_CALIB,
time::KiloHertz,
timer::{Event, Timer},
};
use odrive_rs::spi::Spi;
extern crate drv8301;
use drv8301::drv8301::Drv8301;
use odrive_rs::as5048a::AS5048A;
use odrive_rs::motor::Motor;
use odrive_rs::rcc::{Enable, Reset};
use cortex_m_semihosting::{hprint, hprintln};
use panic_halt as _;
//type TypeLed = gpioa::PA2<Output<PushPull>>;
//static G_LED: Mutex<RefCell<Option<TypeLed>>> = Mutex::new(RefCell::new(None));
type TypeSpi3 = Spi<pac::SPI3, (PC10<Alternate<stm32f4xx_hal::gpio::AF6>>, PC11<Alternate<stm32f4xx_hal::gpio::AF6>>, PC12<Alternate<stm32f4xx_hal::gpio::AF6>>)>;
static G_SPI3: Mutex<RefCell<Option<TypeSpi3>>> = Mutex::new(RefCell::new(None));
type TypeEncoder<'a> = AS5048A<'a, TypeSpi3, gpioa::PA3<Output<PushPull>>>;
//static G_AS5048A: Mutex<RefCell<Option<TypeEncoder>>> = Mutex::new(RefCell::new(None));
type TypeMotor = Motor;
static G_MOTOR: Mutex<RefCell<Option<TypeMotor>>> = Mutex::new(RefCell::new(None));
static G_TIM: Mutex<RefCell<Option<Timer<TIM2>>>> = Mutex::new(RefCell::new(None));
static mut CURRENT_A: f32 = 0.0;
static mut CURRENT_B: f32 = 0.0;
static mut CURRENT_C: f32 = 0.0;
static mut MOT_ANGLE: u16 = 0;
static mut MOT_ANGLE_OLD: u16 = 0;
static mut MOT_VELOCITY: f32 = 0.0;
static mut MOT_VELOCITY_OLD: f32 = 0.0;
static mut ERR_VELOCITY: f32 = 0.0;
static mut ERR_VELOCITY_INT: f32 = 0.0;
static mut REF_CURR_D: f32 = 0.0;
static mut REF_CURR_Q: f32 = 0.0;
// System
const TIM2_FREQ_KHZ: u32 = 10;
// Motor
const MOT_POLE_PAIRS: u16 = 12;
// Encoder
const ENC_RESOLUTION: u16 = 16384;
#[interrupt]
fn | () {
// current sensing
unsafe {
let max_sample:u32 = (1 << 12) - 1;
let device = pac::Peripherals::steal();
device.ADC1.sr.modify(|_, w| w.jeoc().clear_bit());
let jdr1_data = device.ADC1.jdr1.read().jdata().bits();
let jdr1_offset = 48u32;
let so1 = ( ( (u32::from(jdr1_data) + jdr1_offset) * VDDA_CALIB ) / max_sample) as u16;
let jdr2_data = device.ADC1.jdr2.read().jdata().bits();
let jdr2_offset = 118u32;
let so2 = ( ( (u32::from(jdr2_data) + jdr2_offset) * VDDA_CALIB ) / max_sample) as u16;
CURRENT_B = (so1 as f32 - 1650.0) / 200.0;
CURRENT_C = (so2 as f32 - 1650.0) / 200.0;
CURRENT_A = - CURRENT_B - CURRENT_C;
}
/*
// LED Debug
cortex_m::interrupt::free(|cs| {
if let Some(ref mut led) = G_LED.borrow(cs).borrow_mut().as_mut() {
led.toggle().unwrap();
}
});
*/
}
#[interrupt]
fn TIM2() {
cortex_m::interrupt::free(|cs| {
if let Some(ref mut tim) = G_TIM.borrow(cs).borrow_mut().as_mut() {
let _ = tim.wait();
}
});
static mut SPI3: Option<TypeSpi3> = None;
static mut MOTOR: Option<TypeMotor> = None;
unsafe{
let mut spi3 = SPI3.get_or_insert_with(|| {
cortex_m::interrupt::free(|cs| {
G_SPI3.borrow(cs).replace(None).unwrap()
})
});
let device = pac::Peripherals::steal();
let gpioa = device.GPIOA.split();
let ncs = gpioa.pa3.into_push_pull_output();
let mut as5048: TypeEncoder = AS5048A::new(&mut spi3, ncs);
// AS5048A
let measured_angle = as5048.angle().unwrap();
let angle_offset = 650u16;
MOT_ANGLE = (measured_angle - angle_offset) % ENC_RESOLUTION;
let electric_angle = MOT_ANGLE % (ENC_RESOLUTION/MOT_POLE_PAIRS);
let motor = MOTOR.get_or_insert_with(|| {
cortex_m::interrupt::free(|cs| {
G_MOTOR.borrow(cs).replace(None).unwrap()
})
});
// Velocity control
const REF_VELOCITY: f32 = - 100.0;
const VELLOCITY_PGAIN: f32 = 0.1;
const VELOCITY_IGAIN: f32 = 0.00001;
let res_velocity =
if (ENC_RESOLUTION-1000) < MOT_ANGLE_OLD && MOT_ANGLE < 1000 {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 + ENC_RESOLUTION as f32 ) * TIM2_FREQ_KHZ as f32
} else if MOT_ANGLE_OLD < 1000 && MOT_ANGLE > (ENC_RESOLUTION-1000) {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 - ENC_RESOLUTION as f32 ) * TIM2_FREQ_KHZ as f32
} else {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 ) * TIM2_FREQ_KHZ as f32
};
let alpha = 0.1;
MOT_VELOCITY = alpha * res_velocity + (1.0 - alpha) * MOT_VELOCITY_OLD;
ERR_VELOCITY = MOT_VELOCITY - REF_VELOCITY;
ERR_VELOCITY_INT += ERR_VELOCITY;
REF_CURR_D = 0.0;
REF_CURR_Q = VELLOCITY_PGAIN * ERR_VELOCITY + VELOCITY_IGAIN * ERR_VELOCITY_INT;
REF_CURR_Q = -1.0 * REF_CURR_Q;
MOT_ANGLE_OLD = MOT_ANGLE;
MOT_VELOCITY_OLD = MOT_VELOCITY;
// select control mode
//motor.drive_profile().unwrap();
//motor.drive_sixstep().unwrap();
//motor.drive_anglebased_sixstep(electric_angle).unwrap();
motor.drive_foc(electric_angle, CURRENT_A, CURRENT_B, CURRENT_C, REF_CURR_D, REF_CURR_Q).unwrap();
}
/*
// LED Debug
cortex_m::interrupt::free(|cs| {
if let Some(ref mut led) = G_LED.borrow(cs).borrow_mut().as_mut() {
led.toggle().unwrap();
}
});
*/
}
#[entry]
fn main() -> ! {
let dp = pac::Peripherals::take().unwrap();
let cp = cortex_m::peripheral::Peripherals::take().unwrap();
let rcc = dp.RCC.constrain();
let clocks = rcc
.cfgr
.use_hse(8.mhz())
.sysclk(168.mhz())
.hclk(168.mhz())
.pclk1(42.mhz())
.pclk2(84.mhz())
.require_pll48clk()
.freeze();
let mut delay = hal::delay::Delay::new(cp.SYST, clocks);
let gpioa = dp.GPIOA.split();
let gpiob = dp.GPIOB.split();
let gpioc = dp.GPIOC.split();
// SPI3
let sck = gpioc.pc10.into_alternate_af6();
let miso = gpioc.pc11.into_alternate_af6();
let mosi = gpioc.pc12.into_alternate_af6();
let mut spi = Spi::spi3(
dp.SPI3,
(sck, miso, mosi),
MODE_1,
KiloHertz(2000).into(),
clocks,
);
// DRV8301
let ncs = gpioc.pc13.into_push_pull_output();
let en_gate = gpiob.pb12.into_push_pull_output();
let mut drv8301 = Drv8301::new(&mut spi, ncs, en_gate);
drv8301.init().unwrap();
// Move the pin into our global storage
cortex_m::interrupt::free(|cs| *G_SPI3.borrow(cs).borrow_mut() = Some(spi));
// PWM
let channels = (gpioa.pa8.into_alternate_af1(), gpioa.pa9.into_alternate_af1(), gpioa.pa10.into_alternate_af1(), gpioa.pa11.into_alternate_af1());
let pwm = pwm::tim1(dp.TIM1, channels, clocks, 16u32.khz());
let (ch1, ch2, ch3, ch4) = pwm;
let mut ch4 = ch4;
{
// Set complementary oututs mode as AF1
gpiob.pb13.into_alternate_af1();
gpiob.pb14.into_alternate_af1();
gpiob.pb15.into_alternate_af1();
unsafe {
let tim1_regb = &(*(TIM1::ptr()));
// Enable complementary outputs
tim1_regb.ccer.modify(|_, w| w.cc1ne().set_bit());
tim1_regb.ccer.modify(|_, w| w.cc2ne().set_bit());
tim1_regb.ccer.modify(|_, w| w.cc3ne().set_bit());
// Set dead time
tim1_regb.bdtr.modify(|_, w| w.dtg().bits(10));
// Center aligned
tim1_regb.cr1.modify(|_, w| w.cms().center_aligned1());
// OC4REF signal is used as trigger output
tim1_regb.cr2.modify(|_, w| w.mms().compare_oc4());
}
ch4.enable();
ch4.set_duty( (ch4.get_max_duty() as f32 * 0.99)as u16 );
}
delay.delay_ms(1u32);
// Motor
let mut motor = Motor::new(ch1, ch2, ch3, MOT_POLE_PAIRS, ENC_RESOLUTION);
motor.set_duty(0,0,0).unwrap();
motor.enable().unwrap();
delay.delay_ms(1u32);
/*
// for current sensing test
unsafe{
motor.set_hiz_c();
motor.set_duty((motor.max_duty as f32 * 0.6) as u16, (motor.max_duty as f32 * 0.4) as u16, 0u16).unwrap();
}
*/
cortex_m::interrupt::free(|cs| *G_MOTOR.borrow(cs).borrow_mut() = Some(motor));
// ADC1
gpioc.pc0.into_analog();
gpioc.pc1.into_analog();
unsafe {
// All ADCs share the same reset interface.
// NOTE(unsafe) this reference will only be used for atomic writes with no side effects.
let rcc = &(*pac::RCC::ptr());
// Enable the clock
pac::ADC1::enable(rcc);
pac::ADC1::reset(rcc);
let adcc_regb = &(*(ADC_COMMON::ptr()));
let adc1_regb = &(*(ADC1::ptr()));
// Probably unnecessary to disable the ADC in most cases but it shouldn't do any harm either
adc1_regb.cr2.modify(|_, w| w.adon().clear_bit());
// Config common
adcc_regb.ccr.modify(|_, w| w.adcpre().bits(Clock::Pclk2_div_2.into()));
// Config regular conversion
adc1_regb.cr1.modify(|_, w| w.res().bits(Resolution::Twelve.into()));
adc1_regb.cr1.modify(|_, w| w.scan().bit(Scan::Enabled.into()));
adc1_regb.cr2.modify(|_, w| w.align().bit(Align::Right.into()));
adc1_regb.cr2.modify(|_, w| w.cont().bit(Continuous::Single.into()));
// config injected conversion
adc1_regb.cr1.modify(|_, w | w.jeocie().enabled());
adc1_regb.cr2.modify(|_, w| w.jexten().rising_edge());
adc1_regb.cr2.modify(|_, w| w.jextsel().tim1cc4());
adc1_regb.jsqr.modify(|_, w| w.jl().bits(0b01));
adc1_regb.jsqr.modify(|_, w| w.jsq3().bits(10u8));
adc1_regb.jsqr.modify(|_, w| w.jsq4().bits(11u8));
adc1_regb.smpr1.modify(|_, w| w.smp10().cycles3());
adc1_regb.smpr1.modify(|_, w| w.smp11().cycles3());
// enable ADC
adc1_regb.cr2.modify(|_, w| w.adon().set_bit());
delay.delay_ms(1u32);
// enable interrupt
cortex_m::peripheral::NVIC::unmask(Interrupt::ADC);
}
/*
// Debug LED
let mut led = gpioa.pa2.into_push_pull_output();
let _ = led.set_high();
cortex_m::interrupt::free(|cs| *G_LED.borrow(cs).borrow_mut() = Some(led));
*/
// TIM2 Interrupt
let mut timer = Timer::tim2(dp.TIM2, TIM2_FREQ_KHZ.khz(), clocks);
timer.listen(Event::TimeOut);
cortex_m::interrupt::free(|cs| *G_TIM.borrow(cs).borrow_mut() = Some(timer));
//enable TIM2 interrupt
unsafe {
cortex_m::peripheral::NVIC::unmask(Interrupt::TIM2);
}
loop {
wfi();
/*
unsafe{
//hprintln!("CURRENT_A: {}A, CURRENT_B: {}A, CURRENT_C: {}A", CURRENT_A, CURRENT_B, CURRENT_C);
}
delay.delay_ms(1u32);
*/
}
}
| ADC | identifier_name |
main.rs | #![no_std]
#![no_main]
use core::cell::RefCell;
use cortex_m::{asm::wfi, interrupt::Mutex};
use cortex_m_rt::entry;
use embedded_hal::spi::MODE_1;
use stm32f4xx_hal as hal;
use hal::{
adc::config::{Align, Clock, Continuous, Resolution, Scan},
gpio::{gpioa, Output, PushPull, gpioc::{PC10, PC11, PC12}, Alternate},
pac,
pac::{ADC1, ADC_COMMON, interrupt, Interrupt, TIM1, TIM2},
prelude::*,
pwm,
signature::VDDA_CALIB,
time::KiloHertz,
timer::{Event, Timer},
};
use odrive_rs::spi::Spi;
extern crate drv8301;
use drv8301::drv8301::Drv8301;
use odrive_rs::as5048a::AS5048A;
use odrive_rs::motor::Motor;
use odrive_rs::rcc::{Enable, Reset};
use cortex_m_semihosting::{hprint, hprintln};
use panic_halt as _;
//type TypeLed = gpioa::PA2<Output<PushPull>>;
//static G_LED: Mutex<RefCell<Option<TypeLed>>> = Mutex::new(RefCell::new(None));
type TypeSpi3 = Spi<pac::SPI3, (PC10<Alternate<stm32f4xx_hal::gpio::AF6>>, PC11<Alternate<stm32f4xx_hal::gpio::AF6>>, PC12<Alternate<stm32f4xx_hal::gpio::AF6>>)>;
static G_SPI3: Mutex<RefCell<Option<TypeSpi3>>> = Mutex::new(RefCell::new(None));
| static G_MOTOR: Mutex<RefCell<Option<TypeMotor>>> = Mutex::new(RefCell::new(None));
static G_TIM: Mutex<RefCell<Option<Timer<TIM2>>>> = Mutex::new(RefCell::new(None));
static mut CURRENT_A: f32 = 0.0;
static mut CURRENT_B: f32 = 0.0;
static mut CURRENT_C: f32 = 0.0;
static mut MOT_ANGLE: u16 = 0;
static mut MOT_ANGLE_OLD: u16 = 0;
static mut MOT_VELOCITY: f32 = 0.0;
static mut MOT_VELOCITY_OLD: f32 = 0.0;
static mut ERR_VELOCITY: f32 = 0.0;
static mut ERR_VELOCITY_INT: f32 = 0.0;
static mut REF_CURR_D: f32 = 0.0;
static mut REF_CURR_Q: f32 = 0.0;
// System
const TIM2_FREQ_KHZ: u32 = 10;
// Motor
const MOT_POLE_PAIRS: u16 = 12;
// Encoder
const ENC_RESOLUTION: u16 = 16384;
#[interrupt]
fn ADC() {
// current sensing
unsafe {
let max_sample:u32 = (1 << 12) - 1;
let device = pac::Peripherals::steal();
device.ADC1.sr.modify(|_, w| w.jeoc().clear_bit());
let jdr1_data = device.ADC1.jdr1.read().jdata().bits();
let jdr1_offset = 48u32;
let so1 = ( ( (u32::from(jdr1_data) + jdr1_offset) * VDDA_CALIB ) / max_sample) as u16;
let jdr2_data = device.ADC1.jdr2.read().jdata().bits();
let jdr2_offset = 118u32;
let so2 = ( ( (u32::from(jdr2_data) + jdr2_offset) * VDDA_CALIB ) / max_sample) as u16;
CURRENT_B = (so1 as f32 - 1650.0) / 200.0;
CURRENT_C = (so2 as f32 - 1650.0) / 200.0;
CURRENT_A = - CURRENT_B - CURRENT_C;
}
/*
// LED Debug
cortex_m::interrupt::free(|cs| {
if let Some(ref mut led) = G_LED.borrow(cs).borrow_mut().as_mut() {
led.toggle().unwrap();
}
});
*/
}
#[interrupt]
fn TIM2() {
cortex_m::interrupt::free(|cs| {
if let Some(ref mut tim) = G_TIM.borrow(cs).borrow_mut().as_mut() {
let _ = tim.wait();
}
});
static mut SPI3: Option<TypeSpi3> = None;
static mut MOTOR: Option<TypeMotor> = None;
unsafe{
let mut spi3 = SPI3.get_or_insert_with(|| {
cortex_m::interrupt::free(|cs| {
G_SPI3.borrow(cs).replace(None).unwrap()
})
});
let device = pac::Peripherals::steal();
let gpioa = device.GPIOA.split();
let ncs = gpioa.pa3.into_push_pull_output();
let mut as5048: TypeEncoder = AS5048A::new(&mut spi3, ncs);
// AS5048A
let measured_angle = as5048.angle().unwrap();
let angle_offset = 650u16;
MOT_ANGLE = (measured_angle - angle_offset) % ENC_RESOLUTION;
let electric_angle = MOT_ANGLE % (ENC_RESOLUTION/MOT_POLE_PAIRS);
let motor = MOTOR.get_or_insert_with(|| {
cortex_m::interrupt::free(|cs| {
G_MOTOR.borrow(cs).replace(None).unwrap()
})
});
// Velocity control
const REF_VELOCITY: f32 = - 100.0;
const VELLOCITY_PGAIN: f32 = 0.1;
const VELOCITY_IGAIN: f32 = 0.00001;
let res_velocity =
if (ENC_RESOLUTION-1000) < MOT_ANGLE_OLD && MOT_ANGLE < 1000 {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 + ENC_RESOLUTION as f32 ) * TIM2_FREQ_KHZ as f32
} else if MOT_ANGLE_OLD < 1000 && MOT_ANGLE > (ENC_RESOLUTION-1000) {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 - ENC_RESOLUTION as f32 ) * TIM2_FREQ_KHZ as f32
} else {
( MOT_ANGLE as f32 - MOT_ANGLE_OLD as f32 ) * TIM2_FREQ_KHZ as f32
};
let alpha = 0.1;
MOT_VELOCITY = alpha * res_velocity + (1.0 - alpha) * MOT_VELOCITY_OLD;
ERR_VELOCITY = MOT_VELOCITY - REF_VELOCITY;
ERR_VELOCITY_INT += ERR_VELOCITY;
REF_CURR_D = 0.0;
REF_CURR_Q = VELLOCITY_PGAIN * ERR_VELOCITY + VELOCITY_IGAIN * ERR_VELOCITY_INT;
REF_CURR_Q = -1.0 * REF_CURR_Q;
MOT_ANGLE_OLD = MOT_ANGLE;
MOT_VELOCITY_OLD = MOT_VELOCITY;
// select control mode
//motor.drive_profile().unwrap();
//motor.drive_sixstep().unwrap();
//motor.drive_anglebased_sixstep(electric_angle).unwrap();
motor.drive_foc(electric_angle, CURRENT_A, CURRENT_B, CURRENT_C, REF_CURR_D, REF_CURR_Q).unwrap();
}
/*
// LED Debug
cortex_m::interrupt::free(|cs| {
if let Some(ref mut led) = G_LED.borrow(cs).borrow_mut().as_mut() {
led.toggle().unwrap();
}
});
*/
}
#[entry]
fn main() -> ! {
let dp = pac::Peripherals::take().unwrap();
let cp = cortex_m::peripheral::Peripherals::take().unwrap();
let rcc = dp.RCC.constrain();
let clocks = rcc
.cfgr
.use_hse(8.mhz())
.sysclk(168.mhz())
.hclk(168.mhz())
.pclk1(42.mhz())
.pclk2(84.mhz())
.require_pll48clk()
.freeze();
let mut delay = hal::delay::Delay::new(cp.SYST, clocks);
let gpioa = dp.GPIOA.split();
let gpiob = dp.GPIOB.split();
let gpioc = dp.GPIOC.split();
// SPI3
let sck = gpioc.pc10.into_alternate_af6();
let miso = gpioc.pc11.into_alternate_af6();
let mosi = gpioc.pc12.into_alternate_af6();
let mut spi = Spi::spi3(
dp.SPI3,
(sck, miso, mosi),
MODE_1,
KiloHertz(2000).into(),
clocks,
);
// DRV8301
let ncs = gpioc.pc13.into_push_pull_output();
let en_gate = gpiob.pb12.into_push_pull_output();
let mut drv8301 = Drv8301::new(&mut spi, ncs, en_gate);
drv8301.init().unwrap();
// Move the pin into our global storage
cortex_m::interrupt::free(|cs| *G_SPI3.borrow(cs).borrow_mut() = Some(spi));
// PWM
let channels = (gpioa.pa8.into_alternate_af1(), gpioa.pa9.into_alternate_af1(), gpioa.pa10.into_alternate_af1(), gpioa.pa11.into_alternate_af1());
let pwm = pwm::tim1(dp.TIM1, channels, clocks, 16u32.khz());
let (ch1, ch2, ch3, ch4) = pwm;
let mut ch4 = ch4;
{
// Set complementary oututs mode as AF1
gpiob.pb13.into_alternate_af1();
gpiob.pb14.into_alternate_af1();
gpiob.pb15.into_alternate_af1();
unsafe {
let tim1_regb = &(*(TIM1::ptr()));
// Enable complementary outputs
tim1_regb.ccer.modify(|_, w| w.cc1ne().set_bit());
tim1_regb.ccer.modify(|_, w| w.cc2ne().set_bit());
tim1_regb.ccer.modify(|_, w| w.cc3ne().set_bit());
// Set dead time
tim1_regb.bdtr.modify(|_, w| w.dtg().bits(10));
// Center aligned
tim1_regb.cr1.modify(|_, w| w.cms().center_aligned1());
// OC4REF signal is used as trigger output
tim1_regb.cr2.modify(|_, w| w.mms().compare_oc4());
}
ch4.enable();
ch4.set_duty( (ch4.get_max_duty() as f32 * 0.99)as u16 );
}
delay.delay_ms(1u32);
// Motor
let mut motor = Motor::new(ch1, ch2, ch3, MOT_POLE_PAIRS, ENC_RESOLUTION);
motor.set_duty(0,0,0).unwrap();
motor.enable().unwrap();
delay.delay_ms(1u32);
/*
// for current sensing test
unsafe{
motor.set_hiz_c();
motor.set_duty((motor.max_duty as f32 * 0.6) as u16, (motor.max_duty as f32 * 0.4) as u16, 0u16).unwrap();
}
*/
cortex_m::interrupt::free(|cs| *G_MOTOR.borrow(cs).borrow_mut() = Some(motor));
// ADC1
gpioc.pc0.into_analog();
gpioc.pc1.into_analog();
unsafe {
// All ADCs share the same reset interface.
// NOTE(unsafe) this reference will only be used for atomic writes with no side effects.
let rcc = &(*pac::RCC::ptr());
// Enable the clock
pac::ADC1::enable(rcc);
pac::ADC1::reset(rcc);
let adcc_regb = &(*(ADC_COMMON::ptr()));
let adc1_regb = &(*(ADC1::ptr()));
// Probably unnecessary to disable the ADC in most cases but it shouldn't do any harm either
adc1_regb.cr2.modify(|_, w| w.adon().clear_bit());
// Config common
adcc_regb.ccr.modify(|_, w| w.adcpre().bits(Clock::Pclk2_div_2.into()));
// Config regular conversion
adc1_regb.cr1.modify(|_, w| w.res().bits(Resolution::Twelve.into()));
adc1_regb.cr1.modify(|_, w| w.scan().bit(Scan::Enabled.into()));
adc1_regb.cr2.modify(|_, w| w.align().bit(Align::Right.into()));
adc1_regb.cr2.modify(|_, w| w.cont().bit(Continuous::Single.into()));
// config injected conversion
adc1_regb.cr1.modify(|_, w | w.jeocie().enabled());
adc1_regb.cr2.modify(|_, w| w.jexten().rising_edge());
adc1_regb.cr2.modify(|_, w| w.jextsel().tim1cc4());
adc1_regb.jsqr.modify(|_, w| w.jl().bits(0b01));
adc1_regb.jsqr.modify(|_, w| w.jsq3().bits(10u8));
adc1_regb.jsqr.modify(|_, w| w.jsq4().bits(11u8));
adc1_regb.smpr1.modify(|_, w| w.smp10().cycles3());
adc1_regb.smpr1.modify(|_, w| w.smp11().cycles3());
// enable ADC
adc1_regb.cr2.modify(|_, w| w.adon().set_bit());
delay.delay_ms(1u32);
// enable interrupt
cortex_m::peripheral::NVIC::unmask(Interrupt::ADC);
}
/*
// Debug LED
let mut led = gpioa.pa2.into_push_pull_output();
let _ = led.set_high();
cortex_m::interrupt::free(|cs| *G_LED.borrow(cs).borrow_mut() = Some(led));
*/
// TIM2 Interrupt
let mut timer = Timer::tim2(dp.TIM2, TIM2_FREQ_KHZ.khz(), clocks);
timer.listen(Event::TimeOut);
cortex_m::interrupt::free(|cs| *G_TIM.borrow(cs).borrow_mut() = Some(timer));
//enable TIM2 interrupt
unsafe {
cortex_m::peripheral::NVIC::unmask(Interrupt::TIM2);
}
loop {
wfi();
/*
unsafe{
//hprintln!("CURRENT_A: {}A, CURRENT_B: {}A, CURRENT_C: {}A", CURRENT_A, CURRENT_B, CURRENT_C);
}
delay.delay_ms(1u32);
*/
}
} | type TypeEncoder<'a> = AS5048A<'a, TypeSpi3, gpioa::PA3<Output<PushPull>>>;
//static G_AS5048A: Mutex<RefCell<Option<TypeEncoder>>> = Mutex::new(RefCell::new(None));
type TypeMotor = Motor; | random_line_split |
MouvementBrownien.py | #!/usr/bin/env python
# coding: utf-8
## TIPS: to compile, use python -m py_compile MouvementBrownien.py
import random # pour les nombres aleatoires
import math # pour les arrondis
import time # pour ralentir la simu avec sleep ou avec after
import Tkinter as tkinter # for GUI (version for Python2)
random.seed(42); # For debugging/reproducible experiments
# required to prevent launching of multiple instances
mainSimuID=0;
### A few constants
GUI_ELT_WIDTH=10;
GUI_ELT_HEIGHT=1;
FONT=('Times', '8', 'bold italic');
#
GRID_WIDTH = 50; # Taille de la grille
GRID_HEIGHT = 50; # Taille de la grille
GRID_STEP = 10; # Espace entre les points de la grille
GRID_COLOR = "grey"; # Couleur des traits de la grille
GRID_X_MIN = -math.floor(GRID_WIDTH/2); # Coordonnees dans referentiel enonce
GRID_X_MAX = math.floor(GRID_WIDTH/2); # Coordonnees dans referentiel enonce
GRID_Y_MIN = -math.floor(GRID_HEIGHT/2); # Coordonnees dans referentiel enonce
GRID_Y_MAX = math.floor(GRID_HEIGHT/2); # Coordonnees dans referentiel enonce
#
CANVAS_WIDTH = GRID_WIDTH*GRID_STEP; # Taille resultante du canvas (zone dessin)
CANVAS_HEIGHT = GRID_HEIGHT*GRID_STEP; # Taille resultante du canvas (zone dessin)
CANVAS_BG_COLOR = "white"; # Couleur de fond du canvas (zone dessin)
#
NB_PARTICLES = 500; # Nombre de particles a simuler
PARTICLE_SIZE = GRID_STEP*2/3; # Taille des cercles pour representer chq particule
PARTICLE_COLOR = "red"; # Couleur des particules
#
NB_SIMU_STEPS = 100; # Nombre de pas de temps dans la simulation
SIMU_INVSPEED = 100; # Nombre de millisecondes entre chaque pas de temps
## The only shared var
paused=False;
#### Fonction des particules
# Initialise les positions des particules
def initParticles(N):
particles = [];
for p in xrange(N):
initialPos = { 'x': 0, 'y': 0 };
particles.append(initialPos);
return particles;
# Deplace aleatoirement une unique particule, en respectant la "condition de bordure"
def moveSingleParticle(oldPos):
aleat = random.randint(0, 3);
if (aleat==0):
newPos = { 'x':oldPos['x']+1, 'y': oldPos['y']};
elif (aleat==1):
newPos = { 'x': oldPos['x']-1, 'y': oldPos['y']};
elif (aleat==2):
newPos = { 'x': oldPos['x'], 'y': oldPos['y']+1};
elif (aleat==3):
newPos = { 'x': oldPos['x'], 'y': oldPos['y']-1};
else:
print("Should not have reached here! "+str(aleat)+" is not an accepted output");
## "Lorsqu'une particule rencontre une paroi, elle ne bouge pas si le mouvement
## determine aleatoirement la fait traverser cette paroi."
if (newPos['x']<GRID_X_MIN+1):
newPos['x'] = GRID_X_MIN+1;
elif (newPos['x']>GRID_X_MAX-1):
newPos['x'] = GRID_X_MAX-1;
elif (newPos['y']<GRID_Y_MIN+1):
newPos['y'] = GRID_Y_MIN+1;
elif (newPos['y']>GRID_Y_MAX-1):
newPos['y'] = GRID_Y_MAX-1;
return newPos;
# Deplace toutes les particules d'un pas aleatoire
def moveParticles(particles):
newParticles = [];
for p in xrange(len(particles)):
newParticles.append(moveSingleParticle(particles[p]));
return(newParticles);
def applyGravity(particles):
newParticles = [];
for p in xrange(len(particles)):
part = particles[p];
if (part['y']<GRID_Y_MAX-1): # Only change value if it particle does not exit screen
part['y'] += 1; # Make particle go down
newParticles.append(part);
return(newParticles);
#### Problem/Model to GUI/View functions
# Convertit la liste des positions des particules en un tableau
# (proche de la "grille graphique")
def convertToMatrix(particles):
matrix = [[0 for x in range(GRID_WIDTH)] for y in range(GRID_WIDTH)];
for p in xrange(len(particles)):
currentParticle = particles[p];
tx = int(math.floor(currentParticle['x']-GRID_X_MIN));
ty = int(math.floor(currentParticle['y']-GRID_Y_MIN));
##print("----------- Particle position: ("+str(x)+","+str(y)+")->("+str(tx)+","+str(ty)+")"); ## Debug
##print("----------- Grid: ["+str(GRID_X_MIN)+"->"+str(GRID_X_MAX)+" ; "+str(GRID_Y_MIN)+"->"+str(GRID_Y_MAX)+"] / ("+str(GRID_WIDTH)+", "+str(GRID_HEIGHT)+")");
matrix[tx][ty] += 1;
##print(matrix); # for debugging
return matrix;
#### Fonctions graphiques
# Un/Pauses the simulation
def pause(pauseButton):
global paused; ## required to set global var
paused = not paused;
if (paused):
pauseButton.config(text="Unpause");
else:
pauseButton.config(text="Pause");
# Creation & Placement des elements graphiques
def initGUI(rootWindow):
##global canvas; ## for debugging purpose
canvas = tkinter.Canvas(rootWindow, width=CANVAS_WIDTH, height=CANVAS_HEIGHT);
canvas.config(background=CANVAS_BG_COLOR);
canvas.pack();
slider = tkinter.Scale(rootWindow, from_=0, to=10, orient=tkinter.HORIZONTAL);
slider.set(0);
#slider.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
slider.pack();
label = tkinter.Label(rootWindow, text='t=0');
labelfont = ('times', 20, 'bold');
label.config(bg='black', fg='yellow');
label.config(font=labelfont);
label.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
label.pack(); # expand=YES, fill=BOTH
startButton = tkinter.Button(rootWindow, text="Démarrer",
command= lambda: startSimulationLoop(canvas,label,slider));
startButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
startButton.pack();
pauseButton = tkinter.Button(rootWindow, text="Pause",
command=lambda: pause(pauseButton));
pauseButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
pauseButton.pack();
exitButton = tkinter.Button(rootWindow, text="Sortir",
command=rootWindow.destroy);
exitButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
exitButton.pack();
def drawGrid(canvas):
for x in xrange(0, CANVAS_WIDTH, GRID_STEP):
l1 = canvas.create_line(x, 0, x, CANVAS_HEIGHT, fill=GRID_COLOR);
canvas.itemconfig(l1, tags=("vl")); ## Useless
for y in xrange(0, CANVAS_HEIGHT, GRID_STEP):
l2 = canvas.create_line(0, y, CANVAS_WIDTH, y, fill=GRID_COLOR);
canvas.itemconfig(l2, tags=("hl")); ## Useless
ctr = canvas.create_oval(-GRID_X_MIN*GRID_STEP-PARTICLE_SIZE/2,
-GRID_Y_MIN*GRID_STEP-PARTICLE_SIZE/2,
-GRID_X_MIN*GRID_STEP+PARTICLE_SIZE/2,
-GRID_Y_MIN*GRID_STEP+PARTICLE_SIZE/2,
outline="grey", fill="grey");
# def drawParticlesFromPositions(canvas, particles):
# for p in xrange(len(particles)):
# currentParticle = particles[p];
# tx = currentParticle['x']-GRID_X_MIN;
# ty = currentParticle['y']-GRID_Y_MIN;
# txg = tx*GRID_STEP;
# tyg = ty*GRID_STEP;
# o = canvas.create_oval(txg-PARTICLE_SIZE, tyg-PARTICLE_SIZE,
# txg+PARTICLE_SIZE, tyg+PARTICLE_SIZE,
# outline=PARTICLE_COLOR, fill=PARTICLE_COLOR);
# canvas.itemconfig(o, tags=("part"+str(p))); ## Useless
# canvas.update_idletasks(); # THIS IS A DIRTY HACK!!!
def drawParticlesFromGrid(canvas, matrix):
for x in xrange(GRID_WIDTH):
for y in xrange(GRID_HEIGHT):
if (matrix[x][y]>0):
xg = x*GRID_STEP;
yg = y*GRID_STEP;
o = canvas.create_oval(xg-PARTICLE_SIZE, yg-PARTICLE_SIZE,
xg+PARTICLE_SIZE, yg+PARTICLE_SIZE,
outline=PARTICLE_COLOR, fill=PARTICLE_COLOR);
canvas.itemconfig(o, tags=("part("+str(x)+"/"+str(y)+")")); ## Useless
##print("----------- Particle position: "+str(xg)+"/"+str(yg)); ## Debug
t = canvas.create_text((xg, yg), text=str(matrix[x][y]), font=FONT);
canvas.itemconfig(t, tags=("partCount("+str(x)+"/"+str(y)+")")); ## Useless
canvas.update_idletasks(); # THIS IS A DIRTY HACK!!!
def drawTime(label, t):
l |
# Lance la simulation with loop
## Problem1: we do no give a chance to the GUI elements to update => we need to force them
## Problem2: since the event loop is broken, the "Sortir" button does not work.
# def startSimulationLoop(canvas, label):
# particles = initParticles(NB_PARTICLES);
# for step in xrange(NB_SIMU_STEPS):
# ## print("*** DRAWING STEP#"+str(step)); ## Debug
# # for the fun of seeing things move
# canvas.delete("all"); # optimization:
# drawGrid(canvas); # remove only particles
# ### drawParticlesFromPositions(canvas, particles);
# drawParticlesFromGrid(canvas, convertToMatrix(particles));
# drawTime(label, step);
# # actions reelles du pas de temps de la simu
# ## print("*** MOVING PARTICLES"); ## Debug
# particles = moveParticles(particles);
# time.sleep(SIMU_INVSPEED);
# Execute un pas de simulation (si on n'est pas en pause) et se
# rappelle elle-même au bout un certain delai
def oneSimulationStep(simuID, step, canvas, label, particles, gravity):
global paused; ## required to get global var
if (not paused):
## print("*** DRAWING STEP#"+str(step)); ## Debug
# for the fun of seeing things move
canvas.delete("all"); # optimization:
drawGrid(canvas); # remove only particles
### drawParticlesFromPositions(canvas, particles);
matrix = convertToMatrix(particles);
drawParticlesFromGrid(canvas, matrix);
drawTime(label, step);
# actions reelles du pas de temps de la simu
## print("*** MOVING PARTICLES"); ## Debug
## print(particles); ## Debug
## print(matrix); ## Debug
particles = moveParticles(particles);
if (gravity!=0 and step%gravity==0):
## print("*** Applying gravity: step="+str(step)); ## Debug
particles = applyGravity(particles);
step=step+1;
# Whatever the status of pause, we recall ourselves
# NOTE: nope, otherwise multiple instances run in parallel !!!
##print("my simuID="+str(simuID)+" / "+"mainSimuID="+str(mainSimuID)); ## debug
if (simuID==mainSimuID):
canvas.after(SIMU_INVSPEED, oneSimulationStep, simuID, step, canvas, label, particles, gravity);
# Lance la simulation (via un timer)
def startSimulationLoop(canvas, label, slider):
global mainSimuID;
mainSimuID = math.floor(150000*random.random());
particles = initParticles(NB_PARTICLES);
gravity = slider.get();
##print("*** Starting simulation with gravity="+str(gravity));
oneSimulationStep(mainSimuID, 1, canvas, label, particles, gravity);
##### Lancement automatique du programme
def main():
# Les elements graphiques
rootWindow = tkinter.Tk(); # une fenetre graphique TK
rootWindow.title("Ma Super Simulation du Mouvement Brownien");
initGUI(rootWindow);
rootWindow.mainloop();
main();
| abel.configure(text="t="+str(t));
label.update_idletasks(); # THIS IS A DIRTY HACK!!!
| identifier_body |
MouvementBrownien.py | #!/usr/bin/env python
# coding: utf-8
## TIPS: to compile, use python -m py_compile MouvementBrownien.py
import random # pour les nombres aleatoires
import math # pour les arrondis
import time # pour ralentir la simu avec sleep ou avec after
import Tkinter as tkinter # for GUI (version for Python2)
random.seed(42); # For debugging/reproducible experiments
# required to prevent launching of multiple instances
mainSimuID=0;
### A few constants
GUI_ELT_WIDTH=10;
GUI_ELT_HEIGHT=1;
FONT=('Times', '8', 'bold italic');
#
GRID_WIDTH = 50; # Taille de la grille
GRID_HEIGHT = 50; # Taille de la grille
GRID_STEP = 10; # Espace entre les points de la grille
GRID_COLOR = "grey"; # Couleur des traits de la grille
GRID_X_MIN = -math.floor(GRID_WIDTH/2); # Coordonnees dans referentiel enonce
GRID_X_MAX = math.floor(GRID_WIDTH/2); # Coordonnees dans referentiel enonce
GRID_Y_MIN = -math.floor(GRID_HEIGHT/2); # Coordonnees dans referentiel enonce
GRID_Y_MAX = math.floor(GRID_HEIGHT/2); # Coordonnees dans referentiel enonce
#
CANVAS_WIDTH = GRID_WIDTH*GRID_STEP; # Taille resultante du canvas (zone dessin)
CANVAS_HEIGHT = GRID_HEIGHT*GRID_STEP; # Taille resultante du canvas (zone dessin)
CANVAS_BG_COLOR = "white"; # Couleur de fond du canvas (zone dessin)
#
NB_PARTICLES = 500; # Nombre de particles a simuler
PARTICLE_SIZE = GRID_STEP*2/3; # Taille des cercles pour representer chq particule
PARTICLE_COLOR = "red"; # Couleur des particules
#
NB_SIMU_STEPS = 100; # Nombre de pas de temps dans la simulation
SIMU_INVSPEED = 100; # Nombre de millisecondes entre chaque pas de temps
## The only shared var
paused=False;
#### Fonction des particules
# Initialise les positions des particules
def initParticles(N):
particles = [];
for p in xrange(N):
initialPos = { 'x': 0, 'y': 0 };
particles.append(initialPos);
return particles;
# Deplace aleatoirement une unique particule, en respectant la "condition de bordure"
def moveSingleParticle(oldPos):
aleat = random.randint(0, 3);
if (aleat==0):
newPos = { 'x':oldPos['x']+1, 'y': oldPos['y']};
elif (aleat==1):
newPos = { 'x': oldPos['x']-1, 'y': oldPos['y']};
elif (aleat==2):
newPos = { 'x': oldPos['x'], 'y': oldPos['y']+1};
elif (aleat==3):
newPos = { 'x': oldPos['x'], 'y': oldPos['y']-1};
else:
print("Should not have reached here! "+str(aleat)+" is not an accepted output");
## "Lorsqu'une particule rencontre une paroi, elle ne bouge pas si le mouvement
## determine aleatoirement la fait traverser cette paroi."
if (newPos['x']<GRID_X_MIN+1):
newPos['x'] = GRID_X_MIN+1;
elif (newPos['x']>GRID_X_MAX-1):
newPos['x'] = GRID_X_MAX-1;
elif (newPos['y']<GRID_Y_MIN+1):
newPos['y'] = GRID_Y_MIN+1;
elif (newPos['y']>GRID_Y_MAX-1):
newPos['y'] = GRID_Y_MAX-1;
return newPos;
# Deplace toutes les particules d'un pas aleatoire
def moveParticles(particles):
newParticles = [];
for p in xrange(len(particles)):
newParticles.append(moveSingleParticle(particles[p]));
return(newParticles);
def applyGravity(particles):
newParticles = [];
for p in xrange(len(particles)):
part = particles[p];
if (part['y']<GRID_Y_MAX-1): # Only change value if it particle does not exit screen
part['y'] += 1; # Make particle go down
newParticles.append(part);
return(newParticles);
#### Problem/Model to GUI/View functions
# Convertit la liste des positions des particules en un tableau
# (proche de la "grille graphique")
def convertToMatrix(particles):
matrix = [[0 for x in range(GRID_WIDTH)] for y in range(GRID_WIDTH)];
for p in xrange(len(particles)):
currentParticle = particles[p];
tx = int(math.floor(currentParticle['x']-GRID_X_MIN));
ty = int(math.floor(currentParticle['y']-GRID_Y_MIN));
##print("----------- Particle position: ("+str(x)+","+str(y)+")->("+str(tx)+","+str(ty)+")"); ## Debug
##print("----------- Grid: ["+str(GRID_X_MIN)+"->"+str(GRID_X_MAX)+" ; "+str(GRID_Y_MIN)+"->"+str(GRID_Y_MAX)+"] / ("+str(GRID_WIDTH)+", "+str(GRID_HEIGHT)+")");
matrix[tx][ty] += 1;
##print(matrix); # for debugging
return matrix;
#### Fonctions graphiques
# Un/Pauses the simulation
def pause(pauseButton):
global paused; ## required to set global var
paused = not paused;
if (paused):
pauseButton.config(text="Unpause");
else:
pauseButton.config(text="Pause");
# Creation & Placement des elements graphiques
def initGUI(rootWindow):
##global canvas; ## for debugging purpose
canvas = tkinter.Canvas(rootWindow, width=CANVAS_WIDTH, height=CANVAS_HEIGHT);
canvas.config(background=CANVAS_BG_COLOR);
canvas.pack();
slider = tkinter.Scale(rootWindow, from_=0, to=10, orient=tkinter.HORIZONTAL);
slider.set(0);
#slider.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
slider.pack();
label = tkinter.Label(rootWindow, text='t=0');
labelfont = ('times', 20, 'bold');
label.config(bg='black', fg='yellow');
label.config(font=labelfont);
label.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
label.pack(); # expand=YES, fill=BOTH
startButton = tkinter.Button(rootWindow, text="Démarrer",
command= lambda: startSimulationLoop(canvas,label,slider));
startButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
startButton.pack();
pauseButton = tkinter.Button(rootWindow, text="Pause",
command=lambda: pause(pauseButton));
pauseButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
pauseButton.pack();
exitButton = tkinter.Button(rootWindow, text="Sortir",
command=rootWindow.destroy);
exitButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
exitButton.pack();
def drawGrid(canvas):
for x in xrange(0, CANVAS_WIDTH, GRID_STEP):
l1 = canvas.create_line(x, 0, x, CANVAS_HEIGHT, fill=GRID_COLOR);
canvas.itemconfig(l1, tags=("vl")); ## Useless
for y in xrange(0, CANVAS_HEIGHT, GRID_STEP):
l2 = canvas.create_line(0, y, CANVAS_WIDTH, y, fill=GRID_COLOR);
canvas.itemconfig(l2, tags=("hl")); ## Useless
ctr = canvas.create_oval(-GRID_X_MIN*GRID_STEP-PARTICLE_SIZE/2,
-GRID_Y_MIN*GRID_STEP-PARTICLE_SIZE/2,
-GRID_X_MIN*GRID_STEP+PARTICLE_SIZE/2,
-GRID_Y_MIN*GRID_STEP+PARTICLE_SIZE/2,
outline="grey", fill="grey");
# def drawParticlesFromPositions(canvas, particles):
# for p in xrange(len(particles)):
# currentParticle = particles[p];
# tx = currentParticle['x']-GRID_X_MIN; | # ty = currentParticle['y']-GRID_Y_MIN;
# txg = tx*GRID_STEP;
# tyg = ty*GRID_STEP;
# o = canvas.create_oval(txg-PARTICLE_SIZE, tyg-PARTICLE_SIZE,
# txg+PARTICLE_SIZE, tyg+PARTICLE_SIZE,
# outline=PARTICLE_COLOR, fill=PARTICLE_COLOR);
# canvas.itemconfig(o, tags=("part"+str(p))); ## Useless
# canvas.update_idletasks(); # THIS IS A DIRTY HACK!!!
def drawParticlesFromGrid(canvas, matrix):
for x in xrange(GRID_WIDTH):
for y in xrange(GRID_HEIGHT):
if (matrix[x][y]>0):
xg = x*GRID_STEP;
yg = y*GRID_STEP;
o = canvas.create_oval(xg-PARTICLE_SIZE, yg-PARTICLE_SIZE,
xg+PARTICLE_SIZE, yg+PARTICLE_SIZE,
outline=PARTICLE_COLOR, fill=PARTICLE_COLOR);
canvas.itemconfig(o, tags=("part("+str(x)+"/"+str(y)+")")); ## Useless
##print("----------- Particle position: "+str(xg)+"/"+str(yg)); ## Debug
t = canvas.create_text((xg, yg), text=str(matrix[x][y]), font=FONT);
canvas.itemconfig(t, tags=("partCount("+str(x)+"/"+str(y)+")")); ## Useless
canvas.update_idletasks(); # THIS IS A DIRTY HACK!!!
def drawTime(label, t):
label.configure(text="t="+str(t));
label.update_idletasks(); # THIS IS A DIRTY HACK!!!
# Lance la simulation with loop
## Problem1: we do no give a chance to the GUI elements to update => we need to force them
## Problem2: since the event loop is broken, the "Sortir" button does not work.
# def startSimulationLoop(canvas, label):
# particles = initParticles(NB_PARTICLES);
# for step in xrange(NB_SIMU_STEPS):
# ## print("*** DRAWING STEP#"+str(step)); ## Debug
# # for the fun of seeing things move
# canvas.delete("all"); # optimization:
# drawGrid(canvas); # remove only particles
# ### drawParticlesFromPositions(canvas, particles);
# drawParticlesFromGrid(canvas, convertToMatrix(particles));
# drawTime(label, step);
# # actions reelles du pas de temps de la simu
# ## print("*** MOVING PARTICLES"); ## Debug
# particles = moveParticles(particles);
# time.sleep(SIMU_INVSPEED);
# Execute un pas de simulation (si on n'est pas en pause) et se
# rappelle elle-même au bout un certain delai
def oneSimulationStep(simuID, step, canvas, label, particles, gravity):
global paused; ## required to get global var
if (not paused):
## print("*** DRAWING STEP#"+str(step)); ## Debug
# for the fun of seeing things move
canvas.delete("all"); # optimization:
drawGrid(canvas); # remove only particles
### drawParticlesFromPositions(canvas, particles);
matrix = convertToMatrix(particles);
drawParticlesFromGrid(canvas, matrix);
drawTime(label, step);
# actions reelles du pas de temps de la simu
## print("*** MOVING PARTICLES"); ## Debug
## print(particles); ## Debug
## print(matrix); ## Debug
particles = moveParticles(particles);
if (gravity!=0 and step%gravity==0):
## print("*** Applying gravity: step="+str(step)); ## Debug
particles = applyGravity(particles);
step=step+1;
# Whatever the status of pause, we recall ourselves
# NOTE: nope, otherwise multiple instances run in parallel !!!
##print("my simuID="+str(simuID)+" / "+"mainSimuID="+str(mainSimuID)); ## debug
if (simuID==mainSimuID):
canvas.after(SIMU_INVSPEED, oneSimulationStep, simuID, step, canvas, label, particles, gravity);
# Lance la simulation (via un timer)
def startSimulationLoop(canvas, label, slider):
global mainSimuID;
mainSimuID = math.floor(150000*random.random());
particles = initParticles(NB_PARTICLES);
gravity = slider.get();
##print("*** Starting simulation with gravity="+str(gravity));
oneSimulationStep(mainSimuID, 1, canvas, label, particles, gravity);
##### Lancement automatique du programme
def main():
# Les elements graphiques
rootWindow = tkinter.Tk(); # une fenetre graphique TK
rootWindow.title("Ma Super Simulation du Mouvement Brownien");
initGUI(rootWindow);
rootWindow.mainloop();
main(); | random_line_split | |
MouvementBrownien.py | #!/usr/bin/env python
# coding: utf-8
## TIPS: to compile, use python -m py_compile MouvementBrownien.py
import random # pour les nombres aleatoires
import math # pour les arrondis
import time # pour ralentir la simu avec sleep ou avec after
import Tkinter as tkinter # for GUI (version for Python2)
random.seed(42); # For debugging/reproducible experiments
# required to prevent launching of multiple instances
mainSimuID=0;
### A few constants
GUI_ELT_WIDTH=10;
GUI_ELT_HEIGHT=1;
FONT=('Times', '8', 'bold italic');
#
GRID_WIDTH = 50; # Taille de la grille
GRID_HEIGHT = 50; # Taille de la grille
GRID_STEP = 10; # Espace entre les points de la grille
GRID_COLOR = "grey"; # Couleur des traits de la grille
GRID_X_MIN = -math.floor(GRID_WIDTH/2); # Coordonnees dans referentiel enonce
GRID_X_MAX = math.floor(GRID_WIDTH/2); # Coordonnees dans referentiel enonce
GRID_Y_MIN = -math.floor(GRID_HEIGHT/2); # Coordonnees dans referentiel enonce
GRID_Y_MAX = math.floor(GRID_HEIGHT/2); # Coordonnees dans referentiel enonce
#
CANVAS_WIDTH = GRID_WIDTH*GRID_STEP; # Taille resultante du canvas (zone dessin)
CANVAS_HEIGHT = GRID_HEIGHT*GRID_STEP; # Taille resultante du canvas (zone dessin)
CANVAS_BG_COLOR = "white"; # Couleur de fond du canvas (zone dessin)
#
NB_PARTICLES = 500; # Nombre de particles a simuler
PARTICLE_SIZE = GRID_STEP*2/3; # Taille des cercles pour representer chq particule
PARTICLE_COLOR = "red"; # Couleur des particules
#
NB_SIMU_STEPS = 100; # Nombre de pas de temps dans la simulation
SIMU_INVSPEED = 100; # Nombre de millisecondes entre chaque pas de temps
## The only shared var
paused=False;
#### Fonction des particules
# Initialise les positions des particules
def initParticles(N):
particles = [];
for p in xrange(N):
initialPos = { 'x': 0, 'y': 0 };
particles.append(initialPos);
return particles;
# Deplace aleatoirement une unique particule, en respectant la "condition de bordure"
def moveSingleParticle(oldPos):
aleat = random.randint(0, 3);
if (aleat==0):
newPos = { 'x':oldPos['x']+1, 'y': oldPos['y']};
elif (aleat==1):
newPos = { 'x': oldPos['x']-1, 'y': oldPos['y']};
elif (aleat==2):
newPos = { 'x': oldPos['x'], 'y': oldPos['y']+1};
elif (aleat==3):
newPos = { 'x': oldPos['x'], 'y': oldPos['y']-1};
else:
print("Should not have reached here! "+str(aleat)+" is not an accepted output");
## "Lorsqu'une particule rencontre une paroi, elle ne bouge pas si le mouvement
## determine aleatoirement la fait traverser cette paroi."
if (newPos['x']<GRID_X_MIN+1):
newPos['x'] = GRID_X_MIN+1;
elif (newPos['x']>GRID_X_MAX-1):
newPos['x'] = GRID_X_MAX-1;
elif (newPos['y']<GRID_Y_MIN+1):
newPos['y'] = GRID_Y_MIN+1;
elif (newPos['y']>GRID_Y_MAX-1):
newPos['y'] = GRID_Y_MAX-1;
return newPos;
# Deplace toutes les particules d'un pas aleatoire
def moveParticles(particles):
newParticles = [];
for p in xrange(len(particles)):
newParticles.append(moveSingleParticle(particles[p]));
return(newParticles);
def applyGravity(particles):
newParticles = [];
for p in xrange(len(particles)):
part = particles[p];
if (part['y']<GRID_Y_MAX-1): # Only change value if it particle does not exit screen
part['y'] += 1; # Make particle go down
newParticles.append(part);
return(newParticles);
#### Problem/Model to GUI/View functions
# Convertit la liste des positions des particules en un tableau
# (proche de la "grille graphique")
def convertToMatrix(particles):
matrix = [[0 for x in range(GRID_WIDTH)] for y in range(GRID_WIDTH)];
for p in xrange(len(particles)):
currentParticle = particles[p];
tx = int(math.floor(currentParticle['x']-GRID_X_MIN));
ty = int(math.floor(currentParticle['y']-GRID_Y_MIN));
##print("----------- Particle position: ("+str(x)+","+str(y)+")->("+str(tx)+","+str(ty)+")"); ## Debug
##print("----------- Grid: ["+str(GRID_X_MIN)+"->"+str(GRID_X_MAX)+" ; "+str(GRID_Y_MIN)+"->"+str(GRID_Y_MAX)+"] / ("+str(GRID_WIDTH)+", "+str(GRID_HEIGHT)+")");
matrix[tx][ty] += 1;
##print(matrix); # for debugging
return matrix;
#### Fonctions graphiques
# Un/Pauses the simulation
def pause(pauseButton):
global paused; ## required to set global var
paused = not paused;
if (paused):
pauseButton.config(text="Unpause");
else:
|
# Creation & Placement des elements graphiques
def initGUI(rootWindow):
##global canvas; ## for debugging purpose
canvas = tkinter.Canvas(rootWindow, width=CANVAS_WIDTH, height=CANVAS_HEIGHT);
canvas.config(background=CANVAS_BG_COLOR);
canvas.pack();
slider = tkinter.Scale(rootWindow, from_=0, to=10, orient=tkinter.HORIZONTAL);
slider.set(0);
#slider.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
slider.pack();
label = tkinter.Label(rootWindow, text='t=0');
labelfont = ('times', 20, 'bold');
label.config(bg='black', fg='yellow');
label.config(font=labelfont);
label.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
label.pack(); # expand=YES, fill=BOTH
startButton = tkinter.Button(rootWindow, text="Démarrer",
command= lambda: startSimulationLoop(canvas,label,slider));
startButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
startButton.pack();
pauseButton = tkinter.Button(rootWindow, text="Pause",
command=lambda: pause(pauseButton));
pauseButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
pauseButton.pack();
exitButton = tkinter.Button(rootWindow, text="Sortir",
command=rootWindow.destroy);
exitButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
exitButton.pack();
def drawGrid(canvas):
for x in xrange(0, CANVAS_WIDTH, GRID_STEP):
l1 = canvas.create_line(x, 0, x, CANVAS_HEIGHT, fill=GRID_COLOR);
canvas.itemconfig(l1, tags=("vl")); ## Useless
for y in xrange(0, CANVAS_HEIGHT, GRID_STEP):
l2 = canvas.create_line(0, y, CANVAS_WIDTH, y, fill=GRID_COLOR);
canvas.itemconfig(l2, tags=("hl")); ## Useless
ctr = canvas.create_oval(-GRID_X_MIN*GRID_STEP-PARTICLE_SIZE/2,
-GRID_Y_MIN*GRID_STEP-PARTICLE_SIZE/2,
-GRID_X_MIN*GRID_STEP+PARTICLE_SIZE/2,
-GRID_Y_MIN*GRID_STEP+PARTICLE_SIZE/2,
outline="grey", fill="grey");
# def drawParticlesFromPositions(canvas, particles):
# for p in xrange(len(particles)):
# currentParticle = particles[p];
# tx = currentParticle['x']-GRID_X_MIN;
# ty = currentParticle['y']-GRID_Y_MIN;
# txg = tx*GRID_STEP;
# tyg = ty*GRID_STEP;
# o = canvas.create_oval(txg-PARTICLE_SIZE, tyg-PARTICLE_SIZE,
# txg+PARTICLE_SIZE, tyg+PARTICLE_SIZE,
# outline=PARTICLE_COLOR, fill=PARTICLE_COLOR);
# canvas.itemconfig(o, tags=("part"+str(p))); ## Useless
# canvas.update_idletasks(); # THIS IS A DIRTY HACK!!!
def drawParticlesFromGrid(canvas, matrix):
for x in xrange(GRID_WIDTH):
for y in xrange(GRID_HEIGHT):
if (matrix[x][y]>0):
xg = x*GRID_STEP;
yg = y*GRID_STEP;
o = canvas.create_oval(xg-PARTICLE_SIZE, yg-PARTICLE_SIZE,
xg+PARTICLE_SIZE, yg+PARTICLE_SIZE,
outline=PARTICLE_COLOR, fill=PARTICLE_COLOR);
canvas.itemconfig(o, tags=("part("+str(x)+"/"+str(y)+")")); ## Useless
##print("----------- Particle position: "+str(xg)+"/"+str(yg)); ## Debug
t = canvas.create_text((xg, yg), text=str(matrix[x][y]), font=FONT);
canvas.itemconfig(t, tags=("partCount("+str(x)+"/"+str(y)+")")); ## Useless
canvas.update_idletasks(); # THIS IS A DIRTY HACK!!!
def drawTime(label, t):
label.configure(text="t="+str(t));
label.update_idletasks(); # THIS IS A DIRTY HACK!!!
# Lance la simulation with loop
## Problem1: we do no give a chance to the GUI elements to update => we need to force them
## Problem2: since the event loop is broken, the "Sortir" button does not work.
# def startSimulationLoop(canvas, label):
# particles = initParticles(NB_PARTICLES);
# for step in xrange(NB_SIMU_STEPS):
# ## print("*** DRAWING STEP#"+str(step)); ## Debug
# # for the fun of seeing things move
# canvas.delete("all"); # optimization:
# drawGrid(canvas); # remove only particles
# ### drawParticlesFromPositions(canvas, particles);
# drawParticlesFromGrid(canvas, convertToMatrix(particles));
# drawTime(label, step);
# # actions reelles du pas de temps de la simu
# ## print("*** MOVING PARTICLES"); ## Debug
# particles = moveParticles(particles);
# time.sleep(SIMU_INVSPEED);
# Execute un pas de simulation (si on n'est pas en pause) et se
# rappelle elle-même au bout un certain delai
def oneSimulationStep(simuID, step, canvas, label, particles, gravity):
global paused; ## required to get global var
if (not paused):
## print("*** DRAWING STEP#"+str(step)); ## Debug
# for the fun of seeing things move
canvas.delete("all"); # optimization:
drawGrid(canvas); # remove only particles
### drawParticlesFromPositions(canvas, particles);
matrix = convertToMatrix(particles);
drawParticlesFromGrid(canvas, matrix);
drawTime(label, step);
# actions reelles du pas de temps de la simu
## print("*** MOVING PARTICLES"); ## Debug
## print(particles); ## Debug
## print(matrix); ## Debug
particles = moveParticles(particles);
if (gravity!=0 and step%gravity==0):
## print("*** Applying gravity: step="+str(step)); ## Debug
particles = applyGravity(particles);
step=step+1;
# Whatever the status of pause, we recall ourselves
# NOTE: nope, otherwise multiple instances run in parallel !!!
##print("my simuID="+str(simuID)+" / "+"mainSimuID="+str(mainSimuID)); ## debug
if (simuID==mainSimuID):
canvas.after(SIMU_INVSPEED, oneSimulationStep, simuID, step, canvas, label, particles, gravity);
# Lance la simulation (via un timer)
def startSimulationLoop(canvas, label, slider):
global mainSimuID;
mainSimuID = math.floor(150000*random.random());
particles = initParticles(NB_PARTICLES);
gravity = slider.get();
##print("*** Starting simulation with gravity="+str(gravity));
oneSimulationStep(mainSimuID, 1, canvas, label, particles, gravity);
##### Lancement automatique du programme
def main():
# Les elements graphiques
rootWindow = tkinter.Tk(); # une fenetre graphique TK
rootWindow.title("Ma Super Simulation du Mouvement Brownien");
initGUI(rootWindow);
rootWindow.mainloop();
main();
| pauseButton.config(text="Pause"); | conditional_block |
MouvementBrownien.py | #!/usr/bin/env python
# coding: utf-8
## TIPS: to compile, use python -m py_compile MouvementBrownien.py
import random # pour les nombres aleatoires
import math # pour les arrondis
import time # pour ralentir la simu avec sleep ou avec after
import Tkinter as tkinter # for GUI (version for Python2)
random.seed(42); # For debugging/reproducible experiments
# required to prevent launching of multiple instances
mainSimuID=0;
### A few constants
GUI_ELT_WIDTH=10;
GUI_ELT_HEIGHT=1;
FONT=('Times', '8', 'bold italic');
#
GRID_WIDTH = 50; # Taille de la grille
GRID_HEIGHT = 50; # Taille de la grille
GRID_STEP = 10; # Espace entre les points de la grille
GRID_COLOR = "grey"; # Couleur des traits de la grille
GRID_X_MIN = -math.floor(GRID_WIDTH/2); # Coordonnees dans referentiel enonce
GRID_X_MAX = math.floor(GRID_WIDTH/2); # Coordonnees dans referentiel enonce
GRID_Y_MIN = -math.floor(GRID_HEIGHT/2); # Coordonnees dans referentiel enonce
GRID_Y_MAX = math.floor(GRID_HEIGHT/2); # Coordonnees dans referentiel enonce
#
CANVAS_WIDTH = GRID_WIDTH*GRID_STEP; # Taille resultante du canvas (zone dessin)
CANVAS_HEIGHT = GRID_HEIGHT*GRID_STEP; # Taille resultante du canvas (zone dessin)
CANVAS_BG_COLOR = "white"; # Couleur de fond du canvas (zone dessin)
#
NB_PARTICLES = 500; # Nombre de particles a simuler
PARTICLE_SIZE = GRID_STEP*2/3; # Taille des cercles pour representer chq particule
PARTICLE_COLOR = "red"; # Couleur des particules
#
NB_SIMU_STEPS = 100; # Nombre de pas de temps dans la simulation
SIMU_INVSPEED = 100; # Nombre de millisecondes entre chaque pas de temps
## The only shared var
paused=False;
#### Fonction des particules
# Initialise les positions des particules
def initParticles(N):
particles = [];
for p in xrange(N):
initialPos = { 'x': 0, 'y': 0 };
particles.append(initialPos);
return particles;
# Deplace aleatoirement une unique particule, en respectant la "condition de bordure"
def moveSingleParticle(oldPos):
aleat = random.randint(0, 3);
if (aleat==0):
newPos = { 'x':oldPos['x']+1, 'y': oldPos['y']};
elif (aleat==1):
newPos = { 'x': oldPos['x']-1, 'y': oldPos['y']};
elif (aleat==2):
newPos = { 'x': oldPos['x'], 'y': oldPos['y']+1};
elif (aleat==3):
newPos = { 'x': oldPos['x'], 'y': oldPos['y']-1};
else:
print("Should not have reached here! "+str(aleat)+" is not an accepted output");
## "Lorsqu'une particule rencontre une paroi, elle ne bouge pas si le mouvement
## determine aleatoirement la fait traverser cette paroi."
if (newPos['x']<GRID_X_MIN+1):
newPos['x'] = GRID_X_MIN+1;
elif (newPos['x']>GRID_X_MAX-1):
newPos['x'] = GRID_X_MAX-1;
elif (newPos['y']<GRID_Y_MIN+1):
newPos['y'] = GRID_Y_MIN+1;
elif (newPos['y']>GRID_Y_MAX-1):
newPos['y'] = GRID_Y_MAX-1;
return newPos;
# Deplace toutes les particules d'un pas aleatoire
def moveParticles(particles):
newParticles = [];
for p in xrange(len(particles)):
newParticles.append(moveSingleParticle(particles[p]));
return(newParticles);
def applyGravity(particles):
newParticles = [];
for p in xrange(len(particles)):
part = particles[p];
if (part['y']<GRID_Y_MAX-1): # Only change value if it particle does not exit screen
part['y'] += 1; # Make particle go down
newParticles.append(part);
return(newParticles);
#### Problem/Model to GUI/View functions
# Convertit la liste des positions des particules en un tableau
# (proche de la "grille graphique")
def convertToMatrix(particles):
matrix = [[0 for x in range(GRID_WIDTH)] for y in range(GRID_WIDTH)];
for p in xrange(len(particles)):
currentParticle = particles[p];
tx = int(math.floor(currentParticle['x']-GRID_X_MIN));
ty = int(math.floor(currentParticle['y']-GRID_Y_MIN));
##print("----------- Particle position: ("+str(x)+","+str(y)+")->("+str(tx)+","+str(ty)+")"); ## Debug
##print("----------- Grid: ["+str(GRID_X_MIN)+"->"+str(GRID_X_MAX)+" ; "+str(GRID_Y_MIN)+"->"+str(GRID_Y_MAX)+"] / ("+str(GRID_WIDTH)+", "+str(GRID_HEIGHT)+")");
matrix[tx][ty] += 1;
##print(matrix); # for debugging
return matrix;
#### Fonctions graphiques
# Un/Pauses the simulation
def pause(pauseButton):
global paused; ## required to set global var
paused = not paused;
if (paused):
pauseButton.config(text="Unpause");
else:
pauseButton.config(text="Pause");
# Creation & Placement des elements graphiques
def initGUI(rootWindow):
##global canvas; ## for debugging purpose
canvas = tkinter.Canvas(rootWindow, width=CANVAS_WIDTH, height=CANVAS_HEIGHT);
canvas.config(background=CANVAS_BG_COLOR);
canvas.pack();
slider = tkinter.Scale(rootWindow, from_=0, to=10, orient=tkinter.HORIZONTAL);
slider.set(0);
#slider.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
slider.pack();
label = tkinter.Label(rootWindow, text='t=0');
labelfont = ('times', 20, 'bold');
label.config(bg='black', fg='yellow');
label.config(font=labelfont);
label.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
label.pack(); # expand=YES, fill=BOTH
startButton = tkinter.Button(rootWindow, text="Démarrer",
command= lambda: startSimulationLoop(canvas,label,slider));
startButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
startButton.pack();
pauseButton = tkinter.Button(rootWindow, text="Pause",
command=lambda: pause(pauseButton));
pauseButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
pauseButton.pack();
exitButton = tkinter.Button(rootWindow, text="Sortir",
command=rootWindow.destroy);
exitButton.config(height=GUI_ELT_HEIGHT, width=GUI_ELT_WIDTH);
exitButton.pack();
def d | canvas):
for x in xrange(0, CANVAS_WIDTH, GRID_STEP):
l1 = canvas.create_line(x, 0, x, CANVAS_HEIGHT, fill=GRID_COLOR);
canvas.itemconfig(l1, tags=("vl")); ## Useless
for y in xrange(0, CANVAS_HEIGHT, GRID_STEP):
l2 = canvas.create_line(0, y, CANVAS_WIDTH, y, fill=GRID_COLOR);
canvas.itemconfig(l2, tags=("hl")); ## Useless
ctr = canvas.create_oval(-GRID_X_MIN*GRID_STEP-PARTICLE_SIZE/2,
-GRID_Y_MIN*GRID_STEP-PARTICLE_SIZE/2,
-GRID_X_MIN*GRID_STEP+PARTICLE_SIZE/2,
-GRID_Y_MIN*GRID_STEP+PARTICLE_SIZE/2,
outline="grey", fill="grey");
# def drawParticlesFromPositions(canvas, particles):
# for p in xrange(len(particles)):
# currentParticle = particles[p];
# tx = currentParticle['x']-GRID_X_MIN;
# ty = currentParticle['y']-GRID_Y_MIN;
# txg = tx*GRID_STEP;
# tyg = ty*GRID_STEP;
# o = canvas.create_oval(txg-PARTICLE_SIZE, tyg-PARTICLE_SIZE,
# txg+PARTICLE_SIZE, tyg+PARTICLE_SIZE,
# outline=PARTICLE_COLOR, fill=PARTICLE_COLOR);
# canvas.itemconfig(o, tags=("part"+str(p))); ## Useless
# canvas.update_idletasks(); # THIS IS A DIRTY HACK!!!
def drawParticlesFromGrid(canvas, matrix):
for x in xrange(GRID_WIDTH):
for y in xrange(GRID_HEIGHT):
if (matrix[x][y]>0):
xg = x*GRID_STEP;
yg = y*GRID_STEP;
o = canvas.create_oval(xg-PARTICLE_SIZE, yg-PARTICLE_SIZE,
xg+PARTICLE_SIZE, yg+PARTICLE_SIZE,
outline=PARTICLE_COLOR, fill=PARTICLE_COLOR);
canvas.itemconfig(o, tags=("part("+str(x)+"/"+str(y)+")")); ## Useless
##print("----------- Particle position: "+str(xg)+"/"+str(yg)); ## Debug
t = canvas.create_text((xg, yg), text=str(matrix[x][y]), font=FONT);
canvas.itemconfig(t, tags=("partCount("+str(x)+"/"+str(y)+")")); ## Useless
canvas.update_idletasks(); # THIS IS A DIRTY HACK!!!
def drawTime(label, t):
label.configure(text="t="+str(t));
label.update_idletasks(); # THIS IS A DIRTY HACK!!!
# Lance la simulation with loop
## Problem1: we do no give a chance to the GUI elements to update => we need to force them
## Problem2: since the event loop is broken, the "Sortir" button does not work.
# def startSimulationLoop(canvas, label):
# particles = initParticles(NB_PARTICLES);
# for step in xrange(NB_SIMU_STEPS):
# ## print("*** DRAWING STEP#"+str(step)); ## Debug
# # for the fun of seeing things move
# canvas.delete("all"); # optimization:
# drawGrid(canvas); # remove only particles
# ### drawParticlesFromPositions(canvas, particles);
# drawParticlesFromGrid(canvas, convertToMatrix(particles));
# drawTime(label, step);
# # actions reelles du pas de temps de la simu
# ## print("*** MOVING PARTICLES"); ## Debug
# particles = moveParticles(particles);
# time.sleep(SIMU_INVSPEED);
# Execute un pas de simulation (si on n'est pas en pause) et se
# rappelle elle-même au bout un certain delai
def oneSimulationStep(simuID, step, canvas, label, particles, gravity):
global paused; ## required to get global var
if (not paused):
## print("*** DRAWING STEP#"+str(step)); ## Debug
# for the fun of seeing things move
canvas.delete("all"); # optimization:
drawGrid(canvas); # remove only particles
### drawParticlesFromPositions(canvas, particles);
matrix = convertToMatrix(particles);
drawParticlesFromGrid(canvas, matrix);
drawTime(label, step);
# actions reelles du pas de temps de la simu
## print("*** MOVING PARTICLES"); ## Debug
## print(particles); ## Debug
## print(matrix); ## Debug
particles = moveParticles(particles);
if (gravity!=0 and step%gravity==0):
## print("*** Applying gravity: step="+str(step)); ## Debug
particles = applyGravity(particles);
step=step+1;
# Whatever the status of pause, we recall ourselves
# NOTE: nope, otherwise multiple instances run in parallel !!!
##print("my simuID="+str(simuID)+" / "+"mainSimuID="+str(mainSimuID)); ## debug
if (simuID==mainSimuID):
canvas.after(SIMU_INVSPEED, oneSimulationStep, simuID, step, canvas, label, particles, gravity);
# Lance la simulation (via un timer)
def startSimulationLoop(canvas, label, slider):
global mainSimuID;
mainSimuID = math.floor(150000*random.random());
particles = initParticles(NB_PARTICLES);
gravity = slider.get();
##print("*** Starting simulation with gravity="+str(gravity));
oneSimulationStep(mainSimuID, 1, canvas, label, particles, gravity);
##### Lancement automatique du programme
def main():
# Les elements graphiques
rootWindow = tkinter.Tk(); # une fenetre graphique TK
rootWindow.title("Ma Super Simulation du Mouvement Brownien");
initGUI(rootWindow);
rootWindow.mainloop();
main();
| rawGrid( | identifier_name |
roster.go | package gui
import (
"fmt"
"html"
"log"
"sort"
"time"
"github.com/gotk3/gotk3/gdk"
"github.com/gotk3/gotk3/gtk"
rosters "github.com/twstrike/coyim/roster"
"github.com/twstrike/coyim/ui"
)
type roster struct {
widget *gtk.ScrolledWindow
model *gtk.TreeStore
view *gtk.TreeView
checkEncrypted func(to string) bool
sendMessage func(to, message string)
isCollapsed map[string]bool
toCollapse []*gtk.TreePath
ui *gtkUI
}
const (
indexJid = 0
indexDisplayName = 1
indexAccountID = 2
indexColor = 3
indexBackgroundColor = 4
indexWeight = 5
indexParentJid = 0
indexParentDisplayName = 1
indexTooltip = 6
indexStatusIcon = 7
indexRowType = 8
)
func (u *gtkUI) newRoster() *roster {
builder := builderForDefinition("Roster")
r := &roster{
isCollapsed: make(map[string]bool),
ui: u,
}
builder.ConnectSignals(map[string]interface{}{
"on_activate_buddy": r.onActivateBuddy,
})
obj, _ := builder.GetObject("roster")
r.widget = obj.(*gtk.ScrolledWindow)
obj, _ = builder.GetObject("roster-view")
r.view = obj.(*gtk.TreeView)
obj, _ = builder.GetObject("roster-model")
r.model = obj.(*gtk.TreeStore)
u.displaySettings.update()
r.view.Connect("button-press-event", r.onButtonPress)
return r
}
func (r *roster) getAccount(id string) (*account, bool) {
return r.ui.accountManager.getAccountByID(id)
}
func | (m *gtk.TreeStore, iter *gtk.TreeIter, index int) string {
val, _ := m.GetValue(iter, index)
v, _ := val.GetString()
return v
}
func (r *roster) getAccountAndJidFromEvent(bt *gdk.EventButton) (jid string, account *account, rowType string, ok bool) {
x := bt.X()
y := bt.Y()
path := new(gtk.TreePath)
found := r.view.GetPathAtPos(int(x), int(y), path, nil, nil, nil)
if !found {
return "", nil, "", false
}
iter, err := r.model.GetIter(path)
if err != nil {
return "", nil, "", false
}
jid = getFromModelIter(r.model, iter, indexJid)
accountID := getFromModelIter(r.model, iter, indexAccountID)
rowType = getFromModelIter(r.model, iter, indexRowType)
account, ok = r.getAccount(accountID)
return jid, account, rowType, ok
}
func (r *roster) createAccountPeerPopup(jid string, account *account, bt *gdk.EventButton) {
builder := builderForDefinition("ContactPopupMenu")
obj, _ := builder.GetObject("contactMenu")
mn := obj.(*gtk.Menu)
builder.ConnectSignals(map[string]interface{}{
"on_remove_contact": func() {
account.session.RemoveContact(jid)
r.ui.removePeer(account, jid)
r.redraw()
},
"on_allow_contact_to_see_status": func() {
account.session.ApprovePresenceSubscription(jid, "" /* generate id */)
},
"on_forbid_contact_to_see_status": func() {
account.session.DenyPresenceSubscription(jid, "" /* generate id */)
},
"on_ask_contact_to_see_status": func() {
account.session.RequestPresenceSubscription(jid)
},
"on_dump_info": func() {
r.debugPrintRosterFor(account.session.GetConfig().Account)
},
})
mn.ShowAll()
mn.PopupAtMouseCursor(nil, nil, int(bt.Button()), bt.Time())
}
func (r *roster) createAccountPopup(jid string, account *account, bt *gdk.EventButton) {
builder := builderForDefinition("AccountPopupMenu")
obj, _ := builder.GetObject("accountMenu")
mn := obj.(*gtk.Menu)
builder.ConnectSignals(map[string]interface{}{
"on_connect": func() {
account.connect()
},
"on_disconnect": func() {
account.disconnect()
},
"on_dump_info": func() {
r.debugPrintRosterFor(account.session.GetConfig().Account)
},
})
connx, _ := builder.GetObject("connectMenuItem")
connect := connx.(*gtk.MenuItem)
dconnx, _ := builder.GetObject("disconnectMenuItem")
disconnect := dconnx.(*gtk.MenuItem)
connect.SetSensitive(account.session.IsDisconnected())
disconnect.SetSensitive(account.session.IsConnected())
mn.ShowAll()
mn.PopupAtMouseCursor(nil, nil, int(bt.Button()), bt.Time())
}
func (r *roster) onButtonPress(view *gtk.TreeView, ev *gdk.Event) bool {
bt := &gdk.EventButton{ev}
if bt.Button() == 0x03 {
jid, account, rowType, ok := r.getAccountAndJidFromEvent(bt)
if ok {
switch rowType {
case "peer":
r.createAccountPeerPopup(jid, account, bt)
case "account":
r.createAccountPopup(jid, account, bt)
}
}
}
return false
}
func (r *roster) onActivateBuddy(v *gtk.TreeView, path *gtk.TreePath) {
selection, _ := v.GetSelection()
defer selection.UnselectPath(path)
iter, err := r.model.GetIter(path)
if err != nil {
return
}
jid := getFromModelIter(r.model, iter, indexJid)
accountID := getFromModelIter(r.model, iter, indexAccountID)
rowType := getFromModelIter(r.model, iter, indexRowType)
if rowType != "peer" {
r.isCollapsed[jid] = !r.isCollapsed[jid]
r.redraw()
return
}
account, ok := r.getAccount(accountID)
if !ok {
return
}
r.openConversationWindow(account, jid)
}
func (r *roster) openConversationWindow(account *account, to string) (*conversationWindow, error) {
c, ok := account.getConversationWith(to)
if !ok {
textBuffer := r.ui.getTags().createTextBuffer()
c = account.createConversationWindow(to, r.ui.displaySettings, textBuffer)
r.ui.connectShortcutsChildWindow(c.win)
r.ui.connectShortcutsConversationWindow(c)
c.parentWin = r.ui.window
}
c.Show()
return c, nil
}
func (r *roster) displayNameFor(account *account, from string) string {
p, ok := r.ui.getPeer(account, from)
if !ok {
return from
}
return p.NameForPresentation()
}
func (r *roster) presenceUpdated(account *account, from, show, showStatus string, gone bool) {
c, ok := account.getConversationWith(from)
if !ok {
return
}
doInUIThread(func() {
c.appendStatus(r.displayNameFor(account, from), time.Now(), show, showStatus, gone)
})
}
func (r *roster) messageReceived(account *account, from string, timestamp time.Time, encrypted bool, message []byte) {
doInUIThread(func() {
conv, err := r.openConversationWindow(account, from)
if err != nil {
return
}
conv.appendMessage(r.displayNameFor(account, from), timestamp, encrypted, ui.StripHTML(message), false)
})
}
func (r *roster) update(account *account, entries *rosters.List) {
r.ui.accountManager.Lock()
defer r.ui.accountManager.Unlock()
r.ui.accountManager.setContacts(account, entries)
}
func (r *roster) debugPrintRosterFor(nm string) {
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
for account, rs := range r.ui.accountManager.getAllContacts() {
if account.session.GetConfig().Is(nm) {
rs.Iter(func(_ int, item *rosters.Peer) {
fmt.Printf("-> %s\n", item.Dump())
})
}
}
fmt.Printf(" ************************************** \n")
fmt.Println()
}
func isNominallyVisible(p *rosters.Peer) bool {
return (p.Subscription != "none" && p.Subscription != "") || p.PendingSubscribeID != ""
}
func shouldDisplay(p *rosters.Peer, showOffline bool) bool {
return isNominallyVisible(p) && (showOffline || p.Online)
}
func isAway(p *rosters.Peer) bool {
switch p.Status {
case "dnd", "xa", "away":
return true
}
return false
}
func isOnline(p *rosters.Peer) bool {
return p.PendingSubscribeID == "" && p.Online
}
func decideStatusFor(p *rosters.Peer) string {
if p.PendingSubscribeID != "" {
return "unknown"
}
if !p.Online {
return "offline"
}
switch p.Status {
case "dnd":
return "busy"
case "xa":
return "extended-away"
case "away":
return "away"
}
return "available"
}
func decideColorFor(p *rosters.Peer) string {
if !p.Online {
return "#aaaaaa"
}
return "#000000"
}
func createGroupDisplayName(parentName string, counter *counter, isExpanded bool) string {
name := parentName
if !isExpanded {
name = fmt.Sprintf("[%s]", name)
}
return fmt.Sprintf("%s (%d/%d)", name, counter.online, counter.total)
}
func createTooltipFor(item *rosters.Peer) string {
pname := html.EscapeString(item.NameForPresentation())
jid := html.EscapeString(item.Jid)
if pname != jid {
return fmt.Sprintf("%s (%s)", pname, jid)
}
return jid
}
func (r *roster) addItem(item *rosters.Peer, parentIter *gtk.TreeIter, indent string) {
iter := r.model.Append(parentIter)
setAll(r.model, iter,
item.Jid,
fmt.Sprintf("%s %s", indent, item.NameForPresentation()),
item.BelongsTo,
decideColorFor(item),
"#ffffff",
nil,
createTooltipFor(item),
)
r.model.SetValue(iter, indexRowType, "peer")
r.model.SetValue(iter, indexStatusIcon, statusIcons[decideStatusFor(item)].getPixbuf())
}
func (r *roster) redrawMerged() {
showOffline := !r.ui.config.Display.ShowOnlyOnline
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
r.toCollapse = nil
grp := rosters.TopLevelGroup()
for account, contacts := range r.ui.accountManager.getAllContacts() {
contacts.AddTo(grp, account.session.GroupDelimiter)
}
accountCounter := &counter{}
r.displayGroup(grp, nil, accountCounter, showOffline, "")
r.view.ExpandAll()
for _, path := range r.toCollapse {
r.view.CollapseRow(path)
}
}
type counter struct {
total int
online int
}
func (c *counter) inc(total, online bool) {
if total {
c.total++
}
if online {
c.online++
}
}
func (r *roster) displayGroup(g *rosters.Group, parentIter *gtk.TreeIter, accountCounter *counter, showOffline bool, accountName string) {
pi := parentIter
groupCounter := &counter{}
groupID := accountName + "//" + g.FullGroupName()
if g.GroupName != "" {
pi = r.model.Append(parentIter)
r.model.SetValue(pi, indexParentJid, groupID)
r.model.SetValue(pi, indexRowType, "group")
r.model.SetValue(pi, indexWeight, 500)
r.model.SetValue(pi, indexBackgroundColor, "#e9e7f3")
}
for _, item := range g.Peers() {
vs := isNominallyVisible(item)
o := isOnline(item)
accountCounter.inc(vs, vs && o)
groupCounter.inc(vs, vs && o)
if shouldDisplay(item, showOffline) {
r.addItem(item, pi, "")
}
}
for _, gr := range g.Groups() {
r.displayGroup(gr, pi, accountCounter, showOffline, accountName)
}
if g.GroupName != "" {
parentPath, _ := r.model.GetPath(pi)
shouldCollapse, ok := r.isCollapsed[groupID]
isExpanded := true
if ok && shouldCollapse {
isExpanded = false
r.toCollapse = append(r.toCollapse, parentPath)
}
r.model.SetValue(pi, indexParentDisplayName, createGroupDisplayName(g.FullGroupName(), groupCounter, isExpanded))
}
}
func (r *roster) redrawSeparateAccount(account *account, contacts *rosters.List, showOffline bool) {
parentIter := r.model.Append(nil)
accountCounter := &counter{}
grp := contacts.Grouped(account.session.GroupDelimiter)
parentName := account.session.GetConfig().Account
r.displayGroup(grp, parentIter, accountCounter, showOffline, parentName)
r.model.SetValue(parentIter, indexParentJid, parentName)
r.model.SetValue(parentIter, indexAccountID, account.session.GetConfig().ID())
r.model.SetValue(parentIter, indexRowType, "account")
r.model.SetValue(parentIter, indexWeight, 700)
bgcolor := "#918caa"
if account.session.IsDisconnected() {
bgcolor = "#d5d3de"
}
r.model.SetValue(parentIter, indexBackgroundColor, bgcolor)
parentPath, _ := r.model.GetPath(parentIter)
shouldCollapse, ok := r.isCollapsed[parentName]
isExpanded := true
if ok && shouldCollapse {
isExpanded = false
r.toCollapse = append(r.toCollapse, parentPath)
}
var stat string
if account.session.IsDisconnected() {
stat = "offline"
} else if account.session.IsConnected() {
stat = "available"
} else {
stat = "connecting"
}
r.model.SetValue(parentIter, indexStatusIcon, statusIcons[stat].getPixbuf())
r.model.SetValue(parentIter, indexParentDisplayName, createGroupDisplayName(parentName, accountCounter, isExpanded))
}
func (r *roster) sortedAccounts() []*account {
var as []*account
for account := range r.ui.accountManager.getAllContacts() {
if account == nil {
log.Printf("adding an account that is nil...\n")
}
as = append(as, account)
}
sort.Sort(byAccountNameAlphabetic(as))
return as
}
func (r *roster) redrawSeparate() {
showOffline := !r.ui.config.Display.ShowOnlyOnline
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
r.toCollapse = nil
for _, account := range r.sortedAccounts() {
r.redrawSeparateAccount(account, r.ui.accountManager.getContacts(account), showOffline)
}
r.view.ExpandAll()
for _, path := range r.toCollapse {
r.view.CollapseRow(path)
}
}
const disconnectedPageIndex = 0
const spinnerPageIndex = 1
const rosterPageIndex = 2
func (r *roster) redraw() {
//TODO: this should be behind a mutex
r.model.Clear()
if r.ui.shouldViewAccounts() {
r.redrawSeparate()
} else {
r.redrawMerged()
}
}
func setAll(v *gtk.TreeStore, iter *gtk.TreeIter, values ...interface{}) {
for i, val := range values {
if val != nil {
v.SetValue(iter, i, val)
}
}
}
| getFromModelIter | identifier_name |
roster.go | package gui
import (
"fmt"
"html"
"log"
"sort"
"time"
"github.com/gotk3/gotk3/gdk"
"github.com/gotk3/gotk3/gtk"
rosters "github.com/twstrike/coyim/roster"
"github.com/twstrike/coyim/ui"
)
type roster struct {
widget *gtk.ScrolledWindow
model *gtk.TreeStore
view *gtk.TreeView
checkEncrypted func(to string) bool
sendMessage func(to, message string)
isCollapsed map[string]bool
toCollapse []*gtk.TreePath
ui *gtkUI
}
const (
indexJid = 0
indexDisplayName = 1
indexAccountID = 2
indexColor = 3
indexBackgroundColor = 4
indexWeight = 5
indexParentJid = 0
indexParentDisplayName = 1
indexTooltip = 6
indexStatusIcon = 7
indexRowType = 8
)
func (u *gtkUI) newRoster() *roster {
builder := builderForDefinition("Roster")
r := &roster{
isCollapsed: make(map[string]bool),
ui: u,
}
builder.ConnectSignals(map[string]interface{}{
"on_activate_buddy": r.onActivateBuddy,
})
obj, _ := builder.GetObject("roster")
r.widget = obj.(*gtk.ScrolledWindow)
obj, _ = builder.GetObject("roster-view")
r.view = obj.(*gtk.TreeView)
obj, _ = builder.GetObject("roster-model")
r.model = obj.(*gtk.TreeStore)
u.displaySettings.update()
r.view.Connect("button-press-event", r.onButtonPress)
return r
}
func (r *roster) getAccount(id string) (*account, bool) {
return r.ui.accountManager.getAccountByID(id)
}
func getFromModelIter(m *gtk.TreeStore, iter *gtk.TreeIter, index int) string {
val, _ := m.GetValue(iter, index)
v, _ := val.GetString()
return v
}
func (r *roster) getAccountAndJidFromEvent(bt *gdk.EventButton) (jid string, account *account, rowType string, ok bool) {
x := bt.X()
y := bt.Y()
path := new(gtk.TreePath)
found := r.view.GetPathAtPos(int(x), int(y), path, nil, nil, nil)
if !found {
return "", nil, "", false
}
iter, err := r.model.GetIter(path)
if err != nil {
return "", nil, "", false
}
jid = getFromModelIter(r.model, iter, indexJid)
accountID := getFromModelIter(r.model, iter, indexAccountID)
rowType = getFromModelIter(r.model, iter, indexRowType)
account, ok = r.getAccount(accountID)
return jid, account, rowType, ok
}
func (r *roster) createAccountPeerPopup(jid string, account *account, bt *gdk.EventButton) {
builder := builderForDefinition("ContactPopupMenu")
obj, _ := builder.GetObject("contactMenu")
mn := obj.(*gtk.Menu)
builder.ConnectSignals(map[string]interface{}{
"on_remove_contact": func() {
account.session.RemoveContact(jid)
r.ui.removePeer(account, jid)
r.redraw()
},
"on_allow_contact_to_see_status": func() {
account.session.ApprovePresenceSubscription(jid, "" /* generate id */)
},
"on_forbid_contact_to_see_status": func() {
account.session.DenyPresenceSubscription(jid, "" /* generate id */)
},
"on_ask_contact_to_see_status": func() {
account.session.RequestPresenceSubscription(jid)
},
"on_dump_info": func() {
r.debugPrintRosterFor(account.session.GetConfig().Account)
},
})
mn.ShowAll()
mn.PopupAtMouseCursor(nil, nil, int(bt.Button()), bt.Time())
}
func (r *roster) createAccountPopup(jid string, account *account, bt *gdk.EventButton) {
builder := builderForDefinition("AccountPopupMenu")
obj, _ := builder.GetObject("accountMenu")
mn := obj.(*gtk.Menu)
builder.ConnectSignals(map[string]interface{}{
"on_connect": func() {
account.connect()
},
"on_disconnect": func() {
account.disconnect()
},
"on_dump_info": func() {
r.debugPrintRosterFor(account.session.GetConfig().Account)
},
})
connx, _ := builder.GetObject("connectMenuItem")
connect := connx.(*gtk.MenuItem)
dconnx, _ := builder.GetObject("disconnectMenuItem")
disconnect := dconnx.(*gtk.MenuItem)
connect.SetSensitive(account.session.IsDisconnected())
disconnect.SetSensitive(account.session.IsConnected())
mn.ShowAll()
mn.PopupAtMouseCursor(nil, nil, int(bt.Button()), bt.Time())
}
func (r *roster) onButtonPress(view *gtk.TreeView, ev *gdk.Event) bool {
bt := &gdk.EventButton{ev}
if bt.Button() == 0x03 {
jid, account, rowType, ok := r.getAccountAndJidFromEvent(bt)
if ok {
switch rowType {
case "peer":
r.createAccountPeerPopup(jid, account, bt)
case "account":
r.createAccountPopup(jid, account, bt)
}
}
}
return false
}
func (r *roster) onActivateBuddy(v *gtk.TreeView, path *gtk.TreePath) {
selection, _ := v.GetSelection()
defer selection.UnselectPath(path)
iter, err := r.model.GetIter(path)
if err != nil {
return
}
jid := getFromModelIter(r.model, iter, indexJid)
accountID := getFromModelIter(r.model, iter, indexAccountID)
rowType := getFromModelIter(r.model, iter, indexRowType)
if rowType != "peer" {
r.isCollapsed[jid] = !r.isCollapsed[jid]
r.redraw()
return
}
account, ok := r.getAccount(accountID)
if !ok {
return
}
r.openConversationWindow(account, jid)
}
func (r *roster) openConversationWindow(account *account, to string) (*conversationWindow, error) {
c, ok := account.getConversationWith(to)
if !ok {
textBuffer := r.ui.getTags().createTextBuffer()
c = account.createConversationWindow(to, r.ui.displaySettings, textBuffer)
r.ui.connectShortcutsChildWindow(c.win)
r.ui.connectShortcutsConversationWindow(c)
c.parentWin = r.ui.window
}
c.Show()
return c, nil
}
func (r *roster) displayNameFor(account *account, from string) string {
p, ok := r.ui.getPeer(account, from)
if !ok {
return from
}
return p.NameForPresentation()
}
func (r *roster) presenceUpdated(account *account, from, show, showStatus string, gone bool) {
c, ok := account.getConversationWith(from)
if !ok {
return
}
doInUIThread(func() {
c.appendStatus(r.displayNameFor(account, from), time.Now(), show, showStatus, gone)
})
}
func (r *roster) messageReceived(account *account, from string, timestamp time.Time, encrypted bool, message []byte) {
doInUIThread(func() {
conv, err := r.openConversationWindow(account, from)
if err != nil {
return
}
conv.appendMessage(r.displayNameFor(account, from), timestamp, encrypted, ui.StripHTML(message), false)
})
}
func (r *roster) update(account *account, entries *rosters.List) {
r.ui.accountManager.Lock()
defer r.ui.accountManager.Unlock()
r.ui.accountManager.setContacts(account, entries)
}
func (r *roster) debugPrintRosterFor(nm string) {
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
for account, rs := range r.ui.accountManager.getAllContacts() {
if account.session.GetConfig().Is(nm) {
rs.Iter(func(_ int, item *rosters.Peer) {
fmt.Printf("-> %s\n", item.Dump())
})
}
}
fmt.Printf(" ************************************** \n")
fmt.Println()
}
func isNominallyVisible(p *rosters.Peer) bool {
return (p.Subscription != "none" && p.Subscription != "") || p.PendingSubscribeID != ""
}
func shouldDisplay(p *rosters.Peer, showOffline bool) bool {
return isNominallyVisible(p) && (showOffline || p.Online)
}
func isAway(p *rosters.Peer) bool {
switch p.Status {
case "dnd", "xa", "away":
return true
}
return false
}
func isOnline(p *rosters.Peer) bool {
return p.PendingSubscribeID == "" && p.Online
}
func decideStatusFor(p *rosters.Peer) string {
if p.PendingSubscribeID != "" {
return "unknown"
}
if !p.Online {
return "offline"
}
switch p.Status {
case "dnd":
return "busy"
case "xa":
return "extended-away"
case "away":
return "away"
}
return "available"
}
func decideColorFor(p *rosters.Peer) string {
if !p.Online {
return "#aaaaaa"
}
return "#000000"
}
func createGroupDisplayName(parentName string, counter *counter, isExpanded bool) string {
name := parentName
if !isExpanded {
name = fmt.Sprintf("[%s]", name)
}
return fmt.Sprintf("%s (%d/%d)", name, counter.online, counter.total)
}
func createTooltipFor(item *rosters.Peer) string {
pname := html.EscapeString(item.NameForPresentation())
jid := html.EscapeString(item.Jid)
if pname != jid {
return fmt.Sprintf("%s (%s)", pname, jid)
}
return jid
}
func (r *roster) addItem(item *rosters.Peer, parentIter *gtk.TreeIter, indent string) {
iter := r.model.Append(parentIter)
setAll(r.model, iter,
item.Jid,
fmt.Sprintf("%s %s", indent, item.NameForPresentation()),
item.BelongsTo,
decideColorFor(item),
"#ffffff",
nil,
createTooltipFor(item),
)
r.model.SetValue(iter, indexRowType, "peer")
r.model.SetValue(iter, indexStatusIcon, statusIcons[decideStatusFor(item)].getPixbuf())
}
func (r *roster) redrawMerged() {
showOffline := !r.ui.config.Display.ShowOnlyOnline
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
r.toCollapse = nil
grp := rosters.TopLevelGroup()
for account, contacts := range r.ui.accountManager.getAllContacts() {
contacts.AddTo(grp, account.session.GroupDelimiter)
}
accountCounter := &counter{}
r.displayGroup(grp, nil, accountCounter, showOffline, "")
r.view.ExpandAll()
for _, path := range r.toCollapse {
r.view.CollapseRow(path)
}
}
type counter struct {
total int
online int
}
func (c *counter) inc(total, online bool) {
if total {
c.total++
}
if online {
c.online++
}
}
func (r *roster) displayGroup(g *rosters.Group, parentIter *gtk.TreeIter, accountCounter *counter, showOffline bool, accountName string) {
pi := parentIter
groupCounter := &counter{}
groupID := accountName + "//" + g.FullGroupName()
if g.GroupName != "" {
pi = r.model.Append(parentIter)
r.model.SetValue(pi, indexParentJid, groupID)
r.model.SetValue(pi, indexRowType, "group")
r.model.SetValue(pi, indexWeight, 500)
r.model.SetValue(pi, indexBackgroundColor, "#e9e7f3")
}
for _, item := range g.Peers() {
vs := isNominallyVisible(item)
o := isOnline(item)
accountCounter.inc(vs, vs && o)
groupCounter.inc(vs, vs && o)
if shouldDisplay(item, showOffline) {
r.addItem(item, pi, "")
}
}
for _, gr := range g.Groups() {
r.displayGroup(gr, pi, accountCounter, showOffline, accountName)
}
if g.GroupName != "" {
parentPath, _ := r.model.GetPath(pi)
shouldCollapse, ok := r.isCollapsed[groupID]
isExpanded := true
if ok && shouldCollapse {
isExpanded = false
r.toCollapse = append(r.toCollapse, parentPath)
}
r.model.SetValue(pi, indexParentDisplayName, createGroupDisplayName(g.FullGroupName(), groupCounter, isExpanded))
}
}
func (r *roster) redrawSeparateAccount(account *account, contacts *rosters.List, showOffline bool) {
parentIter := r.model.Append(nil)
accountCounter := &counter{}
grp := contacts.Grouped(account.session.GroupDelimiter)
parentName := account.session.GetConfig().Account
r.displayGroup(grp, parentIter, accountCounter, showOffline, parentName)
r.model.SetValue(parentIter, indexParentJid, parentName)
r.model.SetValue(parentIter, indexAccountID, account.session.GetConfig().ID()) | r.model.SetValue(parentIter, indexWeight, 700)
bgcolor := "#918caa"
if account.session.IsDisconnected() {
bgcolor = "#d5d3de"
}
r.model.SetValue(parentIter, indexBackgroundColor, bgcolor)
parentPath, _ := r.model.GetPath(parentIter)
shouldCollapse, ok := r.isCollapsed[parentName]
isExpanded := true
if ok && shouldCollapse {
isExpanded = false
r.toCollapse = append(r.toCollapse, parentPath)
}
var stat string
if account.session.IsDisconnected() {
stat = "offline"
} else if account.session.IsConnected() {
stat = "available"
} else {
stat = "connecting"
}
r.model.SetValue(parentIter, indexStatusIcon, statusIcons[stat].getPixbuf())
r.model.SetValue(parentIter, indexParentDisplayName, createGroupDisplayName(parentName, accountCounter, isExpanded))
}
func (r *roster) sortedAccounts() []*account {
var as []*account
for account := range r.ui.accountManager.getAllContacts() {
if account == nil {
log.Printf("adding an account that is nil...\n")
}
as = append(as, account)
}
sort.Sort(byAccountNameAlphabetic(as))
return as
}
func (r *roster) redrawSeparate() {
showOffline := !r.ui.config.Display.ShowOnlyOnline
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
r.toCollapse = nil
for _, account := range r.sortedAccounts() {
r.redrawSeparateAccount(account, r.ui.accountManager.getContacts(account), showOffline)
}
r.view.ExpandAll()
for _, path := range r.toCollapse {
r.view.CollapseRow(path)
}
}
const disconnectedPageIndex = 0
const spinnerPageIndex = 1
const rosterPageIndex = 2
func (r *roster) redraw() {
//TODO: this should be behind a mutex
r.model.Clear()
if r.ui.shouldViewAccounts() {
r.redrawSeparate()
} else {
r.redrawMerged()
}
}
func setAll(v *gtk.TreeStore, iter *gtk.TreeIter, values ...interface{}) {
for i, val := range values {
if val != nil {
v.SetValue(iter, i, val)
}
}
} | r.model.SetValue(parentIter, indexRowType, "account") | random_line_split |
roster.go | package gui
import (
"fmt"
"html"
"log"
"sort"
"time"
"github.com/gotk3/gotk3/gdk"
"github.com/gotk3/gotk3/gtk"
rosters "github.com/twstrike/coyim/roster"
"github.com/twstrike/coyim/ui"
)
type roster struct {
widget *gtk.ScrolledWindow
model *gtk.TreeStore
view *gtk.TreeView
checkEncrypted func(to string) bool
sendMessage func(to, message string)
isCollapsed map[string]bool
toCollapse []*gtk.TreePath
ui *gtkUI
}
const (
indexJid = 0
indexDisplayName = 1
indexAccountID = 2
indexColor = 3
indexBackgroundColor = 4
indexWeight = 5
indexParentJid = 0
indexParentDisplayName = 1
indexTooltip = 6
indexStatusIcon = 7
indexRowType = 8
)
func (u *gtkUI) newRoster() *roster {
builder := builderForDefinition("Roster")
r := &roster{
isCollapsed: make(map[string]bool),
ui: u,
}
builder.ConnectSignals(map[string]interface{}{
"on_activate_buddy": r.onActivateBuddy,
})
obj, _ := builder.GetObject("roster")
r.widget = obj.(*gtk.ScrolledWindow)
obj, _ = builder.GetObject("roster-view")
r.view = obj.(*gtk.TreeView)
obj, _ = builder.GetObject("roster-model")
r.model = obj.(*gtk.TreeStore)
u.displaySettings.update()
r.view.Connect("button-press-event", r.onButtonPress)
return r
}
func (r *roster) getAccount(id string) (*account, bool) {
return r.ui.accountManager.getAccountByID(id)
}
func getFromModelIter(m *gtk.TreeStore, iter *gtk.TreeIter, index int) string {
val, _ := m.GetValue(iter, index)
v, _ := val.GetString()
return v
}
func (r *roster) getAccountAndJidFromEvent(bt *gdk.EventButton) (jid string, account *account, rowType string, ok bool) {
x := bt.X()
y := bt.Y()
path := new(gtk.TreePath)
found := r.view.GetPathAtPos(int(x), int(y), path, nil, nil, nil)
if !found {
return "", nil, "", false
}
iter, err := r.model.GetIter(path)
if err != nil {
return "", nil, "", false
}
jid = getFromModelIter(r.model, iter, indexJid)
accountID := getFromModelIter(r.model, iter, indexAccountID)
rowType = getFromModelIter(r.model, iter, indexRowType)
account, ok = r.getAccount(accountID)
return jid, account, rowType, ok
}
func (r *roster) createAccountPeerPopup(jid string, account *account, bt *gdk.EventButton) {
builder := builderForDefinition("ContactPopupMenu")
obj, _ := builder.GetObject("contactMenu")
mn := obj.(*gtk.Menu)
builder.ConnectSignals(map[string]interface{}{
"on_remove_contact": func() {
account.session.RemoveContact(jid)
r.ui.removePeer(account, jid)
r.redraw()
},
"on_allow_contact_to_see_status": func() {
account.session.ApprovePresenceSubscription(jid, "" /* generate id */)
},
"on_forbid_contact_to_see_status": func() {
account.session.DenyPresenceSubscription(jid, "" /* generate id */)
},
"on_ask_contact_to_see_status": func() {
account.session.RequestPresenceSubscription(jid)
},
"on_dump_info": func() {
r.debugPrintRosterFor(account.session.GetConfig().Account)
},
})
mn.ShowAll()
mn.PopupAtMouseCursor(nil, nil, int(bt.Button()), bt.Time())
}
func (r *roster) createAccountPopup(jid string, account *account, bt *gdk.EventButton) {
builder := builderForDefinition("AccountPopupMenu")
obj, _ := builder.GetObject("accountMenu")
mn := obj.(*gtk.Menu)
builder.ConnectSignals(map[string]interface{}{
"on_connect": func() {
account.connect()
},
"on_disconnect": func() {
account.disconnect()
},
"on_dump_info": func() {
r.debugPrintRosterFor(account.session.GetConfig().Account)
},
})
connx, _ := builder.GetObject("connectMenuItem")
connect := connx.(*gtk.MenuItem)
dconnx, _ := builder.GetObject("disconnectMenuItem")
disconnect := dconnx.(*gtk.MenuItem)
connect.SetSensitive(account.session.IsDisconnected())
disconnect.SetSensitive(account.session.IsConnected())
mn.ShowAll()
mn.PopupAtMouseCursor(nil, nil, int(bt.Button()), bt.Time())
}
func (r *roster) onButtonPress(view *gtk.TreeView, ev *gdk.Event) bool |
func (r *roster) onActivateBuddy(v *gtk.TreeView, path *gtk.TreePath) {
selection, _ := v.GetSelection()
defer selection.UnselectPath(path)
iter, err := r.model.GetIter(path)
if err != nil {
return
}
jid := getFromModelIter(r.model, iter, indexJid)
accountID := getFromModelIter(r.model, iter, indexAccountID)
rowType := getFromModelIter(r.model, iter, indexRowType)
if rowType != "peer" {
r.isCollapsed[jid] = !r.isCollapsed[jid]
r.redraw()
return
}
account, ok := r.getAccount(accountID)
if !ok {
return
}
r.openConversationWindow(account, jid)
}
func (r *roster) openConversationWindow(account *account, to string) (*conversationWindow, error) {
c, ok := account.getConversationWith(to)
if !ok {
textBuffer := r.ui.getTags().createTextBuffer()
c = account.createConversationWindow(to, r.ui.displaySettings, textBuffer)
r.ui.connectShortcutsChildWindow(c.win)
r.ui.connectShortcutsConversationWindow(c)
c.parentWin = r.ui.window
}
c.Show()
return c, nil
}
func (r *roster) displayNameFor(account *account, from string) string {
p, ok := r.ui.getPeer(account, from)
if !ok {
return from
}
return p.NameForPresentation()
}
func (r *roster) presenceUpdated(account *account, from, show, showStatus string, gone bool) {
c, ok := account.getConversationWith(from)
if !ok {
return
}
doInUIThread(func() {
c.appendStatus(r.displayNameFor(account, from), time.Now(), show, showStatus, gone)
})
}
func (r *roster) messageReceived(account *account, from string, timestamp time.Time, encrypted bool, message []byte) {
doInUIThread(func() {
conv, err := r.openConversationWindow(account, from)
if err != nil {
return
}
conv.appendMessage(r.displayNameFor(account, from), timestamp, encrypted, ui.StripHTML(message), false)
})
}
func (r *roster) update(account *account, entries *rosters.List) {
r.ui.accountManager.Lock()
defer r.ui.accountManager.Unlock()
r.ui.accountManager.setContacts(account, entries)
}
func (r *roster) debugPrintRosterFor(nm string) {
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
for account, rs := range r.ui.accountManager.getAllContacts() {
if account.session.GetConfig().Is(nm) {
rs.Iter(func(_ int, item *rosters.Peer) {
fmt.Printf("-> %s\n", item.Dump())
})
}
}
fmt.Printf(" ************************************** \n")
fmt.Println()
}
func isNominallyVisible(p *rosters.Peer) bool {
return (p.Subscription != "none" && p.Subscription != "") || p.PendingSubscribeID != ""
}
func shouldDisplay(p *rosters.Peer, showOffline bool) bool {
return isNominallyVisible(p) && (showOffline || p.Online)
}
func isAway(p *rosters.Peer) bool {
switch p.Status {
case "dnd", "xa", "away":
return true
}
return false
}
func isOnline(p *rosters.Peer) bool {
return p.PendingSubscribeID == "" && p.Online
}
func decideStatusFor(p *rosters.Peer) string {
if p.PendingSubscribeID != "" {
return "unknown"
}
if !p.Online {
return "offline"
}
switch p.Status {
case "dnd":
return "busy"
case "xa":
return "extended-away"
case "away":
return "away"
}
return "available"
}
func decideColorFor(p *rosters.Peer) string {
if !p.Online {
return "#aaaaaa"
}
return "#000000"
}
func createGroupDisplayName(parentName string, counter *counter, isExpanded bool) string {
name := parentName
if !isExpanded {
name = fmt.Sprintf("[%s]", name)
}
return fmt.Sprintf("%s (%d/%d)", name, counter.online, counter.total)
}
func createTooltipFor(item *rosters.Peer) string {
pname := html.EscapeString(item.NameForPresentation())
jid := html.EscapeString(item.Jid)
if pname != jid {
return fmt.Sprintf("%s (%s)", pname, jid)
}
return jid
}
func (r *roster) addItem(item *rosters.Peer, parentIter *gtk.TreeIter, indent string) {
iter := r.model.Append(parentIter)
setAll(r.model, iter,
item.Jid,
fmt.Sprintf("%s %s", indent, item.NameForPresentation()),
item.BelongsTo,
decideColorFor(item),
"#ffffff",
nil,
createTooltipFor(item),
)
r.model.SetValue(iter, indexRowType, "peer")
r.model.SetValue(iter, indexStatusIcon, statusIcons[decideStatusFor(item)].getPixbuf())
}
func (r *roster) redrawMerged() {
showOffline := !r.ui.config.Display.ShowOnlyOnline
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
r.toCollapse = nil
grp := rosters.TopLevelGroup()
for account, contacts := range r.ui.accountManager.getAllContacts() {
contacts.AddTo(grp, account.session.GroupDelimiter)
}
accountCounter := &counter{}
r.displayGroup(grp, nil, accountCounter, showOffline, "")
r.view.ExpandAll()
for _, path := range r.toCollapse {
r.view.CollapseRow(path)
}
}
type counter struct {
total int
online int
}
func (c *counter) inc(total, online bool) {
if total {
c.total++
}
if online {
c.online++
}
}
func (r *roster) displayGroup(g *rosters.Group, parentIter *gtk.TreeIter, accountCounter *counter, showOffline bool, accountName string) {
pi := parentIter
groupCounter := &counter{}
groupID := accountName + "//" + g.FullGroupName()
if g.GroupName != "" {
pi = r.model.Append(parentIter)
r.model.SetValue(pi, indexParentJid, groupID)
r.model.SetValue(pi, indexRowType, "group")
r.model.SetValue(pi, indexWeight, 500)
r.model.SetValue(pi, indexBackgroundColor, "#e9e7f3")
}
for _, item := range g.Peers() {
vs := isNominallyVisible(item)
o := isOnline(item)
accountCounter.inc(vs, vs && o)
groupCounter.inc(vs, vs && o)
if shouldDisplay(item, showOffline) {
r.addItem(item, pi, "")
}
}
for _, gr := range g.Groups() {
r.displayGroup(gr, pi, accountCounter, showOffline, accountName)
}
if g.GroupName != "" {
parentPath, _ := r.model.GetPath(pi)
shouldCollapse, ok := r.isCollapsed[groupID]
isExpanded := true
if ok && shouldCollapse {
isExpanded = false
r.toCollapse = append(r.toCollapse, parentPath)
}
r.model.SetValue(pi, indexParentDisplayName, createGroupDisplayName(g.FullGroupName(), groupCounter, isExpanded))
}
}
func (r *roster) redrawSeparateAccount(account *account, contacts *rosters.List, showOffline bool) {
parentIter := r.model.Append(nil)
accountCounter := &counter{}
grp := contacts.Grouped(account.session.GroupDelimiter)
parentName := account.session.GetConfig().Account
r.displayGroup(grp, parentIter, accountCounter, showOffline, parentName)
r.model.SetValue(parentIter, indexParentJid, parentName)
r.model.SetValue(parentIter, indexAccountID, account.session.GetConfig().ID())
r.model.SetValue(parentIter, indexRowType, "account")
r.model.SetValue(parentIter, indexWeight, 700)
bgcolor := "#918caa"
if account.session.IsDisconnected() {
bgcolor = "#d5d3de"
}
r.model.SetValue(parentIter, indexBackgroundColor, bgcolor)
parentPath, _ := r.model.GetPath(parentIter)
shouldCollapse, ok := r.isCollapsed[parentName]
isExpanded := true
if ok && shouldCollapse {
isExpanded = false
r.toCollapse = append(r.toCollapse, parentPath)
}
var stat string
if account.session.IsDisconnected() {
stat = "offline"
} else if account.session.IsConnected() {
stat = "available"
} else {
stat = "connecting"
}
r.model.SetValue(parentIter, indexStatusIcon, statusIcons[stat].getPixbuf())
r.model.SetValue(parentIter, indexParentDisplayName, createGroupDisplayName(parentName, accountCounter, isExpanded))
}
func (r *roster) sortedAccounts() []*account {
var as []*account
for account := range r.ui.accountManager.getAllContacts() {
if account == nil {
log.Printf("adding an account that is nil...\n")
}
as = append(as, account)
}
sort.Sort(byAccountNameAlphabetic(as))
return as
}
func (r *roster) redrawSeparate() {
showOffline := !r.ui.config.Display.ShowOnlyOnline
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
r.toCollapse = nil
for _, account := range r.sortedAccounts() {
r.redrawSeparateAccount(account, r.ui.accountManager.getContacts(account), showOffline)
}
r.view.ExpandAll()
for _, path := range r.toCollapse {
r.view.CollapseRow(path)
}
}
const disconnectedPageIndex = 0
const spinnerPageIndex = 1
const rosterPageIndex = 2
func (r *roster) redraw() {
//TODO: this should be behind a mutex
r.model.Clear()
if r.ui.shouldViewAccounts() {
r.redrawSeparate()
} else {
r.redrawMerged()
}
}
func setAll(v *gtk.TreeStore, iter *gtk.TreeIter, values ...interface{}) {
for i, val := range values {
if val != nil {
v.SetValue(iter, i, val)
}
}
}
| {
bt := &gdk.EventButton{ev}
if bt.Button() == 0x03 {
jid, account, rowType, ok := r.getAccountAndJidFromEvent(bt)
if ok {
switch rowType {
case "peer":
r.createAccountPeerPopup(jid, account, bt)
case "account":
r.createAccountPopup(jid, account, bt)
}
}
}
return false
} | identifier_body |
roster.go | package gui
import (
"fmt"
"html"
"log"
"sort"
"time"
"github.com/gotk3/gotk3/gdk"
"github.com/gotk3/gotk3/gtk"
rosters "github.com/twstrike/coyim/roster"
"github.com/twstrike/coyim/ui"
)
type roster struct {
widget *gtk.ScrolledWindow
model *gtk.TreeStore
view *gtk.TreeView
checkEncrypted func(to string) bool
sendMessage func(to, message string)
isCollapsed map[string]bool
toCollapse []*gtk.TreePath
ui *gtkUI
}
const (
indexJid = 0
indexDisplayName = 1
indexAccountID = 2
indexColor = 3
indexBackgroundColor = 4
indexWeight = 5
indexParentJid = 0
indexParentDisplayName = 1
indexTooltip = 6
indexStatusIcon = 7
indexRowType = 8
)
func (u *gtkUI) newRoster() *roster {
builder := builderForDefinition("Roster")
r := &roster{
isCollapsed: make(map[string]bool),
ui: u,
}
builder.ConnectSignals(map[string]interface{}{
"on_activate_buddy": r.onActivateBuddy,
})
obj, _ := builder.GetObject("roster")
r.widget = obj.(*gtk.ScrolledWindow)
obj, _ = builder.GetObject("roster-view")
r.view = obj.(*gtk.TreeView)
obj, _ = builder.GetObject("roster-model")
r.model = obj.(*gtk.TreeStore)
u.displaySettings.update()
r.view.Connect("button-press-event", r.onButtonPress)
return r
}
func (r *roster) getAccount(id string) (*account, bool) {
return r.ui.accountManager.getAccountByID(id)
}
func getFromModelIter(m *gtk.TreeStore, iter *gtk.TreeIter, index int) string {
val, _ := m.GetValue(iter, index)
v, _ := val.GetString()
return v
}
func (r *roster) getAccountAndJidFromEvent(bt *gdk.EventButton) (jid string, account *account, rowType string, ok bool) {
x := bt.X()
y := bt.Y()
path := new(gtk.TreePath)
found := r.view.GetPathAtPos(int(x), int(y), path, nil, nil, nil)
if !found {
return "", nil, "", false
}
iter, err := r.model.GetIter(path)
if err != nil {
return "", nil, "", false
}
jid = getFromModelIter(r.model, iter, indexJid)
accountID := getFromModelIter(r.model, iter, indexAccountID)
rowType = getFromModelIter(r.model, iter, indexRowType)
account, ok = r.getAccount(accountID)
return jid, account, rowType, ok
}
func (r *roster) createAccountPeerPopup(jid string, account *account, bt *gdk.EventButton) {
builder := builderForDefinition("ContactPopupMenu")
obj, _ := builder.GetObject("contactMenu")
mn := obj.(*gtk.Menu)
builder.ConnectSignals(map[string]interface{}{
"on_remove_contact": func() {
account.session.RemoveContact(jid)
r.ui.removePeer(account, jid)
r.redraw()
},
"on_allow_contact_to_see_status": func() {
account.session.ApprovePresenceSubscription(jid, "" /* generate id */)
},
"on_forbid_contact_to_see_status": func() {
account.session.DenyPresenceSubscription(jid, "" /* generate id */)
},
"on_ask_contact_to_see_status": func() {
account.session.RequestPresenceSubscription(jid)
},
"on_dump_info": func() {
r.debugPrintRosterFor(account.session.GetConfig().Account)
},
})
mn.ShowAll()
mn.PopupAtMouseCursor(nil, nil, int(bt.Button()), bt.Time())
}
func (r *roster) createAccountPopup(jid string, account *account, bt *gdk.EventButton) {
builder := builderForDefinition("AccountPopupMenu")
obj, _ := builder.GetObject("accountMenu")
mn := obj.(*gtk.Menu)
builder.ConnectSignals(map[string]interface{}{
"on_connect": func() {
account.connect()
},
"on_disconnect": func() {
account.disconnect()
},
"on_dump_info": func() {
r.debugPrintRosterFor(account.session.GetConfig().Account)
},
})
connx, _ := builder.GetObject("connectMenuItem")
connect := connx.(*gtk.MenuItem)
dconnx, _ := builder.GetObject("disconnectMenuItem")
disconnect := dconnx.(*gtk.MenuItem)
connect.SetSensitive(account.session.IsDisconnected())
disconnect.SetSensitive(account.session.IsConnected())
mn.ShowAll()
mn.PopupAtMouseCursor(nil, nil, int(bt.Button()), bt.Time())
}
func (r *roster) onButtonPress(view *gtk.TreeView, ev *gdk.Event) bool {
bt := &gdk.EventButton{ev}
if bt.Button() == 0x03 {
jid, account, rowType, ok := r.getAccountAndJidFromEvent(bt)
if ok {
switch rowType {
case "peer":
r.createAccountPeerPopup(jid, account, bt)
case "account":
r.createAccountPopup(jid, account, bt)
}
}
}
return false
}
func (r *roster) onActivateBuddy(v *gtk.TreeView, path *gtk.TreePath) {
selection, _ := v.GetSelection()
defer selection.UnselectPath(path)
iter, err := r.model.GetIter(path)
if err != nil {
return
}
jid := getFromModelIter(r.model, iter, indexJid)
accountID := getFromModelIter(r.model, iter, indexAccountID)
rowType := getFromModelIter(r.model, iter, indexRowType)
if rowType != "peer" {
r.isCollapsed[jid] = !r.isCollapsed[jid]
r.redraw()
return
}
account, ok := r.getAccount(accountID)
if !ok {
return
}
r.openConversationWindow(account, jid)
}
func (r *roster) openConversationWindow(account *account, to string) (*conversationWindow, error) {
c, ok := account.getConversationWith(to)
if !ok {
textBuffer := r.ui.getTags().createTextBuffer()
c = account.createConversationWindow(to, r.ui.displaySettings, textBuffer)
r.ui.connectShortcutsChildWindow(c.win)
r.ui.connectShortcutsConversationWindow(c)
c.parentWin = r.ui.window
}
c.Show()
return c, nil
}
func (r *roster) displayNameFor(account *account, from string) string {
p, ok := r.ui.getPeer(account, from)
if !ok {
return from
}
return p.NameForPresentation()
}
func (r *roster) presenceUpdated(account *account, from, show, showStatus string, gone bool) {
c, ok := account.getConversationWith(from)
if !ok {
return
}
doInUIThread(func() {
c.appendStatus(r.displayNameFor(account, from), time.Now(), show, showStatus, gone)
})
}
func (r *roster) messageReceived(account *account, from string, timestamp time.Time, encrypted bool, message []byte) {
doInUIThread(func() {
conv, err := r.openConversationWindow(account, from)
if err != nil {
return
}
conv.appendMessage(r.displayNameFor(account, from), timestamp, encrypted, ui.StripHTML(message), false)
})
}
func (r *roster) update(account *account, entries *rosters.List) {
r.ui.accountManager.Lock()
defer r.ui.accountManager.Unlock()
r.ui.accountManager.setContacts(account, entries)
}
func (r *roster) debugPrintRosterFor(nm string) {
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
for account, rs := range r.ui.accountManager.getAllContacts() {
if account.session.GetConfig().Is(nm) {
rs.Iter(func(_ int, item *rosters.Peer) {
fmt.Printf("-> %s\n", item.Dump())
})
}
}
fmt.Printf(" ************************************** \n")
fmt.Println()
}
func isNominallyVisible(p *rosters.Peer) bool {
return (p.Subscription != "none" && p.Subscription != "") || p.PendingSubscribeID != ""
}
func shouldDisplay(p *rosters.Peer, showOffline bool) bool {
return isNominallyVisible(p) && (showOffline || p.Online)
}
func isAway(p *rosters.Peer) bool {
switch p.Status {
case "dnd", "xa", "away":
return true
}
return false
}
func isOnline(p *rosters.Peer) bool {
return p.PendingSubscribeID == "" && p.Online
}
func decideStatusFor(p *rosters.Peer) string {
if p.PendingSubscribeID != "" |
if !p.Online {
return "offline"
}
switch p.Status {
case "dnd":
return "busy"
case "xa":
return "extended-away"
case "away":
return "away"
}
return "available"
}
func decideColorFor(p *rosters.Peer) string {
if !p.Online {
return "#aaaaaa"
}
return "#000000"
}
func createGroupDisplayName(parentName string, counter *counter, isExpanded bool) string {
name := parentName
if !isExpanded {
name = fmt.Sprintf("[%s]", name)
}
return fmt.Sprintf("%s (%d/%d)", name, counter.online, counter.total)
}
func createTooltipFor(item *rosters.Peer) string {
pname := html.EscapeString(item.NameForPresentation())
jid := html.EscapeString(item.Jid)
if pname != jid {
return fmt.Sprintf("%s (%s)", pname, jid)
}
return jid
}
func (r *roster) addItem(item *rosters.Peer, parentIter *gtk.TreeIter, indent string) {
iter := r.model.Append(parentIter)
setAll(r.model, iter,
item.Jid,
fmt.Sprintf("%s %s", indent, item.NameForPresentation()),
item.BelongsTo,
decideColorFor(item),
"#ffffff",
nil,
createTooltipFor(item),
)
r.model.SetValue(iter, indexRowType, "peer")
r.model.SetValue(iter, indexStatusIcon, statusIcons[decideStatusFor(item)].getPixbuf())
}
func (r *roster) redrawMerged() {
showOffline := !r.ui.config.Display.ShowOnlyOnline
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
r.toCollapse = nil
grp := rosters.TopLevelGroup()
for account, contacts := range r.ui.accountManager.getAllContacts() {
contacts.AddTo(grp, account.session.GroupDelimiter)
}
accountCounter := &counter{}
r.displayGroup(grp, nil, accountCounter, showOffline, "")
r.view.ExpandAll()
for _, path := range r.toCollapse {
r.view.CollapseRow(path)
}
}
type counter struct {
total int
online int
}
func (c *counter) inc(total, online bool) {
if total {
c.total++
}
if online {
c.online++
}
}
func (r *roster) displayGroup(g *rosters.Group, parentIter *gtk.TreeIter, accountCounter *counter, showOffline bool, accountName string) {
pi := parentIter
groupCounter := &counter{}
groupID := accountName + "//" + g.FullGroupName()
if g.GroupName != "" {
pi = r.model.Append(parentIter)
r.model.SetValue(pi, indexParentJid, groupID)
r.model.SetValue(pi, indexRowType, "group")
r.model.SetValue(pi, indexWeight, 500)
r.model.SetValue(pi, indexBackgroundColor, "#e9e7f3")
}
for _, item := range g.Peers() {
vs := isNominallyVisible(item)
o := isOnline(item)
accountCounter.inc(vs, vs && o)
groupCounter.inc(vs, vs && o)
if shouldDisplay(item, showOffline) {
r.addItem(item, pi, "")
}
}
for _, gr := range g.Groups() {
r.displayGroup(gr, pi, accountCounter, showOffline, accountName)
}
if g.GroupName != "" {
parentPath, _ := r.model.GetPath(pi)
shouldCollapse, ok := r.isCollapsed[groupID]
isExpanded := true
if ok && shouldCollapse {
isExpanded = false
r.toCollapse = append(r.toCollapse, parentPath)
}
r.model.SetValue(pi, indexParentDisplayName, createGroupDisplayName(g.FullGroupName(), groupCounter, isExpanded))
}
}
func (r *roster) redrawSeparateAccount(account *account, contacts *rosters.List, showOffline bool) {
parentIter := r.model.Append(nil)
accountCounter := &counter{}
grp := contacts.Grouped(account.session.GroupDelimiter)
parentName := account.session.GetConfig().Account
r.displayGroup(grp, parentIter, accountCounter, showOffline, parentName)
r.model.SetValue(parentIter, indexParentJid, parentName)
r.model.SetValue(parentIter, indexAccountID, account.session.GetConfig().ID())
r.model.SetValue(parentIter, indexRowType, "account")
r.model.SetValue(parentIter, indexWeight, 700)
bgcolor := "#918caa"
if account.session.IsDisconnected() {
bgcolor = "#d5d3de"
}
r.model.SetValue(parentIter, indexBackgroundColor, bgcolor)
parentPath, _ := r.model.GetPath(parentIter)
shouldCollapse, ok := r.isCollapsed[parentName]
isExpanded := true
if ok && shouldCollapse {
isExpanded = false
r.toCollapse = append(r.toCollapse, parentPath)
}
var stat string
if account.session.IsDisconnected() {
stat = "offline"
} else if account.session.IsConnected() {
stat = "available"
} else {
stat = "connecting"
}
r.model.SetValue(parentIter, indexStatusIcon, statusIcons[stat].getPixbuf())
r.model.SetValue(parentIter, indexParentDisplayName, createGroupDisplayName(parentName, accountCounter, isExpanded))
}
func (r *roster) sortedAccounts() []*account {
var as []*account
for account := range r.ui.accountManager.getAllContacts() {
if account == nil {
log.Printf("adding an account that is nil...\n")
}
as = append(as, account)
}
sort.Sort(byAccountNameAlphabetic(as))
return as
}
func (r *roster) redrawSeparate() {
showOffline := !r.ui.config.Display.ShowOnlyOnline
r.ui.accountManager.RLock()
defer r.ui.accountManager.RUnlock()
r.toCollapse = nil
for _, account := range r.sortedAccounts() {
r.redrawSeparateAccount(account, r.ui.accountManager.getContacts(account), showOffline)
}
r.view.ExpandAll()
for _, path := range r.toCollapse {
r.view.CollapseRow(path)
}
}
const disconnectedPageIndex = 0
const spinnerPageIndex = 1
const rosterPageIndex = 2
func (r *roster) redraw() {
//TODO: this should be behind a mutex
r.model.Clear()
if r.ui.shouldViewAccounts() {
r.redrawSeparate()
} else {
r.redrawMerged()
}
}
func setAll(v *gtk.TreeStore, iter *gtk.TreeIter, values ...interface{}) {
for i, val := range values {
if val != nil {
v.SetValue(iter, i, val)
}
}
}
| {
return "unknown"
} | conditional_block |
models.py | import json
from enum import Enum
from os import environ
from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, cast
from mysql.connector import connect
from app.enums import Entities, Operators
from app.schemas import EventSchema, ISchema, SelectionSchema, SportSchema
DB_SETTINGS = {
'host': environ.get('DB_HOST', 'localhost'),
'port': environ.get('DB_PORT', '3306'),
'user': 'root',
'password': 'root',
'database': 'eightapp',
}
## TODO: CREATE DATABASE FROM DOCKERFILE OR MAKE FILE. :)
def create_database(db_settings: Dict[str, Any], database_name: str) -> None:
with connect(**db_settings) as connection:
cursor = connection.cursor()
cursor.execute(f'CREATE DATABASE IF NOT EXISTS {database_name}')
def remove_database(db_settings: Dict[str, Any], database_name: str) -> None:
with connect(**db_settings) as connection:
cursor = connection.cursor()
cursor.execute(f'DROP DATABASE IF EXISTS {database_name}')
class Keywords(str, Enum):
ID = 'ID'
Properties = 'properties'
And = 'AND'
From = 'FROM'
InsertInto = 'INSERT INTO'
Set = 'SET'
Select = 'SELECT'
Update = 'UPDATE'
Values = 'VALUES'
Where = 'WHERE'
class EmptyQuery(Exception):
"""Raised when `.execute()` is called without prior select/filter."""
class SchemaNotFound(Exception):
"""Raised when the requested Schema is not found."""
class BaseModel:
db_settings: Dict[str, Any] = DB_SETTINGS
table_name: str
schema: Type[ISchema]
_table_created: Dict[str, bool] = {}
BLANK_QUERY: str = ''
def _create_table_if_not_exists(self) -> None:
"""Automatically create the provided schema table if it does not exist.
For example::
{
"title":"SportSchema",
"type":"object",
"properties":{
"Name":{
"title":"Name",
"type":"string"
},
"Slug":{
"title":"Slug",
"type":"string"
},
"Active":{
"title":"Active",
"type":"boolean" | "Active"
]
}
Would result in the following create table query::
CREATE TABLE IF NOT EXISTS sports (ID INTEGER PRIMARY KEY AUTO_INCREMENT, Name VARCHAR(255), Slug VARCHAR(255), Active BOOLEAN)
"""
COLUMN_DEFINITIONS = 'definitions'
COLUMN_TYPE = 'type'
KEY_REF = '$ref'
TYPE_LOOKUP = {
'string': 'VARCHAR(255)',
'integer': 'INTEGER',
'boolean': 'BOOLEAN',
'number': 'INTEGER',
}
def ref_lookup(
property: Dict[str, Any], fields: Dict[str, Any]
) -> Dict[str, Any]:
ref = property[KEY_REF]
property_lookup_name = ref[ref.rfind('/') + 1 :]
return fields[COLUMN_DEFINITIONS][property_lookup_name]
field_queries = []
fields = json.loads(self.schema.schema_json())
del fields[Keywords.Properties.value][
Keywords.ID.value
] # Remove primary key field. It is handled with auto increment below.
for property_name, property in fields[Keywords.Properties.value].items():
if KEY_REF in property:
property = ref_lookup(property, fields)
field_queries.append(
f'{property_name} {TYPE_LOOKUP[property[COLUMN_TYPE]]}'
)
table_columns = ', '.join(field_queries)
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(
f'CREATE TABLE IF NOT EXISTS {self.table_name} (ID INTEGER PRIMARY KEY AUTO_INCREMENT, {table_columns})'
)
self._table_created[self.table_name] = True
def __init__(self) -> None:
if not self._table_created.get(self.table_name):
self._create_table_if_not_exists()
self._query: str = BaseModel.BLANK_QUERY
self._last_method_called: Optional[function] = None
def _clean_selected_fields(self, field_names: Tuple[str, ...]) -> Tuple[str, ...]:
"""Remove duplicates, e.g. 'ID' field requested twice.
Maintains order. Using a set doesn't maintain order.
"""
list_field_names = [Keywords.ID.value]
for field in field_names:
if field in list_field_names:
continue
list_field_names.append(field)
return tuple(list_field_names)
def _append_to_query(self, statement: str) -> None:
if self._query == BaseModel.BLANK_QUERY:
fields = json.loads(self.schema.schema_json())
field_names = ', '.join(fields[Keywords.Properties.value].keys())
self._query = f'{Keywords.Select.value} {field_names} {Keywords.From.value} {self.table_name}'
self._query += f' {statement}'
def _map_results_to_schema(
self, field_names: Iterable[str], results: List[Tuple[Any, ...]]
) -> List[ISchema]:
schema_objects: List[ISchema] = []
for result in results:
row_data_mapped_to_fields = dict(zip(field_names, result))
schema_objects.append(self.schema.construct(**row_data_mapped_to_fields))
return schema_objects
def _fields_from_schema(self, schema: ISchema) -> List[str]:
return cast(List[str], schema.dict().keys()) # KeysView[str]
def _values_from_schema(self, schema: ISchema) -> List[Any]:
return cast(List[Any], schema.dict().values()) # KeysView[Any]
def select_fields(self, *field_names) -> List[ISchema]:
field_names = self._clean_selected_fields(field_names)
fields_formatted = ', '.join(field_names)
query = f'{Keywords.Select.value} {fields_formatted} {Keywords.From.value} {self.table_name}'
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query)
results = cursor.fetchall()
return self._map_results_to_schema(field_names, results)
def insert(self, schema: ISchema) -> ISchema:
fields = self._fields_from_schema(schema)
field_names = ', '.join(self._fields_from_schema(schema))
fields_placeholder = ('%s, ' * len(fields))[:-2] # Remove trailing , .
values = tuple(self._values_from_schema(schema))
query = f'{Keywords.InsertInto.value} {self.table_name} ({field_names}) {Keywords.Values.value} ({fields_placeholder})'
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query, values)
connection.commit()
schema.set_id(cursor.lastrowid)
return schema
def update(self, schema: ISchema) -> ISchema:
REMOVE_ID_FIELD_WITH_INDEX = 1
field_names = list(self._fields_from_schema(schema))[
REMOVE_ID_FIELD_WITH_INDEX:
]
values = list(self._values_from_schema(schema))[REMOVE_ID_FIELD_WITH_INDEX:]
fields_placeholder = ', '.join(
[f'{field_name} = %s' for field_name in field_names]
)
query = f"{Keywords.Update.value} {self.table_name} {Keywords.Set.value} {fields_placeholder} {Keywords.Where.value} {Keywords.ID.value} = '{schema.get_id()}'"
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query, values)
connection.commit()
return schema
def select(self, *field_names) -> 'BaseModel':
field_names = self._clean_selected_fields(field_names)
fields_formatted = ', '.join(field_names)
self._query = f'{Keywords.Select.value} {fields_formatted} {Keywords.From.value} {self.table_name}'
self._last_method_called = self.select
return self
def filter(self, field_name: str, operator: Operators, value: Any) -> 'BaseModel':
expression = (
Keywords.And.value
if self._last_method_called == self.filter
else Keywords.Where.value
)
query = f"{expression} {field_name} {operator.value} '{value}'"
self._append_to_query(query)
self._last_method_called = self.filter
return self
def execute(self) -> List[ISchema]:
if self._query == BaseModel.BLANK_QUERY:
raise EmptyQuery()
field_names = list(
map(
str.strip,
self._query[
self._query.find(Keywords.Select.value)
+ len(Keywords.Select.value) : self._query.find(Keywords.From.value)
]
.strip()
.split(','),
)
)
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(self._query)
results = cursor.fetchall()
self._query = BaseModel.BLANK_QUERY
self._last_method_called = None
return self._map_results_to_schema(field_names, results)
def find(self, id: int) -> ISchema:
self.filter(Keywords.ID.value, Operators.Equals, id)
result = self.execute()
if result:
return result[0]
raise SchemaNotFound(f'Not found, ID: {id}.')
def get_query(self) -> str:
return self._query
class SportModel(BaseModel):
schema = SportSchema
table_name = 'sports'
class EventModel(BaseModel):
schema = EventSchema
table_name = 'events'
def update(self, schema: ISchema) -> ISchema:
"""When all the events of a sport are inactive,
the sport becomes inactive
"""
schema = super().update(schema)
schema = cast(EventSchema, schema)
if not schema.Active and schema.Sport > 0:
self.select('ID', 'Sport', 'Active').filter(
'Sport', Operators.Equals, schema.Sport,
).filter('Active', Operators.Equals, 1)
result = self.execute()
if not result:
sm = SportModel()
sport = sm.find(schema.Sport)
sport = cast(SportSchema, sport)
sport.Active = False
sm.update(sport)
return schema
class SelectionModel(BaseModel):
schema = SelectionSchema
table_name = 'selections'
def update(self, schema: ISchema) -> ISchema:
"""When all the selections of a particular event are inactive,
the event becomes inactive
"""
schema = super().update(schema)
schema = cast(SelectionSchema, schema)
if not schema.Active and schema.Event > 0:
self.select('ID', 'Event', 'Active').filter(
'Event', Operators.Equals, schema.Event,
).filter('Active', Operators.Equals, 1)
result = self.execute()
if not result:
em = EventModel()
event = em.find(schema.Event)
event = cast(EventSchema, event)
event.Active = False
em.update(event)
return schema
class ModelFactory:
_models: Dict[str, Type[BaseModel]] = {
Entities.Sport.value: SportModel,
Entities.Event.value: EventModel,
Entities.Selection.value: SelectionModel,
}
@classmethod
def create(cls, model: str) -> BaseModel:
if model in cls._models:
return cls._models[model]()
raise KeyError(model) | }
},
"required":[
"Name",
"Slug", | random_line_split |
models.py | import json
from enum import Enum
from os import environ
from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, cast
from mysql.connector import connect
from app.enums import Entities, Operators
from app.schemas import EventSchema, ISchema, SelectionSchema, SportSchema
DB_SETTINGS = {
'host': environ.get('DB_HOST', 'localhost'),
'port': environ.get('DB_PORT', '3306'),
'user': 'root',
'password': 'root',
'database': 'eightapp',
}
## TODO: CREATE DATABASE FROM DOCKERFILE OR MAKE FILE. :)
def create_database(db_settings: Dict[str, Any], database_name: str) -> None:
with connect(**db_settings) as connection:
cursor = connection.cursor()
cursor.execute(f'CREATE DATABASE IF NOT EXISTS {database_name}')
def remove_database(db_settings: Dict[str, Any], database_name: str) -> None:
with connect(**db_settings) as connection:
cursor = connection.cursor()
cursor.execute(f'DROP DATABASE IF EXISTS {database_name}')
class Keywords(str, Enum):
ID = 'ID'
Properties = 'properties'
And = 'AND'
From = 'FROM'
InsertInto = 'INSERT INTO'
Set = 'SET'
Select = 'SELECT'
Update = 'UPDATE'
Values = 'VALUES'
Where = 'WHERE'
class EmptyQuery(Exception):
"""Raised when `.execute()` is called without prior select/filter."""
class SchemaNotFound(Exception):
"""Raised when the requested Schema is not found."""
class BaseModel:
db_settings: Dict[str, Any] = DB_SETTINGS
table_name: str
schema: Type[ISchema]
_table_created: Dict[str, bool] = {}
BLANK_QUERY: str = ''
def _create_table_if_not_exists(self) -> None:
"""Automatically create the provided schema table if it does not exist.
For example::
{
"title":"SportSchema",
"type":"object",
"properties":{
"Name":{
"title":"Name",
"type":"string"
},
"Slug":{
"title":"Slug",
"type":"string"
},
"Active":{
"title":"Active",
"type":"boolean"
}
},
"required":[
"Name",
"Slug",
"Active"
]
}
Would result in the following create table query::
CREATE TABLE IF NOT EXISTS sports (ID INTEGER PRIMARY KEY AUTO_INCREMENT, Name VARCHAR(255), Slug VARCHAR(255), Active BOOLEAN)
"""
COLUMN_DEFINITIONS = 'definitions'
COLUMN_TYPE = 'type'
KEY_REF = '$ref'
TYPE_LOOKUP = {
'string': 'VARCHAR(255)',
'integer': 'INTEGER',
'boolean': 'BOOLEAN',
'number': 'INTEGER',
}
def ref_lookup(
property: Dict[str, Any], fields: Dict[str, Any]
) -> Dict[str, Any]:
ref = property[KEY_REF]
property_lookup_name = ref[ref.rfind('/') + 1 :]
return fields[COLUMN_DEFINITIONS][property_lookup_name]
field_queries = []
fields = json.loads(self.schema.schema_json())
del fields[Keywords.Properties.value][
Keywords.ID.value
] # Remove primary key field. It is handled with auto increment below.
for property_name, property in fields[Keywords.Properties.value].items():
if KEY_REF in property:
property = ref_lookup(property, fields)
field_queries.append(
f'{property_name} {TYPE_LOOKUP[property[COLUMN_TYPE]]}'
)
table_columns = ', '.join(field_queries)
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(
f'CREATE TABLE IF NOT EXISTS {self.table_name} (ID INTEGER PRIMARY KEY AUTO_INCREMENT, {table_columns})'
)
self._table_created[self.table_name] = True
def __init__(self) -> None:
if not self._table_created.get(self.table_name):
self._create_table_if_not_exists()
self._query: str = BaseModel.BLANK_QUERY
self._last_method_called: Optional[function] = None
def _clean_selected_fields(self, field_names: Tuple[str, ...]) -> Tuple[str, ...]:
"""Remove duplicates, e.g. 'ID' field requested twice.
Maintains order. Using a set doesn't maintain order.
"""
list_field_names = [Keywords.ID.value]
for field in field_names:
if field in list_field_names:
continue
list_field_names.append(field)
return tuple(list_field_names)
def _append_to_query(self, statement: str) -> None:
if self._query == BaseModel.BLANK_QUERY:
fields = json.loads(self.schema.schema_json())
field_names = ', '.join(fields[Keywords.Properties.value].keys())
self._query = f'{Keywords.Select.value} {field_names} {Keywords.From.value} {self.table_name}'
self._query += f' {statement}'
def _map_results_to_schema(
self, field_names: Iterable[str], results: List[Tuple[Any, ...]]
) -> List[ISchema]:
|
def _fields_from_schema(self, schema: ISchema) -> List[str]:
return cast(List[str], schema.dict().keys()) # KeysView[str]
def _values_from_schema(self, schema: ISchema) -> List[Any]:
return cast(List[Any], schema.dict().values()) # KeysView[Any]
def select_fields(self, *field_names) -> List[ISchema]:
field_names = self._clean_selected_fields(field_names)
fields_formatted = ', '.join(field_names)
query = f'{Keywords.Select.value} {fields_formatted} {Keywords.From.value} {self.table_name}'
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query)
results = cursor.fetchall()
return self._map_results_to_schema(field_names, results)
def insert(self, schema: ISchema) -> ISchema:
fields = self._fields_from_schema(schema)
field_names = ', '.join(self._fields_from_schema(schema))
fields_placeholder = ('%s, ' * len(fields))[:-2] # Remove trailing , .
values = tuple(self._values_from_schema(schema))
query = f'{Keywords.InsertInto.value} {self.table_name} ({field_names}) {Keywords.Values.value} ({fields_placeholder})'
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query, values)
connection.commit()
schema.set_id(cursor.lastrowid)
return schema
def update(self, schema: ISchema) -> ISchema:
REMOVE_ID_FIELD_WITH_INDEX = 1
field_names = list(self._fields_from_schema(schema))[
REMOVE_ID_FIELD_WITH_INDEX:
]
values = list(self._values_from_schema(schema))[REMOVE_ID_FIELD_WITH_INDEX:]
fields_placeholder = ', '.join(
[f'{field_name} = %s' for field_name in field_names]
)
query = f"{Keywords.Update.value} {self.table_name} {Keywords.Set.value} {fields_placeholder} {Keywords.Where.value} {Keywords.ID.value} = '{schema.get_id()}'"
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query, values)
connection.commit()
return schema
def select(self, *field_names) -> 'BaseModel':
field_names = self._clean_selected_fields(field_names)
fields_formatted = ', '.join(field_names)
self._query = f'{Keywords.Select.value} {fields_formatted} {Keywords.From.value} {self.table_name}'
self._last_method_called = self.select
return self
def filter(self, field_name: str, operator: Operators, value: Any) -> 'BaseModel':
expression = (
Keywords.And.value
if self._last_method_called == self.filter
else Keywords.Where.value
)
query = f"{expression} {field_name} {operator.value} '{value}'"
self._append_to_query(query)
self._last_method_called = self.filter
return self
def execute(self) -> List[ISchema]:
if self._query == BaseModel.BLANK_QUERY:
raise EmptyQuery()
field_names = list(
map(
str.strip,
self._query[
self._query.find(Keywords.Select.value)
+ len(Keywords.Select.value) : self._query.find(Keywords.From.value)
]
.strip()
.split(','),
)
)
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(self._query)
results = cursor.fetchall()
self._query = BaseModel.BLANK_QUERY
self._last_method_called = None
return self._map_results_to_schema(field_names, results)
def find(self, id: int) -> ISchema:
self.filter(Keywords.ID.value, Operators.Equals, id)
result = self.execute()
if result:
return result[0]
raise SchemaNotFound(f'Not found, ID: {id}.')
def get_query(self) -> str:
return self._query
class SportModel(BaseModel):
schema = SportSchema
table_name = 'sports'
class EventModel(BaseModel):
schema = EventSchema
table_name = 'events'
def update(self, schema: ISchema) -> ISchema:
"""When all the events of a sport are inactive,
the sport becomes inactive
"""
schema = super().update(schema)
schema = cast(EventSchema, schema)
if not schema.Active and schema.Sport > 0:
self.select('ID', 'Sport', 'Active').filter(
'Sport', Operators.Equals, schema.Sport,
).filter('Active', Operators.Equals, 1)
result = self.execute()
if not result:
sm = SportModel()
sport = sm.find(schema.Sport)
sport = cast(SportSchema, sport)
sport.Active = False
sm.update(sport)
return schema
class SelectionModel(BaseModel):
schema = SelectionSchema
table_name = 'selections'
def update(self, schema: ISchema) -> ISchema:
"""When all the selections of a particular event are inactive,
the event becomes inactive
"""
schema = super().update(schema)
schema = cast(SelectionSchema, schema)
if not schema.Active and schema.Event > 0:
self.select('ID', 'Event', 'Active').filter(
'Event', Operators.Equals, schema.Event,
).filter('Active', Operators.Equals, 1)
result = self.execute()
if not result:
em = EventModel()
event = em.find(schema.Event)
event = cast(EventSchema, event)
event.Active = False
em.update(event)
return schema
class ModelFactory:
_models: Dict[str, Type[BaseModel]] = {
Entities.Sport.value: SportModel,
Entities.Event.value: EventModel,
Entities.Selection.value: SelectionModel,
}
@classmethod
def create(cls, model: str) -> BaseModel:
if model in cls._models:
return cls._models[model]()
raise KeyError(model)
| schema_objects: List[ISchema] = []
for result in results:
row_data_mapped_to_fields = dict(zip(field_names, result))
schema_objects.append(self.schema.construct(**row_data_mapped_to_fields))
return schema_objects | identifier_body |
models.py | import json
from enum import Enum
from os import environ
from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, cast
from mysql.connector import connect
from app.enums import Entities, Operators
from app.schemas import EventSchema, ISchema, SelectionSchema, SportSchema
DB_SETTINGS = {
'host': environ.get('DB_HOST', 'localhost'),
'port': environ.get('DB_PORT', '3306'),
'user': 'root',
'password': 'root',
'database': 'eightapp',
}
## TODO: CREATE DATABASE FROM DOCKERFILE OR MAKE FILE. :)
def create_database(db_settings: Dict[str, Any], database_name: str) -> None:
with connect(**db_settings) as connection:
cursor = connection.cursor()
cursor.execute(f'CREATE DATABASE IF NOT EXISTS {database_name}')
def remove_database(db_settings: Dict[str, Any], database_name: str) -> None:
with connect(**db_settings) as connection:
cursor = connection.cursor()
cursor.execute(f'DROP DATABASE IF EXISTS {database_name}')
class Keywords(str, Enum):
ID = 'ID'
Properties = 'properties'
And = 'AND'
From = 'FROM'
InsertInto = 'INSERT INTO'
Set = 'SET'
Select = 'SELECT'
Update = 'UPDATE'
Values = 'VALUES'
Where = 'WHERE'
class EmptyQuery(Exception):
"""Raised when `.execute()` is called without prior select/filter."""
class SchemaNotFound(Exception):
"""Raised when the requested Schema is not found."""
class BaseModel:
db_settings: Dict[str, Any] = DB_SETTINGS
table_name: str
schema: Type[ISchema]
_table_created: Dict[str, bool] = {}
BLANK_QUERY: str = ''
def _create_table_if_not_exists(self) -> None:
"""Automatically create the provided schema table if it does not exist.
For example::
{
"title":"SportSchema",
"type":"object",
"properties":{
"Name":{
"title":"Name",
"type":"string"
},
"Slug":{
"title":"Slug",
"type":"string"
},
"Active":{
"title":"Active",
"type":"boolean"
}
},
"required":[
"Name",
"Slug",
"Active"
]
}
Would result in the following create table query::
CREATE TABLE IF NOT EXISTS sports (ID INTEGER PRIMARY KEY AUTO_INCREMENT, Name VARCHAR(255), Slug VARCHAR(255), Active BOOLEAN)
"""
COLUMN_DEFINITIONS = 'definitions'
COLUMN_TYPE = 'type'
KEY_REF = '$ref'
TYPE_LOOKUP = {
'string': 'VARCHAR(255)',
'integer': 'INTEGER',
'boolean': 'BOOLEAN',
'number': 'INTEGER',
}
def ref_lookup(
property: Dict[str, Any], fields: Dict[str, Any]
) -> Dict[str, Any]:
ref = property[KEY_REF]
property_lookup_name = ref[ref.rfind('/') + 1 :]
return fields[COLUMN_DEFINITIONS][property_lookup_name]
field_queries = []
fields = json.loads(self.schema.schema_json())
del fields[Keywords.Properties.value][
Keywords.ID.value
] # Remove primary key field. It is handled with auto increment below.
for property_name, property in fields[Keywords.Properties.value].items():
|
table_columns = ', '.join(field_queries)
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(
f'CREATE TABLE IF NOT EXISTS {self.table_name} (ID INTEGER PRIMARY KEY AUTO_INCREMENT, {table_columns})'
)
self._table_created[self.table_name] = True
def __init__(self) -> None:
if not self._table_created.get(self.table_name):
self._create_table_if_not_exists()
self._query: str = BaseModel.BLANK_QUERY
self._last_method_called: Optional[function] = None
def _clean_selected_fields(self, field_names: Tuple[str, ...]) -> Tuple[str, ...]:
"""Remove duplicates, e.g. 'ID' field requested twice.
Maintains order. Using a set doesn't maintain order.
"""
list_field_names = [Keywords.ID.value]
for field in field_names:
if field in list_field_names:
continue
list_field_names.append(field)
return tuple(list_field_names)
def _append_to_query(self, statement: str) -> None:
if self._query == BaseModel.BLANK_QUERY:
fields = json.loads(self.schema.schema_json())
field_names = ', '.join(fields[Keywords.Properties.value].keys())
self._query = f'{Keywords.Select.value} {field_names} {Keywords.From.value} {self.table_name}'
self._query += f' {statement}'
def _map_results_to_schema(
self, field_names: Iterable[str], results: List[Tuple[Any, ...]]
) -> List[ISchema]:
schema_objects: List[ISchema] = []
for result in results:
row_data_mapped_to_fields = dict(zip(field_names, result))
schema_objects.append(self.schema.construct(**row_data_mapped_to_fields))
return schema_objects
def _fields_from_schema(self, schema: ISchema) -> List[str]:
return cast(List[str], schema.dict().keys()) # KeysView[str]
def _values_from_schema(self, schema: ISchema) -> List[Any]:
return cast(List[Any], schema.dict().values()) # KeysView[Any]
def select_fields(self, *field_names) -> List[ISchema]:
field_names = self._clean_selected_fields(field_names)
fields_formatted = ', '.join(field_names)
query = f'{Keywords.Select.value} {fields_formatted} {Keywords.From.value} {self.table_name}'
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query)
results = cursor.fetchall()
return self._map_results_to_schema(field_names, results)
def insert(self, schema: ISchema) -> ISchema:
fields = self._fields_from_schema(schema)
field_names = ', '.join(self._fields_from_schema(schema))
fields_placeholder = ('%s, ' * len(fields))[:-2] # Remove trailing , .
values = tuple(self._values_from_schema(schema))
query = f'{Keywords.InsertInto.value} {self.table_name} ({field_names}) {Keywords.Values.value} ({fields_placeholder})'
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query, values)
connection.commit()
schema.set_id(cursor.lastrowid)
return schema
def update(self, schema: ISchema) -> ISchema:
REMOVE_ID_FIELD_WITH_INDEX = 1
field_names = list(self._fields_from_schema(schema))[
REMOVE_ID_FIELD_WITH_INDEX:
]
values = list(self._values_from_schema(schema))[REMOVE_ID_FIELD_WITH_INDEX:]
fields_placeholder = ', '.join(
[f'{field_name} = %s' for field_name in field_names]
)
query = f"{Keywords.Update.value} {self.table_name} {Keywords.Set.value} {fields_placeholder} {Keywords.Where.value} {Keywords.ID.value} = '{schema.get_id()}'"
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query, values)
connection.commit()
return schema
def select(self, *field_names) -> 'BaseModel':
field_names = self._clean_selected_fields(field_names)
fields_formatted = ', '.join(field_names)
self._query = f'{Keywords.Select.value} {fields_formatted} {Keywords.From.value} {self.table_name}'
self._last_method_called = self.select
return self
def filter(self, field_name: str, operator: Operators, value: Any) -> 'BaseModel':
expression = (
Keywords.And.value
if self._last_method_called == self.filter
else Keywords.Where.value
)
query = f"{expression} {field_name} {operator.value} '{value}'"
self._append_to_query(query)
self._last_method_called = self.filter
return self
def execute(self) -> List[ISchema]:
if self._query == BaseModel.BLANK_QUERY:
raise EmptyQuery()
field_names = list(
map(
str.strip,
self._query[
self._query.find(Keywords.Select.value)
+ len(Keywords.Select.value) : self._query.find(Keywords.From.value)
]
.strip()
.split(','),
)
)
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(self._query)
results = cursor.fetchall()
self._query = BaseModel.BLANK_QUERY
self._last_method_called = None
return self._map_results_to_schema(field_names, results)
def find(self, id: int) -> ISchema:
self.filter(Keywords.ID.value, Operators.Equals, id)
result = self.execute()
if result:
return result[0]
raise SchemaNotFound(f'Not found, ID: {id}.')
def get_query(self) -> str:
return self._query
class SportModel(BaseModel):
schema = SportSchema
table_name = 'sports'
class EventModel(BaseModel):
schema = EventSchema
table_name = 'events'
def update(self, schema: ISchema) -> ISchema:
"""When all the events of a sport are inactive,
the sport becomes inactive
"""
schema = super().update(schema)
schema = cast(EventSchema, schema)
if not schema.Active and schema.Sport > 0:
self.select('ID', 'Sport', 'Active').filter(
'Sport', Operators.Equals, schema.Sport,
).filter('Active', Operators.Equals, 1)
result = self.execute()
if not result:
sm = SportModel()
sport = sm.find(schema.Sport)
sport = cast(SportSchema, sport)
sport.Active = False
sm.update(sport)
return schema
class SelectionModel(BaseModel):
schema = SelectionSchema
table_name = 'selections'
def update(self, schema: ISchema) -> ISchema:
"""When all the selections of a particular event are inactive,
the event becomes inactive
"""
schema = super().update(schema)
schema = cast(SelectionSchema, schema)
if not schema.Active and schema.Event > 0:
self.select('ID', 'Event', 'Active').filter(
'Event', Operators.Equals, schema.Event,
).filter('Active', Operators.Equals, 1)
result = self.execute()
if not result:
em = EventModel()
event = em.find(schema.Event)
event = cast(EventSchema, event)
event.Active = False
em.update(event)
return schema
class ModelFactory:
_models: Dict[str, Type[BaseModel]] = {
Entities.Sport.value: SportModel,
Entities.Event.value: EventModel,
Entities.Selection.value: SelectionModel,
}
@classmethod
def create(cls, model: str) -> BaseModel:
if model in cls._models:
return cls._models[model]()
raise KeyError(model)
| if KEY_REF in property:
property = ref_lookup(property, fields)
field_queries.append(
f'{property_name} {TYPE_LOOKUP[property[COLUMN_TYPE]]}'
) | conditional_block |
models.py | import json
from enum import Enum
from os import environ
from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, cast
from mysql.connector import connect
from app.enums import Entities, Operators
from app.schemas import EventSchema, ISchema, SelectionSchema, SportSchema
DB_SETTINGS = {
'host': environ.get('DB_HOST', 'localhost'),
'port': environ.get('DB_PORT', '3306'),
'user': 'root',
'password': 'root',
'database': 'eightapp',
}
## TODO: CREATE DATABASE FROM DOCKERFILE OR MAKE FILE. :)
def create_database(db_settings: Dict[str, Any], database_name: str) -> None:
with connect(**db_settings) as connection:
cursor = connection.cursor()
cursor.execute(f'CREATE DATABASE IF NOT EXISTS {database_name}')
def remove_database(db_settings: Dict[str, Any], database_name: str) -> None:
with connect(**db_settings) as connection:
cursor = connection.cursor()
cursor.execute(f'DROP DATABASE IF EXISTS {database_name}')
class Keywords(str, Enum):
ID = 'ID'
Properties = 'properties'
And = 'AND'
From = 'FROM'
InsertInto = 'INSERT INTO'
Set = 'SET'
Select = 'SELECT'
Update = 'UPDATE'
Values = 'VALUES'
Where = 'WHERE'
class EmptyQuery(Exception):
"""Raised when `.execute()` is called without prior select/filter."""
class SchemaNotFound(Exception):
"""Raised when the requested Schema is not found."""
class BaseModel:
db_settings: Dict[str, Any] = DB_SETTINGS
table_name: str
schema: Type[ISchema]
_table_created: Dict[str, bool] = {}
BLANK_QUERY: str = ''
def _create_table_if_not_exists(self) -> None:
"""Automatically create the provided schema table if it does not exist.
For example::
{
"title":"SportSchema",
"type":"object",
"properties":{
"Name":{
"title":"Name",
"type":"string"
},
"Slug":{
"title":"Slug",
"type":"string"
},
"Active":{
"title":"Active",
"type":"boolean"
}
},
"required":[
"Name",
"Slug",
"Active"
]
}
Would result in the following create table query::
CREATE TABLE IF NOT EXISTS sports (ID INTEGER PRIMARY KEY AUTO_INCREMENT, Name VARCHAR(255), Slug VARCHAR(255), Active BOOLEAN)
"""
COLUMN_DEFINITIONS = 'definitions'
COLUMN_TYPE = 'type'
KEY_REF = '$ref'
TYPE_LOOKUP = {
'string': 'VARCHAR(255)',
'integer': 'INTEGER',
'boolean': 'BOOLEAN',
'number': 'INTEGER',
}
def ref_lookup(
property: Dict[str, Any], fields: Dict[str, Any]
) -> Dict[str, Any]:
ref = property[KEY_REF]
property_lookup_name = ref[ref.rfind('/') + 1 :]
return fields[COLUMN_DEFINITIONS][property_lookup_name]
field_queries = []
fields = json.loads(self.schema.schema_json())
del fields[Keywords.Properties.value][
Keywords.ID.value
] # Remove primary key field. It is handled with auto increment below.
for property_name, property in fields[Keywords.Properties.value].items():
if KEY_REF in property:
property = ref_lookup(property, fields)
field_queries.append(
f'{property_name} {TYPE_LOOKUP[property[COLUMN_TYPE]]}'
)
table_columns = ', '.join(field_queries)
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(
f'CREATE TABLE IF NOT EXISTS {self.table_name} (ID INTEGER PRIMARY KEY AUTO_INCREMENT, {table_columns})'
)
self._table_created[self.table_name] = True
def __init__(self) -> None:
if not self._table_created.get(self.table_name):
self._create_table_if_not_exists()
self._query: str = BaseModel.BLANK_QUERY
self._last_method_called: Optional[function] = None
def _clean_selected_fields(self, field_names: Tuple[str, ...]) -> Tuple[str, ...]:
"""Remove duplicates, e.g. 'ID' field requested twice.
Maintains order. Using a set doesn't maintain order.
"""
list_field_names = [Keywords.ID.value]
for field in field_names:
if field in list_field_names:
continue
list_field_names.append(field)
return tuple(list_field_names)
def _append_to_query(self, statement: str) -> None:
if self._query == BaseModel.BLANK_QUERY:
fields = json.loads(self.schema.schema_json())
field_names = ', '.join(fields[Keywords.Properties.value].keys())
self._query = f'{Keywords.Select.value} {field_names} {Keywords.From.value} {self.table_name}'
self._query += f' {statement}'
def _map_results_to_schema(
self, field_names: Iterable[str], results: List[Tuple[Any, ...]]
) -> List[ISchema]:
schema_objects: List[ISchema] = []
for result in results:
row_data_mapped_to_fields = dict(zip(field_names, result))
schema_objects.append(self.schema.construct(**row_data_mapped_to_fields))
return schema_objects
def _fields_from_schema(self, schema: ISchema) -> List[str]:
return cast(List[str], schema.dict().keys()) # KeysView[str]
def | (self, schema: ISchema) -> List[Any]:
return cast(List[Any], schema.dict().values()) # KeysView[Any]
def select_fields(self, *field_names) -> List[ISchema]:
field_names = self._clean_selected_fields(field_names)
fields_formatted = ', '.join(field_names)
query = f'{Keywords.Select.value} {fields_formatted} {Keywords.From.value} {self.table_name}'
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query)
results = cursor.fetchall()
return self._map_results_to_schema(field_names, results)
def insert(self, schema: ISchema) -> ISchema:
fields = self._fields_from_schema(schema)
field_names = ', '.join(self._fields_from_schema(schema))
fields_placeholder = ('%s, ' * len(fields))[:-2] # Remove trailing , .
values = tuple(self._values_from_schema(schema))
query = f'{Keywords.InsertInto.value} {self.table_name} ({field_names}) {Keywords.Values.value} ({fields_placeholder})'
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query, values)
connection.commit()
schema.set_id(cursor.lastrowid)
return schema
def update(self, schema: ISchema) -> ISchema:
REMOVE_ID_FIELD_WITH_INDEX = 1
field_names = list(self._fields_from_schema(schema))[
REMOVE_ID_FIELD_WITH_INDEX:
]
values = list(self._values_from_schema(schema))[REMOVE_ID_FIELD_WITH_INDEX:]
fields_placeholder = ', '.join(
[f'{field_name} = %s' for field_name in field_names]
)
query = f"{Keywords.Update.value} {self.table_name} {Keywords.Set.value} {fields_placeholder} {Keywords.Where.value} {Keywords.ID.value} = '{schema.get_id()}'"
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(query, values)
connection.commit()
return schema
def select(self, *field_names) -> 'BaseModel':
field_names = self._clean_selected_fields(field_names)
fields_formatted = ', '.join(field_names)
self._query = f'{Keywords.Select.value} {fields_formatted} {Keywords.From.value} {self.table_name}'
self._last_method_called = self.select
return self
def filter(self, field_name: str, operator: Operators, value: Any) -> 'BaseModel':
expression = (
Keywords.And.value
if self._last_method_called == self.filter
else Keywords.Where.value
)
query = f"{expression} {field_name} {operator.value} '{value}'"
self._append_to_query(query)
self._last_method_called = self.filter
return self
def execute(self) -> List[ISchema]:
if self._query == BaseModel.BLANK_QUERY:
raise EmptyQuery()
field_names = list(
map(
str.strip,
self._query[
self._query.find(Keywords.Select.value)
+ len(Keywords.Select.value) : self._query.find(Keywords.From.value)
]
.strip()
.split(','),
)
)
with connect(**BaseModel.db_settings) as connection:
cursor = connection.cursor()
cursor.execute(self._query)
results = cursor.fetchall()
self._query = BaseModel.BLANK_QUERY
self._last_method_called = None
return self._map_results_to_schema(field_names, results)
def find(self, id: int) -> ISchema:
self.filter(Keywords.ID.value, Operators.Equals, id)
result = self.execute()
if result:
return result[0]
raise SchemaNotFound(f'Not found, ID: {id}.')
def get_query(self) -> str:
return self._query
class SportModel(BaseModel):
schema = SportSchema
table_name = 'sports'
class EventModel(BaseModel):
schema = EventSchema
table_name = 'events'
def update(self, schema: ISchema) -> ISchema:
"""When all the events of a sport are inactive,
the sport becomes inactive
"""
schema = super().update(schema)
schema = cast(EventSchema, schema)
if not schema.Active and schema.Sport > 0:
self.select('ID', 'Sport', 'Active').filter(
'Sport', Operators.Equals, schema.Sport,
).filter('Active', Operators.Equals, 1)
result = self.execute()
if not result:
sm = SportModel()
sport = sm.find(schema.Sport)
sport = cast(SportSchema, sport)
sport.Active = False
sm.update(sport)
return schema
class SelectionModel(BaseModel):
schema = SelectionSchema
table_name = 'selections'
def update(self, schema: ISchema) -> ISchema:
"""When all the selections of a particular event are inactive,
the event becomes inactive
"""
schema = super().update(schema)
schema = cast(SelectionSchema, schema)
if not schema.Active and schema.Event > 0:
self.select('ID', 'Event', 'Active').filter(
'Event', Operators.Equals, schema.Event,
).filter('Active', Operators.Equals, 1)
result = self.execute()
if not result:
em = EventModel()
event = em.find(schema.Event)
event = cast(EventSchema, event)
event.Active = False
em.update(event)
return schema
class ModelFactory:
_models: Dict[str, Type[BaseModel]] = {
Entities.Sport.value: SportModel,
Entities.Event.value: EventModel,
Entities.Selection.value: SelectionModel,
}
@classmethod
def create(cls, model: str) -> BaseModel:
if model in cls._models:
return cls._models[model]()
raise KeyError(model)
| _values_from_schema | identifier_name |
wgl.rs | use crate::{conv, device::Device, native, Backend, GlContainer, PhysicalDevice, QueueFamily};
use std::{
ffi::{CString, OsStr},
iter,
mem,
os::{raw::c_void, windows::ffi::OsStrExt},
ptr,
};
use glow::Context as _;
use hal::{adapter::Adapter, format as f, image, window};
use arrayvec::ArrayVec;
use lazy_static::lazy_static;
use winapi::shared::minwindef::*;
use winapi::shared::windef::*;
use winapi::um::libloaderapi::*;
use winapi::um::wingdi::*;
use winapi::um::winuser::*;
pub mod wgl_sys {
include!(concat!(env!("OUT_DIR"), "/wgl_sys.rs"));
}
pub mod wgl_ext_sys {
include!(concat!(env!("OUT_DIR"), "/wgl_ext_sys.rs"));
}
#[link(name = "opengl32")]
extern "C" {}
#[cfg(feature = "winit")]
use winit;
pub(crate) struct Entry {
hwnd: HWND,
pub(crate) hdc: HDC,
pub(crate) wgl: wgl_ext_sys::Wgl,
lib: HMODULE,
}
unsafe impl Send for Entry {}
unsafe impl Sync for Entry {}
impl Entry {
pub fn new() -> Self {
unsafe {
let mut class: WNDCLASSEXW = mem::zeroed();
let instance = GetModuleHandleW(ptr::null());
let class_name = OsStr::new("gfx-rs wgl")
.encode_wide()
.chain(Some(0).into_iter())
.collect::<Vec<_>>();
class.cbSize = mem::size_of::<WNDCLASSEXW>() as UINT;
class.lpszClassName = class_name.as_ptr();
class.lpfnWndProc = Some(DefWindowProcW);
RegisterClassExW(&class);
let hwnd = CreateWindowExW(
0,
class_name.as_ptr(),
std::ptr::null(),
0,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
std::ptr::null_mut(),
std::ptr::null_mut(),
instance,
std::ptr::null_mut(),
);
let hdc = GetDC(hwnd);
let desc = PIXELFORMATDESCRIPTOR {
nSize: std::mem::size_of::<PIXELFORMATDESCRIPTOR>() as u16,
nVersion: 1,
dwFlags: PFD_SUPPORT_OPENGL,
iPixelType: PFD_TYPE_RGBA,
cColorBits: 8,
cRedBits: 0,
cRedShift: 0,
cGreenBits: 0,
cGreenShift: 0,
cBlueBits: 0,
cBlueShift: 0,
cAlphaBits: 8,
cAlphaShift: 0,
cAccumBits: 0,
cAccumRedBits: 0,
cAccumGreenBits: 0,
cAccumBlueBits: 0,
cAccumAlphaBits: 0,
cDepthBits: 0,
cStencilBits: 0,
cAuxBuffers: 0,
iLayerType: PFD_MAIN_PLANE,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: 0,
dwDamageMask: 0,
};
let format_id = ChoosePixelFormat(hdc, &desc);
SetPixelFormat(hdc, format_id, &desc);
let hglrc = wglCreateContext(hdc);
println!("{:?}", (hwnd, hdc, format_id, hglrc));
wglMakeCurrent(hdc, hglrc);
let name = OsStr::new("opengl32.dll")
.encode_wide()
.chain(Some(0).into_iter())
.collect::<Vec<_>>();
let lib = LoadLibraryW(name.as_ptr());
let wgl = wgl_ext_sys::Wgl::load_with(|sym| {
let sym = CString::new(sym.as_bytes()).unwrap();
let addr = wgl_sys::GetProcAddress(sym.as_ptr()) as *const ();
if !addr.is_null() {
addr as *const _
} else {
GetProcAddress(lib, sym.as_ptr()) as *const _
}
});
Entry {
hwnd,
hdc: hdc as _,
wgl,
lib,
}
}
}
}
impl Drop for Entry {
fn drop(&mut self) {
unsafe {
DestroyWindow(self.hwnd);
}
}
}
lazy_static! {
// Entry function pointers
pub(crate) static ref WGL_ENTRY: Entry = Entry::new();
}
pub struct Instance {
pub(crate) ctxt: DeviceContext,
}
impl Instance {
pub fn create(_name: &str, version: u32) -> Result<Self, hal::UnsupportedBackend> {
unsafe {
let glrc = WGL_ENTRY.wgl.CreateContextAttribsARB(
WGL_ENTRY.hdc as *const _,
ptr::null(),
ptr::null(),
) as HGLRC;
wglMakeCurrent(WGL_ENTRY.hdc as *mut _, glrc);
Ok(Instance {
ctxt: DeviceContext {
ctxt: Context { glrc },
hdc: WGL_ENTRY.hdc,
},
})
}
}
#[cfg(windows)]
pub fn create_surface_from_hwnd(&self, hwnd: *mut c_void) -> Surface {
Surface {
hwnd: hwnd as *mut _,
swapchain: None,
renderbuffer: None,
}
}
#[cfg(feature = "winit")]
pub fn create_surface(&self, window: &winit::window::Window) -> Surface {
use winit::platform::windows::WindowExtWindows;
let hwnd = window.hwnd();
self.create_surface_from_hwnd(hwnd as *mut _)
}
}
impl hal::Instance for Instance {
type Backend = Backend;
fn enumerate_adapters(&self) -> Vec<Adapter<Backend>> {
let gl_container = GlContainer::from_fn_proc(|s| unsafe {
let sym = CString::new(s.as_bytes()).unwrap();
let addr = wgl_sys::GetProcAddress(sym.as_ptr()) as *const ();
if !addr.is_null() {
addr as *const _
} else {
GetProcAddress(WGL_ENTRY.lib, sym.as_ptr()) as *const _
}
});
let adapter = PhysicalDevice::new_adapter(self.ctxt, gl_container);
vec![adapter]
}
}
#[derive(Debug)]
pub struct Surface {
pub(crate) hwnd: HWND,
pub(crate) swapchain: Option<Swapchain>,
renderbuffer: Option<native::Renderbuffer>,
}
// TODO: high -msiglreith
unsafe impl Send for Surface {}
unsafe impl Sync for Surface {}
impl window::Surface<Backend> for Surface {
fn compatibility(
&self,
physical_device: &PhysicalDevice,
) -> (
window::SurfaceCapabilities,
Option<Vec<f::Format>>,
Vec<window::PresentMode>,
) {
let extent = unsafe {
let mut rect: RECT = mem::zeroed();
GetClientRect(self.hwnd, &mut rect);
window::Extent2D {
width: (rect.right - rect.left) as _,
height: (rect.bottom - rect.top) as _,
}
};
let caps = window::SurfaceCapabilities {
image_count: 2 ..= 2,
current_extent: Some(extent),
extents: extent ..= extent,
max_image_layers: 1,
usage: image::Usage::COLOR_ATTACHMENT | image::Usage::TRANSFER_SRC,
composite_alpha: window::CompositeAlpha::OPAQUE, //TODO
};
let present_modes = vec![
window::PresentMode::Fifo, //TODO
];
(
caps,
Some(vec![f::Format::Rgba8Srgb, f::Format::Bgra8Srgb]),
present_modes,
)
}
fn supports_queue_family(&self, _queue_family: &QueueFamily) -> bool {
true
}
}
impl window::PresentationSurface<Backend> for Surface {
type SwapchainImage = native::ImageView;
unsafe fn configure_swapchain(
&mut self,
device: &Device,
config: window::SwapchainConfig,
) -> Result<(), window::CreationError> {
let gl = &device.share.context;
let context = match self.swapchain.take() {
Some(old) => {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
old.context
}
None => PresentContext::new(self, &device.share.instance_context),
};
context.make_current();
if self.renderbuffer.is_none() {
self.renderbuffer = Some(gl.create_renderbuffer().unwrap());
}
let desc = conv::describe_format(config.format).unwrap();
gl.bind_renderbuffer(glow::RENDERBUFFER, self.renderbuffer);
gl.renderbuffer_storage(
glow::RENDERBUFFER,
desc.tex_internal,
config.extent.width as i32,
config.extent.height as i32,
);
let fbo = gl.create_framebuffer().unwrap();
gl.bind_framebuffer(glow::READ_FRAMEBUFFER, Some(fbo));
gl.framebuffer_renderbuffer(
glow::READ_FRAMEBUFFER,
glow::COLOR_ATTACHMENT0,
glow::RENDERBUFFER,
self.renderbuffer, | extent: config.extent,
fbos: iter::once(fbo).collect(),
});
Ok(())
}
unsafe fn unconfigure_swapchain(&mut self, device: &Device) {
let gl = &device.share.context;
if let Some(old) = self.swapchain.take() {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
}
if let Some(rbo) = self.renderbuffer.take() {
gl.delete_renderbuffer(rbo);
}
}
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
) -> Result<(Self::SwapchainImage, Option<window::Suboptimal>), window::AcquireError> {
let image = native::ImageView::Renderbuffer(self.renderbuffer.unwrap());
Ok((image, None))
}
}
#[derive(Debug)]
pub struct Swapchain {
pub(crate) fbos: ArrayVec<[native::RawFrameBuffer; 3]>,
pub(crate) context: PresentContext,
pub(crate) extent: window::Extent2D,
}
impl Swapchain {
pub(crate) fn make_current(&self) {
self.context.make_current();
}
pub(crate) fn swap_buffers(&self) {
self.context.swap_buffers();
}
}
impl window::Swapchain<Backend> for Swapchain {
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
_semaphore: Option<&native::Semaphore>,
_fence: Option<&native::Fence>,
) -> Result<(window::SwapImageIndex, Option<window::Suboptimal>), window::AcquireError> {
Ok((0, None)) // TODO
}
}
/// Basic abstraction for wgl context handles.
#[derive(Debug, Copy, Clone)]
struct Context {
glrc: HGLRC,
}
impl Context {
unsafe fn make_current(&self, hdc: HDC) {
wglMakeCurrent(hdc, self.glrc);
}
}
/// Owned context for devices and instances.
#[derive(Debug, Copy, Clone)]
pub(crate) struct DeviceContext {
/// Owned wgl context.
ctxt: Context,
/// Device context owned by the corresponding instance.
///
/// This refers to either a pbuffer or dummy window. Therefore not used for actual presentation.
hdc: HDC,
}
// TODO
unsafe impl Send for DeviceContext {}
unsafe impl Sync for DeviceContext {}
impl DeviceContext {
pub(crate) fn make_current(&self) {
unsafe {
self.ctxt.make_current(self.hdc);
}
}
}
/// Owned context for swapchains which soley is required for presentation.
#[derive(Debug)]
pub(crate) struct PresentContext {
/// Owned wgl context.
ctxt: Context,
/// Device context of the corresponding presentation surface.
hdc: HDC,
}
// TODO
unsafe impl Send for PresentContext {}
unsafe impl Sync for PresentContext {}
impl PresentContext {
pub(crate) fn new(surface: &Surface, device_ctxt: &DeviceContext) -> Self {
// TODO: configuration options
unsafe {
let hdc = GetDC(surface.hwnd);
let desc = PIXELFORMATDESCRIPTOR {
nSize: std::mem::size_of::<PIXELFORMATDESCRIPTOR>() as u16,
nVersion: 1,
dwFlags: PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
iPixelType: PFD_TYPE_RGBA,
cColorBits: 32,
cRedBits: 0,
cRedShift: 0,
cGreenBits: 0,
cGreenShift: 0,
cBlueBits: 0,
cBlueShift: 0,
cAlphaBits: 8,
cAlphaShift: 0,
cAccumBits: 0,
cAccumRedBits: 0,
cAccumGreenBits: 0,
cAccumBlueBits: 0,
cAccumAlphaBits: 0,
cDepthBits: 0,
cStencilBits: 0,
cAuxBuffers: 0,
iLayerType: PFD_MAIN_PLANE,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: 0,
dwDamageMask: 0,
};
let format_id = ChoosePixelFormat(hdc, &desc);
SetPixelFormat(hdc, format_id, &desc);
let glrc = WGL_ENTRY.wgl.CreateContextAttribsARB(
hdc as *const _,
device_ctxt.ctxt.glrc as _,
ptr::null(),
) as HGLRC;
wglMakeCurrent(hdc, glrc);
PresentContext {
ctxt: Context { glrc },
hdc,
}
}
}
pub(crate) fn make_current(&self) {
unsafe {
self.ctxt.make_current(self.hdc);
}
}
fn swap_buffers(&self) {
unsafe {
SwapBuffers(self.hdc);
}
}
} | );
self.swapchain = Some(Swapchain {
context, | random_line_split |
wgl.rs | use crate::{conv, device::Device, native, Backend, GlContainer, PhysicalDevice, QueueFamily};
use std::{
ffi::{CString, OsStr},
iter,
mem,
os::{raw::c_void, windows::ffi::OsStrExt},
ptr,
};
use glow::Context as _;
use hal::{adapter::Adapter, format as f, image, window};
use arrayvec::ArrayVec;
use lazy_static::lazy_static;
use winapi::shared::minwindef::*;
use winapi::shared::windef::*;
use winapi::um::libloaderapi::*;
use winapi::um::wingdi::*;
use winapi::um::winuser::*;
pub mod wgl_sys {
include!(concat!(env!("OUT_DIR"), "/wgl_sys.rs"));
}
pub mod wgl_ext_sys {
include!(concat!(env!("OUT_DIR"), "/wgl_ext_sys.rs"));
}
#[link(name = "opengl32")]
extern "C" {}
#[cfg(feature = "winit")]
use winit;
pub(crate) struct Entry {
hwnd: HWND,
pub(crate) hdc: HDC,
pub(crate) wgl: wgl_ext_sys::Wgl,
lib: HMODULE,
}
unsafe impl Send for Entry {}
unsafe impl Sync for Entry {}
impl Entry {
pub fn new() -> Self {
unsafe {
let mut class: WNDCLASSEXW = mem::zeroed();
let instance = GetModuleHandleW(ptr::null());
let class_name = OsStr::new("gfx-rs wgl")
.encode_wide()
.chain(Some(0).into_iter())
.collect::<Vec<_>>();
class.cbSize = mem::size_of::<WNDCLASSEXW>() as UINT;
class.lpszClassName = class_name.as_ptr();
class.lpfnWndProc = Some(DefWindowProcW);
RegisterClassExW(&class);
let hwnd = CreateWindowExW(
0,
class_name.as_ptr(),
std::ptr::null(),
0,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
std::ptr::null_mut(),
std::ptr::null_mut(),
instance,
std::ptr::null_mut(),
);
let hdc = GetDC(hwnd);
let desc = PIXELFORMATDESCRIPTOR {
nSize: std::mem::size_of::<PIXELFORMATDESCRIPTOR>() as u16,
nVersion: 1,
dwFlags: PFD_SUPPORT_OPENGL,
iPixelType: PFD_TYPE_RGBA,
cColorBits: 8,
cRedBits: 0,
cRedShift: 0,
cGreenBits: 0,
cGreenShift: 0,
cBlueBits: 0,
cBlueShift: 0,
cAlphaBits: 8,
cAlphaShift: 0,
cAccumBits: 0,
cAccumRedBits: 0,
cAccumGreenBits: 0,
cAccumBlueBits: 0,
cAccumAlphaBits: 0,
cDepthBits: 0,
cStencilBits: 0,
cAuxBuffers: 0,
iLayerType: PFD_MAIN_PLANE,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: 0,
dwDamageMask: 0,
};
let format_id = ChoosePixelFormat(hdc, &desc);
SetPixelFormat(hdc, format_id, &desc);
let hglrc = wglCreateContext(hdc);
println!("{:?}", (hwnd, hdc, format_id, hglrc));
wglMakeCurrent(hdc, hglrc);
let name = OsStr::new("opengl32.dll")
.encode_wide()
.chain(Some(0).into_iter())
.collect::<Vec<_>>();
let lib = LoadLibraryW(name.as_ptr());
let wgl = wgl_ext_sys::Wgl::load_with(|sym| {
let sym = CString::new(sym.as_bytes()).unwrap();
let addr = wgl_sys::GetProcAddress(sym.as_ptr()) as *const ();
if !addr.is_null() | else {
GetProcAddress(lib, sym.as_ptr()) as *const _
}
});
Entry {
hwnd,
hdc: hdc as _,
wgl,
lib,
}
}
}
}
impl Drop for Entry {
fn drop(&mut self) {
unsafe {
DestroyWindow(self.hwnd);
}
}
}
lazy_static! {
// Entry function pointers
pub(crate) static ref WGL_ENTRY: Entry = Entry::new();
}
pub struct Instance {
pub(crate) ctxt: DeviceContext,
}
impl Instance {
pub fn create(_name: &str, version: u32) -> Result<Self, hal::UnsupportedBackend> {
unsafe {
let glrc = WGL_ENTRY.wgl.CreateContextAttribsARB(
WGL_ENTRY.hdc as *const _,
ptr::null(),
ptr::null(),
) as HGLRC;
wglMakeCurrent(WGL_ENTRY.hdc as *mut _, glrc);
Ok(Instance {
ctxt: DeviceContext {
ctxt: Context { glrc },
hdc: WGL_ENTRY.hdc,
},
})
}
}
#[cfg(windows)]
pub fn create_surface_from_hwnd(&self, hwnd: *mut c_void) -> Surface {
Surface {
hwnd: hwnd as *mut _,
swapchain: None,
renderbuffer: None,
}
}
#[cfg(feature = "winit")]
pub fn create_surface(&self, window: &winit::window::Window) -> Surface {
use winit::platform::windows::WindowExtWindows;
let hwnd = window.hwnd();
self.create_surface_from_hwnd(hwnd as *mut _)
}
}
impl hal::Instance for Instance {
type Backend = Backend;
fn enumerate_adapters(&self) -> Vec<Adapter<Backend>> {
let gl_container = GlContainer::from_fn_proc(|s| unsafe {
let sym = CString::new(s.as_bytes()).unwrap();
let addr = wgl_sys::GetProcAddress(sym.as_ptr()) as *const ();
if !addr.is_null() {
addr as *const _
} else {
GetProcAddress(WGL_ENTRY.lib, sym.as_ptr()) as *const _
}
});
let adapter = PhysicalDevice::new_adapter(self.ctxt, gl_container);
vec![adapter]
}
}
#[derive(Debug)]
pub struct Surface {
pub(crate) hwnd: HWND,
pub(crate) swapchain: Option<Swapchain>,
renderbuffer: Option<native::Renderbuffer>,
}
// TODO: high -msiglreith
unsafe impl Send for Surface {}
unsafe impl Sync for Surface {}
impl window::Surface<Backend> for Surface {
fn compatibility(
&self,
physical_device: &PhysicalDevice,
) -> (
window::SurfaceCapabilities,
Option<Vec<f::Format>>,
Vec<window::PresentMode>,
) {
let extent = unsafe {
let mut rect: RECT = mem::zeroed();
GetClientRect(self.hwnd, &mut rect);
window::Extent2D {
width: (rect.right - rect.left) as _,
height: (rect.bottom - rect.top) as _,
}
};
let caps = window::SurfaceCapabilities {
image_count: 2 ..= 2,
current_extent: Some(extent),
extents: extent ..= extent,
max_image_layers: 1,
usage: image::Usage::COLOR_ATTACHMENT | image::Usage::TRANSFER_SRC,
composite_alpha: window::CompositeAlpha::OPAQUE, //TODO
};
let present_modes = vec![
window::PresentMode::Fifo, //TODO
];
(
caps,
Some(vec![f::Format::Rgba8Srgb, f::Format::Bgra8Srgb]),
present_modes,
)
}
fn supports_queue_family(&self, _queue_family: &QueueFamily) -> bool {
true
}
}
impl window::PresentationSurface<Backend> for Surface {
type SwapchainImage = native::ImageView;
unsafe fn configure_swapchain(
&mut self,
device: &Device,
config: window::SwapchainConfig,
) -> Result<(), window::CreationError> {
let gl = &device.share.context;
let context = match self.swapchain.take() {
Some(old) => {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
old.context
}
None => PresentContext::new(self, &device.share.instance_context),
};
context.make_current();
if self.renderbuffer.is_none() {
self.renderbuffer = Some(gl.create_renderbuffer().unwrap());
}
let desc = conv::describe_format(config.format).unwrap();
gl.bind_renderbuffer(glow::RENDERBUFFER, self.renderbuffer);
gl.renderbuffer_storage(
glow::RENDERBUFFER,
desc.tex_internal,
config.extent.width as i32,
config.extent.height as i32,
);
let fbo = gl.create_framebuffer().unwrap();
gl.bind_framebuffer(glow::READ_FRAMEBUFFER, Some(fbo));
gl.framebuffer_renderbuffer(
glow::READ_FRAMEBUFFER,
glow::COLOR_ATTACHMENT0,
glow::RENDERBUFFER,
self.renderbuffer,
);
self.swapchain = Some(Swapchain {
context,
extent: config.extent,
fbos: iter::once(fbo).collect(),
});
Ok(())
}
unsafe fn unconfigure_swapchain(&mut self, device: &Device) {
let gl = &device.share.context;
if let Some(old) = self.swapchain.take() {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
}
if let Some(rbo) = self.renderbuffer.take() {
gl.delete_renderbuffer(rbo);
}
}
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
) -> Result<(Self::SwapchainImage, Option<window::Suboptimal>), window::AcquireError> {
let image = native::ImageView::Renderbuffer(self.renderbuffer.unwrap());
Ok((image, None))
}
}
#[derive(Debug)]
pub struct Swapchain {
pub(crate) fbos: ArrayVec<[native::RawFrameBuffer; 3]>,
pub(crate) context: PresentContext,
pub(crate) extent: window::Extent2D,
}
impl Swapchain {
pub(crate) fn make_current(&self) {
self.context.make_current();
}
pub(crate) fn swap_buffers(&self) {
self.context.swap_buffers();
}
}
impl window::Swapchain<Backend> for Swapchain {
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
_semaphore: Option<&native::Semaphore>,
_fence: Option<&native::Fence>,
) -> Result<(window::SwapImageIndex, Option<window::Suboptimal>), window::AcquireError> {
Ok((0, None)) // TODO
}
}
/// Basic abstraction for wgl context handles.
#[derive(Debug, Copy, Clone)]
struct Context {
glrc: HGLRC,
}
impl Context {
unsafe fn make_current(&self, hdc: HDC) {
wglMakeCurrent(hdc, self.glrc);
}
}
/// Owned context for devices and instances.
#[derive(Debug, Copy, Clone)]
pub(crate) struct DeviceContext {
/// Owned wgl context.
ctxt: Context,
/// Device context owned by the corresponding instance.
///
/// This refers to either a pbuffer or dummy window. Therefore not used for actual presentation.
hdc: HDC,
}
// TODO
unsafe impl Send for DeviceContext {}
unsafe impl Sync for DeviceContext {}
impl DeviceContext {
pub(crate) fn make_current(&self) {
unsafe {
self.ctxt.make_current(self.hdc);
}
}
}
/// Owned context for swapchains which soley is required for presentation.
#[derive(Debug)]
pub(crate) struct PresentContext {
/// Owned wgl context.
ctxt: Context,
/// Device context of the corresponding presentation surface.
hdc: HDC,
}
// TODO
unsafe impl Send for PresentContext {}
unsafe impl Sync for PresentContext {}
impl PresentContext {
pub(crate) fn new(surface: &Surface, device_ctxt: &DeviceContext) -> Self {
// TODO: configuration options
unsafe {
let hdc = GetDC(surface.hwnd);
let desc = PIXELFORMATDESCRIPTOR {
nSize: std::mem::size_of::<PIXELFORMATDESCRIPTOR>() as u16,
nVersion: 1,
dwFlags: PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
iPixelType: PFD_TYPE_RGBA,
cColorBits: 32,
cRedBits: 0,
cRedShift: 0,
cGreenBits: 0,
cGreenShift: 0,
cBlueBits: 0,
cBlueShift: 0,
cAlphaBits: 8,
cAlphaShift: 0,
cAccumBits: 0,
cAccumRedBits: 0,
cAccumGreenBits: 0,
cAccumBlueBits: 0,
cAccumAlphaBits: 0,
cDepthBits: 0,
cStencilBits: 0,
cAuxBuffers: 0,
iLayerType: PFD_MAIN_PLANE,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: 0,
dwDamageMask: 0,
};
let format_id = ChoosePixelFormat(hdc, &desc);
SetPixelFormat(hdc, format_id, &desc);
let glrc = WGL_ENTRY.wgl.CreateContextAttribsARB(
hdc as *const _,
device_ctxt.ctxt.glrc as _,
ptr::null(),
) as HGLRC;
wglMakeCurrent(hdc, glrc);
PresentContext {
ctxt: Context { glrc },
hdc,
}
}
}
pub(crate) fn make_current(&self) {
unsafe {
self.ctxt.make_current(self.hdc);
}
}
fn swap_buffers(&self) {
unsafe {
SwapBuffers(self.hdc);
}
}
}
| {
addr as *const _
} | conditional_block |
wgl.rs | use crate::{conv, device::Device, native, Backend, GlContainer, PhysicalDevice, QueueFamily};
use std::{
ffi::{CString, OsStr},
iter,
mem,
os::{raw::c_void, windows::ffi::OsStrExt},
ptr,
};
use glow::Context as _;
use hal::{adapter::Adapter, format as f, image, window};
use arrayvec::ArrayVec;
use lazy_static::lazy_static;
use winapi::shared::minwindef::*;
use winapi::shared::windef::*;
use winapi::um::libloaderapi::*;
use winapi::um::wingdi::*;
use winapi::um::winuser::*;
pub mod wgl_sys {
include!(concat!(env!("OUT_DIR"), "/wgl_sys.rs"));
}
pub mod wgl_ext_sys {
include!(concat!(env!("OUT_DIR"), "/wgl_ext_sys.rs"));
}
#[link(name = "opengl32")]
extern "C" {}
#[cfg(feature = "winit")]
use winit;
pub(crate) struct Entry {
hwnd: HWND,
pub(crate) hdc: HDC,
pub(crate) wgl: wgl_ext_sys::Wgl,
lib: HMODULE,
}
unsafe impl Send for Entry {}
unsafe impl Sync for Entry {}
impl Entry {
pub fn new() -> Self {
unsafe {
let mut class: WNDCLASSEXW = mem::zeroed();
let instance = GetModuleHandleW(ptr::null());
let class_name = OsStr::new("gfx-rs wgl")
.encode_wide()
.chain(Some(0).into_iter())
.collect::<Vec<_>>();
class.cbSize = mem::size_of::<WNDCLASSEXW>() as UINT;
class.lpszClassName = class_name.as_ptr();
class.lpfnWndProc = Some(DefWindowProcW);
RegisterClassExW(&class);
let hwnd = CreateWindowExW(
0,
class_name.as_ptr(),
std::ptr::null(),
0,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
std::ptr::null_mut(),
std::ptr::null_mut(),
instance,
std::ptr::null_mut(),
);
let hdc = GetDC(hwnd);
let desc = PIXELFORMATDESCRIPTOR {
nSize: std::mem::size_of::<PIXELFORMATDESCRIPTOR>() as u16,
nVersion: 1,
dwFlags: PFD_SUPPORT_OPENGL,
iPixelType: PFD_TYPE_RGBA,
cColorBits: 8,
cRedBits: 0,
cRedShift: 0,
cGreenBits: 0,
cGreenShift: 0,
cBlueBits: 0,
cBlueShift: 0,
cAlphaBits: 8,
cAlphaShift: 0,
cAccumBits: 0,
cAccumRedBits: 0,
cAccumGreenBits: 0,
cAccumBlueBits: 0,
cAccumAlphaBits: 0,
cDepthBits: 0,
cStencilBits: 0,
cAuxBuffers: 0,
iLayerType: PFD_MAIN_PLANE,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: 0,
dwDamageMask: 0,
};
let format_id = ChoosePixelFormat(hdc, &desc);
SetPixelFormat(hdc, format_id, &desc);
let hglrc = wglCreateContext(hdc);
println!("{:?}", (hwnd, hdc, format_id, hglrc));
wglMakeCurrent(hdc, hglrc);
let name = OsStr::new("opengl32.dll")
.encode_wide()
.chain(Some(0).into_iter())
.collect::<Vec<_>>();
let lib = LoadLibraryW(name.as_ptr());
let wgl = wgl_ext_sys::Wgl::load_with(|sym| {
let sym = CString::new(sym.as_bytes()).unwrap();
let addr = wgl_sys::GetProcAddress(sym.as_ptr()) as *const ();
if !addr.is_null() {
addr as *const _
} else {
GetProcAddress(lib, sym.as_ptr()) as *const _
}
});
Entry {
hwnd,
hdc: hdc as _,
wgl,
lib,
}
}
}
}
impl Drop for Entry {
fn drop(&mut self) {
unsafe {
DestroyWindow(self.hwnd);
}
}
}
lazy_static! {
// Entry function pointers
pub(crate) static ref WGL_ENTRY: Entry = Entry::new();
}
pub struct Instance {
pub(crate) ctxt: DeviceContext,
}
impl Instance {
pub fn create(_name: &str, version: u32) -> Result<Self, hal::UnsupportedBackend> {
unsafe {
let glrc = WGL_ENTRY.wgl.CreateContextAttribsARB(
WGL_ENTRY.hdc as *const _,
ptr::null(),
ptr::null(),
) as HGLRC;
wglMakeCurrent(WGL_ENTRY.hdc as *mut _, glrc);
Ok(Instance {
ctxt: DeviceContext {
ctxt: Context { glrc },
hdc: WGL_ENTRY.hdc,
},
})
}
}
#[cfg(windows)]
pub fn create_surface_from_hwnd(&self, hwnd: *mut c_void) -> Surface {
Surface {
hwnd: hwnd as *mut _,
swapchain: None,
renderbuffer: None,
}
}
#[cfg(feature = "winit")]
pub fn create_surface(&self, window: &winit::window::Window) -> Surface {
use winit::platform::windows::WindowExtWindows;
let hwnd = window.hwnd();
self.create_surface_from_hwnd(hwnd as *mut _)
}
}
impl hal::Instance for Instance {
type Backend = Backend;
fn enumerate_adapters(&self) -> Vec<Adapter<Backend>> {
let gl_container = GlContainer::from_fn_proc(|s| unsafe {
let sym = CString::new(s.as_bytes()).unwrap();
let addr = wgl_sys::GetProcAddress(sym.as_ptr()) as *const ();
if !addr.is_null() {
addr as *const _
} else {
GetProcAddress(WGL_ENTRY.lib, sym.as_ptr()) as *const _
}
});
let adapter = PhysicalDevice::new_adapter(self.ctxt, gl_container);
vec![adapter]
}
}
#[derive(Debug)]
pub struct Surface {
pub(crate) hwnd: HWND,
pub(crate) swapchain: Option<Swapchain>,
renderbuffer: Option<native::Renderbuffer>,
}
// TODO: high -msiglreith
unsafe impl Send for Surface {}
unsafe impl Sync for Surface {}
impl window::Surface<Backend> for Surface {
fn compatibility(
&self,
physical_device: &PhysicalDevice,
) -> (
window::SurfaceCapabilities,
Option<Vec<f::Format>>,
Vec<window::PresentMode>,
) {
let extent = unsafe {
let mut rect: RECT = mem::zeroed();
GetClientRect(self.hwnd, &mut rect);
window::Extent2D {
width: (rect.right - rect.left) as _,
height: (rect.bottom - rect.top) as _,
}
};
let caps = window::SurfaceCapabilities {
image_count: 2 ..= 2,
current_extent: Some(extent),
extents: extent ..= extent,
max_image_layers: 1,
usage: image::Usage::COLOR_ATTACHMENT | image::Usage::TRANSFER_SRC,
composite_alpha: window::CompositeAlpha::OPAQUE, //TODO
};
let present_modes = vec![
window::PresentMode::Fifo, //TODO
];
(
caps,
Some(vec![f::Format::Rgba8Srgb, f::Format::Bgra8Srgb]),
present_modes,
)
}
fn supports_queue_family(&self, _queue_family: &QueueFamily) -> bool |
}
impl window::PresentationSurface<Backend> for Surface {
type SwapchainImage = native::ImageView;
unsafe fn configure_swapchain(
&mut self,
device: &Device,
config: window::SwapchainConfig,
) -> Result<(), window::CreationError> {
let gl = &device.share.context;
let context = match self.swapchain.take() {
Some(old) => {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
old.context
}
None => PresentContext::new(self, &device.share.instance_context),
};
context.make_current();
if self.renderbuffer.is_none() {
self.renderbuffer = Some(gl.create_renderbuffer().unwrap());
}
let desc = conv::describe_format(config.format).unwrap();
gl.bind_renderbuffer(glow::RENDERBUFFER, self.renderbuffer);
gl.renderbuffer_storage(
glow::RENDERBUFFER,
desc.tex_internal,
config.extent.width as i32,
config.extent.height as i32,
);
let fbo = gl.create_framebuffer().unwrap();
gl.bind_framebuffer(glow::READ_FRAMEBUFFER, Some(fbo));
gl.framebuffer_renderbuffer(
glow::READ_FRAMEBUFFER,
glow::COLOR_ATTACHMENT0,
glow::RENDERBUFFER,
self.renderbuffer,
);
self.swapchain = Some(Swapchain {
context,
extent: config.extent,
fbos: iter::once(fbo).collect(),
});
Ok(())
}
unsafe fn unconfigure_swapchain(&mut self, device: &Device) {
let gl = &device.share.context;
if let Some(old) = self.swapchain.take() {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
}
if let Some(rbo) = self.renderbuffer.take() {
gl.delete_renderbuffer(rbo);
}
}
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
) -> Result<(Self::SwapchainImage, Option<window::Suboptimal>), window::AcquireError> {
let image = native::ImageView::Renderbuffer(self.renderbuffer.unwrap());
Ok((image, None))
}
}
#[derive(Debug)]
pub struct Swapchain {
pub(crate) fbos: ArrayVec<[native::RawFrameBuffer; 3]>,
pub(crate) context: PresentContext,
pub(crate) extent: window::Extent2D,
}
impl Swapchain {
pub(crate) fn make_current(&self) {
self.context.make_current();
}
pub(crate) fn swap_buffers(&self) {
self.context.swap_buffers();
}
}
impl window::Swapchain<Backend> for Swapchain {
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
_semaphore: Option<&native::Semaphore>,
_fence: Option<&native::Fence>,
) -> Result<(window::SwapImageIndex, Option<window::Suboptimal>), window::AcquireError> {
Ok((0, None)) // TODO
}
}
/// Basic abstraction for wgl context handles.
#[derive(Debug, Copy, Clone)]
struct Context {
glrc: HGLRC,
}
impl Context {
unsafe fn make_current(&self, hdc: HDC) {
wglMakeCurrent(hdc, self.glrc);
}
}
/// Owned context for devices and instances.
#[derive(Debug, Copy, Clone)]
pub(crate) struct DeviceContext {
/// Owned wgl context.
ctxt: Context,
/// Device context owned by the corresponding instance.
///
/// This refers to either a pbuffer or dummy window. Therefore not used for actual presentation.
hdc: HDC,
}
// TODO
unsafe impl Send for DeviceContext {}
unsafe impl Sync for DeviceContext {}
impl DeviceContext {
pub(crate) fn make_current(&self) {
unsafe {
self.ctxt.make_current(self.hdc);
}
}
}
/// Owned context for swapchains which soley is required for presentation.
#[derive(Debug)]
pub(crate) struct PresentContext {
/// Owned wgl context.
ctxt: Context,
/// Device context of the corresponding presentation surface.
hdc: HDC,
}
// TODO
unsafe impl Send for PresentContext {}
unsafe impl Sync for PresentContext {}
impl PresentContext {
pub(crate) fn new(surface: &Surface, device_ctxt: &DeviceContext) -> Self {
// TODO: configuration options
unsafe {
let hdc = GetDC(surface.hwnd);
let desc = PIXELFORMATDESCRIPTOR {
nSize: std::mem::size_of::<PIXELFORMATDESCRIPTOR>() as u16,
nVersion: 1,
dwFlags: PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
iPixelType: PFD_TYPE_RGBA,
cColorBits: 32,
cRedBits: 0,
cRedShift: 0,
cGreenBits: 0,
cGreenShift: 0,
cBlueBits: 0,
cBlueShift: 0,
cAlphaBits: 8,
cAlphaShift: 0,
cAccumBits: 0,
cAccumRedBits: 0,
cAccumGreenBits: 0,
cAccumBlueBits: 0,
cAccumAlphaBits: 0,
cDepthBits: 0,
cStencilBits: 0,
cAuxBuffers: 0,
iLayerType: PFD_MAIN_PLANE,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: 0,
dwDamageMask: 0,
};
let format_id = ChoosePixelFormat(hdc, &desc);
SetPixelFormat(hdc, format_id, &desc);
let glrc = WGL_ENTRY.wgl.CreateContextAttribsARB(
hdc as *const _,
device_ctxt.ctxt.glrc as _,
ptr::null(),
) as HGLRC;
wglMakeCurrent(hdc, glrc);
PresentContext {
ctxt: Context { glrc },
hdc,
}
}
}
pub(crate) fn make_current(&self) {
unsafe {
self.ctxt.make_current(self.hdc);
}
}
fn swap_buffers(&self) {
unsafe {
SwapBuffers(self.hdc);
}
}
}
| {
true
} | identifier_body |
wgl.rs | use crate::{conv, device::Device, native, Backend, GlContainer, PhysicalDevice, QueueFamily};
use std::{
ffi::{CString, OsStr},
iter,
mem,
os::{raw::c_void, windows::ffi::OsStrExt},
ptr,
};
use glow::Context as _;
use hal::{adapter::Adapter, format as f, image, window};
use arrayvec::ArrayVec;
use lazy_static::lazy_static;
use winapi::shared::minwindef::*;
use winapi::shared::windef::*;
use winapi::um::libloaderapi::*;
use winapi::um::wingdi::*;
use winapi::um::winuser::*;
pub mod wgl_sys {
include!(concat!(env!("OUT_DIR"), "/wgl_sys.rs"));
}
pub mod wgl_ext_sys {
include!(concat!(env!("OUT_DIR"), "/wgl_ext_sys.rs"));
}
#[link(name = "opengl32")]
extern "C" {}
#[cfg(feature = "winit")]
use winit;
pub(crate) struct Entry {
hwnd: HWND,
pub(crate) hdc: HDC,
pub(crate) wgl: wgl_ext_sys::Wgl,
lib: HMODULE,
}
unsafe impl Send for Entry {}
unsafe impl Sync for Entry {}
impl Entry {
pub fn new() -> Self {
unsafe {
let mut class: WNDCLASSEXW = mem::zeroed();
let instance = GetModuleHandleW(ptr::null());
let class_name = OsStr::new("gfx-rs wgl")
.encode_wide()
.chain(Some(0).into_iter())
.collect::<Vec<_>>();
class.cbSize = mem::size_of::<WNDCLASSEXW>() as UINT;
class.lpszClassName = class_name.as_ptr();
class.lpfnWndProc = Some(DefWindowProcW);
RegisterClassExW(&class);
let hwnd = CreateWindowExW(
0,
class_name.as_ptr(),
std::ptr::null(),
0,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
std::ptr::null_mut(),
std::ptr::null_mut(),
instance,
std::ptr::null_mut(),
);
let hdc = GetDC(hwnd);
let desc = PIXELFORMATDESCRIPTOR {
nSize: std::mem::size_of::<PIXELFORMATDESCRIPTOR>() as u16,
nVersion: 1,
dwFlags: PFD_SUPPORT_OPENGL,
iPixelType: PFD_TYPE_RGBA,
cColorBits: 8,
cRedBits: 0,
cRedShift: 0,
cGreenBits: 0,
cGreenShift: 0,
cBlueBits: 0,
cBlueShift: 0,
cAlphaBits: 8,
cAlphaShift: 0,
cAccumBits: 0,
cAccumRedBits: 0,
cAccumGreenBits: 0,
cAccumBlueBits: 0,
cAccumAlphaBits: 0,
cDepthBits: 0,
cStencilBits: 0,
cAuxBuffers: 0,
iLayerType: PFD_MAIN_PLANE,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: 0,
dwDamageMask: 0,
};
let format_id = ChoosePixelFormat(hdc, &desc);
SetPixelFormat(hdc, format_id, &desc);
let hglrc = wglCreateContext(hdc);
println!("{:?}", (hwnd, hdc, format_id, hglrc));
wglMakeCurrent(hdc, hglrc);
let name = OsStr::new("opengl32.dll")
.encode_wide()
.chain(Some(0).into_iter())
.collect::<Vec<_>>();
let lib = LoadLibraryW(name.as_ptr());
let wgl = wgl_ext_sys::Wgl::load_with(|sym| {
let sym = CString::new(sym.as_bytes()).unwrap();
let addr = wgl_sys::GetProcAddress(sym.as_ptr()) as *const ();
if !addr.is_null() {
addr as *const _
} else {
GetProcAddress(lib, sym.as_ptr()) as *const _
}
});
Entry {
hwnd,
hdc: hdc as _,
wgl,
lib,
}
}
}
}
impl Drop for Entry {
fn drop(&mut self) {
unsafe {
DestroyWindow(self.hwnd);
}
}
}
lazy_static! {
// Entry function pointers
pub(crate) static ref WGL_ENTRY: Entry = Entry::new();
}
pub struct Instance {
pub(crate) ctxt: DeviceContext,
}
impl Instance {
pub fn create(_name: &str, version: u32) -> Result<Self, hal::UnsupportedBackend> {
unsafe {
let glrc = WGL_ENTRY.wgl.CreateContextAttribsARB(
WGL_ENTRY.hdc as *const _,
ptr::null(),
ptr::null(),
) as HGLRC;
wglMakeCurrent(WGL_ENTRY.hdc as *mut _, glrc);
Ok(Instance {
ctxt: DeviceContext {
ctxt: Context { glrc },
hdc: WGL_ENTRY.hdc,
},
})
}
}
#[cfg(windows)]
pub fn create_surface_from_hwnd(&self, hwnd: *mut c_void) -> Surface {
Surface {
hwnd: hwnd as *mut _,
swapchain: None,
renderbuffer: None,
}
}
#[cfg(feature = "winit")]
pub fn create_surface(&self, window: &winit::window::Window) -> Surface {
use winit::platform::windows::WindowExtWindows;
let hwnd = window.hwnd();
self.create_surface_from_hwnd(hwnd as *mut _)
}
}
impl hal::Instance for Instance {
type Backend = Backend;
fn enumerate_adapters(&self) -> Vec<Adapter<Backend>> {
let gl_container = GlContainer::from_fn_proc(|s| unsafe {
let sym = CString::new(s.as_bytes()).unwrap();
let addr = wgl_sys::GetProcAddress(sym.as_ptr()) as *const ();
if !addr.is_null() {
addr as *const _
} else {
GetProcAddress(WGL_ENTRY.lib, sym.as_ptr()) as *const _
}
});
let adapter = PhysicalDevice::new_adapter(self.ctxt, gl_container);
vec![adapter]
}
}
#[derive(Debug)]
pub struct Surface {
pub(crate) hwnd: HWND,
pub(crate) swapchain: Option<Swapchain>,
renderbuffer: Option<native::Renderbuffer>,
}
// TODO: high -msiglreith
unsafe impl Send for Surface {}
unsafe impl Sync for Surface {}
impl window::Surface<Backend> for Surface {
fn compatibility(
&self,
physical_device: &PhysicalDevice,
) -> (
window::SurfaceCapabilities,
Option<Vec<f::Format>>,
Vec<window::PresentMode>,
) {
let extent = unsafe {
let mut rect: RECT = mem::zeroed();
GetClientRect(self.hwnd, &mut rect);
window::Extent2D {
width: (rect.right - rect.left) as _,
height: (rect.bottom - rect.top) as _,
}
};
let caps = window::SurfaceCapabilities {
image_count: 2 ..= 2,
current_extent: Some(extent),
extents: extent ..= extent,
max_image_layers: 1,
usage: image::Usage::COLOR_ATTACHMENT | image::Usage::TRANSFER_SRC,
composite_alpha: window::CompositeAlpha::OPAQUE, //TODO
};
let present_modes = vec![
window::PresentMode::Fifo, //TODO
];
(
caps,
Some(vec![f::Format::Rgba8Srgb, f::Format::Bgra8Srgb]),
present_modes,
)
}
fn supports_queue_family(&self, _queue_family: &QueueFamily) -> bool {
true
}
}
impl window::PresentationSurface<Backend> for Surface {
type SwapchainImage = native::ImageView;
unsafe fn configure_swapchain(
&mut self,
device: &Device,
config: window::SwapchainConfig,
) -> Result<(), window::CreationError> {
let gl = &device.share.context;
let context = match self.swapchain.take() {
Some(old) => {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
old.context
}
None => PresentContext::new(self, &device.share.instance_context),
};
context.make_current();
if self.renderbuffer.is_none() {
self.renderbuffer = Some(gl.create_renderbuffer().unwrap());
}
let desc = conv::describe_format(config.format).unwrap();
gl.bind_renderbuffer(glow::RENDERBUFFER, self.renderbuffer);
gl.renderbuffer_storage(
glow::RENDERBUFFER,
desc.tex_internal,
config.extent.width as i32,
config.extent.height as i32,
);
let fbo = gl.create_framebuffer().unwrap();
gl.bind_framebuffer(glow::READ_FRAMEBUFFER, Some(fbo));
gl.framebuffer_renderbuffer(
glow::READ_FRAMEBUFFER,
glow::COLOR_ATTACHMENT0,
glow::RENDERBUFFER,
self.renderbuffer,
);
self.swapchain = Some(Swapchain {
context,
extent: config.extent,
fbos: iter::once(fbo).collect(),
});
Ok(())
}
unsafe fn unconfigure_swapchain(&mut self, device: &Device) {
let gl = &device.share.context;
if let Some(old) = self.swapchain.take() {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
}
if let Some(rbo) = self.renderbuffer.take() {
gl.delete_renderbuffer(rbo);
}
}
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
) -> Result<(Self::SwapchainImage, Option<window::Suboptimal>), window::AcquireError> {
let image = native::ImageView::Renderbuffer(self.renderbuffer.unwrap());
Ok((image, None))
}
}
#[derive(Debug)]
pub struct Swapchain {
pub(crate) fbos: ArrayVec<[native::RawFrameBuffer; 3]>,
pub(crate) context: PresentContext,
pub(crate) extent: window::Extent2D,
}
impl Swapchain {
pub(crate) fn make_current(&self) {
self.context.make_current();
}
pub(crate) fn swap_buffers(&self) {
self.context.swap_buffers();
}
}
impl window::Swapchain<Backend> for Swapchain {
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
_semaphore: Option<&native::Semaphore>,
_fence: Option<&native::Fence>,
) -> Result<(window::SwapImageIndex, Option<window::Suboptimal>), window::AcquireError> {
Ok((0, None)) // TODO
}
}
/// Basic abstraction for wgl context handles.
#[derive(Debug, Copy, Clone)]
struct Context {
glrc: HGLRC,
}
impl Context {
unsafe fn make_current(&self, hdc: HDC) {
wglMakeCurrent(hdc, self.glrc);
}
}
/// Owned context for devices and instances.
#[derive(Debug, Copy, Clone)]
pub(crate) struct DeviceContext {
/// Owned wgl context.
ctxt: Context,
/// Device context owned by the corresponding instance.
///
/// This refers to either a pbuffer or dummy window. Therefore not used for actual presentation.
hdc: HDC,
}
// TODO
unsafe impl Send for DeviceContext {}
unsafe impl Sync for DeviceContext {}
impl DeviceContext {
pub(crate) fn | (&self) {
unsafe {
self.ctxt.make_current(self.hdc);
}
}
}
/// Owned context for swapchains which soley is required for presentation.
#[derive(Debug)]
pub(crate) struct PresentContext {
/// Owned wgl context.
ctxt: Context,
/// Device context of the corresponding presentation surface.
hdc: HDC,
}
// TODO
unsafe impl Send for PresentContext {}
unsafe impl Sync for PresentContext {}
impl PresentContext {
pub(crate) fn new(surface: &Surface, device_ctxt: &DeviceContext) -> Self {
// TODO: configuration options
unsafe {
let hdc = GetDC(surface.hwnd);
let desc = PIXELFORMATDESCRIPTOR {
nSize: std::mem::size_of::<PIXELFORMATDESCRIPTOR>() as u16,
nVersion: 1,
dwFlags: PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
iPixelType: PFD_TYPE_RGBA,
cColorBits: 32,
cRedBits: 0,
cRedShift: 0,
cGreenBits: 0,
cGreenShift: 0,
cBlueBits: 0,
cBlueShift: 0,
cAlphaBits: 8,
cAlphaShift: 0,
cAccumBits: 0,
cAccumRedBits: 0,
cAccumGreenBits: 0,
cAccumBlueBits: 0,
cAccumAlphaBits: 0,
cDepthBits: 0,
cStencilBits: 0,
cAuxBuffers: 0,
iLayerType: PFD_MAIN_PLANE,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: 0,
dwDamageMask: 0,
};
let format_id = ChoosePixelFormat(hdc, &desc);
SetPixelFormat(hdc, format_id, &desc);
let glrc = WGL_ENTRY.wgl.CreateContextAttribsARB(
hdc as *const _,
device_ctxt.ctxt.glrc as _,
ptr::null(),
) as HGLRC;
wglMakeCurrent(hdc, glrc);
PresentContext {
ctxt: Context { glrc },
hdc,
}
}
}
pub(crate) fn make_current(&self) {
unsafe {
self.ctxt.make_current(self.hdc);
}
}
fn swap_buffers(&self) {
unsafe {
SwapBuffers(self.hdc);
}
}
}
| make_current | identifier_name |
RunExperiment.py | #!/usr/bin/env python3
#%%
# NOTE: v2.1.1. 3 different Tones (3kHz, 6kHz, 12kHz) are played based on animal's position on the virtual track.
# ramping volume depend on a parameter named "peak_volume" describing how steep the ramping function
# should be (default 13). Taking care the max peakVolume or OnVolume should not exceed -90dB and 90dB.
# Features:
# sound logic that is controlled only by linear_pos
# pump logic controlled by PumpOn and PumpOffTime, so each time the pump is triggered, it must reset after 100ms regardless of animal's pos
# peak_volume is constant number regardless of different tone frequencies
# max_reward_times controls the max number of reward it can get within one single lap
#
# See SoundStimulus.py - need to run `jackd -R -P50 -v -d alsa -p64 -n2 -P hw:1,0 -r48000` (use aplay -l/-L to figure out which hw device)
#
import time
import datetime
import os
import shutil
import argparse
import yaml
import csv
import zmq
import numpy as np
import warnings
from contextlib import ExitStack
NamedVersion = '1.2'
Profiling = False
### Maybe should add argcomplete for this program?
# Command-line arguments: computer settings
# Command-line arguments: computer settings
parser = argparse.ArgumentParser(description='Run simple linear track experiment.')
parser.add_argument('-P', '--serial-port', default='/dev/ttyACM0',
help='TTY device for USB-serial interface (e.g., /dev/ttyUSB0 or COM10)')
parser.add_argument('-C','--param-file', default='defaults.yaml',
help='YAML file containing task parameters')
parser.add_argument('-R','--random-seed', default=None,
help='Random seed. If specified, this also overrides the YAML configuration file.')
parser.add_argument('--output-dir', default=None,
help='Directory to write output file (defaults to cwd)')
parser.add_argument('--no-check-space', default=None,
help='Exits if less than 10 GB of space is available.')
args = parser.parse_args()
print(args)
if args.param_file == 'defaults.yaml':
warnings.warn('Using default configuration file. That is almost certainly not what you want to do!')
# YAML parameters: task settings
with open(args.param_file, 'r') as f:
Config = yaml.safe_load(f)
# ------------------- Validate config file-------------------------------------------------------------
if 'AuditoryStimuli' in Config:
from treadmillio.soundstimulus import validate_sound_config
validate_sound_config(Config['AuditoryStimuli'])
# ------------------- Setup logging. ------------------------------------------------------------------
DoLogCommands = Config['Preferences'].get('LogCommands', True)
if DoLogCommands:
auto_log_directory = Config['Preferences'].get('AutoLogDirectory', True) if 'Preferences' in Config else True
log_directory = Config['Preferences'].get('LogDirectory', None) if 'Preferences' in Config else None
if log_directory is not None and args.output_dir is not None:
warnings.warn('The configuration file specifies {} for logging, '
'but command line has {}. Using command line!\n'.format(log_directory, args.output_dir))
log_directory = args.output_dir
elif args.output_dir is not None:
log_directory = args.output_dir
elif auto_log_directory:
now = datetime.datetime.now()
log_root = Config['Preferences'].get('LogDirectoryRoot', '') if 'Preferences' in Config else ''
log_directory = os.path.join(log_root, '{}{}'.format('ExperimentLog', now.strftime("%Y-%m-%d_%H%M")))
else:
raise(ValueError('You did not specify a directory for experiment logs, and AutoLogDirectory is False.'))
if not os.path.isabs(log_directory):
log_directory = os.path.join(os.getcwd(), log_directory)
orig_log_directory = log_directory
k=1
while os.path.exists(log_directory):
k = k + 1
log_directory = orig_log_directory + '_' + str(k)
if log_directory != orig_log_directory:
warnings.warn('Specified experiment logs directory {} exists, using {}'.format(orig_log_directory, log_directory))
print('Creating log directory: {}\n'.format(log_directory))
os.makedirs(log_directory)
# Check for available space!
if not args.no_check_space:
disk_total, disk_used, disk_free = shutil.disk_usage(log_directory)
if disk_free < 10*1024.0**3: # if less than 10 GB is available, exit
print("\n!!!! Only {} MB available, exiting. Use the '--no-check-space' "
"command line option to override. !!!!".format(disk_free/(1024.0**2)))
os.removedirs(log_directory)
exit(0)
else:
print('#'*80, '\n')
print('Warning!!! Not logging!!!!')
print('#'*80, '\n')
log_directory = None
EnableSound = Config['Preferences'].get('EnableSound', False)
# Check for random seed on command line or in preferences
if args.random_seed is not None:
np.random.seed(int(args.random_seed))
print(f'Setting random seed to {args.random_seed}.')
if 'RandomSeed' in Config['Preferences']:
Config['Preferences']['RandomSeed'] = int(args.random_seed)
print('Overwriting random seed in preferences file (true value will be logged).')
elif 'RandomSeed' in Config['Preferences']:
np.random.seed(Config['Preferences']['RandomSeed'])
print(f"Setting random seed to {Config['Preferences']['RandomSeed']}.")
with ExitStack() as stack:
# -------------- Initialize Serial IO - Won't actually do anything until we call connect()! --------------------------
from treadmillio.serialinterface import SerialInterface
gpio_config = Config.get('GPIO', None)
if not gpio_config:
warnings.warn("No GPIOs specified in config file. All IOs will be inputs.", RuntimeWarning)
maze_config = Config.get('Maze', None)
if 'Preferences' in Config:
zmq_streaming = Config['Preferences'].get('DataStreamingPort', None)
Interface = stack.enter_context(SerialInterface(SerialPort=args.serial_port, gpio_config=gpio_config,
maze_config=maze_config, zmq_streaming=zmq_streaming))
#----------------------- Sound stimuli --------------
if 'AuditoryStimuli' in Config and EnableSound:
from treadmillio.soundstimulus import SoundStimulusController
SoundController = stack.enter_context(SoundStimulusController(Config['AuditoryStimuli'], Interface.virtual_track_length,
Interface.maze_topology, log_directory))
else:
SoundController = None
if 'AuditoryStimuli' in Config:
warnings.warn("Config file specified AuditoryStimuli, but EnableSound is False.", RuntimeWarning)
# ------------------- Read in State Machine States. ------------------------------------------------------------------
if 'StateMachine' in Config:
from treadmillio.taskstatemachine import TaskStateMachine
# BUG: Should check to make sure states are all connected properly?
StateMachine = stack.enter_context(TaskStateMachine(Config['StateMachine'], Interface, SoundController))
else:
StateMachine = None
# ------------------- Read in VR Reward Zones. ------------------------------------------------------------------
if 'RewardZones' in Config:
from treadmillio.rewardzone import RewardZoneController
RewardZones = RewardZoneController(Config['RewardZones'], Interface, SoundController)
else:
RewardZones = None
if DoLogCommands:
# -------------------------- Set up all the different log files -------------------------------------
# Log git diffs for provenance
import git # gitpython
repo = git.Repo(search_parent_directories=True)
GitCommit = repo.head.object.hexsha
GitChangedFiles = [fn.a_path for fn in repo.index.diff(None)]
GitPatch = [fn.diff for fn in repo.index.diff(None, create_patch=True)]
with open(os.path.join(log_directory, 'ExperimentCodeDiffs.txt'), 'w') as git_file:
print(f' Git Commit: {GitCommit}',file=git_file)
if GitChangedFiles:
print(f' ChangedFiles: {GitChangedFiles}',file=git_file)
print(f'Patch:\n{GitPatch}',file=git_file)
# Log config file used
with open(os.path.join(log_directory, 'ParsedConfig.yaml'), 'w') as yaml_file:
yaml.dump(Config, yaml_file, indent=4)
# Create data log file and write header
log_file = stack.enter_context(open(os.path.join(log_directory, 'DataLog.csv'), 'w', newline=''))
print(f'Experiment Data File.\n Version {NamedVersion}',file=log_file)
log_writer = csv.writer(log_file) # logging is actually CSV format
if StateMachine and DoLogCommands:
# Create state machine log file and write header
state_machine_log = stack.enter_context(open(os.path.join(log_directory, 'StatemachineLog.csv'), 'w', newline=''))
print(f'State Machine Log File.\n Version {NamedVersion}',file=state_machine_log)
state_log_writer = csv.writer(state_machine_log)
if RewardZones and DoLogCommands:
# Create state machine log file and write header
reward_zone_log = stack.enter_context(open(os.path.join(log_directory, 'RewardzoneLog.csv'), 'w', newline='', buffering=1))
print(f'Reward Zone Log File.\n Version {NamedVersion}',file=reward_zone_log)
reward_zone_writer = csv.writer(reward_zone_log)
if Profiling:
execution_log = stack.enter_context(open(os.path.join(log_directory, 'execution.csv'), 'w', newline=''))
execution_writer = csv.writer(execution_log)
# ------------------- Webcam Video Recording. ------------------------------------------------------------------ | else:
for cameraname, camera in Config['Cameras'].items():
if camera['RecordVideo']:
print('Over-riding camera configuration to not record video or timestamps!!!')
camera['RecordVideo'] = False
for cameraname, camera in Config['Cameras'].items():
shared_termination_flag = RunCameraInterface(camera) # this starts a bunch of processes
# ------------------- Webcam Video Recording. ------------------------------------------------------------------
if 'GigE-Cameras' in Config:
from treadmillio.camera.gigecam import RunCameraInterface
if DoLogCommands:
for cameraname, camera in Config['GigE-Cameras'].items():
camera['LogDirectory'] = log_directory
else:
for cameraname, camera in Config['GigE-Cameras'].items():
if camera['RecordVideo']:
print('Over-riding camera configuration to not record video or timestamps!!!')
camera['RecordVideo'] = False
for cameraname, camera in Config['GigE-Cameras'].items():
shared_termination_flag = RunCameraInterface(camera) # this starts a bunch of processes
# TODO: Figure out how to handle errors below. The shared termination flag should work, but it doesn't
# ----------------- Initialization
##### Actually connect to IO device. We wait until here so that data doesn't get lost/confused in serial buffer
Interface.connect()
FlagChar, StructSize, MasterTime, InitialEncoder, InitialUnwrappedEncoder, InitialGPIO, AuxGPIO = Interface.read_data() # This will initialize encoder
if SoundController:
SoundController.start_capture() # TODO: This doesn't currently do anything
if StateMachine:
StateMachine.start(MasterTime)
first_sample = True
while(True):
## every 2 ms happens:
FlagChar, StructSize, MasterTime, Encoder, UnwrappedEncoder, GPIO, AuxGPIO = Interface.read_data()
last_ts = time.monotonic() # to match with miniscope timestamps (which is written in msec, here is sec)
# since read_data() is blocking, this is a farther bound (i.e., ts AFTER) data
if DoLogCommands:
if not first_sample:
log_writer.writerow([MasterTime, GPIO, Encoder, UnwrappedEncoder, last_ts, Interface.pos, Interface.velocity]) # Log data from serial interface
else: # for ths first sample, to synchronize to a meaningful clock, we the CLOCK_REALTIME time, in the first row
sys_ts = time.time()
log_writer.writerow([0, InitialGPIO, InitialEncoder, UnwrappedEncoder, sys_ts, 0, 0])
log_writer.writerow([MasterTime, GPIO, Encoder, UnwrappedEncoder, last_ts, Interface.pos, Interface.velocity])
first_sample = False
# -------------------- Updates --------------------
Interface.update_pulses() # lower any outstanding GPIO pulses
if SoundController:
SoundController.update_beeps(MasterTime) # stop any outstanding beeps
if StateMachine:
if DoLogCommands:
StateMachine.update_statemachine(state_log_writer.writerow) # update the state machine
else:
StateMachine.update_statemachine(None) # update the state machine
# unwrapped_pos = (UnwrappedEncoder - initialUnwrappedencoder) / encoder_gain *d *np.pi
# pos = unwrapped_pos % virtual_track_length
if "Maze" in Config:
if (MasterTime % Config['Preferences']['HeartBeat']) == 0:
print(f'Heartbeat {MasterTime} - 0x{GPIO:012b}. Pos - {Interface.pos}. Lap: {Interface.unwrapped_pos // Interface.virtual_track_length}. Speed: {Interface.velocity}')
if StateMachine:
print(StateMachine.CurrentState.label)
if SoundController:
SoundController.update_localized(Interface.pos, Interface.unwrapped_pos) # update VR-position-dependent sounds
if RewardZones:
if DoLogCommands:
RewardZones.update_reward_zones(MasterTime, Interface.pos, GPIO, reward_zone_writer.writerow) # update any VR-position rewards
else:
RewardZones.update_reward_zones(MasterTime, Interface.pos, GPIO) # update any VR-position rewards
if Profiling and DoLogCommands:
exec_time = time.monotonic() - last_ts
execution_writer.writerow([exec_time])
# %% | if 'Cameras' in Config:
from treadmillio.uvccam.uvccam import RunCameraInterface
if DoLogCommands:
for cameraname, camera in Config['Cameras'].items():
camera['LogDirectory'] = log_directory | random_line_split |
RunExperiment.py | #!/usr/bin/env python3
#%%
# NOTE: v2.1.1. 3 different Tones (3kHz, 6kHz, 12kHz) are played based on animal's position on the virtual track.
# ramping volume depend on a parameter named "peak_volume" describing how steep the ramping function
# should be (default 13). Taking care the max peakVolume or OnVolume should not exceed -90dB and 90dB.
# Features:
# sound logic that is controlled only by linear_pos
# pump logic controlled by PumpOn and PumpOffTime, so each time the pump is triggered, it must reset after 100ms regardless of animal's pos
# peak_volume is constant number regardless of different tone frequencies
# max_reward_times controls the max number of reward it can get within one single lap
#
# See SoundStimulus.py - need to run `jackd -R -P50 -v -d alsa -p64 -n2 -P hw:1,0 -r48000` (use aplay -l/-L to figure out which hw device)
#
import time
import datetime
import os
import shutil
import argparse
import yaml
import csv
import zmq
import numpy as np
import warnings
from contextlib import ExitStack
NamedVersion = '1.2'
Profiling = False
### Maybe should add argcomplete for this program?
# Command-line arguments: computer settings
# Command-line arguments: computer settings
parser = argparse.ArgumentParser(description='Run simple linear track experiment.')
parser.add_argument('-P', '--serial-port', default='/dev/ttyACM0',
help='TTY device for USB-serial interface (e.g., /dev/ttyUSB0 or COM10)')
parser.add_argument('-C','--param-file', default='defaults.yaml',
help='YAML file containing task parameters')
parser.add_argument('-R','--random-seed', default=None,
help='Random seed. If specified, this also overrides the YAML configuration file.')
parser.add_argument('--output-dir', default=None,
help='Directory to write output file (defaults to cwd)')
parser.add_argument('--no-check-space', default=None,
help='Exits if less than 10 GB of space is available.')
args = parser.parse_args()
print(args)
if args.param_file == 'defaults.yaml':
warnings.warn('Using default configuration file. That is almost certainly not what you want to do!')
# YAML parameters: task settings
with open(args.param_file, 'r') as f:
Config = yaml.safe_load(f)
# ------------------- Validate config file-------------------------------------------------------------
if 'AuditoryStimuli' in Config:
from treadmillio.soundstimulus import validate_sound_config
validate_sound_config(Config['AuditoryStimuli'])
# ------------------- Setup logging. ------------------------------------------------------------------
DoLogCommands = Config['Preferences'].get('LogCommands', True)
if DoLogCommands:
auto_log_directory = Config['Preferences'].get('AutoLogDirectory', True) if 'Preferences' in Config else True
log_directory = Config['Preferences'].get('LogDirectory', None) if 'Preferences' in Config else None
if log_directory is not None and args.output_dir is not None:
warnings.warn('The configuration file specifies {} for logging, '
'but command line has {}. Using command line!\n'.format(log_directory, args.output_dir))
log_directory = args.output_dir
elif args.output_dir is not None:
log_directory = args.output_dir
elif auto_log_directory:
now = datetime.datetime.now()
log_root = Config['Preferences'].get('LogDirectoryRoot', '') if 'Preferences' in Config else ''
log_directory = os.path.join(log_root, '{}{}'.format('ExperimentLog', now.strftime("%Y-%m-%d_%H%M")))
else:
raise(ValueError('You did not specify a directory for experiment logs, and AutoLogDirectory is False.'))
if not os.path.isabs(log_directory):
log_directory = os.path.join(os.getcwd(), log_directory)
orig_log_directory = log_directory
k=1
while os.path.exists(log_directory):
k = k + 1
log_directory = orig_log_directory + '_' + str(k)
if log_directory != orig_log_directory:
warnings.warn('Specified experiment logs directory {} exists, using {}'.format(orig_log_directory, log_directory))
print('Creating log directory: {}\n'.format(log_directory))
os.makedirs(log_directory)
# Check for available space!
if not args.no_check_space:
disk_total, disk_used, disk_free = shutil.disk_usage(log_directory)
if disk_free < 10*1024.0**3: # if less than 10 GB is available, exit
print("\n!!!! Only {} MB available, exiting. Use the '--no-check-space' "
"command line option to override. !!!!".format(disk_free/(1024.0**2)))
os.removedirs(log_directory)
exit(0)
else:
print('#'*80, '\n')
print('Warning!!! Not logging!!!!')
print('#'*80, '\n')
log_directory = None
EnableSound = Config['Preferences'].get('EnableSound', False)
# Check for random seed on command line or in preferences
if args.random_seed is not None:
np.random.seed(int(args.random_seed))
print(f'Setting random seed to {args.random_seed}.')
if 'RandomSeed' in Config['Preferences']:
Config['Preferences']['RandomSeed'] = int(args.random_seed)
print('Overwriting random seed in preferences file (true value will be logged).')
elif 'RandomSeed' in Config['Preferences']:
np.random.seed(Config['Preferences']['RandomSeed'])
print(f"Setting random seed to {Config['Preferences']['RandomSeed']}.")
with ExitStack() as stack:
# -------------- Initialize Serial IO - Won't actually do anything until we call connect()! --------------------------
from treadmillio.serialinterface import SerialInterface
gpio_config = Config.get('GPIO', None)
if not gpio_config:
warnings.warn("No GPIOs specified in config file. All IOs will be inputs.", RuntimeWarning)
maze_config = Config.get('Maze', None)
if 'Preferences' in Config:
zmq_streaming = Config['Preferences'].get('DataStreamingPort', None)
Interface = stack.enter_context(SerialInterface(SerialPort=args.serial_port, gpio_config=gpio_config,
maze_config=maze_config, zmq_streaming=zmq_streaming))
#----------------------- Sound stimuli --------------
if 'AuditoryStimuli' in Config and EnableSound:
from treadmillio.soundstimulus import SoundStimulusController
SoundController = stack.enter_context(SoundStimulusController(Config['AuditoryStimuli'], Interface.virtual_track_length,
Interface.maze_topology, log_directory))
else:
SoundController = None
if 'AuditoryStimuli' in Config:
warnings.warn("Config file specified AuditoryStimuli, but EnableSound is False.", RuntimeWarning)
# ------------------- Read in State Machine States. ------------------------------------------------------------------
if 'StateMachine' in Config:
from treadmillio.taskstatemachine import TaskStateMachine
# BUG: Should check to make sure states are all connected properly?
StateMachine = stack.enter_context(TaskStateMachine(Config['StateMachine'], Interface, SoundController))
else:
StateMachine = None
# ------------------- Read in VR Reward Zones. ------------------------------------------------------------------
if 'RewardZones' in Config:
from treadmillio.rewardzone import RewardZoneController
RewardZones = RewardZoneController(Config['RewardZones'], Interface, SoundController)
else:
RewardZones = None
if DoLogCommands:
# -------------------------- Set up all the different log files -------------------------------------
# Log git diffs for provenance
import git # gitpython
repo = git.Repo(search_parent_directories=True)
GitCommit = repo.head.object.hexsha
GitChangedFiles = [fn.a_path for fn in repo.index.diff(None)]
GitPatch = [fn.diff for fn in repo.index.diff(None, create_patch=True)]
with open(os.path.join(log_directory, 'ExperimentCodeDiffs.txt'), 'w') as git_file:
print(f' Git Commit: {GitCommit}',file=git_file)
if GitChangedFiles:
print(f' ChangedFiles: {GitChangedFiles}',file=git_file)
print(f'Patch:\n{GitPatch}',file=git_file)
# Log config file used
with open(os.path.join(log_directory, 'ParsedConfig.yaml'), 'w') as yaml_file:
yaml.dump(Config, yaml_file, indent=4)
# Create data log file and write header
log_file = stack.enter_context(open(os.path.join(log_directory, 'DataLog.csv'), 'w', newline=''))
print(f'Experiment Data File.\n Version {NamedVersion}',file=log_file)
log_writer = csv.writer(log_file) # logging is actually CSV format
if StateMachine and DoLogCommands:
# Create state machine log file and write header
state_machine_log = stack.enter_context(open(os.path.join(log_directory, 'StatemachineLog.csv'), 'w', newline=''))
print(f'State Machine Log File.\n Version {NamedVersion}',file=state_machine_log)
state_log_writer = csv.writer(state_machine_log)
if RewardZones and DoLogCommands:
# Create state machine log file and write header
reward_zone_log = stack.enter_context(open(os.path.join(log_directory, 'RewardzoneLog.csv'), 'w', newline='', buffering=1))
print(f'Reward Zone Log File.\n Version {NamedVersion}',file=reward_zone_log)
reward_zone_writer = csv.writer(reward_zone_log)
if Profiling:
execution_log = stack.enter_context(open(os.path.join(log_directory, 'execution.csv'), 'w', newline=''))
execution_writer = csv.writer(execution_log)
# ------------------- Webcam Video Recording. ------------------------------------------------------------------
if 'Cameras' in Config:
from treadmillio.uvccam.uvccam import RunCameraInterface
if DoLogCommands:
for cameraname, camera in Config['Cameras'].items():
camera['LogDirectory'] = log_directory
else:
for cameraname, camera in Config['Cameras'].items():
if camera['RecordVideo']:
print('Over-riding camera configuration to not record video or timestamps!!!')
camera['RecordVideo'] = False
for cameraname, camera in Config['Cameras'].items():
shared_termination_flag = RunCameraInterface(camera) # this starts a bunch of processes
# ------------------- Webcam Video Recording. ------------------------------------------------------------------
if 'GigE-Cameras' in Config:
from treadmillio.camera.gigecam import RunCameraInterface
if DoLogCommands:
for cameraname, camera in Config['GigE-Cameras'].items():
camera['LogDirectory'] = log_directory
else:
for cameraname, camera in Config['GigE-Cameras'].items():
if camera['RecordVideo']:
print('Over-riding camera configuration to not record video or timestamps!!!')
camera['RecordVideo'] = False
for cameraname, camera in Config['GigE-Cameras'].items():
shared_termination_flag = RunCameraInterface(camera) # this starts a bunch of processes
# TODO: Figure out how to handle errors below. The shared termination flag should work, but it doesn't
# ----------------- Initialization
##### Actually connect to IO device. We wait until here so that data doesn't get lost/confused in serial buffer
Interface.connect()
FlagChar, StructSize, MasterTime, InitialEncoder, InitialUnwrappedEncoder, InitialGPIO, AuxGPIO = Interface.read_data() # This will initialize encoder
if SoundController:
SoundController.start_capture() # TODO: This doesn't currently do anything
if StateMachine:
StateMachine.start(MasterTime)
first_sample = True
while(True):
## every 2 ms happens:
FlagChar, StructSize, MasterTime, Encoder, UnwrappedEncoder, GPIO, AuxGPIO = Interface.read_data()
last_ts = time.monotonic() # to match with miniscope timestamps (which is written in msec, here is sec)
# since read_data() is blocking, this is a farther bound (i.e., ts AFTER) data
if DoLogCommands:
if not first_sample:
log_writer.writerow([MasterTime, GPIO, Encoder, UnwrappedEncoder, last_ts, Interface.pos, Interface.velocity]) # Log data from serial interface
else: # for ths first sample, to synchronize to a meaningful clock, we the CLOCK_REALTIME time, in the first row
sys_ts = time.time()
log_writer.writerow([0, InitialGPIO, InitialEncoder, UnwrappedEncoder, sys_ts, 0, 0])
log_writer.writerow([MasterTime, GPIO, Encoder, UnwrappedEncoder, last_ts, Interface.pos, Interface.velocity])
first_sample = False
# -------------------- Updates --------------------
Interface.update_pulses() # lower any outstanding GPIO pulses
if SoundController:
SoundController.update_beeps(MasterTime) # stop any outstanding beeps
if StateMachine:
|
# unwrapped_pos = (UnwrappedEncoder - initialUnwrappedencoder) / encoder_gain *d *np.pi
# pos = unwrapped_pos % virtual_track_length
if "Maze" in Config:
if (MasterTime % Config['Preferences']['HeartBeat']) == 0:
print(f'Heartbeat {MasterTime} - 0x{GPIO:012b}. Pos - {Interface.pos}. Lap: {Interface.unwrapped_pos // Interface.virtual_track_length}. Speed: {Interface.velocity}')
if StateMachine:
print(StateMachine.CurrentState.label)
if SoundController:
SoundController.update_localized(Interface.pos, Interface.unwrapped_pos) # update VR-position-dependent sounds
if RewardZones:
if DoLogCommands:
RewardZones.update_reward_zones(MasterTime, Interface.pos, GPIO, reward_zone_writer.writerow) # update any VR-position rewards
else:
RewardZones.update_reward_zones(MasterTime, Interface.pos, GPIO) # update any VR-position rewards
if Profiling and DoLogCommands:
exec_time = time.monotonic() - last_ts
execution_writer.writerow([exec_time])
# %%
| if DoLogCommands:
StateMachine.update_statemachine(state_log_writer.writerow) # update the state machine
else:
StateMachine.update_statemachine(None) # update the state machine | conditional_block |
single_iceberg_model.py | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 3 16:20:04 2021
This model is run from a GUI in which there is a run program button.
Also appearing on the GUI are 'data inputs', 'show results' and
'close window' buttons.
Data inputs shows the two input files in a graphical format.
Show results shows the calculation results, only after the program is run.
Close window closes the window, writes output data to a file and
ends the program.
When you run the code, a message will appear on the GUI 'Program Running'.
The buttons are for the user to interact with the program inputs/ outputs.
Program functions:
download
pull_heights
calculations
show_graphs
show_results
quit
"""
print ("Program Starting")
#Import statements
import tkinter
import requests
import matplotlib
matplotlib.use ('TkAgg')
import matplotlib.pyplot as plt
import csv
import time
"""
Stage 1: Initialise GUI main window
"""
root = tkinter.Tk ()
root.wm_title ("Iceberg-towing Model")
"""
Stage 2: Download data from Web to files
"""
#Download timing start
start_download = time.time ()
print ("Downloading Web data to files")
#Downloading data and writing to files function
def download (url):
"""
Downloads web data and writes it to a file
Parameters
----------
url : String
Web address used as the data source.
Returns
-------
None.
"""
path, url = url
r = requests.get (url, stream = True)
content = r.text
#print (content)
with open (path + '.txt', 'w') as f:
f.write (content)
#List of url web data
urls = [('Lidar', 'https://www.geog.leeds.ac.uk/courses/computing/study/core-python-odl2/assessment2/white1.lidar'),
('Radar', 'https://www.geog.leeds.ac.uk/courses/computing/study/core-python-odl2/assessment2/white1.radar')]
#Call download function
for x in urls:
download (x)
"""
Stage 3: Reading csv data into lists
"""
print ("Reading csv data")
#Reading csv into lidar
#Lidar represents the height of the pixel within the environment
lidar = []
with open ('Lidar.txt', newline = '') as f1:
reader = csv.reader (f1, quoting=csv.QUOTE_NONNUMERIC)
for row in reader:
#print (row)
rowlist_L = []
for value in row:
#print (value)
rowlist_L.append (int (value))
lidar.append (rowlist_L)
#for row in lidar:
#print ("length of row:", len (row))
#List contains empty rows
#Remove empty rows
lidar_clean = [x for x in lidar if x != []]
#print ("lidar2 length =", len(lidar_clean))
# for row in lidar_clean:
# print ("length of row:", len (row))
#Data familiaristation
# print (lidar_clean)
# print (type(lidar_clean))
# print ("Lidar length = ", len(lidar_clean))
#Reading csv into radar
#Radar represents ice or water within the environment
radar = []
with open ('Radar.txt', newline = '') as f2:
reader2 = csv.reader (f2, quoting=csv.QUOTE_NONNUMERIC)
for row in reader2:
#print (row)
rowlist_R = []
for value in row:
#print (value)
rowlist_R.append (int (value))
radar.append (rowlist_R)
#for row in radar:
#print ("length of row:", len (row))
#list contains empty rows
#remove empty rows
radar_clean = [x for x in radar if x != []]
#for row in radar_clean:
#print ("length of row:", len (row))
#Data familiarisation
# print (radar_clean)
# print (type(radar_clean))
# print ("Radar length = ", len(radar_clean))
#Download and list creation timing
end_download = time.time ()
download_time = (end_download - start_download)
print ("Download to reading in time: " + str(download_time))
# """
# Displaying lidar and radar data
# """
# #Code based on https://www.kite.com/python/answers/how-to-show-two-figures-at-once-in-matplotlib-in-python
# #Define axes
# plt.ylim = (0, 300)
# plt.xlim = (0, 300)
# #Lidar plot
# lidar_plot = plt.figure (1)
# #Assign title
# plt.title ('Lidar data')
# plt.imshow (lidar_clean)
# #Radar plot
# radar_plot = plt.figure (2)
# #Assign title
# plt.title ('Radar data')
# plt.imshow (radar_clean)
# #Show plots
# plt.show ()
#Commented code above has been moved into the GUI so removed here
"""
Stage 4: Finding ice areas and pulling their heights
"""
#Calculation timing
start_calculation = time.time ()
#Data familiarisation
# print (radar_clean [145][150])
# print (radar_clean [145])
# print (lidar_clean [145][150])
# print (lidar_clean [145])
print ("Locating ice and pulling heights")
#Pulling heights from lidar data
def pull_heights ():
"""
Pulls height values from the lidar data from ice locations within the
radar data, appending the heights to an ice list
Returns
-------
None.
"""
global ice
ice = []
for i in range (len (radar_clean)):
for j in range (len (radar_clean)):
|
#print (ice)
#print (len (ice))
print ("Ice located and heights pulled")
"""
Stage 5: Calculating ice mass
"""
print ("Determining iceberg size")
#Pull heights
def calculations ():
pull_heights ()
ice_size = (len(ice))
#print ("ice size:", ice_size)
#Calculating ice volume above sea level
print ("Calculating ice mass")
#Convert ice values from cm to m
ice_m = ((sum (ice)) * 0.1)
#print (ice_m)
#Calculating ice volume above surface
global ice_volume_positive
ice_volume_positive = (ice_size * ice_m)
#print (ice_volume_positive)
#Calculating sub-surface ice volume
global ice_volume_subsurface
ice_volume_subsurface = ice_volume_positive * 10
#print (ice_volume_subsurface)
#Calculating total ice volume
global ice_volume
ice_volume = ice_volume_positive + ice_volume_subsurface
#print (ice_volume)
#Calculating ice mass
global ice_mass
ice_mass = 900 * ice_volume
#print (ice_mass)
print ("Ice mass calculated")
run = tkinter.Label\
(text= ("Running Program" + "\n"))
run.pack ()
results_btn ['state'] = 'normal'
# """
# Stage 6: Calculating towability
# """
# print ("Calculating towability")
# def towability ():
# """
# Determines the towability of the iceberg
# Returns
# -------
# None.
# """
# if ice_mass > 36000000:
# print ("Iceberg cannot be towed")
# else:
# print ("Iceberg can be towed")
# print ("Towability calculated")
#Commented towability code above moved into GUI so removed here
#Calculation timing
end_calculation = time.time ()
calculation_time = (end_calculation - start_calculation)
print ("Calculation time: ", calculation_time)
# """
# Stage 6: Writing data out to a file
# """
# with open ("Data_out.txt", 'w') as FO:
# FO.write ("Above surface volume: " + str(ice_volume_positive) + '\n')
# FO.write ("Subsurface volume: " + str(ice_volume_subsurface) + '\n')
# FO.write ("Total ice volume: " + str(ice_volume) + '\n')
# FO.write ("Total mass: " + str(ice_mass) + '\n')
# Section moved to after GUI is completed as to access global variables
# within calculations function
"""
Stage 6: Initialise and populate GUI
"""
#Label code based on https://www.python-course.eu/tkinter_text_widget.php
#Create GUI description
description = tkinter.Label\
(text="This GUI runs the Iceberg Towability Model and shows its inputs and results"\
+ '\n')
description.pack ()
#Create user defined run
# menu_bar = tkinter.Menu (root)
# root.config (menu = menu_bar)
# model_menu = tkinter.Menu (menu_bar)
# menu_bar.add_cascade (label= "Model", menu= model_menu)
# model_menu.add_command (label= "Run Model", command= calculations)
run_btn = tkinter.Button (root, text= 'Run Program', command = calculations)
run_btn.pack (side= 'top', fill= 'both')
#Show input data graphs
def show_graphs ():
"""
Generates two figures simultaneously displaying lidar and radar input
datasets.
Figures will appear in individual pop-up windows.
Returns
-------
None.
"""
plt.ylim = (0, 300)
plt.xlim = (0, 300)
#Set up lidar plot to figure 1
lidar_plot = plt.figure (1)
#Assign title
plt.title ('Lidar data')
#Assign data
plt.imshow (lidar_clean)
#Set up radar plot to figure 2
radar_plot = plt.figure (2)
#Assign title
plt.title ('Radar data')
#Assign data
plt.imshow (radar_clean)
#Show plots
plt.show ()
#Display the results
def show_results ():
"""
Generates the program results, calculates towability
and disables the results button post-execution
Returns
-------
None.
"""
#Total volume
vol = tkinter.Label\
(text= ("Total volume: " + str (ice_volume) + " m\u00b2"))
vol.pack ()
#Total mass
mass = tkinter.Label\
(text= ("Total mass: " + str (ice_mass) + " kg"))
mass.pack ()
#Towability
print ("Calculating towability")
if ice_mass > 36000000:
tow = tkinter.Label (text = "Iceberg cannot be towed")
else:
tow = tkinter.Label (text = "Iceberg can be towed")
print ("Towability calculated")
tow.pack ()
#Disable button after 1 click
#Code based on https://www.youtube.com/watch?v=QfTo3rK3e48
results_btn ['state'] = 'disabled'
#Close window
def quit (event=None):
"""
Quits and closes the GUI to end the program at a time of the user's choice
Parameters
----------
event : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
None.
"""
root.destroy ()
#Create and pack buttons
#Button to quit window
quit_btn = tkinter.Button (root, text= 'Close Window', command = quit)
quit_btn.pack (side='bottom', fill= 'both')
#Button to show results
results_btn = tkinter.Button (root, text= 'Show Results', \
command = show_results, state= "disabled")
results_btn.pack (side='bottom', fill= 'both')
#Button to show graphs
graph_btn = tkinter.Button (root, text= 'Data inputs', command = show_graphs)
graph_btn.pack (side= 'bottom', fill= 'both')
#Activate window
root.mainloop ()
"""
Stage 7: Writing data out to a file
"""
with open ("Data_out.txt", 'w') as FO:
FO.write ("Above surface volume: " + str(ice_volume_positive) + '\n')
FO.write ("Subsurface volume: " + str(ice_volume_subsurface) + '\n')
FO.write ("Total ice volume: " + str(ice_volume) + '\n')
FO.write ("Total mass: " + str(ice_mass) + '\n')
if ice_mass > 36000000:
FO.write ("Iceberg cannot be towed")
else:
FO.write ("Iceberg can be towed")
#Finish program
print ("Program Ended")
print ("Thank you for running the program") | radar_clean [i][j] = lidar_clean [i][j]
#print (radar_clean [i][j])
if (radar_clean [i][j]) > 100:
#print (radar_clean [i][j])
ice.append (lidar_clean [i][j]) | conditional_block |
single_iceberg_model.py | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 3 16:20:04 2021
This model is run from a GUI in which there is a run program button.
Also appearing on the GUI are 'data inputs', 'show results' and
'close window' buttons.
Data inputs shows the two input files in a graphical format.
Show results shows the calculation results, only after the program is run.
Close window closes the window, writes output data to a file and
ends the program.
When you run the code, a message will appear on the GUI 'Program Running'.
The buttons are for the user to interact with the program inputs/ outputs.
Program functions:
download
pull_heights
calculations
show_graphs
show_results
quit
"""
print ("Program Starting")
#Import statements
import tkinter
import requests
import matplotlib
matplotlib.use ('TkAgg')
import matplotlib.pyplot as plt
import csv
import time
"""
Stage 1: Initialise GUI main window
"""
root = tkinter.Tk ()
root.wm_title ("Iceberg-towing Model")
"""
Stage 2: Download data from Web to files
"""
#Download timing start
start_download = time.time ()
print ("Downloading Web data to files")
#Downloading data and writing to files function
def download (url):
"""
Downloads web data and writes it to a file
Parameters
----------
url : String
Web address used as the data source.
Returns
-------
None.
"""
path, url = url
r = requests.get (url, stream = True)
content = r.text
#print (content)
with open (path + '.txt', 'w') as f:
f.write (content)
#List of url web data
urls = [('Lidar', 'https://www.geog.leeds.ac.uk/courses/computing/study/core-python-odl2/assessment2/white1.lidar'),
('Radar', 'https://www.geog.leeds.ac.uk/courses/computing/study/core-python-odl2/assessment2/white1.radar')]
#Call download function
for x in urls:
download (x)
"""
Stage 3: Reading csv data into lists
"""
print ("Reading csv data")
#Reading csv into lidar
#Lidar represents the height of the pixel within the environment
lidar = []
with open ('Lidar.txt', newline = '') as f1:
reader = csv.reader (f1, quoting=csv.QUOTE_NONNUMERIC)
for row in reader:
#print (row)
rowlist_L = []
for value in row:
#print (value)
rowlist_L.append (int (value))
lidar.append (rowlist_L)
#for row in lidar:
#print ("length of row:", len (row))
#List contains empty rows
#Remove empty rows
lidar_clean = [x for x in lidar if x != []]
#print ("lidar2 length =", len(lidar_clean))
# for row in lidar_clean:
# print ("length of row:", len (row))
#Data familiaristation
# print (lidar_clean)
# print (type(lidar_clean))
# print ("Lidar length = ", len(lidar_clean))
#Reading csv into radar
#Radar represents ice or water within the environment
radar = []
with open ('Radar.txt', newline = '') as f2:
reader2 = csv.reader (f2, quoting=csv.QUOTE_NONNUMERIC)
for row in reader2:
#print (row)
rowlist_R = []
for value in row:
#print (value)
rowlist_R.append (int (value))
radar.append (rowlist_R)
#for row in radar:
#print ("length of row:", len (row))
#list contains empty rows
#remove empty rows
radar_clean = [x for x in radar if x != []]
#for row in radar_clean:
#print ("length of row:", len (row))
#Data familiarisation
# print (radar_clean)
# print (type(radar_clean))
# print ("Radar length = ", len(radar_clean))
#Download and list creation timing
end_download = time.time ()
download_time = (end_download - start_download)
print ("Download to reading in time: " + str(download_time))
# """
# Displaying lidar and radar data
# """
# #Code based on https://www.kite.com/python/answers/how-to-show-two-figures-at-once-in-matplotlib-in-python
# #Define axes
# plt.ylim = (0, 300)
# plt.xlim = (0, 300)
# #Lidar plot
# lidar_plot = plt.figure (1)
# #Assign title
# plt.title ('Lidar data')
# plt.imshow (lidar_clean)
# #Radar plot
# radar_plot = plt.figure (2)
# #Assign title
# plt.title ('Radar data')
# plt.imshow (radar_clean)
# #Show plots
# plt.show ()
#Commented code above has been moved into the GUI so removed here
"""
Stage 4: Finding ice areas and pulling their heights
"""
#Calculation timing
start_calculation = time.time ()
#Data familiarisation
# print (radar_clean [145][150])
# print (radar_clean [145])
# print (lidar_clean [145][150])
# print (lidar_clean [145])
print ("Locating ice and pulling heights")
#Pulling heights from lidar data
def pull_heights ():
"""
Pulls height values from the lidar data from ice locations within the
radar data, appending the heights to an ice list
Returns
-------
None. | for i in range (len (radar_clean)):
for j in range (len (radar_clean)):
radar_clean [i][j] = lidar_clean [i][j]
#print (radar_clean [i][j])
if (radar_clean [i][j]) > 100:
#print (radar_clean [i][j])
ice.append (lidar_clean [i][j])
#print (ice)
#print (len (ice))
print ("Ice located and heights pulled")
"""
Stage 5: Calculating ice mass
"""
print ("Determining iceberg size")
#Pull heights
def calculations ():
pull_heights ()
ice_size = (len(ice))
#print ("ice size:", ice_size)
#Calculating ice volume above sea level
print ("Calculating ice mass")
#Convert ice values from cm to m
ice_m = ((sum (ice)) * 0.1)
#print (ice_m)
#Calculating ice volume above surface
global ice_volume_positive
ice_volume_positive = (ice_size * ice_m)
#print (ice_volume_positive)
#Calculating sub-surface ice volume
global ice_volume_subsurface
ice_volume_subsurface = ice_volume_positive * 10
#print (ice_volume_subsurface)
#Calculating total ice volume
global ice_volume
ice_volume = ice_volume_positive + ice_volume_subsurface
#print (ice_volume)
#Calculating ice mass
global ice_mass
ice_mass = 900 * ice_volume
#print (ice_mass)
print ("Ice mass calculated")
run = tkinter.Label\
(text= ("Running Program" + "\n"))
run.pack ()
results_btn ['state'] = 'normal'
# """
# Stage 6: Calculating towability
# """
# print ("Calculating towability")
# def towability ():
# """
# Determines the towability of the iceberg
# Returns
# -------
# None.
# """
# if ice_mass > 36000000:
# print ("Iceberg cannot be towed")
# else:
# print ("Iceberg can be towed")
# print ("Towability calculated")
#Commented towability code above moved into GUI so removed here
#Calculation timing
end_calculation = time.time ()
calculation_time = (end_calculation - start_calculation)
print ("Calculation time: ", calculation_time)
# """
# Stage 6: Writing data out to a file
# """
# with open ("Data_out.txt", 'w') as FO:
# FO.write ("Above surface volume: " + str(ice_volume_positive) + '\n')
# FO.write ("Subsurface volume: " + str(ice_volume_subsurface) + '\n')
# FO.write ("Total ice volume: " + str(ice_volume) + '\n')
# FO.write ("Total mass: " + str(ice_mass) + '\n')
# Section moved to after GUI is completed as to access global variables
# within calculations function
"""
Stage 6: Initialise and populate GUI
"""
#Label code based on https://www.python-course.eu/tkinter_text_widget.php
#Create GUI description
description = tkinter.Label\
(text="This GUI runs the Iceberg Towability Model and shows its inputs and results"\
+ '\n')
description.pack ()
#Create user defined run
# menu_bar = tkinter.Menu (root)
# root.config (menu = menu_bar)
# model_menu = tkinter.Menu (menu_bar)
# menu_bar.add_cascade (label= "Model", menu= model_menu)
# model_menu.add_command (label= "Run Model", command= calculations)
run_btn = tkinter.Button (root, text= 'Run Program', command = calculations)
run_btn.pack (side= 'top', fill= 'both')
#Show input data graphs
def show_graphs ():
"""
Generates two figures simultaneously displaying lidar and radar input
datasets.
Figures will appear in individual pop-up windows.
Returns
-------
None.
"""
plt.ylim = (0, 300)
plt.xlim = (0, 300)
#Set up lidar plot to figure 1
lidar_plot = plt.figure (1)
#Assign title
plt.title ('Lidar data')
#Assign data
plt.imshow (lidar_clean)
#Set up radar plot to figure 2
radar_plot = plt.figure (2)
#Assign title
plt.title ('Radar data')
#Assign data
plt.imshow (radar_clean)
#Show plots
plt.show ()
#Display the results
def show_results ():
"""
Generates the program results, calculates towability
and disables the results button post-execution
Returns
-------
None.
"""
#Total volume
vol = tkinter.Label\
(text= ("Total volume: " + str (ice_volume) + " m\u00b2"))
vol.pack ()
#Total mass
mass = tkinter.Label\
(text= ("Total mass: " + str (ice_mass) + " kg"))
mass.pack ()
#Towability
print ("Calculating towability")
if ice_mass > 36000000:
tow = tkinter.Label (text = "Iceberg cannot be towed")
else:
tow = tkinter.Label (text = "Iceberg can be towed")
print ("Towability calculated")
tow.pack ()
#Disable button after 1 click
#Code based on https://www.youtube.com/watch?v=QfTo3rK3e48
results_btn ['state'] = 'disabled'
#Close window
def quit (event=None):
"""
Quits and closes the GUI to end the program at a time of the user's choice
Parameters
----------
event : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
None.
"""
root.destroy ()
#Create and pack buttons
#Button to quit window
quit_btn = tkinter.Button (root, text= 'Close Window', command = quit)
quit_btn.pack (side='bottom', fill= 'both')
#Button to show results
results_btn = tkinter.Button (root, text= 'Show Results', \
command = show_results, state= "disabled")
results_btn.pack (side='bottom', fill= 'both')
#Button to show graphs
graph_btn = tkinter.Button (root, text= 'Data inputs', command = show_graphs)
graph_btn.pack (side= 'bottom', fill= 'both')
#Activate window
root.mainloop ()
"""
Stage 7: Writing data out to a file
"""
with open ("Data_out.txt", 'w') as FO:
FO.write ("Above surface volume: " + str(ice_volume_positive) + '\n')
FO.write ("Subsurface volume: " + str(ice_volume_subsurface) + '\n')
FO.write ("Total ice volume: " + str(ice_volume) + '\n')
FO.write ("Total mass: " + str(ice_mass) + '\n')
if ice_mass > 36000000:
FO.write ("Iceberg cannot be towed")
else:
FO.write ("Iceberg can be towed")
#Finish program
print ("Program Ended")
print ("Thank you for running the program") |
"""
global ice
ice = [] | random_line_split |
single_iceberg_model.py | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 3 16:20:04 2021
This model is run from a GUI in which there is a run program button.
Also appearing on the GUI are 'data inputs', 'show results' and
'close window' buttons.
Data inputs shows the two input files in a graphical format.
Show results shows the calculation results, only after the program is run.
Close window closes the window, writes output data to a file and
ends the program.
When you run the code, a message will appear on the GUI 'Program Running'.
The buttons are for the user to interact with the program inputs/ outputs.
Program functions:
download
pull_heights
calculations
show_graphs
show_results
quit
"""
print ("Program Starting")
#Import statements
import tkinter
import requests
import matplotlib
matplotlib.use ('TkAgg')
import matplotlib.pyplot as plt
import csv
import time
"""
Stage 1: Initialise GUI main window
"""
root = tkinter.Tk ()
root.wm_title ("Iceberg-towing Model")
"""
Stage 2: Download data from Web to files
"""
#Download timing start
start_download = time.time ()
print ("Downloading Web data to files")
#Downloading data and writing to files function
def download (url):
|
#List of url web data
urls = [('Lidar', 'https://www.geog.leeds.ac.uk/courses/computing/study/core-python-odl2/assessment2/white1.lidar'),
('Radar', 'https://www.geog.leeds.ac.uk/courses/computing/study/core-python-odl2/assessment2/white1.radar')]
#Call download function
for x in urls:
download (x)
"""
Stage 3: Reading csv data into lists
"""
print ("Reading csv data")
#Reading csv into lidar
#Lidar represents the height of the pixel within the environment
lidar = []
with open ('Lidar.txt', newline = '') as f1:
reader = csv.reader (f1, quoting=csv.QUOTE_NONNUMERIC)
for row in reader:
#print (row)
rowlist_L = []
for value in row:
#print (value)
rowlist_L.append (int (value))
lidar.append (rowlist_L)
#for row in lidar:
#print ("length of row:", len (row))
#List contains empty rows
#Remove empty rows
lidar_clean = [x for x in lidar if x != []]
#print ("lidar2 length =", len(lidar_clean))
# for row in lidar_clean:
# print ("length of row:", len (row))
#Data familiaristation
# print (lidar_clean)
# print (type(lidar_clean))
# print ("Lidar length = ", len(lidar_clean))
#Reading csv into radar
#Radar represents ice or water within the environment
radar = []
with open ('Radar.txt', newline = '') as f2:
reader2 = csv.reader (f2, quoting=csv.QUOTE_NONNUMERIC)
for row in reader2:
#print (row)
rowlist_R = []
for value in row:
#print (value)
rowlist_R.append (int (value))
radar.append (rowlist_R)
#for row in radar:
#print ("length of row:", len (row))
#list contains empty rows
#remove empty rows
radar_clean = [x for x in radar if x != []]
#for row in radar_clean:
#print ("length of row:", len (row))
#Data familiarisation
# print (radar_clean)
# print (type(radar_clean))
# print ("Radar length = ", len(radar_clean))
#Download and list creation timing
end_download = time.time ()
download_time = (end_download - start_download)
print ("Download to reading in time: " + str(download_time))
# """
# Displaying lidar and radar data
# """
# #Code based on https://www.kite.com/python/answers/how-to-show-two-figures-at-once-in-matplotlib-in-python
# #Define axes
# plt.ylim = (0, 300)
# plt.xlim = (0, 300)
# #Lidar plot
# lidar_plot = plt.figure (1)
# #Assign title
# plt.title ('Lidar data')
# plt.imshow (lidar_clean)
# #Radar plot
# radar_plot = plt.figure (2)
# #Assign title
# plt.title ('Radar data')
# plt.imshow (radar_clean)
# #Show plots
# plt.show ()
#Commented code above has been moved into the GUI so removed here
"""
Stage 4: Finding ice areas and pulling their heights
"""
#Calculation timing
start_calculation = time.time ()
#Data familiarisation
# print (radar_clean [145][150])
# print (radar_clean [145])
# print (lidar_clean [145][150])
# print (lidar_clean [145])
print ("Locating ice and pulling heights")
#Pulling heights from lidar data
def pull_heights ():
"""
Pulls height values from the lidar data from ice locations within the
radar data, appending the heights to an ice list
Returns
-------
None.
"""
global ice
ice = []
for i in range (len (radar_clean)):
for j in range (len (radar_clean)):
radar_clean [i][j] = lidar_clean [i][j]
#print (radar_clean [i][j])
if (radar_clean [i][j]) > 100:
#print (radar_clean [i][j])
ice.append (lidar_clean [i][j])
#print (ice)
#print (len (ice))
print ("Ice located and heights pulled")
"""
Stage 5: Calculating ice mass
"""
print ("Determining iceberg size")
#Pull heights
def calculations ():
pull_heights ()
ice_size = (len(ice))
#print ("ice size:", ice_size)
#Calculating ice volume above sea level
print ("Calculating ice mass")
#Convert ice values from cm to m
ice_m = ((sum (ice)) * 0.1)
#print (ice_m)
#Calculating ice volume above surface
global ice_volume_positive
ice_volume_positive = (ice_size * ice_m)
#print (ice_volume_positive)
#Calculating sub-surface ice volume
global ice_volume_subsurface
ice_volume_subsurface = ice_volume_positive * 10
#print (ice_volume_subsurface)
#Calculating total ice volume
global ice_volume
ice_volume = ice_volume_positive + ice_volume_subsurface
#print (ice_volume)
#Calculating ice mass
global ice_mass
ice_mass = 900 * ice_volume
#print (ice_mass)
print ("Ice mass calculated")
run = tkinter.Label\
(text= ("Running Program" + "\n"))
run.pack ()
results_btn ['state'] = 'normal'
# """
# Stage 6: Calculating towability
# """
# print ("Calculating towability")
# def towability ():
# """
# Determines the towability of the iceberg
# Returns
# -------
# None.
# """
# if ice_mass > 36000000:
# print ("Iceberg cannot be towed")
# else:
# print ("Iceberg can be towed")
# print ("Towability calculated")
#Commented towability code above moved into GUI so removed here
#Calculation timing
end_calculation = time.time ()
calculation_time = (end_calculation - start_calculation)
print ("Calculation time: ", calculation_time)
# """
# Stage 6: Writing data out to a file
# """
# with open ("Data_out.txt", 'w') as FO:
# FO.write ("Above surface volume: " + str(ice_volume_positive) + '\n')
# FO.write ("Subsurface volume: " + str(ice_volume_subsurface) + '\n')
# FO.write ("Total ice volume: " + str(ice_volume) + '\n')
# FO.write ("Total mass: " + str(ice_mass) + '\n')
# Section moved to after GUI is completed as to access global variables
# within calculations function
"""
Stage 6: Initialise and populate GUI
"""
#Label code based on https://www.python-course.eu/tkinter_text_widget.php
#Create GUI description
description = tkinter.Label\
(text="This GUI runs the Iceberg Towability Model and shows its inputs and results"\
+ '\n')
description.pack ()
#Create user defined run
# menu_bar = tkinter.Menu (root)
# root.config (menu = menu_bar)
# model_menu = tkinter.Menu (menu_bar)
# menu_bar.add_cascade (label= "Model", menu= model_menu)
# model_menu.add_command (label= "Run Model", command= calculations)
run_btn = tkinter.Button (root, text= 'Run Program', command = calculations)
run_btn.pack (side= 'top', fill= 'both')
#Show input data graphs
def show_graphs ():
"""
Generates two figures simultaneously displaying lidar and radar input
datasets.
Figures will appear in individual pop-up windows.
Returns
-------
None.
"""
plt.ylim = (0, 300)
plt.xlim = (0, 300)
#Set up lidar plot to figure 1
lidar_plot = plt.figure (1)
#Assign title
plt.title ('Lidar data')
#Assign data
plt.imshow (lidar_clean)
#Set up radar plot to figure 2
radar_plot = plt.figure (2)
#Assign title
plt.title ('Radar data')
#Assign data
plt.imshow (radar_clean)
#Show plots
plt.show ()
#Display the results
def show_results ():
"""
Generates the program results, calculates towability
and disables the results button post-execution
Returns
-------
None.
"""
#Total volume
vol = tkinter.Label\
(text= ("Total volume: " + str (ice_volume) + " m\u00b2"))
vol.pack ()
#Total mass
mass = tkinter.Label\
(text= ("Total mass: " + str (ice_mass) + " kg"))
mass.pack ()
#Towability
print ("Calculating towability")
if ice_mass > 36000000:
tow = tkinter.Label (text = "Iceberg cannot be towed")
else:
tow = tkinter.Label (text = "Iceberg can be towed")
print ("Towability calculated")
tow.pack ()
#Disable button after 1 click
#Code based on https://www.youtube.com/watch?v=QfTo3rK3e48
results_btn ['state'] = 'disabled'
#Close window
def quit (event=None):
"""
Quits and closes the GUI to end the program at a time of the user's choice
Parameters
----------
event : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
None.
"""
root.destroy ()
#Create and pack buttons
#Button to quit window
quit_btn = tkinter.Button (root, text= 'Close Window', command = quit)
quit_btn.pack (side='bottom', fill= 'both')
#Button to show results
results_btn = tkinter.Button (root, text= 'Show Results', \
command = show_results, state= "disabled")
results_btn.pack (side='bottom', fill= 'both')
#Button to show graphs
graph_btn = tkinter.Button (root, text= 'Data inputs', command = show_graphs)
graph_btn.pack (side= 'bottom', fill= 'both')
#Activate window
root.mainloop ()
"""
Stage 7: Writing data out to a file
"""
with open ("Data_out.txt", 'w') as FO:
FO.write ("Above surface volume: " + str(ice_volume_positive) + '\n')
FO.write ("Subsurface volume: " + str(ice_volume_subsurface) + '\n')
FO.write ("Total ice volume: " + str(ice_volume) + '\n')
FO.write ("Total mass: " + str(ice_mass) + '\n')
if ice_mass > 36000000:
FO.write ("Iceberg cannot be towed")
else:
FO.write ("Iceberg can be towed")
#Finish program
print ("Program Ended")
print ("Thank you for running the program") | """
Downloads web data and writes it to a file
Parameters
----------
url : String
Web address used as the data source.
Returns
-------
None.
"""
path, url = url
r = requests.get (url, stream = True)
content = r.text
#print (content)
with open (path + '.txt', 'w') as f:
f.write (content) | identifier_body |
single_iceberg_model.py | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 3 16:20:04 2021
This model is run from a GUI in which there is a run program button.
Also appearing on the GUI are 'data inputs', 'show results' and
'close window' buttons.
Data inputs shows the two input files in a graphical format.
Show results shows the calculation results, only after the program is run.
Close window closes the window, writes output data to a file and
ends the program.
When you run the code, a message will appear on the GUI 'Program Running'.
The buttons are for the user to interact with the program inputs/ outputs.
Program functions:
download
pull_heights
calculations
show_graphs
show_results
quit
"""
print ("Program Starting")
#Import statements
import tkinter
import requests
import matplotlib
matplotlib.use ('TkAgg')
import matplotlib.pyplot as plt
import csv
import time
"""
Stage 1: Initialise GUI main window
"""
root = tkinter.Tk ()
root.wm_title ("Iceberg-towing Model")
"""
Stage 2: Download data from Web to files
"""
#Download timing start
start_download = time.time ()
print ("Downloading Web data to files")
#Downloading data and writing to files function
def download (url):
"""
Downloads web data and writes it to a file
Parameters
----------
url : String
Web address used as the data source.
Returns
-------
None.
"""
path, url = url
r = requests.get (url, stream = True)
content = r.text
#print (content)
with open (path + '.txt', 'w') as f:
f.write (content)
#List of url web data
urls = [('Lidar', 'https://www.geog.leeds.ac.uk/courses/computing/study/core-python-odl2/assessment2/white1.lidar'),
('Radar', 'https://www.geog.leeds.ac.uk/courses/computing/study/core-python-odl2/assessment2/white1.radar')]
#Call download function
for x in urls:
download (x)
"""
Stage 3: Reading csv data into lists
"""
print ("Reading csv data")
#Reading csv into lidar
#Lidar represents the height of the pixel within the environment
lidar = []
with open ('Lidar.txt', newline = '') as f1:
reader = csv.reader (f1, quoting=csv.QUOTE_NONNUMERIC)
for row in reader:
#print (row)
rowlist_L = []
for value in row:
#print (value)
rowlist_L.append (int (value))
lidar.append (rowlist_L)
#for row in lidar:
#print ("length of row:", len (row))
#List contains empty rows
#Remove empty rows
lidar_clean = [x for x in lidar if x != []]
#print ("lidar2 length =", len(lidar_clean))
# for row in lidar_clean:
# print ("length of row:", len (row))
#Data familiaristation
# print (lidar_clean)
# print (type(lidar_clean))
# print ("Lidar length = ", len(lidar_clean))
#Reading csv into radar
#Radar represents ice or water within the environment
radar = []
with open ('Radar.txt', newline = '') as f2:
reader2 = csv.reader (f2, quoting=csv.QUOTE_NONNUMERIC)
for row in reader2:
#print (row)
rowlist_R = []
for value in row:
#print (value)
rowlist_R.append (int (value))
radar.append (rowlist_R)
#for row in radar:
#print ("length of row:", len (row))
#list contains empty rows
#remove empty rows
radar_clean = [x for x in radar if x != []]
#for row in radar_clean:
#print ("length of row:", len (row))
#Data familiarisation
# print (radar_clean)
# print (type(radar_clean))
# print ("Radar length = ", len(radar_clean))
#Download and list creation timing
end_download = time.time ()
download_time = (end_download - start_download)
print ("Download to reading in time: " + str(download_time))
# """
# Displaying lidar and radar data
# """
# #Code based on https://www.kite.com/python/answers/how-to-show-two-figures-at-once-in-matplotlib-in-python
# #Define axes
# plt.ylim = (0, 300)
# plt.xlim = (0, 300)
# #Lidar plot
# lidar_plot = plt.figure (1)
# #Assign title
# plt.title ('Lidar data')
# plt.imshow (lidar_clean)
# #Radar plot
# radar_plot = plt.figure (2)
# #Assign title
# plt.title ('Radar data')
# plt.imshow (radar_clean)
# #Show plots
# plt.show ()
#Commented code above has been moved into the GUI so removed here
"""
Stage 4: Finding ice areas and pulling their heights
"""
#Calculation timing
start_calculation = time.time ()
#Data familiarisation
# print (radar_clean [145][150])
# print (radar_clean [145])
# print (lidar_clean [145][150])
# print (lidar_clean [145])
print ("Locating ice and pulling heights")
#Pulling heights from lidar data
def pull_heights ():
"""
Pulls height values from the lidar data from ice locations within the
radar data, appending the heights to an ice list
Returns
-------
None.
"""
global ice
ice = []
for i in range (len (radar_clean)):
for j in range (len (radar_clean)):
radar_clean [i][j] = lidar_clean [i][j]
#print (radar_clean [i][j])
if (radar_clean [i][j]) > 100:
#print (radar_clean [i][j])
ice.append (lidar_clean [i][j])
#print (ice)
#print (len (ice))
print ("Ice located and heights pulled")
"""
Stage 5: Calculating ice mass
"""
print ("Determining iceberg size")
#Pull heights
def calculations ():
pull_heights ()
ice_size = (len(ice))
#print ("ice size:", ice_size)
#Calculating ice volume above sea level
print ("Calculating ice mass")
#Convert ice values from cm to m
ice_m = ((sum (ice)) * 0.1)
#print (ice_m)
#Calculating ice volume above surface
global ice_volume_positive
ice_volume_positive = (ice_size * ice_m)
#print (ice_volume_positive)
#Calculating sub-surface ice volume
global ice_volume_subsurface
ice_volume_subsurface = ice_volume_positive * 10
#print (ice_volume_subsurface)
#Calculating total ice volume
global ice_volume
ice_volume = ice_volume_positive + ice_volume_subsurface
#print (ice_volume)
#Calculating ice mass
global ice_mass
ice_mass = 900 * ice_volume
#print (ice_mass)
print ("Ice mass calculated")
run = tkinter.Label\
(text= ("Running Program" + "\n"))
run.pack ()
results_btn ['state'] = 'normal'
# """
# Stage 6: Calculating towability
# """
# print ("Calculating towability")
# def towability ():
# """
# Determines the towability of the iceberg
# Returns
# -------
# None.
# """
# if ice_mass > 36000000:
# print ("Iceberg cannot be towed")
# else:
# print ("Iceberg can be towed")
# print ("Towability calculated")
#Commented towability code above moved into GUI so removed here
#Calculation timing
end_calculation = time.time ()
calculation_time = (end_calculation - start_calculation)
print ("Calculation time: ", calculation_time)
# """
# Stage 6: Writing data out to a file
# """
# with open ("Data_out.txt", 'w') as FO:
# FO.write ("Above surface volume: " + str(ice_volume_positive) + '\n')
# FO.write ("Subsurface volume: " + str(ice_volume_subsurface) + '\n')
# FO.write ("Total ice volume: " + str(ice_volume) + '\n')
# FO.write ("Total mass: " + str(ice_mass) + '\n')
# Section moved to after GUI is completed as to access global variables
# within calculations function
"""
Stage 6: Initialise and populate GUI
"""
#Label code based on https://www.python-course.eu/tkinter_text_widget.php
#Create GUI description
description = tkinter.Label\
(text="This GUI runs the Iceberg Towability Model and shows its inputs and results"\
+ '\n')
description.pack ()
#Create user defined run
# menu_bar = tkinter.Menu (root)
# root.config (menu = menu_bar)
# model_menu = tkinter.Menu (menu_bar)
# menu_bar.add_cascade (label= "Model", menu= model_menu)
# model_menu.add_command (label= "Run Model", command= calculations)
run_btn = tkinter.Button (root, text= 'Run Program', command = calculations)
run_btn.pack (side= 'top', fill= 'both')
#Show input data graphs
def | ():
"""
Generates two figures simultaneously displaying lidar and radar input
datasets.
Figures will appear in individual pop-up windows.
Returns
-------
None.
"""
plt.ylim = (0, 300)
plt.xlim = (0, 300)
#Set up lidar plot to figure 1
lidar_plot = plt.figure (1)
#Assign title
plt.title ('Lidar data')
#Assign data
plt.imshow (lidar_clean)
#Set up radar plot to figure 2
radar_plot = plt.figure (2)
#Assign title
plt.title ('Radar data')
#Assign data
plt.imshow (radar_clean)
#Show plots
plt.show ()
#Display the results
def show_results ():
"""
Generates the program results, calculates towability
and disables the results button post-execution
Returns
-------
None.
"""
#Total volume
vol = tkinter.Label\
(text= ("Total volume: " + str (ice_volume) + " m\u00b2"))
vol.pack ()
#Total mass
mass = tkinter.Label\
(text= ("Total mass: " + str (ice_mass) + " kg"))
mass.pack ()
#Towability
print ("Calculating towability")
if ice_mass > 36000000:
tow = tkinter.Label (text = "Iceberg cannot be towed")
else:
tow = tkinter.Label (text = "Iceberg can be towed")
print ("Towability calculated")
tow.pack ()
#Disable button after 1 click
#Code based on https://www.youtube.com/watch?v=QfTo3rK3e48
results_btn ['state'] = 'disabled'
#Close window
def quit (event=None):
"""
Quits and closes the GUI to end the program at a time of the user's choice
Parameters
----------
event : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
None.
"""
root.destroy ()
#Create and pack buttons
#Button to quit window
quit_btn = tkinter.Button (root, text= 'Close Window', command = quit)
quit_btn.pack (side='bottom', fill= 'both')
#Button to show results
results_btn = tkinter.Button (root, text= 'Show Results', \
command = show_results, state= "disabled")
results_btn.pack (side='bottom', fill= 'both')
#Button to show graphs
graph_btn = tkinter.Button (root, text= 'Data inputs', command = show_graphs)
graph_btn.pack (side= 'bottom', fill= 'both')
#Activate window
root.mainloop ()
"""
Stage 7: Writing data out to a file
"""
with open ("Data_out.txt", 'w') as FO:
FO.write ("Above surface volume: " + str(ice_volume_positive) + '\n')
FO.write ("Subsurface volume: " + str(ice_volume_subsurface) + '\n')
FO.write ("Total ice volume: " + str(ice_volume) + '\n')
FO.write ("Total mass: " + str(ice_mass) + '\n')
if ice_mass > 36000000:
FO.write ("Iceberg cannot be towed")
else:
FO.write ("Iceberg can be towed")
#Finish program
print ("Program Ended")
print ("Thank you for running the program") | show_graphs | identifier_name |
buffio_test.go | package buffio
import (
"bufio"
"bytes"
"fmt"
"io"
"strings"
"testing"
)
func TestPeek1(t *testing.T) {
/*
func NewReaderSize(rd io.Reader, size int) *Reader 将 rd 封装成一个带缓存的 bufio.Reader 对象,
缓存大小由 size 指定(如果小于 16 则会被设置为 16)。minReadBufferSize = 16
如果 rd 的基类型就是有足够缓存的 bufio.Reader 类型,则直接将rd 转换为基类型返回。
NewReader()方法返回一个默认大小的带缓存的bufio.Reader对象,即 NewReaderSize(rd, 4096)
*/
s := strings.NewReader("hello world")
// func (b *Reader) Reset(r io.Reader)
// Reset丢弃缓冲中的数据,清除任何错误,将b重设为其下层从r读取数据。
comment := "Package io provides basic interfaces to I/O primitives. " +
"Its primary job is to wrap existing implementations of such primitives, " +
"such as those in package os, " +
"into shared public interfaces that abstract the functionality, " +
"plus some other related primitives."
s.Reset(comment)
buf1 := make([]byte, 10)
reader1 := io.LimitReader(s, 19)
for i := 0; i < 3; i++ {
n, _ := reader1.Read(buf1) //每一次读取都是将buf1元素从左到右按位覆盖
fmt.Println(n)
fmt.Println(string(buf1))
/*
10
Package io
9
provideso
0
provideso
*/
}
// func (b *Reader) Peek(n int) ([]byte, error)
// Peek 返回Reader的一个切片,该切片引用Reader中从当前起始索引位置开始的 n 个字节的数据,
// 该操作不会将数据读出,只是引用,引用的数据在下一次读取操作之
// 前是有效的。如果切片长度小于 n,则返回一个错误信息说明原因。
// 如果 n 大于缓存的总大小,则返回 ErrBufferFull。
// 看源码,缓存范围:16~4096
br := bufio.NewReader(s)
b, _ := br.Peek(15)
fmt.Printf("%q\n", b) // " basic interfac"
b[0] = 'a'
b, _ = br.Peek(5)
fmt.Printf("%q\n", b) // "abasi"
}
func TestRead1(t *testing.T) {
// Read 从 b 中读出数据到 p 中,返回写入p的字节数
// 读取到达结尾时,返回值n将为0而err将为io.EOF。
// 如果缓存不为空,则只能读出缓存中的数据,不会从底层 io.Reader
// 中提取数据,如果缓存为空,则:
// 1、len(p) >= 缓存大小,则跳过缓存,直接从底层 io.Reader 中读
// 出到 p 中。
// 2、len(p) < 缓存大小,则先将数据从底层 io.Reader 中读取到缓存
// 中,再从缓存读取到 p 中。
// func (b *Reader) Read(p []byte) (n int, err error)
s := strings.NewReader("123456789")
br := bufio.NewReader(s)
b := make([]byte, 4)
n, err := br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 1234 4
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 5678 4
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 9 1
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 0 EOF
}
func TestBuffered1(t *testing.T) {
// 返回可以从缓存中读取的字节数
// func (b *Reader) Buffered() int { return b.w - b.r }
s := strings.NewReader("123456789")
br := bufio.NewReader(s)
b := make([]byte, 3)
br.Read(b)
fmt.Println(br.Buffered()) // 6
br.Read(b)
fmt.Println(br.Buffered()) // 3
}
func TestReadByte1(t *testing.T) {
// ReadByte读取并返回一个字节。如果没有可用的数据,会返回错误。
// func (b *Reader) ReadByte() (c byte, err error)
origin := "abcd"
s := strings.NewReader(origin)
br := bufio.NewReader(s)
// 第一次读取
tmp, err := br.ReadByte()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // 'a'
// Buffered returns the number of bytes that can be read from the current buffer.
fmt.Println(br.Buffered()) // 3
for i := 0; i < len(origin); i++ {
tmp, err = br.ReadByte()
if err != nil {
// panic: EOF 因为已经读取了1个字符 缓存中只剩下3个
// 所以在读取第4个字符报错EOF
panic(err)
}
}
}
func TestUnreadByte1(t *testing.T) {
// 撤消最后读出的字节
s := strings.NewReader("abcde")
br := bufio.NewReader(s)
tmp, _ := br.ReadByte()
fmt.Printf("%q\n", tmp) // 'a'
fmt.Println(br.Buffered()) // 4
br.UnreadByte() // 撤销吐出,即栈中弹出的a元素又放回来了
fmt.Println(br.Buffered()) // 5
tmp, _ = br.ReadByte()
fmt.Printf("%q\n", tmp) // 'a'
}
func TestReadRune1(t *testing.T) {
// ReadRune读取一个utf-8编码的unicode码值
chinese := "中国人"
s := strings.NewReader(chinese)
br := bufio.NewReader(s)
tmp, _, err := br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
}
func TestUnReadRune1(t *testing.T) {
chinese := "中国人"
s := strings.NewReader(chinese)
br := bufio.NewReader(s)
tmp, _, err := br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
br.UnreadRune()
tmp, _, err = br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
}
func TestReadLine1(t *testing.T) {
// ReadLine 是一个低水平的行读取原语,大多数情况下,应该使用
// ReadBytes('\n') 或 ReadString('\n'),或者使用一个 Scanner。
//
// ReadLine 通过调用 ReadSlice 方法实现,返回的也是缓存的切片。用于
// 读取一行数据,不包括行尾标记(\n 或 \r\n)。
//
// 只要能读出数据,err 就为 nil。如果没有数据可读,则 isPrefix 返回
// false,err 返回 io.EOF。
//
// 如果找到行尾标记,则返回查找结果,isPrefix 返回 false。
// 如果未找到行尾标记,则:
// 1、缓存不满,则将缓存填满后再次查找。
// 2、缓存是满的,则返回整个缓存,isPrefix 返回 true。
//
// 整个数据尾部“有一个换行标记”和“没有换行标记”的读取结果是一样。
//
// 如果 ReadLine 读取到换行标记,则调用 UnreadByte 撤销的是换行标记,
// 而不是返回的数据。
// func (b *Reader) ReadLine() (line []byte, isPrefix bool, err error)
s := strings.NewReader("123\nzzz")
br := bufio.NewReader(s)
for line, isPrefix, err := []byte{0}, false, error(nil); len(line) > 0 && err == nil; {
line, isPrefix, err = br.ReadLine()
fmt.Printf("%q %t %v\n", line, isPrefix, err)
/*
"123" false <nil>
"zzz" false <nil>
"" false EOF
*/
}
}
func TestReadSlice1(t *testing.T) {
// ReadSlice 在 b 中查找 delim 并返回 delim 及其之前的所有数据。
// 该操作会读出数据,返回的切片是已读出的数据的引用,切片中的数据
// 在下一次读取操作之前是有效的。
//
// 如果找到 delim,则返回查找结果,err 返回 nil。
// 如果未找到 delim,则:
// 1、缓存不满,则将缓存填满后再次查找。
// 2、缓存是满的,则返回整个缓存,err 返回 ErrBufferFull。
//
// 如果未找到 delim 且遇到错误(通常是 io.EOF),则返回缓存中的所
// 有数据和遇到的错误。
//
// 因为返回的数据有可能被下一次的读写操作修改,所以大多数操作应该
// 使用 ReadBytes 或 ReadString,它们返回的是数据的拷贝。
// func (b *Reader) ReadSlice(delim byte) (line []byte, err error)
s := strings.NewReader("ABC DEF GHI")
br := bufio.NewReader(s)
w, err := br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // "ABC "
w, err = br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // "DEF "
w, err = br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // panic: EOF
}
func TestReadBytes1(t *testing.T) {
// ReadBytes 功能同 ReadSlice,只不过返回的是缓存的拷贝。
// func (b *Reader) ReadBytes(delim byte) (line []byte, err error)
s := strings.NewReader("ABC,EFG,HIJ")
br := bufio.NewReader(s)
line, err := br.ReadBytes(',')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", line) // "ABC,"
line, err = br.ReadBytes(',')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", line) // "EFG,"
line, err = br.ReadBytes(',')
if err != nil {
panic(err) // panic: EOF
}
fmt.Printf("%q\n", line)
}
func TestReadString1(t *testing.T) {
// ReadString 功能同 ReadBytes,只不过返回的是字符串。
// func (b *Reader) ReadString(delim byte) (line string, err error)
s := strings.NewReader("你好,我是卡牌")
br := bufio.NewReader(s)
line, err := br.ReadString(',')
if err != nil {
panic(err)
}
fmt.Printf("%s\n", line) // 你好,
line, err = br.ReadString(',')
if err != nil {
panic(err) // panic: EOF
}
fmt.Printf("%s\n", line)
}
// bufio.Reader的WriteTo()和 bufio.Writer的ReadFrom()
// WriteTo方法实现了io.WriterTo接口。
// func (b *Reader) WriteTo(w io.Writer) (n int64, err error)
func TestWriteTo1(t *testing.T) {
b := bytes.NewBuffer(make([]byte, 0))
s := strings.NewReader("ABCDEFG")
br := bufio.NewReader(s)
br.WriteTo(b)
fmt.Printf("%s\n", b.String()) // ABCDEFG
fmt.Printf("%q\n", b) // "ABCDEFG"
fmt.Println(string(65)) // A
}
// ReadFrom实现了io.ReaderFrom接口。
// func (b *Writer) ReadFrom(r io.Reader) (n int64, err error)
// ReadFrom无需使用Flush
func TestReadFrom1(t *testing.T) {
s := strings.NewReader("hello world")
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.ReadFrom(s)
fmt.Println(b)
}
//go test -run=buffio_test.go -bench="BenchmarkReadFrom1" -benchtime="3s" -cpuprofile cpu.out
// linux 是 -bench=.
func BenchmarkReadFrom1(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
for idx := 0; idx < b.N; idx++ {
s := strings.NewReader("hello world")
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.ReadFrom(s)
}
b.StopTimer()
}
// Writer实现了为io.Writer接口对象提供缓冲。
// 如果在向一个Writer类型值写入时遇到了错误,
// 该对象将不再接受任何数据,返回该错误
// 数据都写入后,调用者有义务调用Flush方法,
// 保证所有的数据都交给了下层的io.Writer。
func TestNewWriter1(t *testing.T) {
// NewWriter创建一个具有默认大小缓冲、写入w的*Writer。 相当于 NewWriterSize(wr, 4096)
// func NewWriter(w io.Writer) *Writer
// Buffered()返回缓冲中已使用的字节数。
// func (b *Writer) Buffered() int
// Available()返回缓冲中还有多少字节未使用。
// func (b *Writer) Available() int
// Reset丢弃缓冲中的数据,清除任何错误,将b重设为将其输出写入w。
// func (b *Writer) Reset(w io.Writer)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
fmt.Println(bw.Available(), bw.Buffered()) // 4096 0
bw.WriteString("card")
fmt.Println(bw.Available(), bw.Buffered()) // 4092 4
bw.Reset(b)
fmt.Println(bw.Available(), bw.Buffered()) // 4096 0
}
func TestWrite1(t *testing.T) {
// Write 将 p 中的数据写入 b 中,返回写入的字节数
// 如果写入的字节数小于 p 的长度,则返回一个错误信息
// func (b *Writer) Write(p []byte) (nn int, err error)
// Flush 将缓存中的数据提交到底层的 io.Writer 中
// func (b *Writer) Flush() error
p := [...]byte{'a', 'b', 'c'}
| ewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.Write(p[:])
bw.Flush()
fmt.Printf("%q\n", b) //"abc"
}
func TestWriteString1(t *testing.T) {
// WriteString 同 Write,只不过写入的是字符串
// func (b *Writer) WriteString(s string) (int, error)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.WriteString("hello world")
bw.Flush()
fmt.Printf("%s\n", b) //hello world
}
func TestWriteByte1(t *testing.T) {
// WriteByte写入单个字节。
// func (b *Writer) WriteByte(c byte) error
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.WriteByte('c')
bw.Flush()
fmt.Println(b)
}
func TestWriteRune1(t *testing.T) {
// WriteRune写入一个unicode码值(的utf-8编码),返回写入的字节数和可能的错误。
// func (b *Writer) WriteRune(r rune) (size int, err error)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
size, err := bw.WriteRune('周')
if err != nil {
panic(err)
}
fmt.Println(size) // 3
bw.Flush()
fmt.Println(b) // 周
}
func TestReadWriter1(t *testing.T) {
// ReadWriter类型保管了指向Reader和Writer类型的指针
// 实现了io.ReadWriter接口。
// NewReadWriter 生成bufio.ReadWriter对象
// func NewReadWriter(r *Reader, w *Writer) *ReadWriter
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
s := strings.NewReader("hello world")
br := bufio.NewReader(s)
rw := bufio.NewReadWriter(br, bw)
word, err := rw.ReadString(' ')
if err != nil {
panic(err)
}
fmt.Printf("%s\n", word) // hello
_, err = rw.WriteString(",I'm coming")
if err != nil {
panic(err)
}
rw.Flush()
fmt.Println(b)
}
func TestNewScanner1(t *testing.T) {
// Scanner 提供了一个方便的接口来读取数据,例如遍历多行文本中的行。Scan 方法会通过
// 一个“匹配函数”读取数据中符合要求的部分,跳过不符合要求的部分。“匹配函数”由调
// 用者指定。本包中提供的匹配函数有“行匹配函数”、“字节匹配函数”、“字符匹配函数”
// 和“单词匹配函数”,用户也可以自定义“匹配函数”。默认的“匹配函数”为“行匹配函
// 数”,用于获取数据中的一行内容(不包括行尾标记)
//
// Scanner 使用了缓存,所以匹配部分的长度不能超出缓存的容量。默认缓存容量为 4096 -
// bufio.MaxScanTokenSize,用户可以通过 Buffer 方法指定自定义缓存及其最大容量。
//
// Scan 在遇到下面的情况时会终止扫描并返回 false(扫描一旦终止,将无法再继续):
// 1、遇到 io.EOF
// 2、遇到读写错误
// 3、“匹配部分”的长度超过了缓存的长度
//
// 如果需要对错误进行更多的控制,
// 或“匹配部分”超出缓存容量,或需要连续扫描,
// 则应该使用 bufio.Reader
// func NewScanner(r io.Reader) *Scanner
// Bytes方法返回最近一次Scan调用生成的token。
// 底层数组指向的数据可能会被下一次Scan的调用重写。
// func (s *Scanner) Bytes() []byte
// Buffer()方法设置扫描时使用的初始缓冲区和最大值
// 默认情况下,Scan使用内部缓冲区并设置MaxScanTokenSize的最大令牌大小
s := strings.NewReader("周起\n卡牌\n程序员\n")
bs := bufio.NewScanner(s)
bs.Buffer(make([]byte, 0), bufio.MaxScanTokenSize)
for bs.Scan() {
// 周起
// 卡牌
// 程序员
fmt.Printf("%s\n", bs.Bytes())
}
}
func TestSplit1(t *testing.T) {
// Split设置该Scanner的分割函数。默认设置为 bufio.ScanLines()
// 本方法必须在Scan之前调用。
// func (s *Scanner) Split(split SplitFunc)
s := strings.NewReader("周起 卡牌 程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
// Text返回由Scan调用生成的最新标记,
// 作为保存其字节的新分配字符串。
for bs.Scan() {
fmt.Printf("%s\n", bs.Text())
}
}
func TestScan1(t *testing.T) {
// Scan方法获取当前位置的token(该token可以通过Bytes或Text方法获得),
// 并让Scanner的扫描位置移动到下一个token。
// 当扫描因为抵达输入流结尾或者遇到错误而停止时,
// 本方法会返回false。在Scan方法返回false后,
// Err方法将返回扫描时遇到的任何错误;
// 除非是io.EOF,此时Err会返回nil。
// func (s *Scanner) Scan() bool
s := strings.NewReader("周起 卡牌 程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
for bs.Scan() {
fmt.Printf("%s %s\n", bs.Text(), bs.Bytes())
}
}
func TestScanBytes1(t *testing.T) {
// Bytes方法返回最近一次Scan调用生成的token。
// 底层数组指向的数据可能会被下一次Scan的调用重写。
s := strings.NewReader("abcd")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanBytes)
for bs.Scan() {
// a
// b
// c
// d
fmt.Printf("%s\n", bs.Bytes())
}
}
func TestScanRunes1(t *testing.T) {
// ScanRunes是用于Scanner类型的分割函数(符合SplitFunc),
// 本函数会将每个utf-8编码的unicode码值作为一个token返回。
s := strings.NewReader("周起卡牌程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanRunes)
for bs.Scan() {
// 周
// 起
// 卡
// 牌
// 程
// 序
// 员
fmt.Printf("%s\n", bs.Text())
}
}
func TestScanWords1(t *testing.T) {
// ScanRunes是用于Scanner类型的分割函数(符合SplitFunc),
// 本函数会将空白(参见unicode.IsSpace)
// 分隔的片段(去掉前后空白后)作为一个token返回。
// 本函数永远不会返回空字符串。
s := strings.NewReader("我 是 卡 牌")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
for bs.Scan() {
// 我
// 是
// 卡
// 牌
fmt.Printf("%s\n", bs.Text())
}
}
func TestScanLines1(t *testing.T) {
// 将每一行文本去掉末尾的换行标记作为一个token返回
// 此函数的bs.Scan()的默认值
s := strings.NewReader("卡牌\n周起\n程序员\n")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanLines)
for bs.Scan() {
// 卡牌
// 周起
// 程序员
fmt.Printf("%s\n", bs.Text())
}
}
| b := bytes.N | identifier_name |
buffio_test.go | package buffio
import (
"bufio"
"bytes"
"fmt"
"io"
"strings"
"testing"
)
func TestPeek1(t *testing.T) {
/*
func NewReaderSize(rd io.Reader, size int) *Reader 将 rd 封装成一个带缓存的 bufio.Reader 对象,
缓存大小由 size 指定(如果小于 16 则会被设置为 16)。minReadBufferSize = 16
如果 rd 的基类型就是有足够缓存的 bufio.Reader 类型,则直接将rd 转换为基类型返回。
NewReader()方法返回一个默认大小的带缓存的bufio.Reader对象,即 NewReaderSize(rd, 4096)
*/
s := strings.NewReader("hello world")
// func (b *Reader) Reset(r io.Reader)
// Reset丢弃缓冲中的数据,清除任何错误,将b重设为其下层从r读取数据。
comment := "Package io provides basic interfaces to I/O primitives. " +
"Its primary job is to wrap existing implementations of such primitives, " +
"such as those in package os, " +
"into shared public interfaces that abstract the functionality, " +
"plus some other related primitives."
s.Reset(comment)
buf1 := make([]byte, 10)
reader1 := io.LimitReader(s, 19)
for i := 0; i < 3; i++ {
n, _ := reader1.Read(buf1) //每一次读取都是将buf1元素从左到右按位覆盖
fmt.Println(n)
fmt.Println(string(buf1))
/*
10
Package io
9
provideso
0
provideso
*/
}
// func (b *Reader) Peek(n int) ([]byte, error)
// Peek 返回Reader的一个切片,该切片引用Reader中从当前起始索引位置开始的 n 个字节的数据,
// 该操作不会将数据读出,只是引用,引用的数据在下一次读取操作之
// 前是有效的。如果切片长度小于 n,则返回一个错误信息说明原因。
// 如果 n 大于缓存的总大小,则返回 ErrBufferFull。
// 看源码,缓存范围:16~4096
br := bufio.NewReader(s)
b, _ := br.Peek(15)
fmt.Printf("%q\n", b) // " basic interfac"
b[0] = 'a'
b, _ = br.Peek(5)
fmt.Printf("%q\n", b) // "abasi"
}
func TestRead1(t *testing.T) {
// Read 从 b 中读出数据到 p 中,返回写入p的字节数
// 读取到达结尾时,返回值n将为0而err将为io.EOF。
// 如果缓存不为空,则只能读出缓存中的数据,不会从底层 io.Reader
// 中提取数据,如果缓存为空,则:
// 1、len(p) >= 缓存大小,则跳过缓存,直接从底层 io.Reader 中读
// 出到 p 中。
// 2、len(p) < 缓存大小,则先将数据从底层 io.Reader 中读取到缓存
// 中,再从缓存读取到 p 中。
// func (b *Reader) Read(p []byte) (n int, err error)
s := strings.NewReader("123456789")
br := bufio.NewReader(s)
b := make([]byte, 4)
n, err := br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 1234 4
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 5678 4
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 9 1
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 0 EOF
}
func TestBuffered1(t *testing.T) {
// 返回可以从缓存中读取的字节数
// func (b *Reader) Buffered() int { return b.w - b.r }
s := strings.NewReader("123456789")
br := bufio.NewReader(s)
b := make([]byte, 3)
br.Read(b)
fmt.Println(br.Buffered()) // 6
br.Read(b)
fmt.Println(br.Buffered()) // 3
}
func TestReadByte1(t *testing.T) {
// ReadByte读取并返回一个字节。如果没有可用的数据,会返回错误。
// func (b *Reader) ReadByte() (c byte, err error)
origin := "abcd"
s := strings.NewReader(origin)
br := bufio.NewReader(s)
// 第一次读取
tmp, err := br.ReadByte()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // 'a'
// Buffered returns the number of bytes that can be read from the current buffer.
fmt.Println(br.Buffered()) // 3
for i := 0; i < len(origin); i++ {
tmp, err = br.ReadByte()
if err != nil {
// panic: EOF 因为已经读取了1个字符 缓存中只剩下3个
// 所以在读取第4个字符报错EOF
panic(err)
}
}
}
func TestUnreadByte1(t *testing.T) {
// 撤消最后读出的字节
s := strings.NewReader("abcde")
br := bufio.NewReader(s)
tmp, _ := br.ReadByte()
fmt.Printf("%q\n", tmp) // 'a'
fmt.Println(br.Buffered()) // 4
br.UnreadByte() // 撤销吐出,即栈中弹出的a元素又放回来了
fmt.Println(br.Buffered()) // 5
tmp, _ = br.ReadByte()
fmt.Printf("%q\n", tmp) // 'a'
}
func TestReadRune1(t *testing.T) {
// ReadRune读取一个utf-8编码的unicode码值
chinese := "中国人"
s := strings.NewReader(chinese)
br := bufio.NewReader(s)
tmp, _, err := br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
}
func TestUnReadRune1(t *testing.T) {
chinese := "中国人"
s := strings.NewReader(chinese)
br := bufio.NewReader(s)
tmp, _, err := br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
br.UnreadRune()
tmp, _, err = br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
}
func TestReadLine1(t *testing.T) {
// ReadLine 是一个低水平的行读取原语,大多数情况下,应该使用
// ReadBytes('\n') 或 ReadString('\n'),或者使用一个 Scanner。
//
// ReadLine 通过调用 ReadSlice 方法实现,返回的也是缓存的切片。用于
// 读取一行数据,不包括行尾标记(\n 或 \r\n)。
//
// 只要能读出数据,err 就为 nil。如果没有数据可读,则 isPrefix 返回
// false,err 返回 io.EOF。
//
// 如果找到行尾标记,则返回查找结果,isPrefix 返回 false。
// 如果未找到行尾标记,则: | // 1、缓存不满,则将缓存填满后再次查找。
// 2、缓存是满的,则返回整个缓存,isPrefix 返回 true。
//
// 整个数据尾部“有一个换行标记”和“没有换行标记”的读取结果是一样。
//
// 如果 ReadLine 读取到换行标记,则调用 UnreadByte 撤销的是换行标记,
// 而不是返回的数据。
// func (b *Reader) ReadLine() (line []byte, isPrefix bool, err error)
s := strings.NewReader("123\nzzz")
br := bufio.NewReader(s)
for line, isPrefix, err := []byte{0}, false, error(nil); len(line) > 0 && err == nil; {
line, isPrefix, err = br.ReadLine()
fmt.Printf("%q %t %v\n", line, isPrefix, err)
/*
"123" false <nil>
"zzz" false <nil>
"" false EOF
*/
}
}
func TestReadSlice1(t *testing.T) {
// ReadSlice 在 b 中查找 delim 并返回 delim 及其之前的所有数据。
// 该操作会读出数据,返回的切片是已读出的数据的引用,切片中的数据
// 在下一次读取操作之前是有效的。
//
// 如果找到 delim,则返回查找结果,err 返回 nil。
// 如果未找到 delim,则:
// 1、缓存不满,则将缓存填满后再次查找。
// 2、缓存是满的,则返回整个缓存,err 返回 ErrBufferFull。
//
// 如果未找到 delim 且遇到错误(通常是 io.EOF),则返回缓存中的所
// 有数据和遇到的错误。
//
// 因为返回的数据有可能被下一次的读写操作修改,所以大多数操作应该
// 使用 ReadBytes 或 ReadString,它们返回的是数据的拷贝。
// func (b *Reader) ReadSlice(delim byte) (line []byte, err error)
s := strings.NewReader("ABC DEF GHI")
br := bufio.NewReader(s)
w, err := br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // "ABC "
w, err = br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // "DEF "
w, err = br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // panic: EOF
}
func TestReadBytes1(t *testing.T) {
// ReadBytes 功能同 ReadSlice,只不过返回的是缓存的拷贝。
// func (b *Reader) ReadBytes(delim byte) (line []byte, err error)
s := strings.NewReader("ABC,EFG,HIJ")
br := bufio.NewReader(s)
line, err := br.ReadBytes(',')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", line) // "ABC,"
line, err = br.ReadBytes(',')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", line) // "EFG,"
line, err = br.ReadBytes(',')
if err != nil {
panic(err) // panic: EOF
}
fmt.Printf("%q\n", line)
}
func TestReadString1(t *testing.T) {
// ReadString 功能同 ReadBytes,只不过返回的是字符串。
// func (b *Reader) ReadString(delim byte) (line string, err error)
s := strings.NewReader("你好,我是卡牌")
br := bufio.NewReader(s)
line, err := br.ReadString(',')
if err != nil {
panic(err)
}
fmt.Printf("%s\n", line) // 你好,
line, err = br.ReadString(',')
if err != nil {
panic(err) // panic: EOF
}
fmt.Printf("%s\n", line)
}
// bufio.Reader的WriteTo()和 bufio.Writer的ReadFrom()
// WriteTo方法实现了io.WriterTo接口。
// func (b *Reader) WriteTo(w io.Writer) (n int64, err error)
func TestWriteTo1(t *testing.T) {
b := bytes.NewBuffer(make([]byte, 0))
s := strings.NewReader("ABCDEFG")
br := bufio.NewReader(s)
br.WriteTo(b)
fmt.Printf("%s\n", b.String()) // ABCDEFG
fmt.Printf("%q\n", b) // "ABCDEFG"
fmt.Println(string(65)) // A
}
// ReadFrom实现了io.ReaderFrom接口。
// func (b *Writer) ReadFrom(r io.Reader) (n int64, err error)
// ReadFrom无需使用Flush
func TestReadFrom1(t *testing.T) {
s := strings.NewReader("hello world")
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.ReadFrom(s)
fmt.Println(b)
}
//go test -run=buffio_test.go -bench="BenchmarkReadFrom1" -benchtime="3s" -cpuprofile cpu.out
// linux 是 -bench=.
func BenchmarkReadFrom1(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
for idx := 0; idx < b.N; idx++ {
s := strings.NewReader("hello world")
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.ReadFrom(s)
}
b.StopTimer()
}
// Writer实现了为io.Writer接口对象提供缓冲。
// 如果在向一个Writer类型值写入时遇到了错误,
// 该对象将不再接受任何数据,返回该错误
// 数据都写入后,调用者有义务调用Flush方法,
// 保证所有的数据都交给了下层的io.Writer。
func TestNewWriter1(t *testing.T) {
// NewWriter创建一个具有默认大小缓冲、写入w的*Writer。 相当于 NewWriterSize(wr, 4096)
// func NewWriter(w io.Writer) *Writer
// Buffered()返回缓冲中已使用的字节数。
// func (b *Writer) Buffered() int
// Available()返回缓冲中还有多少字节未使用。
// func (b *Writer) Available() int
// Reset丢弃缓冲中的数据,清除任何错误,将b重设为将其输出写入w。
// func (b *Writer) Reset(w io.Writer)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
fmt.Println(bw.Available(), bw.Buffered()) // 4096 0
bw.WriteString("card")
fmt.Println(bw.Available(), bw.Buffered()) // 4092 4
bw.Reset(b)
fmt.Println(bw.Available(), bw.Buffered()) // 4096 0
}
func TestWrite1(t *testing.T) {
// Write 将 p 中的数据写入 b 中,返回写入的字节数
// 如果写入的字节数小于 p 的长度,则返回一个错误信息
// func (b *Writer) Write(p []byte) (nn int, err error)
// Flush 将缓存中的数据提交到底层的 io.Writer 中
// func (b *Writer) Flush() error
p := [...]byte{'a', 'b', 'c'}
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.Write(p[:])
bw.Flush()
fmt.Printf("%q\n", b) //"abc"
}
func TestWriteString1(t *testing.T) {
// WriteString 同 Write,只不过写入的是字符串
// func (b *Writer) WriteString(s string) (int, error)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.WriteString("hello world")
bw.Flush()
fmt.Printf("%s\n", b) //hello world
}
func TestWriteByte1(t *testing.T) {
// WriteByte写入单个字节。
// func (b *Writer) WriteByte(c byte) error
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.WriteByte('c')
bw.Flush()
fmt.Println(b)
}
func TestWriteRune1(t *testing.T) {
// WriteRune写入一个unicode码值(的utf-8编码),返回写入的字节数和可能的错误。
// func (b *Writer) WriteRune(r rune) (size int, err error)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
size, err := bw.WriteRune('周')
if err != nil {
panic(err)
}
fmt.Println(size) // 3
bw.Flush()
fmt.Println(b) // 周
}
func TestReadWriter1(t *testing.T) {
// ReadWriter类型保管了指向Reader和Writer类型的指针
// 实现了io.ReadWriter接口。
// NewReadWriter 生成bufio.ReadWriter对象
// func NewReadWriter(r *Reader, w *Writer) *ReadWriter
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
s := strings.NewReader("hello world")
br := bufio.NewReader(s)
rw := bufio.NewReadWriter(br, bw)
word, err := rw.ReadString(' ')
if err != nil {
panic(err)
}
fmt.Printf("%s\n", word) // hello
_, err = rw.WriteString(",I'm coming")
if err != nil {
panic(err)
}
rw.Flush()
fmt.Println(b)
}
func TestNewScanner1(t *testing.T) {
// Scanner 提供了一个方便的接口来读取数据,例如遍历多行文本中的行。Scan 方法会通过
// 一个“匹配函数”读取数据中符合要求的部分,跳过不符合要求的部分。“匹配函数”由调
// 用者指定。本包中提供的匹配函数有“行匹配函数”、“字节匹配函数”、“字符匹配函数”
// 和“单词匹配函数”,用户也可以自定义“匹配函数”。默认的“匹配函数”为“行匹配函
// 数”,用于获取数据中的一行内容(不包括行尾标记)
//
// Scanner 使用了缓存,所以匹配部分的长度不能超出缓存的容量。默认缓存容量为 4096 -
// bufio.MaxScanTokenSize,用户可以通过 Buffer 方法指定自定义缓存及其最大容量。
//
// Scan 在遇到下面的情况时会终止扫描并返回 false(扫描一旦终止,将无法再继续):
// 1、遇到 io.EOF
// 2、遇到读写错误
// 3、“匹配部分”的长度超过了缓存的长度
//
// 如果需要对错误进行更多的控制,
// 或“匹配部分”超出缓存容量,或需要连续扫描,
// 则应该使用 bufio.Reader
// func NewScanner(r io.Reader) *Scanner
// Bytes方法返回最近一次Scan调用生成的token。
// 底层数组指向的数据可能会被下一次Scan的调用重写。
// func (s *Scanner) Bytes() []byte
// Buffer()方法设置扫描时使用的初始缓冲区和最大值
// 默认情况下,Scan使用内部缓冲区并设置MaxScanTokenSize的最大令牌大小
s := strings.NewReader("周起\n卡牌\n程序员\n")
bs := bufio.NewScanner(s)
bs.Buffer(make([]byte, 0), bufio.MaxScanTokenSize)
for bs.Scan() {
// 周起
// 卡牌
// 程序员
fmt.Printf("%s\n", bs.Bytes())
}
}
func TestSplit1(t *testing.T) {
// Split设置该Scanner的分割函数。默认设置为 bufio.ScanLines()
// 本方法必须在Scan之前调用。
// func (s *Scanner) Split(split SplitFunc)
s := strings.NewReader("周起 卡牌 程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
// Text返回由Scan调用生成的最新标记,
// 作为保存其字节的新分配字符串。
for bs.Scan() {
fmt.Printf("%s\n", bs.Text())
}
}
func TestScan1(t *testing.T) {
// Scan方法获取当前位置的token(该token可以通过Bytes或Text方法获得),
// 并让Scanner的扫描位置移动到下一个token。
// 当扫描因为抵达输入流结尾或者遇到错误而停止时,
// 本方法会返回false。在Scan方法返回false后,
// Err方法将返回扫描时遇到的任何错误;
// 除非是io.EOF,此时Err会返回nil。
// func (s *Scanner) Scan() bool
s := strings.NewReader("周起 卡牌 程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
for bs.Scan() {
fmt.Printf("%s %s\n", bs.Text(), bs.Bytes())
}
}
func TestScanBytes1(t *testing.T) {
// Bytes方法返回最近一次Scan调用生成的token。
// 底层数组指向的数据可能会被下一次Scan的调用重写。
s := strings.NewReader("abcd")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanBytes)
for bs.Scan() {
// a
// b
// c
// d
fmt.Printf("%s\n", bs.Bytes())
}
}
func TestScanRunes1(t *testing.T) {
// ScanRunes是用于Scanner类型的分割函数(符合SplitFunc),
// 本函数会将每个utf-8编码的unicode码值作为一个token返回。
s := strings.NewReader("周起卡牌程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanRunes)
for bs.Scan() {
// 周
// 起
// 卡
// 牌
// 程
// 序
// 员
fmt.Printf("%s\n", bs.Text())
}
}
func TestScanWords1(t *testing.T) {
// ScanRunes是用于Scanner类型的分割函数(符合SplitFunc),
// 本函数会将空白(参见unicode.IsSpace)
// 分隔的片段(去掉前后空白后)作为一个token返回。
// 本函数永远不会返回空字符串。
s := strings.NewReader("我 是 卡 牌")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
for bs.Scan() {
// 我
// 是
// 卡
// 牌
fmt.Printf("%s\n", bs.Text())
}
}
func TestScanLines1(t *testing.T) {
// 将每一行文本去掉末尾的换行标记作为一个token返回
// 此函数的bs.Scan()的默认值
s := strings.NewReader("卡牌\n周起\n程序员\n")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanLines)
for bs.Scan() {
// 卡牌
// 周起
// 程序员
fmt.Printf("%s\n", bs.Text())
}
} | random_line_split | |
buffio_test.go | package buffio
import (
"bufio"
"bytes"
"fmt"
"io"
"strings"
"testing"
)
func TestPeek1(t *testing.T) {
/*
func NewReaderSize(rd io.Reader, size int) *Reader 将 rd 封装成一个带缓存的 bufio.Reader 对象,
缓存大小由 size 指定(如果小于 16 则会被设置为 16)。minReadBufferSize = 16
如果 rd 的基类型就是有足够缓存的 bufio.Reader 类型,则直接将rd 转换为基类型返回。
NewReader()方法返回一个默认大小的带缓存的bufio.Reader对象,即 NewReaderSize(rd, 4096)
*/
s := strings.NewReader("hello world")
// func (b *Reader) Reset(r io.Reader)
// Reset丢弃缓冲中的数据,清除任何错误,将b重设为其下层从r读取数据。
comment := "Package io provides basic interfaces to I/O primitives. " +
"Its primary job is to wrap existing implementations of such primitives, " +
"such as those in package os, " +
"into shared public interfaces that abstract the functionality, " +
"plus some other related primitives."
s.Reset(comment)
buf1 := make([]byte, 10)
reader1 := io.LimitReader(s, 19)
for i := 0; i < 3; i++ {
n, _ := reader1.Read(buf1) //每一次读取都是将buf1元素从左到右按位覆盖
fmt.Println(n)
fmt.Println(string(buf1))
/*
10
Package io
9
provideso
0
provideso
*/
}
// func (b *Reader) Peek(n int) ([]byte, error)
// Peek 返回Reader的一个切片,该切片引用Reader中从当前起始索引位置开始的 n 个字节的数据,
// 该操作不会将数据读出,只是引用,引用的数据在下一次读取操作之
// 前是有效的。如果切片长度小于 n,则返回一个错误信息说明原因。
// 如果 n 大于缓存的总大小,则返回 ErrBufferFull。
// 看源码,缓存范围:16~4096
br := bufio.NewReader(s)
b, _ := br.Peek(15)
fmt.Printf("%q\n", b) // " basic interfac"
b[0] = 'a'
b, _ = br.Peek(5)
fmt.Printf("%q\n", b) // "abasi"
}
func TestRead1(t *testing.T) {
// Read 从 b 中读出数据到 p 中,返回写入p的字节数
// 读取到达结尾时,返回值n将为0而err将为io.EOF。
// 如果缓存不为空,则只能读出缓存中的数据,不会从底层 io.Reader
// 中提取数据,如果缓存为空,则:
// 1、len(p) >= 缓存大小,则跳过缓存,直接从底层 io.Reader 中读
// 出到 p 中。
// 2、len(p) < 缓存大小,则先将数据从底层 io.Reader 中读取到缓存
// 中,再从缓存读取到 p 中。
// func (b *Reader) Read(p []byte) (n int, err error)
s := strings.NewReader("123456789")
br := bufio.NewReader(s)
b := make([]byte, 4)
n, err := br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 1234 4
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 5678 4
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 9 1
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 0 EOF
}
func TestBuffered1(t *testing.T) {
// 返回可以从缓存中读取的字节数
// func (b *Reader) Buffered() int { return b.w - b.r }
s := strings.NewReader("123456789")
br := bufio.NewReader(s)
b := make([]byte, 3)
br.Read(b)
fmt.Println(br.Buffered()) // 6
br.Read(b)
fmt.Println(br.Buffered()) // 3
}
func TestReadByte1(t *testing.T) {
// ReadByte读取并返回一个字节。如果没有可用的数据,会返回错误。
// func (b *Reader) ReadByte() (c byte, err error)
origin := "abcd"
s := strings.NewReader(origin)
br := bufio.NewReader(s)
// 第一次读取
tmp, err := br.ReadByte()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // 'a'
// Buffered returns the number of bytes that can be read from the current buffer.
fmt.Println(br.Buffered()) // 3
for i := 0; i < len(origin); i++ {
tmp, err = br.ReadByte()
if err != nil {
// panic: EOF 因为已经读取了1个字符 缓存中只剩下3个
// 所以在读取第4个字符报错EOF
panic(err)
}
}
}
func TestUnreadByte1(t *testing.T) {
// 撤消最后读出的字节
s := strings.NewReader("abcde")
br := bufio.NewReader(s)
tmp, _ := br.ReadByte()
fmt.Printf("%q\n", tmp) // 'a'
fmt.Println(br.Buffered()) // 4
br.UnreadByte() // 撤销吐出,即栈中弹出的a元素又放回来了
fmt.Println(br.Buffered()) // 5
tmp, _ = br.ReadByte()
fmt.Printf("%q\n", tmp) // 'a'
}
func TestReadRune1(t *testing.T) {
// ReadRune读取一个utf-8编码的unicode码值
chinese := "中国人"
s := strings.NewReader(chinese)
br := bufio.NewReader(s)
tmp, _, err := br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
}
func TestUnReadRune1(t *testing.T) {
chinese := "中国人"
s := strings.NewReader(chinese)
br := bufio.NewReader(s)
tmp, _, err := br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
br.UnreadRune()
tmp, _, err = br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
}
func TestReadLine1(t *testing.T) {
// ReadLine 是一个低水平的行读取原语,大多数情况下,应该使用
// ReadBytes('\n') 或 ReadString('\n'),或者使用一个 Scanner。
//
// ReadLine 通过调用 ReadSlice 方法实现,返回的也是缓存的切片。用于
// 读取一行数据,不包括行尾标记(\n 或 \r\n)。
//
// 只要能读出数据,err 就为 nil。如果没有数据可读,则 isPrefix 返回
// false,err 返回 io.EOF。
//
// 如果找到行尾标记,则返回查找结果,isPrefix 返回 false。
// 如果未找到行尾标记,则:
// 1、缓存不满,则将缓存填满后再次查找。
// 2、缓存是满的,则返回整个缓存,isPrefix 返回 true。
//
// 整个数据尾部“有一个换行标记”和“没有换行标记”的读取结果是一样。
//
// 如果 ReadLine 读取到换行标记,则调用 UnreadByte 撤销的是换行标记,
// 而不是返回的数据。
// func (b *Reader) ReadLine() (line []byte, isPrefix bool, err error)
s := strings.NewReader("123\nzzz")
br := bufio.NewReader(s)
for line, isPrefix, err := []byte{0}, false, error(nil); len(line) > 0 && err == nil; {
line, isPrefix, err = br.ReadLine()
fmt.Printf("%q %t %v\n", line, isPrefix, err)
/*
"123" false <nil>
"zzz" false <nil>
"" false EOF
*/
}
}
func TestReadSlice1(t *testing.T) {
// ReadSlice 在 b 中查找 delim 并返回 delim 及其之前的所有数据。
// 该操作会读出数据,返回的切片是已读出的数据的引用,切片中的数据
// 在下一次读取操作之前是有效的。
//
// 如果找到 delim,则返回查找结果,err 返回 nil。
// 如果未找到 delim,则:
// 1、缓存不满,则将缓存填满后再次查找。
// 2、缓存是满的,则返回整个缓存,err 返回 ErrBufferFull。
//
// 如果未找到 delim 且遇到错误(通常是 io.EOF),则返回缓存中的所
// 有数据和遇到的错误。
//
// 因为返回的数据有可能被下一次的读写操作修改,所以大多数操作应该
// 使用 ReadBytes 或 ReadString,它们返回的是数据的拷贝。
// func (b *Reader) ReadSlice(delim byte) (line []byte, err error)
s := strings.NewReader("ABC DEF GHI")
br := bufio.NewReader(s)
w, err := br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // "ABC "
w, err = br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // "DEF "
w, err = br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // panic: EOF
}
func TestReadBytes1(t *testing.T) {
// ReadBytes 功能同 ReadSlice,只不过返回的是缓存的拷贝。
// func (b *Reader) ReadBytes(delim byte) (line []byte, err error)
s := strings.NewReader("ABC,EFG,HIJ")
br := bufio.NewReader(s)
line, err := br.ReadBytes(',')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", line) // "ABC,"
line, err = br.ReadBytes(',')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", line) // "EFG,"
line, err = br.ReadBytes(',')
if err != nil {
panic(err) // panic: EOF
}
fmt.Printf("%q\n", line)
}
func TestReadString1(t *testing.T) {
// ReadString 功能同 ReadBytes,只不过返回的是字符串。
// func (b *Reader) ReadString(delim byte) (line string, err error)
s := strings.NewReader("你好,我是卡牌")
br := bufio.NewReader(s)
line, err := br.ReadString(',')
if err != nil {
panic(err)
}
fmt.Printf("%s\n", line) // 你好,
line, err = br.ReadString(',')
if err != nil {
panic(err) // panic: EOF
}
fmt.Printf("%s\n", line)
}
// bufio.Reader的WriteTo()和 bufio.Writer的ReadFrom()
// WriteTo方法实现了io.WriterTo接口。
// func (b *Reader) WriteTo(w io.Writer) (n int64, err error)
func TestWriteTo1(t *testing.T) {
b := bytes.NewBuffer(make([]byte, 0))
s := strings.NewReader("ABCDEFG")
br := bufio.NewReader(s)
br.WriteTo(b)
fmt.Printf("%s\n", b.String()) // ABCDEFG
fmt.Printf("%q\n", b) // "ABCDEFG"
fmt.Println(string(65)) // A
}
// ReadFrom实现了io.ReaderFrom接口。
// func (b *Writer) ReadFrom(r io.Reader) (n int64, err error)
// ReadFrom无需使用Flush
func TestReadFrom1(t *testing.T) {
s := strings.NewReader("hello world")
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.ReadFrom(s)
fmt.Println(b)
}
//go test -run=buffio_test.go -bench="BenchmarkReadFrom1" -benchtime="3s" -cpuprofile cpu.out
// linux 是 -bench=.
func BenchmarkReadFrom1(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
for idx := 0; idx < b.N; idx++ {
s := strings.NewReader("hello world")
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.ReadFrom(s)
}
b.StopTimer()
}
// Writer实现了为io.Writer接口对象提供缓冲。
// 如果在向一个Writer类型值写入时遇到了错误,
// 该对象将不再接受任何数据,返回该错误
// 数据都写入后,调用者有义务调用Flush方法,
// 保证所有的数据都交给了下层的io.Writer。
func TestNewWriter1(t *testing.T) {
// NewWriter创建一个具有默认大小缓冲、写入w的*Writer。 相当于 NewWriterSize(w | NewWriter(w io.Writer) *Writer
// Buffered()返回缓冲中已使用的字节数。
// func (b *Writer) Buffered() int
// Available()返回缓冲中还有多少字节未使用。
// func (b *Writer) Available() int
// Reset丢弃缓冲中的数据,清除任何错误,将b重设为将其输出写入w。
// func (b *Writer) Reset(w io.Writer)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
fmt.Println(bw.Available(), bw.Buffered()) // 4096 0
bw.WriteString("card")
fmt.Println(bw.Available(), bw.Buffered()) // 4092 4
bw.Reset(b)
fmt.Println(bw.Available(), bw.Buffered()) // 4096 0
}
func TestWrite1(t *testing.T) {
// Write 将 p 中的数据写入 b 中,返回写入的字节数
// 如果写入的字节数小于 p 的长度,则返回一个错误信息
// func (b *Writer) Write(p []byte) (nn int, err error)
// Flush 将缓存中的数据提交到底层的 io.Writer 中
// func (b *Writer) Flush() error
p := [...]byte{'a', 'b', 'c'}
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.Write(p[:])
bw.Flush()
fmt.Printf("%q\n", b) //"abc"
}
func TestWriteString1(t *testing.T) {
// WriteString 同 Write,只不过写入的是字符串
// func (b *Writer) WriteString(s string) (int, error)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.WriteString("hello world")
bw.Flush()
fmt.Printf("%s\n", b) //hello world
}
func TestWriteByte1(t *testing.T) {
// WriteByte写入单个字节。
// func (b *Writer) WriteByte(c byte) error
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.WriteByte('c')
bw.Flush()
fmt.Println(b)
}
func TestWriteRune1(t *testing.T) {
// WriteRune写入一个unicode码值(的utf-8编码),返回写入的字节数和可能的错误。
// func (b *Writer) WriteRune(r rune) (size int, err error)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
size, err := bw.WriteRune('周')
if err != nil {
panic(err)
}
fmt.Println(size) // 3
bw.Flush()
fmt.Println(b) // 周
}
func TestReadWriter1(t *testing.T) {
// ReadWriter类型保管了指向Reader和Writer类型的指针
// 实现了io.ReadWriter接口。
// NewReadWriter 生成bufio.ReadWriter对象
// func NewReadWriter(r *Reader, w *Writer) *ReadWriter
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
s := strings.NewReader("hello world")
br := bufio.NewReader(s)
rw := bufio.NewReadWriter(br, bw)
word, err := rw.ReadString(' ')
if err != nil {
panic(err)
}
fmt.Printf("%s\n", word) // hello
_, err = rw.WriteString(",I'm coming")
if err != nil {
panic(err)
}
rw.Flush()
fmt.Println(b)
}
func TestNewScanner1(t *testing.T) {
// Scanner 提供了一个方便的接口来读取数据,例如遍历多行文本中的行。Scan 方法会通过
// 一个“匹配函数”读取数据中符合要求的部分,跳过不符合要求的部分。“匹配函数”由调
// 用者指定。本包中提供的匹配函数有“行匹配函数”、“字节匹配函数”、“字符匹配函数”
// 和“单词匹配函数”,用户也可以自定义“匹配函数”。默认的“匹配函数”为“行匹配函
// 数”,用于获取数据中的一行内容(不包括行尾标记)
//
// Scanner 使用了缓存,所以匹配部分的长度不能超出缓存的容量。默认缓存容量为 4096 -
// bufio.MaxScanTokenSize,用户可以通过 Buffer 方法指定自定义缓存及其最大容量。
//
// Scan 在遇到下面的情况时会终止扫描并返回 false(扫描一旦终止,将无法再继续):
// 1、遇到 io.EOF
// 2、遇到读写错误
// 3、“匹配部分”的长度超过了缓存的长度
//
// 如果需要对错误进行更多的控制,
// 或“匹配部分”超出缓存容量,或需要连续扫描,
// 则应该使用 bufio.Reader
// func NewScanner(r io.Reader) *Scanner
// Bytes方法返回最近一次Scan调用生成的token。
// 底层数组指向的数据可能会被下一次Scan的调用重写。
// func (s *Scanner) Bytes() []byte
// Buffer()方法设置扫描时使用的初始缓冲区和最大值
// 默认情况下,Scan使用内部缓冲区并设置MaxScanTokenSize的最大令牌大小
s := strings.NewReader("周起\n卡牌\n程序员\n")
bs := bufio.NewScanner(s)
bs.Buffer(make([]byte, 0), bufio.MaxScanTokenSize)
for bs.Scan() {
// 周起
// 卡牌
// 程序员
fmt.Printf("%s\n", bs.Bytes())
}
}
func TestSplit1(t *testing.T) {
// Split设置该Scanner的分割函数。默认设置为 bufio.ScanLines()
// 本方法必须在Scan之前调用。
// func (s *Scanner) Split(split SplitFunc)
s := strings.NewReader("周起 卡牌 程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
// Text返回由Scan调用生成的最新标记,
// 作为保存其字节的新分配字符串。
for bs.Scan() {
fmt.Printf("%s\n", bs.Text())
}
}
func TestScan1(t *testing.T) {
// Scan方法获取当前位置的token(该token可以通过Bytes或Text方法获得),
// 并让Scanner的扫描位置移动到下一个token。
// 当扫描因为抵达输入流结尾或者遇到错误而停止时,
// 本方法会返回false。在Scan方法返回false后,
// Err方法将返回扫描时遇到的任何错误;
// 除非是io.EOF,此时Err会返回nil。
// func (s *Scanner) Scan() bool
s := strings.NewReader("周起 卡牌 程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
for bs.Scan() {
fmt.Printf("%s %s\n", bs.Text(), bs.Bytes())
}
}
func TestScanBytes1(t *testing.T) {
// Bytes方法返回最近一次Scan调用生成的token。
// 底层数组指向的数据可能会被下一次Scan的调用重写。
s := strings.NewReader("abcd")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanBytes)
for bs.Scan() {
// a
// b
// c
// d
fmt.Printf("%s\n", bs.Bytes())
}
}
func TestScanRunes1(t *testing.T) {
// ScanRunes是用于Scanner类型的分割函数(符合SplitFunc),
// 本函数会将每个utf-8编码的unicode码值作为一个token返回。
s := strings.NewReader("周起卡牌程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanRunes)
for bs.Scan() {
// 周
// 起
// 卡
// 牌
// 程
// 序
// 员
fmt.Printf("%s\n", bs.Text())
}
}
func TestScanWords1(t *testing.T) {
// ScanRunes是用于Scanner类型的分割函数(符合SplitFunc),
// 本函数会将空白(参见unicode.IsSpace)
// 分隔的片段(去掉前后空白后)作为一个token返回。
// 本函数永远不会返回空字符串。
s := strings.NewReader("我 是 卡 牌")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
for bs.Scan() {
// 我
// 是
// 卡
// 牌
fmt.Printf("%s\n", bs.Text())
}
}
func TestScanLines1(t *testing.T) {
// 将每一行文本去掉末尾的换行标记作为一个token返回
// 此函数的bs.Scan()的默认值
s := strings.NewReader("卡牌\n周起\n程序员\n")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanLines)
for bs.Scan() {
// 卡牌
// 周起
// 程序员
fmt.Printf("%s\n", bs.Text())
}
}
| r, 4096)
// func | conditional_block |
buffio_test.go | package buffio
import (
"bufio"
"bytes"
"fmt"
"io"
"strings"
"testing"
)
func TestPeek1(t *testing.T) {
/*
func NewReaderSize(rd io.Reader, size int) *Reader 将 rd 封装成一个带缓存的 bufio.Reader 对象,
缓存大小由 size 指定(如果小于 16 则会被设置为 16)。minReadBufferSize = 16
如果 rd 的基类型就是有足够缓存的 bufio.Reader 类型,则直接将rd 转换为基类型返回。
NewReader()方法返回一个默认大小的带缓存的bufio.Reader对象,即 NewReaderSize(rd, 4096)
*/
s := strings.NewReader("hello world")
// func (b *Reader) Reset(r io.Reader)
// Reset丢弃缓冲中的数据,清除任何错误,将b重设为其下层从r读取数据。
comment := "Package io provides basic interfaces to I/O primitives. " +
"Its primary job is to wrap existing implementations of such primitives, " +
"such as those in package os, " +
"into shared public interfaces that abstract the functionality, " +
"plus some other related primitives."
s.Reset(comment)
buf1 := make([]byte, 10)
reader1 := io.LimitReader(s, 19)
for i := 0; i < 3; i++ {
n, _ := reader1.Read(buf1) //每一次读取都是将buf1元素从左到右按位覆盖
fmt.Println(n)
fmt.Println(string(buf1))
/*
10
Package io
9
provideso
0
provideso
*/
}
// func (b *Reader) Peek(n int) ([]byte, error)
// Peek 返回Reader的一个切片,该切片引用Reader中从当前起始索引位置开始的 n 个字节的数据,
// 该操作不会将数据读出,只是引用,引用的数据在下一次读取操作之
// 前是有效的。如果切片长度小于 n,则返回一个错误信息说明原因。
// 如果 n 大于缓存的总大小,则返回 ErrBufferFull。
// 看源码,缓存范围:16~4096
br := bufio.NewReader(s)
b, _ := br.Peek(15)
fmt.Printf("%q\n", b) // " basic interfac"
b[0] = 'a'
b, _ = br.Peek(5)
fmt.Printf("%q\n", b) // "abasi"
}
func TestRead1(t *testing.T) {
// Read 从 b 中读出数据到 p 中,返回写入p的字节数
// 读取到达结尾时,返回值n将为0而err将为io.EOF。
// 如果缓存不为空,则只能读出缓存中的数据,不会从底层 io.Reader
// 中提取数据,如果缓存为空,则:
// 1、len(p) >= 缓存大小,则跳过缓存,直接从底层 io.Reader 中读
// 出到 p 中。
// 2、len(p) < 缓存大小,则先将数据从底层 io.Reader 中读取到缓存
// 中,再从缓存读取到 p 中。
// func (b *Reader) Read(p []byte) (n int, err error)
s := strings.NewReader("123456789")
br := bufio.NewReader(s)
b := make([]byte, 4)
n, err := br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 1234 4
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 5678 4
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 9 1
n, err = br.Read(b)
fmt.Printf("%s %v %v\n", b[:n], n, err) // 0 EOF
}
func TestBuffered1(t *testing.T) {
// 返回可以从缓存中读取的字节数
// func (b *Reader) Buffered() int { return b.w - b.r }
s := strings.NewReader("123456789")
br := bufio.NewReader(s)
b := make([]byte, 3)
br.Read(b)
fmt.Println(br.Buffered()) // 6
br.Read(b)
fmt.Println(br.Buffered()) // 3
}
func TestReadByte1(t *testing.T) {
// ReadByte读取并返回一个字节。如果没有可用的数据,会返回错误。
// func (b *Reader) ReadByte() (c byte, err error)
origin := "abcd"
s := strings.NewReader(origin)
br := bufio.NewReader(s)
// 第一次读取
tmp, err := br.ReadByte()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // 'a'
// Buffered returns the number of bytes that can be read from the current buffer.
fmt.Println(br.Buffered()) // 3
for i := 0; i < len(origin); i++ {
tmp, err = br.ReadByte()
if err != nil {
// panic: EOF 因为已经读取了1个字符 缓存中只剩下3个
// 所以在读取第4个字符报错EOF
panic(err)
}
}
}
func TestUnreadByte1(t *testing.T) {
// 撤消最后读出的字节
s := strings.NewReader("abcde")
br := bufio.NewReader(s)
tmp, _ := br.ReadByte()
fmt.Printf("%q\n", tmp) // 'a'
fmt.Println(br.Buffered()) // 4
br.UnreadByte() // 撤销吐出,即栈中弹出的a元素又放回来了
fmt.Println(br.Buffered()) // 5
tmp, _ = br.ReadByte()
fmt.Printf("%q\n", tmp) // 'a'
}
func TestReadRune1(t *testing.T) {
// ReadRune读取一个utf-8编码的unicode码值
chinese := "中国人"
s := strings.NewReader(chinese)
br := bufio.NewReader(s)
tmp, _, err := br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
}
func TestUnReadRune1(t *testing.T) {
chinese := "中国人"
s := strings.NewReader(chinese)
br := bufio.NewReader(s)
tmp, _, err := br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
br.UnreadRune()
tmp, _, err = br.ReadRune()
if err != nil {
panic(err)
}
fmt.Printf("%q\n", tmp) // '中'
}
func TestReadLine1(t *testing.T) {
// ReadLine 是一个低水平的行读取原语,大多数情况下,应该使用
// ReadBytes('\n') 或 ReadString('\n'),或者使用一个 Scanner。
//
// ReadLine 通过调用 ReadSlice 方法实现,返回的也是缓存的切片。用于
// 读取一行数据,不包括行尾标记(\n 或 \r\n)。
//
// 只要能读出数据,err 就为 nil。如果没有数据可读,则 isPrefix 返回
// false,err 返回 io.EOF。
//
// 如果找到行尾标记,则返回查找结果,isPrefix 返回 false。
// 如果未找到行尾标记,则:
// 1、缓存不满,则将缓存填满后再次查找。
// 2、缓存是满的,则返回整个缓存,isPrefix 返回 true。
//
// 整个数据尾部“有一个换行标记”和“没有换行标记”的读取结果是一样。
//
// 如果 ReadLine 读取到换行标记,则调用 UnreadByte 撤销的是换行标记,
// 而不是返回的数据。
// func (b *Reader) ReadLine() (line []byte, isPrefix bool, err error)
s := strings.NewReader("123\nzzz")
br := bufio.NewReader(s)
for line, isPrefix, err := []byte{0}, false, error(nil); len(line) > 0 && err == nil; {
line, isPrefix, err = br.ReadLine()
fmt.Printf("%q %t %v\n", line, isPrefix, err)
/*
"123" false <nil>
"zzz" false <nil>
"" false EOF
*/
}
}
func TestReadSlice1(t *testing.T) {
// ReadSlice 在 b 中查找 delim 并返回 delim 及其之前的所有数据。
// 该操作会读出数据,返回的切片是已读出的数据的引用,切片中的数据
// 在下一次读取操作之前是有效的。
//
// 如果找到 delim,则返回查找结果,err 返回 nil。
// 如果未找到 delim,则:
// 1、缓存不满,则将缓存填满后再次查找。
// 2、缓存是满的,则返回整个缓存,err 返回 ErrBufferFull。
//
// 如果未找到 delim 且遇到错误(通常是 io.EOF),则返回缓存中的所
// 有数据和遇到的错误。
//
// 因为返回的数据有可能被下一次的读写操作修改,所以大多数操作应该
// 使用 ReadBytes 或 ReadString,它们返回的是数据的拷贝。
// func (b *Reader) ReadSlice(delim byte) (line []byte, err error)
s := strings.NewReader("ABC DEF GHI")
br := bufio.NewReader(s)
w, err := br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // "ABC "
w, err = br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // "DEF "
w, err = br.ReadSlice(' ')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", w) // panic: EOF
}
func TestReadBytes1(t *testing.T) {
// ReadBytes 功能同 ReadSlice,只不过返回的是缓存的拷贝。
// func (b *Reader) ReadBytes(delim byte) (line []byte, err error)
s := strings.NewReader("ABC,EFG,HIJ")
br := bufio.NewReader(s)
line, err := br.ReadBytes(',')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", line) // "ABC,"
line, err = br.ReadBytes(',')
if err != nil {
panic(err)
}
fmt.Printf("%q\n", line) // "EFG,"
line, err = br.ReadBytes(',')
if err != nil {
panic(err) // panic: EOF
}
fmt.Printf("%q\n", line)
}
func TestReadString1(t *testing.T) {
// ReadString 功能同 ReadBytes,只不过返回的是字符串。
// func (b *Reader) ReadString(delim byte) (line string, err error)
s := strings.NewReader("你好,我是卡牌")
br := bufio.NewReader(s)
line, err := br.ReadString(',')
if err != nil {
panic(err)
}
fmt.Printf("%s\n", line) // 你好,
line, err = br.ReadString(',')
if err != nil {
panic(err) // panic: EOF
}
fmt.Printf("%s\n", line)
}
// bufio.Reader的WriteTo()和 bufio.Writer的ReadFrom()
// WriteTo方法实现了io.WriterTo接口。
// func (b *Reader) WriteTo(w io.Writer) (n int64, err error)
func TestWriteTo1(t *testing.T) {
b := bytes.NewBuffer(make([]byte, 0))
s := strings.NewReader("ABCDEFG")
br := bufio.NewReader(s)
br.WriteTo(b)
fmt.Printf("%s\n", b.String()) // ABCDEFG
fmt.Printf("%q\n", b) // "ABCDEFG"
fmt.Println(string(65)) // A
}
// ReadFrom实现了io.ReaderFrom接口。
// func (b *Writer) ReadFrom(r io.Reader) (n int64, err error)
// ReadFrom无需使用Flush
func TestReadFrom1(t *testing.T) {
s := strings.NewReader("hello world")
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.ReadFrom(s)
fmt.Println(b)
}
//go test -run=buffio_test.go -bench="BenchmarkReadFrom1" -benchtime="3s" -cpuprofile cpu.out
// linux 是 -bench=.
func BenchmarkReadFrom1(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
for idx := 0; idx < b.N; idx++ {
s := strings.NewReader("hello world")
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.ReadFrom(s)
}
b.StopTimer()
}
// Writer实现了为io.Writer接口对象提供缓冲。
// 如果在向一个Writer类型值写入时遇到了错误,
// 该对象将不再接受任何数据,返回该错误
// 数据都写入后,调用者有义务调用Flush方法,
// 保证所有的数据都交给了下层的io.Writer。
func TestNewWriter1(t *testing.T) {
// NewWriter创建一个具有默认大小缓冲、写入w的*Writer。 相当于 NewWriterSize(wr, 4096)
// func NewWriter(w io.Writer) *Writer
// Buffered()返回缓冲中已使用的字节数。
// func (b *Writer) Buffered() int
// Available()返回缓冲中还有多少字节未使用。
// func (b *Writer) Available() int
// Reset丢弃缓冲中的数据,清除任何错误,将b重设为将其输出写入w。
// func (b *Writer) Reset(w io.Writer)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
fmt.Println(bw.Available(), bw.Buffered()) // 4096 0
bw.WriteString("card")
fmt.Println(bw.Available(), bw.Buffered()) // 4092 4
bw.Reset(b)
fmt.Println(bw.Available(), bw.Buffered()) // 4096 0
}
func TestWrite1(t *testing.T) {
// Write 将 p 中的数据写入 b 中,返回写入的字节数
// 如果写入的字节数小于 p 的长度,则返回一个错误信息
// func (b *Writer) Write(p []byte) (nn int, err error)
// Flush 将缓存中的数据提交到底层的 io.Writer 中
// func (b *Writer) Flush() error
p := [...]byte{'a', 'b', 'c'}
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.Write(p[:])
bw.Flush()
fmt.Printf("%q\n", b) //"abc"
}
func TestWriteString1(t *testing.T) {
// WriteString 同 Write,只不过写入的是字符串
// func (b *Writer) WriteString(s string) (int, error)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.WriteString("hello world")
bw.Flush()
fmt.Printf("%s\n", b) //hello world
}
func TestWriteByte1(t *testing.T) {
// WriteByte写入单个字节。
// func (b *Writer) WriteByte(c byte) error
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
bw.WriteByte('c')
bw.Flush()
fmt.Println(b)
}
func TestWriteRune1(t *testing.T) {
// WriteRune写入一个unicode码值(的utf-8编码),返回写入的字节数和可能的错误。
// func (b *Writer) WriteRune(r rune) (size int, err error)
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
size, err := bw.WriteRune('周')
if err != nil {
panic(err)
}
fmt.Println(size) // 3
bw.Flush()
fmt.Println(b) // 周
}
func TestReadWriter1(t *testing.T) {
// ReadWriter类型保管了指向Reader和Writer类型的指针
// 实现了io.ReadWriter接口。
// NewReadWriter 生成bufio.ReadWriter对象
// func NewReadWriter(r *Reader, w *Writer) *ReadWriter
b := bytes.NewBuffer(make([]byte, 0))
bw := bufio.NewWriter(b)
s := strings.NewReader("hello world")
br := bufio.NewReade | // 或“匹配部分”超出缓存容量,或需要连续扫描,
// 则应该使用 bufio.Reader
// func NewScanner(r io.Reader) *Scanner
// Bytes方法返回最近一次Scan调用生成的token。
// 底层数组指向的数据可能会被下一次Scan的调用重写。
// func (s *Scanner) Bytes() []byte
// Buffer()方法设置扫描时使用的初始缓冲区和最大值
// 默认情况下,Scan使用内部缓冲区并设置MaxScanTokenSize的最大令牌大小
s := strings.NewReader("周起\n卡牌\n程序员\n")
bs := bufio.NewScanner(s)
bs.Buffer(make([]byte, 0), bufio.MaxScanTokenSize)
for bs.Scan() {
// 周起
// 卡牌
// 程序员
fmt.Printf("%s\n", bs.Bytes())
}
}
func TestSplit1(t *testing.T) {
// Split设置该Scanner的分割函数。默认设置为 bufio.ScanLines()
// 本方法必须在Scan之前调用。
// func (s *Scanner) Split(split SplitFunc)
s := strings.NewReader("周起 卡牌 程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
// Text返回由Scan调用生成的最新标记,
// 作为保存其字节的新分配字符串。
for bs.Scan() {
fmt.Printf("%s\n", bs.Text())
}
}
func TestScan1(t *testing.T) {
// Scan方法获取当前位置的token(该token可以通过Bytes或Text方法获得),
// 并让Scanner的扫描位置移动到下一个token。
// 当扫描因为抵达输入流结尾或者遇到错误而停止时,
// 本方法会返回false。在Scan方法返回false后,
// Err方法将返回扫描时遇到的任何错误;
// 除非是io.EOF,此时Err会返回nil。
// func (s *Scanner) Scan() bool
s := strings.NewReader("周起 卡牌 程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
for bs.Scan() {
fmt.Printf("%s %s\n", bs.Text(), bs.Bytes())
}
}
func TestScanBytes1(t *testing.T) {
// Bytes方法返回最近一次Scan调用生成的token。
// 底层数组指向的数据可能会被下一次Scan的调用重写。
s := strings.NewReader("abcd")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanBytes)
for bs.Scan() {
// a
// b
// c
// d
fmt.Printf("%s\n", bs.Bytes())
}
}
func TestScanRunes1(t *testing.T) {
// ScanRunes是用于Scanner类型的分割函数(符合SplitFunc),
// 本函数会将每个utf-8编码的unicode码值作为一个token返回。
s := strings.NewReader("周起卡牌程序员")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanRunes)
for bs.Scan() {
// 周
// 起
// 卡
// 牌
// 程
// 序
// 员
fmt.Printf("%s\n", bs.Text())
}
}
func TestScanWords1(t *testing.T) {
// ScanRunes是用于Scanner类型的分割函数(符合SplitFunc),
// 本函数会将空白(参见unicode.IsSpace)
// 分隔的片段(去掉前后空白后)作为一个token返回。
// 本函数永远不会返回空字符串。
s := strings.NewReader("我 是 卡 牌")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanWords)
for bs.Scan() {
// 我
// 是
// 卡
// 牌
fmt.Printf("%s\n", bs.Text())
}
}
func TestScanLines1(t *testing.T) {
// 将每一行文本去掉末尾的换行标记作为一个token返回
// 此函数的bs.Scan()的默认值
s := strings.NewReader("卡牌\n周起\n程序员\n")
bs := bufio.NewScanner(s)
bs.Split(bufio.ScanLines)
for bs.Scan() {
// 卡牌
// 周起
// 程序员
fmt.Printf("%s\n", bs.Text())
}
}
| r(s)
rw := bufio.NewReadWriter(br, bw)
word, err := rw.ReadString(' ')
if err != nil {
panic(err)
}
fmt.Printf("%s\n", word) // hello
_, err = rw.WriteString(",I'm coming")
if err != nil {
panic(err)
}
rw.Flush()
fmt.Println(b)
}
func TestNewScanner1(t *testing.T) {
// Scanner 提供了一个方便的接口来读取数据,例如遍历多行文本中的行。Scan 方法会通过
// 一个“匹配函数”读取数据中符合要求的部分,跳过不符合要求的部分。“匹配函数”由调
// 用者指定。本包中提供的匹配函数有“行匹配函数”、“字节匹配函数”、“字符匹配函数”
// 和“单词匹配函数”,用户也可以自定义“匹配函数”。默认的“匹配函数”为“行匹配函
// 数”,用于获取数据中的一行内容(不包括行尾标记)
//
// Scanner 使用了缓存,所以匹配部分的长度不能超出缓存的容量。默认缓存容量为 4096 -
// bufio.MaxScanTokenSize,用户可以通过 Buffer 方法指定自定义缓存及其最大容量。
//
// Scan 在遇到下面的情况时会终止扫描并返回 false(扫描一旦终止,将无法再继续):
// 1、遇到 io.EOF
// 2、遇到读写错误
// 3、“匹配部分”的长度超过了缓存的长度
//
// 如果需要对错误进行更多的控制,
| identifier_body |
particle.rs | use std::{
mem::size_of,
num::{NonZeroU32, NonZeroU8},
};
use crate::{
client::{
entity::particle::Particle,
render::{
create_texture,
pipeline::{Pipeline, PushConstantUpdate},
world::{Camera, WorldPipelineBase},
Palette, TextureData,
},
},
common::{math::Angles, util::any_slice_as_bytes},
};
use bumpalo::Bump;
use cgmath::Matrix4;
lazy_static! {
static ref VERTEX_BUFFER_ATTRIBUTES: [Vec<wgpu::VertexAttribute>; 1] = [
wgpu::vertex_attr_array![
// position
0 => Float32x3,
// texcoord
1 => Float32x2,
].to_vec(),
];
}
#[rustfmt::skip]
const PARTICLE_TEXTURE_PIXELS: [u8; 64] = [
0, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
0, 1, 1, 1, 1, 1, 1, 0,
0, 0, 1, 1, 1, 1, 0, 0,
];
pub struct ParticlePipeline {
pipeline: wgpu::RenderPipeline,
bind_group_layouts: Vec<wgpu::BindGroupLayout>,
vertex_buffer: wgpu::Buffer,
sampler: wgpu::Sampler,
textures: Vec<wgpu::Texture>,
texture_views: Vec<wgpu::TextureView>,
bind_group: wgpu::BindGroup,
}
impl ParticlePipeline {
pub fn new(
device: &wgpu::Device,
queue: &wgpu::Queue,
compiler: &mut shaderc::Compiler,
sample_count: u32,
palette: &Palette,
) -> ParticlePipeline {
let (pipeline, bind_group_layouts) =
ParticlePipeline::create(device, compiler, &[], sample_count);
use wgpu::util::DeviceExt as _;
let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: unsafe { any_slice_as_bytes(&VERTICES) },
usage: wgpu::BufferUsage::VERTEX,
});
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("particle sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Nearest,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Linear,
lod_min_clamp: -1000.0,
lod_max_clamp: 1000.0,
compare: None,
anisotropy_clamp: NonZeroU8::new(16),
border_color: None,
});
let textures: Vec<wgpu::Texture> = (0..256)
.map(|i| {
let mut pixels = PARTICLE_TEXTURE_PIXELS;
// set up palette translation
for pix in pixels.iter_mut() {
if *pix == 0 | else {
*pix *= i as u8;
}
}
let (diffuse_data, _) = palette.translate(&pixels);
create_texture(
device,
queue,
Some(&format!("particle texture {}", i)),
8,
8,
&TextureData::Diffuse(diffuse_data),
)
})
.collect();
let texture_views: Vec<wgpu::TextureView> = textures
.iter()
.map(|t| t.create_view(&Default::default()))
.collect();
let texture_view_refs = texture_views.iter().collect::<Vec<_>>();
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("particle bind group"),
layout: &bind_group_layouts[0],
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::Sampler(&sampler),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureViewArray(&texture_view_refs[..]),
},
],
});
ParticlePipeline {
pipeline,
bind_group_layouts,
sampler,
textures,
texture_views,
bind_group,
vertex_buffer,
}
}
pub fn rebuild(
&mut self,
device: &wgpu::Device,
compiler: &mut shaderc::Compiler,
sample_count: u32,
) {
let layout_refs: Vec<_> = self.bind_group_layouts.iter().collect();
self.pipeline = ParticlePipeline::recreate(device, compiler, &layout_refs, sample_count);
}
pub fn pipeline(&self) -> &wgpu::RenderPipeline {
&self.pipeline
}
pub fn bind_group_layouts(&self) -> &[wgpu::BindGroupLayout] {
&self.bind_group_layouts
}
pub fn vertex_buffer(&self) -> &wgpu::Buffer {
&self.vertex_buffer
}
pub fn record_draw<'a, 'b, P>(
&'a self,
pass: &mut wgpu::RenderPass<'a>,
bump: &'a Bump,
camera: &Camera,
particles: P,
) where
P: Iterator<Item = &'b Particle>,
{
use PushConstantUpdate::*;
pass.set_pipeline(self.pipeline());
pass.set_vertex_buffer(0, self.vertex_buffer.slice(..));
pass.set_bind_group(0, &self.bind_group, &[]);
// face toward camera
let Angles { pitch, yaw, roll } = camera.angles();
let rotation = Angles {
pitch: -pitch,
yaw: -yaw,
roll: -roll,
}
.mat4_wgpu();
for particle in particles {
let q_origin = particle.origin();
let translation =
Matrix4::from_translation([-q_origin.y, q_origin.z, -q_origin.x].into());
Self::set_push_constants(
pass,
Update(bump.alloc(VertexPushConstants {
transform: camera.view_projection() * translation * rotation,
})),
Retain,
Update(bump.alloc(FragmentPushConstants {
color: particle.color() as u32,
})),
);
pass.draw(0..6, 0..1);
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct VertexPushConstants {
pub transform: Matrix4<f32>,
}
#[derive(Copy, Clone, Debug)]
pub struct FragmentPushConstants {
pub color: u32,
}
const BIND_GROUP_LAYOUT_ENTRIES: &[wgpu::BindGroupLayoutEntry] = &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Sampler {
filtering: true,
comparison: false,
},
count: None,
},
// per-index texture array
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Texture {
view_dimension: wgpu::TextureViewDimension::D2,
sample_type: wgpu::TextureSampleType::Float { filterable: true },
multisampled: false,
},
count: NonZeroU32::new(256),
},
];
lazy_static! {
static ref VERTEX_ATTRIBUTES: [[wgpu::VertexAttribute; 2]; 2] = [
wgpu::vertex_attr_array![
// position
0 => Float32x3,
// texcoord
1 => Float32x2,
],
wgpu::vertex_attr_array![
// instance position
2 => Float32x3,
// color index
3 => Uint32,
]
];
}
impl Pipeline for ParticlePipeline {
type VertexPushConstants = VertexPushConstants;
type SharedPushConstants = ();
type FragmentPushConstants = FragmentPushConstants;
fn name() -> &'static str {
"particle"
}
fn vertex_shader() -> &'static str {
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/shaders/particle.vert"
))
}
fn fragment_shader() -> &'static str {
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/shaders/particle.frag"
))
}
// NOTE: if any of the binding indices are changed, they must also be changed in
// the corresponding shaders and the BindGroupLayout generation functions.
fn bind_group_layout_descriptors() -> Vec<wgpu::BindGroupLayoutDescriptor<'static>> {
vec![
// group 0
wgpu::BindGroupLayoutDescriptor {
label: Some("particle bind group layout"),
entries: BIND_GROUP_LAYOUT_ENTRIES,
},
]
}
fn primitive_state() -> wgpu::PrimitiveState {
WorldPipelineBase::primitive_state()
}
fn color_target_states() -> Vec<wgpu::ColorTargetState> {
WorldPipelineBase::color_target_states()
}
fn depth_stencil_state() -> Option<wgpu::DepthStencilState> {
let mut desc = WorldPipelineBase::depth_stencil_state().unwrap();
desc.depth_write_enabled = false;
Some(desc)
}
// NOTE: if the vertex format is changed, this descriptor must also be changed accordingly.
fn vertex_buffer_layouts() -> Vec<wgpu::VertexBufferLayout<'static>> {
vec![wgpu::VertexBufferLayout {
array_stride: size_of::<ParticleVertex>() as u64,
step_mode: wgpu::InputStepMode::Vertex,
attributes: &VERTEX_ATTRIBUTES[0],
}]
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
pub struct ParticleVertex {
position: [f32; 3],
texcoord: [f32; 2],
}
pub const VERTICES: [ParticleVertex; 6] = [
ParticleVertex {
position: [-1.0, -1.0, 0.0],
texcoord: [0.0, 1.0],
},
ParticleVertex {
position: [-1.0, 1.0, 0.0],
texcoord: [0.0, 0.0],
},
ParticleVertex {
position: [1.0, 1.0, 0.0],
texcoord: [1.0, 0.0],
},
ParticleVertex {
position: [-1.0, -1.0, 0.0],
texcoord: [0.0, 1.0],
},
ParticleVertex {
position: [1.0, 1.0, 0.0],
texcoord: [1.0, 0.0],
},
ParticleVertex {
position: [1.0, -1.0, 0.0],
texcoord: [1.0, 1.0],
},
];
#[repr(C)]
pub struct ParticleInstance {
color: u32,
}
| {
*pix = 0xFF;
} | conditional_block |
particle.rs | use std::{
mem::size_of,
num::{NonZeroU32, NonZeroU8},
};
use crate::{
client::{
entity::particle::Particle,
render::{
create_texture,
pipeline::{Pipeline, PushConstantUpdate},
world::{Camera, WorldPipelineBase},
Palette, TextureData,
},
},
common::{math::Angles, util::any_slice_as_bytes},
};
use bumpalo::Bump;
use cgmath::Matrix4;
lazy_static! {
static ref VERTEX_BUFFER_ATTRIBUTES: [Vec<wgpu::VertexAttribute>; 1] = [
wgpu::vertex_attr_array![
// position
0 => Float32x3,
// texcoord
1 => Float32x2,
].to_vec(),
];
}
#[rustfmt::skip]
const PARTICLE_TEXTURE_PIXELS: [u8; 64] = [
0, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
0, 1, 1, 1, 1, 1, 1, 0,
0, 0, 1, 1, 1, 1, 0, 0,
];
pub struct ParticlePipeline {
pipeline: wgpu::RenderPipeline,
bind_group_layouts: Vec<wgpu::BindGroupLayout>,
vertex_buffer: wgpu::Buffer,
sampler: wgpu::Sampler,
textures: Vec<wgpu::Texture>,
texture_views: Vec<wgpu::TextureView>,
bind_group: wgpu::BindGroup,
}
impl ParticlePipeline {
pub fn new(
device: &wgpu::Device,
queue: &wgpu::Queue,
compiler: &mut shaderc::Compiler,
sample_count: u32,
palette: &Palette,
) -> ParticlePipeline {
let (pipeline, bind_group_layouts) =
ParticlePipeline::create(device, compiler, &[], sample_count);
use wgpu::util::DeviceExt as _;
let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: unsafe { any_slice_as_bytes(&VERTICES) },
usage: wgpu::BufferUsage::VERTEX,
});
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("particle sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Nearest,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Linear,
lod_min_clamp: -1000.0,
lod_max_clamp: 1000.0,
compare: None,
anisotropy_clamp: NonZeroU8::new(16),
border_color: None,
});
let textures: Vec<wgpu::Texture> = (0..256)
.map(|i| {
let mut pixels = PARTICLE_TEXTURE_PIXELS;
// set up palette translation
for pix in pixels.iter_mut() {
if *pix == 0 {
*pix = 0xFF;
} else {
*pix *= i as u8;
}
}
let (diffuse_data, _) = palette.translate(&pixels);
create_texture(
device,
queue,
Some(&format!("particle texture {}", i)),
8,
8,
&TextureData::Diffuse(diffuse_data),
)
})
.collect();
let texture_views: Vec<wgpu::TextureView> = textures
.iter()
.map(|t| t.create_view(&Default::default()))
.collect();
let texture_view_refs = texture_views.iter().collect::<Vec<_>>();
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("particle bind group"),
layout: &bind_group_layouts[0],
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::Sampler(&sampler),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureViewArray(&texture_view_refs[..]),
},
],
});
ParticlePipeline {
pipeline,
bind_group_layouts,
sampler,
textures,
texture_views,
bind_group,
vertex_buffer,
}
}
pub fn rebuild(
&mut self,
device: &wgpu::Device,
compiler: &mut shaderc::Compiler,
sample_count: u32,
) {
let layout_refs: Vec<_> = self.bind_group_layouts.iter().collect();
self.pipeline = ParticlePipeline::recreate(device, compiler, &layout_refs, sample_count);
}
pub fn | (&self) -> &wgpu::RenderPipeline {
&self.pipeline
}
pub fn bind_group_layouts(&self) -> &[wgpu::BindGroupLayout] {
&self.bind_group_layouts
}
pub fn vertex_buffer(&self) -> &wgpu::Buffer {
&self.vertex_buffer
}
pub fn record_draw<'a, 'b, P>(
&'a self,
pass: &mut wgpu::RenderPass<'a>,
bump: &'a Bump,
camera: &Camera,
particles: P,
) where
P: Iterator<Item = &'b Particle>,
{
use PushConstantUpdate::*;
pass.set_pipeline(self.pipeline());
pass.set_vertex_buffer(0, self.vertex_buffer.slice(..));
pass.set_bind_group(0, &self.bind_group, &[]);
// face toward camera
let Angles { pitch, yaw, roll } = camera.angles();
let rotation = Angles {
pitch: -pitch,
yaw: -yaw,
roll: -roll,
}
.mat4_wgpu();
for particle in particles {
let q_origin = particle.origin();
let translation =
Matrix4::from_translation([-q_origin.y, q_origin.z, -q_origin.x].into());
Self::set_push_constants(
pass,
Update(bump.alloc(VertexPushConstants {
transform: camera.view_projection() * translation * rotation,
})),
Retain,
Update(bump.alloc(FragmentPushConstants {
color: particle.color() as u32,
})),
);
pass.draw(0..6, 0..1);
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct VertexPushConstants {
pub transform: Matrix4<f32>,
}
#[derive(Copy, Clone, Debug)]
pub struct FragmentPushConstants {
pub color: u32,
}
const BIND_GROUP_LAYOUT_ENTRIES: &[wgpu::BindGroupLayoutEntry] = &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Sampler {
filtering: true,
comparison: false,
},
count: None,
},
// per-index texture array
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Texture {
view_dimension: wgpu::TextureViewDimension::D2,
sample_type: wgpu::TextureSampleType::Float { filterable: true },
multisampled: false,
},
count: NonZeroU32::new(256),
},
];
lazy_static! {
static ref VERTEX_ATTRIBUTES: [[wgpu::VertexAttribute; 2]; 2] = [
wgpu::vertex_attr_array![
// position
0 => Float32x3,
// texcoord
1 => Float32x2,
],
wgpu::vertex_attr_array![
// instance position
2 => Float32x3,
// color index
3 => Uint32,
]
];
}
impl Pipeline for ParticlePipeline {
type VertexPushConstants = VertexPushConstants;
type SharedPushConstants = ();
type FragmentPushConstants = FragmentPushConstants;
fn name() -> &'static str {
"particle"
}
fn vertex_shader() -> &'static str {
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/shaders/particle.vert"
))
}
fn fragment_shader() -> &'static str {
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/shaders/particle.frag"
))
}
// NOTE: if any of the binding indices are changed, they must also be changed in
// the corresponding shaders and the BindGroupLayout generation functions.
fn bind_group_layout_descriptors() -> Vec<wgpu::BindGroupLayoutDescriptor<'static>> {
vec![
// group 0
wgpu::BindGroupLayoutDescriptor {
label: Some("particle bind group layout"),
entries: BIND_GROUP_LAYOUT_ENTRIES,
},
]
}
fn primitive_state() -> wgpu::PrimitiveState {
WorldPipelineBase::primitive_state()
}
fn color_target_states() -> Vec<wgpu::ColorTargetState> {
WorldPipelineBase::color_target_states()
}
fn depth_stencil_state() -> Option<wgpu::DepthStencilState> {
let mut desc = WorldPipelineBase::depth_stencil_state().unwrap();
desc.depth_write_enabled = false;
Some(desc)
}
// NOTE: if the vertex format is changed, this descriptor must also be changed accordingly.
fn vertex_buffer_layouts() -> Vec<wgpu::VertexBufferLayout<'static>> {
vec![wgpu::VertexBufferLayout {
array_stride: size_of::<ParticleVertex>() as u64,
step_mode: wgpu::InputStepMode::Vertex,
attributes: &VERTEX_ATTRIBUTES[0],
}]
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
pub struct ParticleVertex {
position: [f32; 3],
texcoord: [f32; 2],
}
pub const VERTICES: [ParticleVertex; 6] = [
ParticleVertex {
position: [-1.0, -1.0, 0.0],
texcoord: [0.0, 1.0],
},
ParticleVertex {
position: [-1.0, 1.0, 0.0],
texcoord: [0.0, 0.0],
},
ParticleVertex {
position: [1.0, 1.0, 0.0],
texcoord: [1.0, 0.0],
},
ParticleVertex {
position: [-1.0, -1.0, 0.0],
texcoord: [0.0, 1.0],
},
ParticleVertex {
position: [1.0, 1.0, 0.0],
texcoord: [1.0, 0.0],
},
ParticleVertex {
position: [1.0, -1.0, 0.0],
texcoord: [1.0, 1.0],
},
];
#[repr(C)]
pub struct ParticleInstance {
color: u32,
}
| pipeline | identifier_name |
particle.rs | use std::{
mem::size_of,
num::{NonZeroU32, NonZeroU8},
};
use crate::{
client::{
entity::particle::Particle,
render::{
create_texture,
pipeline::{Pipeline, PushConstantUpdate},
world::{Camera, WorldPipelineBase},
Palette, TextureData,
},
},
common::{math::Angles, util::any_slice_as_bytes},
};
use bumpalo::Bump;
use cgmath::Matrix4;
lazy_static! {
static ref VERTEX_BUFFER_ATTRIBUTES: [Vec<wgpu::VertexAttribute>; 1] = [
wgpu::vertex_attr_array![
// position
0 => Float32x3,
// texcoord
1 => Float32x2,
].to_vec(),
];
}
#[rustfmt::skip]
const PARTICLE_TEXTURE_PIXELS: [u8; 64] = [
0, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
0, 1, 1, 1, 1, 1, 1, 0,
0, 0, 1, 1, 1, 1, 0, 0,
];
pub struct ParticlePipeline {
pipeline: wgpu::RenderPipeline,
bind_group_layouts: Vec<wgpu::BindGroupLayout>,
vertex_buffer: wgpu::Buffer,
sampler: wgpu::Sampler,
textures: Vec<wgpu::Texture>,
texture_views: Vec<wgpu::TextureView>,
bind_group: wgpu::BindGroup,
}
impl ParticlePipeline {
pub fn new(
device: &wgpu::Device,
queue: &wgpu::Queue,
compiler: &mut shaderc::Compiler,
sample_count: u32,
palette: &Palette,
) -> ParticlePipeline {
let (pipeline, bind_group_layouts) =
ParticlePipeline::create(device, compiler, &[], sample_count);
use wgpu::util::DeviceExt as _;
let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: unsafe { any_slice_as_bytes(&VERTICES) },
usage: wgpu::BufferUsage::VERTEX,
});
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("particle sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Nearest,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Linear,
lod_min_clamp: -1000.0,
lod_max_clamp: 1000.0,
compare: None,
anisotropy_clamp: NonZeroU8::new(16),
border_color: None,
});
let textures: Vec<wgpu::Texture> = (0..256)
.map(|i| {
let mut pixels = PARTICLE_TEXTURE_PIXELS;
// set up palette translation
for pix in pixels.iter_mut() {
if *pix == 0 {
*pix = 0xFF;
} else {
*pix *= i as u8;
}
}
let (diffuse_data, _) = palette.translate(&pixels);
create_texture(
device,
queue,
Some(&format!("particle texture {}", i)),
8,
8,
&TextureData::Diffuse(diffuse_data),
)
})
.collect();
let texture_views: Vec<wgpu::TextureView> = textures
.iter()
.map(|t| t.create_view(&Default::default()))
.collect();
let texture_view_refs = texture_views.iter().collect::<Vec<_>>();
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("particle bind group"),
layout: &bind_group_layouts[0],
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::Sampler(&sampler),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureViewArray(&texture_view_refs[..]),
},
],
});
ParticlePipeline {
pipeline,
bind_group_layouts,
sampler,
textures,
texture_views,
bind_group,
vertex_buffer,
}
}
pub fn rebuild(
&mut self,
device: &wgpu::Device,
compiler: &mut shaderc::Compiler,
sample_count: u32,
) {
let layout_refs: Vec<_> = self.bind_group_layouts.iter().collect();
self.pipeline = ParticlePipeline::recreate(device, compiler, &layout_refs, sample_count);
}
pub fn pipeline(&self) -> &wgpu::RenderPipeline {
&self.pipeline
}
pub fn bind_group_layouts(&self) -> &[wgpu::BindGroupLayout] {
&self.bind_group_layouts
}
pub fn vertex_buffer(&self) -> &wgpu::Buffer {
&self.vertex_buffer
}
pub fn record_draw<'a, 'b, P>(
&'a self,
pass: &mut wgpu::RenderPass<'a>,
bump: &'a Bump,
camera: &Camera,
particles: P,
) where
P: Iterator<Item = &'b Particle>,
{
use PushConstantUpdate::*;
pass.set_pipeline(self.pipeline());
pass.set_vertex_buffer(0, self.vertex_buffer.slice(..));
pass.set_bind_group(0, &self.bind_group, &[]);
// face toward camera
let Angles { pitch, yaw, roll } = camera.angles();
let rotation = Angles {
pitch: -pitch,
yaw: -yaw,
roll: -roll,
}
.mat4_wgpu();
for particle in particles {
let q_origin = particle.origin();
let translation =
Matrix4::from_translation([-q_origin.y, q_origin.z, -q_origin.x].into());
Self::set_push_constants(
pass,
Update(bump.alloc(VertexPushConstants {
transform: camera.view_projection() * translation * rotation,
})),
Retain,
Update(bump.alloc(FragmentPushConstants {
color: particle.color() as u32,
})),
);
pass.draw(0..6, 0..1);
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct VertexPushConstants {
pub transform: Matrix4<f32>,
}
#[derive(Copy, Clone, Debug)]
pub struct FragmentPushConstants {
pub color: u32, | visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Sampler {
filtering: true,
comparison: false,
},
count: None,
},
// per-index texture array
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Texture {
view_dimension: wgpu::TextureViewDimension::D2,
sample_type: wgpu::TextureSampleType::Float { filterable: true },
multisampled: false,
},
count: NonZeroU32::new(256),
},
];
lazy_static! {
static ref VERTEX_ATTRIBUTES: [[wgpu::VertexAttribute; 2]; 2] = [
wgpu::vertex_attr_array![
// position
0 => Float32x3,
// texcoord
1 => Float32x2,
],
wgpu::vertex_attr_array![
// instance position
2 => Float32x3,
// color index
3 => Uint32,
]
];
}
impl Pipeline for ParticlePipeline {
type VertexPushConstants = VertexPushConstants;
type SharedPushConstants = ();
type FragmentPushConstants = FragmentPushConstants;
fn name() -> &'static str {
"particle"
}
fn vertex_shader() -> &'static str {
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/shaders/particle.vert"
))
}
fn fragment_shader() -> &'static str {
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/shaders/particle.frag"
))
}
// NOTE: if any of the binding indices are changed, they must also be changed in
// the corresponding shaders and the BindGroupLayout generation functions.
fn bind_group_layout_descriptors() -> Vec<wgpu::BindGroupLayoutDescriptor<'static>> {
vec![
// group 0
wgpu::BindGroupLayoutDescriptor {
label: Some("particle bind group layout"),
entries: BIND_GROUP_LAYOUT_ENTRIES,
},
]
}
fn primitive_state() -> wgpu::PrimitiveState {
WorldPipelineBase::primitive_state()
}
fn color_target_states() -> Vec<wgpu::ColorTargetState> {
WorldPipelineBase::color_target_states()
}
fn depth_stencil_state() -> Option<wgpu::DepthStencilState> {
let mut desc = WorldPipelineBase::depth_stencil_state().unwrap();
desc.depth_write_enabled = false;
Some(desc)
}
// NOTE: if the vertex format is changed, this descriptor must also be changed accordingly.
fn vertex_buffer_layouts() -> Vec<wgpu::VertexBufferLayout<'static>> {
vec![wgpu::VertexBufferLayout {
array_stride: size_of::<ParticleVertex>() as u64,
step_mode: wgpu::InputStepMode::Vertex,
attributes: &VERTEX_ATTRIBUTES[0],
}]
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
pub struct ParticleVertex {
position: [f32; 3],
texcoord: [f32; 2],
}
pub const VERTICES: [ParticleVertex; 6] = [
ParticleVertex {
position: [-1.0, -1.0, 0.0],
texcoord: [0.0, 1.0],
},
ParticleVertex {
position: [-1.0, 1.0, 0.0],
texcoord: [0.0, 0.0],
},
ParticleVertex {
position: [1.0, 1.0, 0.0],
texcoord: [1.0, 0.0],
},
ParticleVertex {
position: [-1.0, -1.0, 0.0],
texcoord: [0.0, 1.0],
},
ParticleVertex {
position: [1.0, 1.0, 0.0],
texcoord: [1.0, 0.0],
},
ParticleVertex {
position: [1.0, -1.0, 0.0],
texcoord: [1.0, 1.0],
},
];
#[repr(C)]
pub struct ParticleInstance {
color: u32,
} | }
const BIND_GROUP_LAYOUT_ENTRIES: &[wgpu::BindGroupLayoutEntry] = &[
wgpu::BindGroupLayoutEntry {
binding: 0, | random_line_split |
particle.rs | use std::{
mem::size_of,
num::{NonZeroU32, NonZeroU8},
};
use crate::{
client::{
entity::particle::Particle,
render::{
create_texture,
pipeline::{Pipeline, PushConstantUpdate},
world::{Camera, WorldPipelineBase},
Palette, TextureData,
},
},
common::{math::Angles, util::any_slice_as_bytes},
};
use bumpalo::Bump;
use cgmath::Matrix4;
lazy_static! {
static ref VERTEX_BUFFER_ATTRIBUTES: [Vec<wgpu::VertexAttribute>; 1] = [
wgpu::vertex_attr_array![
// position
0 => Float32x3,
// texcoord
1 => Float32x2,
].to_vec(),
];
}
#[rustfmt::skip]
const PARTICLE_TEXTURE_PIXELS: [u8; 64] = [
0, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
0, 1, 1, 1, 1, 1, 1, 0,
0, 0, 1, 1, 1, 1, 0, 0,
];
pub struct ParticlePipeline {
pipeline: wgpu::RenderPipeline,
bind_group_layouts: Vec<wgpu::BindGroupLayout>,
vertex_buffer: wgpu::Buffer,
sampler: wgpu::Sampler,
textures: Vec<wgpu::Texture>,
texture_views: Vec<wgpu::TextureView>,
bind_group: wgpu::BindGroup,
}
impl ParticlePipeline {
pub fn new(
device: &wgpu::Device,
queue: &wgpu::Queue,
compiler: &mut shaderc::Compiler,
sample_count: u32,
palette: &Palette,
) -> ParticlePipeline {
let (pipeline, bind_group_layouts) =
ParticlePipeline::create(device, compiler, &[], sample_count);
use wgpu::util::DeviceExt as _;
let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: unsafe { any_slice_as_bytes(&VERTICES) },
usage: wgpu::BufferUsage::VERTEX,
});
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("particle sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Nearest,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Linear,
lod_min_clamp: -1000.0,
lod_max_clamp: 1000.0,
compare: None,
anisotropy_clamp: NonZeroU8::new(16),
border_color: None,
});
let textures: Vec<wgpu::Texture> = (0..256)
.map(|i| {
let mut pixels = PARTICLE_TEXTURE_PIXELS;
// set up palette translation
for pix in pixels.iter_mut() {
if *pix == 0 {
*pix = 0xFF;
} else {
*pix *= i as u8;
}
}
let (diffuse_data, _) = palette.translate(&pixels);
create_texture(
device,
queue,
Some(&format!("particle texture {}", i)),
8,
8,
&TextureData::Diffuse(diffuse_data),
)
})
.collect();
let texture_views: Vec<wgpu::TextureView> = textures
.iter()
.map(|t| t.create_view(&Default::default()))
.collect();
let texture_view_refs = texture_views.iter().collect::<Vec<_>>();
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("particle bind group"),
layout: &bind_group_layouts[0],
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::Sampler(&sampler),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::TextureViewArray(&texture_view_refs[..]),
},
],
});
ParticlePipeline {
pipeline,
bind_group_layouts,
sampler,
textures,
texture_views,
bind_group,
vertex_buffer,
}
}
pub fn rebuild(
&mut self,
device: &wgpu::Device,
compiler: &mut shaderc::Compiler,
sample_count: u32,
) {
let layout_refs: Vec<_> = self.bind_group_layouts.iter().collect();
self.pipeline = ParticlePipeline::recreate(device, compiler, &layout_refs, sample_count);
}
pub fn pipeline(&self) -> &wgpu::RenderPipeline {
&self.pipeline
}
pub fn bind_group_layouts(&self) -> &[wgpu::BindGroupLayout] {
&self.bind_group_layouts
}
pub fn vertex_buffer(&self) -> &wgpu::Buffer {
&self.vertex_buffer
}
pub fn record_draw<'a, 'b, P>(
&'a self,
pass: &mut wgpu::RenderPass<'a>,
bump: &'a Bump,
camera: &Camera,
particles: P,
) where
P: Iterator<Item = &'b Particle>,
|
}
#[derive(Copy, Clone, Debug)]
pub struct VertexPushConstants {
pub transform: Matrix4<f32>,
}
#[derive(Copy, Clone, Debug)]
pub struct FragmentPushConstants {
pub color: u32,
}
const BIND_GROUP_LAYOUT_ENTRIES: &[wgpu::BindGroupLayoutEntry] = &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Sampler {
filtering: true,
comparison: false,
},
count: None,
},
// per-index texture array
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Texture {
view_dimension: wgpu::TextureViewDimension::D2,
sample_type: wgpu::TextureSampleType::Float { filterable: true },
multisampled: false,
},
count: NonZeroU32::new(256),
},
];
lazy_static! {
static ref VERTEX_ATTRIBUTES: [[wgpu::VertexAttribute; 2]; 2] = [
wgpu::vertex_attr_array![
// position
0 => Float32x3,
// texcoord
1 => Float32x2,
],
wgpu::vertex_attr_array![
// instance position
2 => Float32x3,
// color index
3 => Uint32,
]
];
}
impl Pipeline for ParticlePipeline {
type VertexPushConstants = VertexPushConstants;
type SharedPushConstants = ();
type FragmentPushConstants = FragmentPushConstants;
fn name() -> &'static str {
"particle"
}
fn vertex_shader() -> &'static str {
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/shaders/particle.vert"
))
}
fn fragment_shader() -> &'static str {
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/shaders/particle.frag"
))
}
// NOTE: if any of the binding indices are changed, they must also be changed in
// the corresponding shaders and the BindGroupLayout generation functions.
fn bind_group_layout_descriptors() -> Vec<wgpu::BindGroupLayoutDescriptor<'static>> {
vec![
// group 0
wgpu::BindGroupLayoutDescriptor {
label: Some("particle bind group layout"),
entries: BIND_GROUP_LAYOUT_ENTRIES,
},
]
}
fn primitive_state() -> wgpu::PrimitiveState {
WorldPipelineBase::primitive_state()
}
fn color_target_states() -> Vec<wgpu::ColorTargetState> {
WorldPipelineBase::color_target_states()
}
fn depth_stencil_state() -> Option<wgpu::DepthStencilState> {
let mut desc = WorldPipelineBase::depth_stencil_state().unwrap();
desc.depth_write_enabled = false;
Some(desc)
}
// NOTE: if the vertex format is changed, this descriptor must also be changed accordingly.
fn vertex_buffer_layouts() -> Vec<wgpu::VertexBufferLayout<'static>> {
vec![wgpu::VertexBufferLayout {
array_stride: size_of::<ParticleVertex>() as u64,
step_mode: wgpu::InputStepMode::Vertex,
attributes: &VERTEX_ATTRIBUTES[0],
}]
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
pub struct ParticleVertex {
position: [f32; 3],
texcoord: [f32; 2],
}
pub const VERTICES: [ParticleVertex; 6] = [
ParticleVertex {
position: [-1.0, -1.0, 0.0],
texcoord: [0.0, 1.0],
},
ParticleVertex {
position: [-1.0, 1.0, 0.0],
texcoord: [0.0, 0.0],
},
ParticleVertex {
position: [1.0, 1.0, 0.0],
texcoord: [1.0, 0.0],
},
ParticleVertex {
position: [-1.0, -1.0, 0.0],
texcoord: [0.0, 1.0],
},
ParticleVertex {
position: [1.0, 1.0, 0.0],
texcoord: [1.0, 0.0],
},
ParticleVertex {
position: [1.0, -1.0, 0.0],
texcoord: [1.0, 1.0],
},
];
#[repr(C)]
pub struct ParticleInstance {
color: u32,
}
| {
use PushConstantUpdate::*;
pass.set_pipeline(self.pipeline());
pass.set_vertex_buffer(0, self.vertex_buffer.slice(..));
pass.set_bind_group(0, &self.bind_group, &[]);
// face toward camera
let Angles { pitch, yaw, roll } = camera.angles();
let rotation = Angles {
pitch: -pitch,
yaw: -yaw,
roll: -roll,
}
.mat4_wgpu();
for particle in particles {
let q_origin = particle.origin();
let translation =
Matrix4::from_translation([-q_origin.y, q_origin.z, -q_origin.x].into());
Self::set_push_constants(
pass,
Update(bump.alloc(VertexPushConstants {
transform: camera.view_projection() * translation * rotation,
})),
Retain,
Update(bump.alloc(FragmentPushConstants {
color: particle.color() as u32,
})),
);
pass.draw(0..6, 0..1);
}
} | identifier_body |
train_transformer.py | import os
import math
import time
from tqdm import tqdm
import torch
from torch import nn
import torch.optim as optim
from torch.nn import functional as F
from torch.nn.utils import clip_grad_norm_
from torch.utils.data import DataLoader
from nag.modules import Transformer, TransformerTorch
from nag.logger import LogManager, SummaryHelper
from nag.metric import BLEUMetric, DistinctNGram
from nag.vocab_helper import VocabBulider
from nag.utils import PadCollate, get_index, restore_best_state, init_seed
from nag.dataset import OpenSubDataset, IMSDBDataset
from nag.optimizer import RAdam
from nag.options import parse_args
from nag.criterion import similarity_regularization, LabelSmoothedCrossEntropyLoss
def train(epoch, model, dataloader, criterion, optimizer, scheduler):
global global_train_step
model.train()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='train', total=len(opensub_dataset)//opt.realbatch):
tgt_input = tgt[:, :-1]
tgt_gold = tgt[:, 1:]
tgt_lens = tgt_lens - 1
decoder_output_probs, _ = model(
src=src, tgt=tgt_input, src_lengths=src_lens, tgt_lengths=tgt_lens)
decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)
out_seqs = torch.argmax(decoder_output_probs, dim=2)
# loss
loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION
loss.backward()
total_loss += loss.item()
# calculate metrics
bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)
distinct_1_score += distinct_1(out_seqs, tgt_lens)
distinct_2_score += distinct_2(out_seqs, tgt_lens)
# summary writer
global_train_step += 1
writer.log_loss(loss.item()*ACCUMULATION, mode='train')
if (i+1) % ACCUMULATION == 0:
# clip_grad_norm_(model.parameters(), max_norm=5)
optimizer.step()
optimizer.zero_grad()
scheduler.step()
if (i+1) % opt.logstep == 0:
avg_loss = (total_loss / opt.logstep) * ACCUMULATION
avg_bleu = bleu_score / opt.logstep
avg_distinct_1 = distinct_1_score / opt.logstep
avg_distinct_2 = distinct_2_score / opt.logstep
mylogger.log(
i, epoch, model, value=avg_loss, is_train=True,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2], vocab_bulider, global_train_step, mode='train')
def eval(epoch, model, dataloader, criterion, beam_size=2):
global global_valid_step
model.eval()
criterion.eval()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' + str(epoch)), 'w', encoding='utf-8')
with torch.no_grad():
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='eval', total=len(imsdb_dataset)):
tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)
tgt_gold = tgt[:, 1:]
if beam_size > 1:
output_seqs, output_probs = model.beam_search(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], beam_size=beam_size, max_length=tgt_lens.item())
else:
output_seqs, output_probs = model.greedy(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], max_length=tgt_lens.item())
min_len = min(tgt_gold.shape[1], output_seqs.shape[1])
# loss
loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1), tgt_gold[:, :min_len])
total_loss += loss.item()
# calculate metrics
out_lens = [min_len]
bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)
distinct_1_score += distinct_1(output_seqs, out_lens)
distinct_2_score += distinct_2(output_seqs, out_lens)
# show sequence
global_valid_step += 1
fout.write(' '.join(convert_ids_to_seq(output_seqs[0], vocab_bulider)) + '\n')
if (i+1) % opt.logstep == 0:
show_gen_seq(src, output_seqs, out_lens, tgt_gold, vocab_bulider, global_valid_step, mode='valid')
# summary
avg_loss = total_loss / i
avg_bleu = bleu_score / i
avg_distinct_1 = distinct_1_score / i
avg_distinct_2 = distinct_2_score / i
writer.log_loss(avg_loss, mode='valid')
mylogger.log(
i, epoch, model, value=avg_bleu, is_train=False,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
fout.close()
def run_model(model, train_loader, eval_loader, niter, criterion, optimizer, scheduler):
mylogger.log_info('Running Model')
for i in range(niter):
mylogger.log_info(f'EPOCH: {i}, lr: {optimizer.state_dict()["param_groups"][0]["lr"]}')
train(i, model, train_loader, criterion, optimizer, scheduler)
eval(i, model, eval_loader, criterion, beam_size=opt.beam)
def convert_ids_to_seq(id_seq, vocab_bulider):
return [vocab_bulider.id_to_word(idx) for idx in id_seq]
def show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth, vocab_bulider, step, mode='train'):
for in_id, out_id, out_len, gold_id in zip(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth):
in_seq = convert_ids_to_seq(in_id, vocab_bulider)
out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else out_id, vocab_bulider)
gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)
writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:get_index(in_seq, '<pad>')]), global_step=step)
writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq), global_step=step)
writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:get_index(in_seq, '<pad>')]), global_step=step)
if __name__ == '__main__':
begin_time = time.strftime("%H%M%S", time.localtime())
model_name = 'transformer' + begin_time
opt = parse_args()
device = "cuda" if torch.cuda.is_available() else "cpu"
torch.cuda.set_device(opt.gpuid)
init_seed(opt.manualSeed)
ACCUMULATION = opt.batchsize // opt.realbatch
mylogger = LogManager(checkpoint_step=10,
save_dir='./save',
model_name=model_name,
log_file_name=model_name + '.log',
mode='max', device=device)
mylogger.save_args(opt)
writer = SummaryHelper(save_dir='./save', model_name=model_name)
train_data_dir = './data/opensubtitles'
# train_data_dir = './data/wmt15en-de'
vocab_file_list = ['dialogue_length3_6.post']
# vocab_file_list = ['all_de-en.bpe.post', 'all_de-en.bpe.response']
vocab_bulider = VocabBulider(
train_data_dir, src_files=vocab_file_list, ignore_unk_error=True,
vocab_file='vocab.txt', min_count=opt.mincount, update=opt.update)
print('most common 50:', vocab_bulider.most_common(50))
mylogger.log_info('vocab size: %d' % len(vocab_bulider))
# metircs
bleu_metirc = BLEUMetric(vocab_bulider.id2vocab, ignore_smoothing_error=True)
distinct_1 = DistinctNGram(ngram=1)
distinct_2 = DistinctNGram(ngram=2)
# train dataset and dataloader
if opt.cotk: # use dataset in paper 'cotk'
# opensub_file_name_list = ['all_de-en.bpe']
opensub_file_name_list = ['opensub_pair_dev', 'opensub_pair_test', 'opensub_pair_train']
unk_token = None
else: # use dataset in paper 'Non-Autoregressive Neural Dialogue Generation'
opensub_file_name_list = ['dialogue_length3_6']
unk_token = 'UNknown'
opensub_dataset = OpenSubDataset(
data_dir=train_data_dir, vocab_bulider=vocab_bulider,
file_name_list=opensub_file_name_list, unk_token='UNknown',
save_process=False, samples=opt.trainsamples, add_bos=True, add_eos=True)
print(opensub_dataset.sample())
opensub_dataloader = DataLoader(
opensub_dataset, batch_size=opt.realbatch,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=True, num_workers=opt.workers, drop_last=True)
# dev set
dev_data_dir = './data/imsdb'
imsdb_file_name_list = ['imsdb_lower']
# dev_data_dir = './data/wmt15en-de'
# imsdb_file_name_list = ['newstest']
imsdb_dataset = IMSDBDataset(
data_dir=dev_data_dir, vocab_bulider=vocab_bulider,
file_name_list=imsdb_file_name_list, save_process=False,
samples=opt.validsamples, add_bos=True, add_eos=True)
print(imsdb_dataset.sample())
imsdb_dataloader = DataLoader(
imsdb_dataset, batch_size=1,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=False, num_workers=opt.workers, drop_last=True)
# model definition
if opt.mine:
model = Transformer(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=True, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
else:
model = TransformerTorch(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=False, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
model.show_graph()
if opt.half:
model = model.half()
if opt.ft:
model = restore_best_state(model, opt.ckpt, save_dir='./save', device=model.device)
# optimizer and scheduler
if opt.warmup:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=1., betas=(opt.beta1, opt.beta2), eps=opt.eps)
rate_ratio = 1. / math.sqrt(opt.embedsize)
# top_lr = 1 / sqrt(d_model * warmup_step) at step == warmup_step
scheduler = optim.lr_scheduler.LambdaLR(
optimizer,
lr_lambda=lambda step: rate_ratio * min(1. / math.sqrt(step+1), step*(opt.warmup_step**(-1.5))))
else:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=opt.lr, betas=(opt.beta1, opt.beta2), eps=opt.eps,
weight_decay=opt.weight_decay)
scheduler = optim.lr_scheduler.StepLR(
optimizer, step_size=opt.schedulerstep, gamma=opt.gamma)
# loss function
# criterion = nn.CrossEntropyLoss(ignore_index=vocab_bulider.padid) # for Transformer
criterion = LabelSmoothedCrossEntropyLoss(eps=0.1, ignore_index=vocab_bulider.padid)
# run model
global_train_step, global_valid_step = 0, 0
run_model(
| model, opensub_dataloader, imsdb_dataloader,
opt.niter, criterion, optimizer, scheduler)
writer.close() | random_line_split | |
train_transformer.py | import os
import math
import time
from tqdm import tqdm
import torch
from torch import nn
import torch.optim as optim
from torch.nn import functional as F
from torch.nn.utils import clip_grad_norm_
from torch.utils.data import DataLoader
from nag.modules import Transformer, TransformerTorch
from nag.logger import LogManager, SummaryHelper
from nag.metric import BLEUMetric, DistinctNGram
from nag.vocab_helper import VocabBulider
from nag.utils import PadCollate, get_index, restore_best_state, init_seed
from nag.dataset import OpenSubDataset, IMSDBDataset
from nag.optimizer import RAdam
from nag.options import parse_args
from nag.criterion import similarity_regularization, LabelSmoothedCrossEntropyLoss
def train(epoch, model, dataloader, criterion, optimizer, scheduler):
|
def eval(epoch, model, dataloader, criterion, beam_size=2):
global global_valid_step
model.eval()
criterion.eval()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' + str(epoch)), 'w', encoding='utf-8')
with torch.no_grad():
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='eval', total=len(imsdb_dataset)):
tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)
tgt_gold = tgt[:, 1:]
if beam_size > 1:
output_seqs, output_probs = model.beam_search(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], beam_size=beam_size, max_length=tgt_lens.item())
else:
output_seqs, output_probs = model.greedy(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], max_length=tgt_lens.item())
min_len = min(tgt_gold.shape[1], output_seqs.shape[1])
# loss
loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1), tgt_gold[:, :min_len])
total_loss += loss.item()
# calculate metrics
out_lens = [min_len]
bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)
distinct_1_score += distinct_1(output_seqs, out_lens)
distinct_2_score += distinct_2(output_seqs, out_lens)
# show sequence
global_valid_step += 1
fout.write(' '.join(convert_ids_to_seq(output_seqs[0], vocab_bulider)) + '\n')
if (i+1) % opt.logstep == 0:
show_gen_seq(src, output_seqs, out_lens, tgt_gold, vocab_bulider, global_valid_step, mode='valid')
# summary
avg_loss = total_loss / i
avg_bleu = bleu_score / i
avg_distinct_1 = distinct_1_score / i
avg_distinct_2 = distinct_2_score / i
writer.log_loss(avg_loss, mode='valid')
mylogger.log(
i, epoch, model, value=avg_bleu, is_train=False,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
fout.close()
def run_model(model, train_loader, eval_loader, niter, criterion, optimizer, scheduler):
mylogger.log_info('Running Model')
for i in range(niter):
mylogger.log_info(f'EPOCH: {i}, lr: {optimizer.state_dict()["param_groups"][0]["lr"]}')
train(i, model, train_loader, criterion, optimizer, scheduler)
eval(i, model, eval_loader, criterion, beam_size=opt.beam)
def convert_ids_to_seq(id_seq, vocab_bulider):
return [vocab_bulider.id_to_word(idx) for idx in id_seq]
def show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth, vocab_bulider, step, mode='train'):
for in_id, out_id, out_len, gold_id in zip(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth):
in_seq = convert_ids_to_seq(in_id, vocab_bulider)
out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else out_id, vocab_bulider)
gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)
writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:get_index(in_seq, '<pad>')]), global_step=step)
writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq), global_step=step)
writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:get_index(in_seq, '<pad>')]), global_step=step)
if __name__ == '__main__':
begin_time = time.strftime("%H%M%S", time.localtime())
model_name = 'transformer' + begin_time
opt = parse_args()
device = "cuda" if torch.cuda.is_available() else "cpu"
torch.cuda.set_device(opt.gpuid)
init_seed(opt.manualSeed)
ACCUMULATION = opt.batchsize // opt.realbatch
mylogger = LogManager(checkpoint_step=10,
save_dir='./save',
model_name=model_name,
log_file_name=model_name + '.log',
mode='max', device=device)
mylogger.save_args(opt)
writer = SummaryHelper(save_dir='./save', model_name=model_name)
train_data_dir = './data/opensubtitles'
# train_data_dir = './data/wmt15en-de'
vocab_file_list = ['dialogue_length3_6.post']
# vocab_file_list = ['all_de-en.bpe.post', 'all_de-en.bpe.response']
vocab_bulider = VocabBulider(
train_data_dir, src_files=vocab_file_list, ignore_unk_error=True,
vocab_file='vocab.txt', min_count=opt.mincount, update=opt.update)
print('most common 50:', vocab_bulider.most_common(50))
mylogger.log_info('vocab size: %d' % len(vocab_bulider))
# metircs
bleu_metirc = BLEUMetric(vocab_bulider.id2vocab, ignore_smoothing_error=True)
distinct_1 = DistinctNGram(ngram=1)
distinct_2 = DistinctNGram(ngram=2)
# train dataset and dataloader
if opt.cotk: # use dataset in paper 'cotk'
# opensub_file_name_list = ['all_de-en.bpe']
opensub_file_name_list = ['opensub_pair_dev', 'opensub_pair_test', 'opensub_pair_train']
unk_token = None
else: # use dataset in paper 'Non-Autoregressive Neural Dialogue Generation'
opensub_file_name_list = ['dialogue_length3_6']
unk_token = 'UNknown'
opensub_dataset = OpenSubDataset(
data_dir=train_data_dir, vocab_bulider=vocab_bulider,
file_name_list=opensub_file_name_list, unk_token='UNknown',
save_process=False, samples=opt.trainsamples, add_bos=True, add_eos=True)
print(opensub_dataset.sample())
opensub_dataloader = DataLoader(
opensub_dataset, batch_size=opt.realbatch,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=True, num_workers=opt.workers, drop_last=True)
# dev set
dev_data_dir = './data/imsdb'
imsdb_file_name_list = ['imsdb_lower']
# dev_data_dir = './data/wmt15en-de'
# imsdb_file_name_list = ['newstest']
imsdb_dataset = IMSDBDataset(
data_dir=dev_data_dir, vocab_bulider=vocab_bulider,
file_name_list=imsdb_file_name_list, save_process=False,
samples=opt.validsamples, add_bos=True, add_eos=True)
print(imsdb_dataset.sample())
imsdb_dataloader = DataLoader(
imsdb_dataset, batch_size=1,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=False, num_workers=opt.workers, drop_last=True)
# model definition
if opt.mine:
model = Transformer(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=True, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
else:
model = TransformerTorch(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=False, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
model.show_graph()
if opt.half:
model = model.half()
if opt.ft:
model = restore_best_state(model, opt.ckpt, save_dir='./save', device=model.device)
# optimizer and scheduler
if opt.warmup:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=1., betas=(opt.beta1, opt.beta2), eps=opt.eps)
rate_ratio = 1. / math.sqrt(opt.embedsize)
# top_lr = 1 / sqrt(d_model * warmup_step) at step == warmup_step
scheduler = optim.lr_scheduler.LambdaLR(
optimizer,
lr_lambda=lambda step: rate_ratio * min(1. / math.sqrt(step+1), step*(opt.warmup_step**(-1.5))))
else:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=opt.lr, betas=(opt.beta1, opt.beta2), eps=opt.eps,
weight_decay=opt.weight_decay)
scheduler = optim.lr_scheduler.StepLR(
optimizer, step_size=opt.schedulerstep, gamma=opt.gamma)
# loss function
# criterion = nn.CrossEntropyLoss(ignore_index=vocab_bulider.padid) # for Transformer
criterion = LabelSmoothedCrossEntropyLoss(eps=0.1, ignore_index=vocab_bulider.padid)
# run model
global_train_step, global_valid_step = 0, 0
run_model(
model, opensub_dataloader, imsdb_dataloader,
opt.niter, criterion, optimizer, scheduler)
writer.close()
| global global_train_step
model.train()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='train', total=len(opensub_dataset)//opt.realbatch):
tgt_input = tgt[:, :-1]
tgt_gold = tgt[:, 1:]
tgt_lens = tgt_lens - 1
decoder_output_probs, _ = model(
src=src, tgt=tgt_input, src_lengths=src_lens, tgt_lengths=tgt_lens)
decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)
out_seqs = torch.argmax(decoder_output_probs, dim=2)
# loss
loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION
loss.backward()
total_loss += loss.item()
# calculate metrics
bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)
distinct_1_score += distinct_1(out_seqs, tgt_lens)
distinct_2_score += distinct_2(out_seqs, tgt_lens)
# summary writer
global_train_step += 1
writer.log_loss(loss.item()*ACCUMULATION, mode='train')
if (i+1) % ACCUMULATION == 0:
# clip_grad_norm_(model.parameters(), max_norm=5)
optimizer.step()
optimizer.zero_grad()
scheduler.step()
if (i+1) % opt.logstep == 0:
avg_loss = (total_loss / opt.logstep) * ACCUMULATION
avg_bleu = bleu_score / opt.logstep
avg_distinct_1 = distinct_1_score / opt.logstep
avg_distinct_2 = distinct_2_score / opt.logstep
mylogger.log(
i, epoch, model, value=avg_loss, is_train=True,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2], vocab_bulider, global_train_step, mode='train') | identifier_body |
train_transformer.py | import os
import math
import time
from tqdm import tqdm
import torch
from torch import nn
import torch.optim as optim
from torch.nn import functional as F
from torch.nn.utils import clip_grad_norm_
from torch.utils.data import DataLoader
from nag.modules import Transformer, TransformerTorch
from nag.logger import LogManager, SummaryHelper
from nag.metric import BLEUMetric, DistinctNGram
from nag.vocab_helper import VocabBulider
from nag.utils import PadCollate, get_index, restore_best_state, init_seed
from nag.dataset import OpenSubDataset, IMSDBDataset
from nag.optimizer import RAdam
from nag.options import parse_args
from nag.criterion import similarity_regularization, LabelSmoothedCrossEntropyLoss
def train(epoch, model, dataloader, criterion, optimizer, scheduler):
global global_train_step
model.train()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='train', total=len(opensub_dataset)//opt.realbatch):
tgt_input = tgt[:, :-1]
tgt_gold = tgt[:, 1:]
tgt_lens = tgt_lens - 1
decoder_output_probs, _ = model(
src=src, tgt=tgt_input, src_lengths=src_lens, tgt_lengths=tgt_lens)
decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)
out_seqs = torch.argmax(decoder_output_probs, dim=2)
# loss
loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION
loss.backward()
total_loss += loss.item()
# calculate metrics
bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)
distinct_1_score += distinct_1(out_seqs, tgt_lens)
distinct_2_score += distinct_2(out_seqs, tgt_lens)
# summary writer
global_train_step += 1
writer.log_loss(loss.item()*ACCUMULATION, mode='train')
if (i+1) % ACCUMULATION == 0:
# clip_grad_norm_(model.parameters(), max_norm=5)
optimizer.step()
optimizer.zero_grad()
scheduler.step()
if (i+1) % opt.logstep == 0:
avg_loss = (total_loss / opt.logstep) * ACCUMULATION
avg_bleu = bleu_score / opt.logstep
avg_distinct_1 = distinct_1_score / opt.logstep
avg_distinct_2 = distinct_2_score / opt.logstep
mylogger.log(
i, epoch, model, value=avg_loss, is_train=True,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2], vocab_bulider, global_train_step, mode='train')
def eval(epoch, model, dataloader, criterion, beam_size=2):
global global_valid_step
model.eval()
criterion.eval()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' + str(epoch)), 'w', encoding='utf-8')
with torch.no_grad():
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='eval', total=len(imsdb_dataset)):
tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)
tgt_gold = tgt[:, 1:]
if beam_size > 1:
output_seqs, output_probs = model.beam_search(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], beam_size=beam_size, max_length=tgt_lens.item())
else:
output_seqs, output_probs = model.greedy(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], max_length=tgt_lens.item())
min_len = min(tgt_gold.shape[1], output_seqs.shape[1])
# loss
loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1), tgt_gold[:, :min_len])
total_loss += loss.item()
# calculate metrics
out_lens = [min_len]
bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)
distinct_1_score += distinct_1(output_seqs, out_lens)
distinct_2_score += distinct_2(output_seqs, out_lens)
# show sequence
global_valid_step += 1
fout.write(' '.join(convert_ids_to_seq(output_seqs[0], vocab_bulider)) + '\n')
if (i+1) % opt.logstep == 0:
show_gen_seq(src, output_seqs, out_lens, tgt_gold, vocab_bulider, global_valid_step, mode='valid')
# summary
avg_loss = total_loss / i
avg_bleu = bleu_score / i
avg_distinct_1 = distinct_1_score / i
avg_distinct_2 = distinct_2_score / i
writer.log_loss(avg_loss, mode='valid')
mylogger.log(
i, epoch, model, value=avg_bleu, is_train=False,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
fout.close()
def | (model, train_loader, eval_loader, niter, criterion, optimizer, scheduler):
mylogger.log_info('Running Model')
for i in range(niter):
mylogger.log_info(f'EPOCH: {i}, lr: {optimizer.state_dict()["param_groups"][0]["lr"]}')
train(i, model, train_loader, criterion, optimizer, scheduler)
eval(i, model, eval_loader, criterion, beam_size=opt.beam)
def convert_ids_to_seq(id_seq, vocab_bulider):
return [vocab_bulider.id_to_word(idx) for idx in id_seq]
def show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth, vocab_bulider, step, mode='train'):
for in_id, out_id, out_len, gold_id in zip(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth):
in_seq = convert_ids_to_seq(in_id, vocab_bulider)
out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else out_id, vocab_bulider)
gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)
writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:get_index(in_seq, '<pad>')]), global_step=step)
writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq), global_step=step)
writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:get_index(in_seq, '<pad>')]), global_step=step)
if __name__ == '__main__':
begin_time = time.strftime("%H%M%S", time.localtime())
model_name = 'transformer' + begin_time
opt = parse_args()
device = "cuda" if torch.cuda.is_available() else "cpu"
torch.cuda.set_device(opt.gpuid)
init_seed(opt.manualSeed)
ACCUMULATION = opt.batchsize // opt.realbatch
mylogger = LogManager(checkpoint_step=10,
save_dir='./save',
model_name=model_name,
log_file_name=model_name + '.log',
mode='max', device=device)
mylogger.save_args(opt)
writer = SummaryHelper(save_dir='./save', model_name=model_name)
train_data_dir = './data/opensubtitles'
# train_data_dir = './data/wmt15en-de'
vocab_file_list = ['dialogue_length3_6.post']
# vocab_file_list = ['all_de-en.bpe.post', 'all_de-en.bpe.response']
vocab_bulider = VocabBulider(
train_data_dir, src_files=vocab_file_list, ignore_unk_error=True,
vocab_file='vocab.txt', min_count=opt.mincount, update=opt.update)
print('most common 50:', vocab_bulider.most_common(50))
mylogger.log_info('vocab size: %d' % len(vocab_bulider))
# metircs
bleu_metirc = BLEUMetric(vocab_bulider.id2vocab, ignore_smoothing_error=True)
distinct_1 = DistinctNGram(ngram=1)
distinct_2 = DistinctNGram(ngram=2)
# train dataset and dataloader
if opt.cotk: # use dataset in paper 'cotk'
# opensub_file_name_list = ['all_de-en.bpe']
opensub_file_name_list = ['opensub_pair_dev', 'opensub_pair_test', 'opensub_pair_train']
unk_token = None
else: # use dataset in paper 'Non-Autoregressive Neural Dialogue Generation'
opensub_file_name_list = ['dialogue_length3_6']
unk_token = 'UNknown'
opensub_dataset = OpenSubDataset(
data_dir=train_data_dir, vocab_bulider=vocab_bulider,
file_name_list=opensub_file_name_list, unk_token='UNknown',
save_process=False, samples=opt.trainsamples, add_bos=True, add_eos=True)
print(opensub_dataset.sample())
opensub_dataloader = DataLoader(
opensub_dataset, batch_size=opt.realbatch,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=True, num_workers=opt.workers, drop_last=True)
# dev set
dev_data_dir = './data/imsdb'
imsdb_file_name_list = ['imsdb_lower']
# dev_data_dir = './data/wmt15en-de'
# imsdb_file_name_list = ['newstest']
imsdb_dataset = IMSDBDataset(
data_dir=dev_data_dir, vocab_bulider=vocab_bulider,
file_name_list=imsdb_file_name_list, save_process=False,
samples=opt.validsamples, add_bos=True, add_eos=True)
print(imsdb_dataset.sample())
imsdb_dataloader = DataLoader(
imsdb_dataset, batch_size=1,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=False, num_workers=opt.workers, drop_last=True)
# model definition
if opt.mine:
model = Transformer(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=True, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
else:
model = TransformerTorch(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=False, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
model.show_graph()
if opt.half:
model = model.half()
if opt.ft:
model = restore_best_state(model, opt.ckpt, save_dir='./save', device=model.device)
# optimizer and scheduler
if opt.warmup:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=1., betas=(opt.beta1, opt.beta2), eps=opt.eps)
rate_ratio = 1. / math.sqrt(opt.embedsize)
# top_lr = 1 / sqrt(d_model * warmup_step) at step == warmup_step
scheduler = optim.lr_scheduler.LambdaLR(
optimizer,
lr_lambda=lambda step: rate_ratio * min(1. / math.sqrt(step+1), step*(opt.warmup_step**(-1.5))))
else:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=opt.lr, betas=(opt.beta1, opt.beta2), eps=opt.eps,
weight_decay=opt.weight_decay)
scheduler = optim.lr_scheduler.StepLR(
optimizer, step_size=opt.schedulerstep, gamma=opt.gamma)
# loss function
# criterion = nn.CrossEntropyLoss(ignore_index=vocab_bulider.padid) # for Transformer
criterion = LabelSmoothedCrossEntropyLoss(eps=0.1, ignore_index=vocab_bulider.padid)
# run model
global_train_step, global_valid_step = 0, 0
run_model(
model, opensub_dataloader, imsdb_dataloader,
opt.niter, criterion, optimizer, scheduler)
writer.close()
| run_model | identifier_name |
train_transformer.py | import os
import math
import time
from tqdm import tqdm
import torch
from torch import nn
import torch.optim as optim
from torch.nn import functional as F
from torch.nn.utils import clip_grad_norm_
from torch.utils.data import DataLoader
from nag.modules import Transformer, TransformerTorch
from nag.logger import LogManager, SummaryHelper
from nag.metric import BLEUMetric, DistinctNGram
from nag.vocab_helper import VocabBulider
from nag.utils import PadCollate, get_index, restore_best_state, init_seed
from nag.dataset import OpenSubDataset, IMSDBDataset
from nag.optimizer import RAdam
from nag.options import parse_args
from nag.criterion import similarity_regularization, LabelSmoothedCrossEntropyLoss
def train(epoch, model, dataloader, criterion, optimizer, scheduler):
global global_train_step
model.train()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='train', total=len(opensub_dataset)//opt.realbatch):
tgt_input = tgt[:, :-1]
tgt_gold = tgt[:, 1:]
tgt_lens = tgt_lens - 1
decoder_output_probs, _ = model(
src=src, tgt=tgt_input, src_lengths=src_lens, tgt_lengths=tgt_lens)
decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)
out_seqs = torch.argmax(decoder_output_probs, dim=2)
# loss
loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION
loss.backward()
total_loss += loss.item()
# calculate metrics
bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)
distinct_1_score += distinct_1(out_seqs, tgt_lens)
distinct_2_score += distinct_2(out_seqs, tgt_lens)
# summary writer
global_train_step += 1
writer.log_loss(loss.item()*ACCUMULATION, mode='train')
if (i+1) % ACCUMULATION == 0:
# clip_grad_norm_(model.parameters(), max_norm=5)
optimizer.step()
optimizer.zero_grad()
scheduler.step()
if (i+1) % opt.logstep == 0:
|
def eval(epoch, model, dataloader, criterion, beam_size=2):
global global_valid_step
model.eval()
criterion.eval()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' + str(epoch)), 'w', encoding='utf-8')
with torch.no_grad():
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='eval', total=len(imsdb_dataset)):
tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)
tgt_gold = tgt[:, 1:]
if beam_size > 1:
output_seqs, output_probs = model.beam_search(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], beam_size=beam_size, max_length=tgt_lens.item())
else:
output_seqs, output_probs = model.greedy(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], max_length=tgt_lens.item())
min_len = min(tgt_gold.shape[1], output_seqs.shape[1])
# loss
loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1), tgt_gold[:, :min_len])
total_loss += loss.item()
# calculate metrics
out_lens = [min_len]
bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)
distinct_1_score += distinct_1(output_seqs, out_lens)
distinct_2_score += distinct_2(output_seqs, out_lens)
# show sequence
global_valid_step += 1
fout.write(' '.join(convert_ids_to_seq(output_seqs[0], vocab_bulider)) + '\n')
if (i+1) % opt.logstep == 0:
show_gen_seq(src, output_seqs, out_lens, tgt_gold, vocab_bulider, global_valid_step, mode='valid')
# summary
avg_loss = total_loss / i
avg_bleu = bleu_score / i
avg_distinct_1 = distinct_1_score / i
avg_distinct_2 = distinct_2_score / i
writer.log_loss(avg_loss, mode='valid')
mylogger.log(
i, epoch, model, value=avg_bleu, is_train=False,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
fout.close()
def run_model(model, train_loader, eval_loader, niter, criterion, optimizer, scheduler):
mylogger.log_info('Running Model')
for i in range(niter):
mylogger.log_info(f'EPOCH: {i}, lr: {optimizer.state_dict()["param_groups"][0]["lr"]}')
train(i, model, train_loader, criterion, optimizer, scheduler)
eval(i, model, eval_loader, criterion, beam_size=opt.beam)
def convert_ids_to_seq(id_seq, vocab_bulider):
return [vocab_bulider.id_to_word(idx) for idx in id_seq]
def show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth, vocab_bulider, step, mode='train'):
for in_id, out_id, out_len, gold_id in zip(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth):
in_seq = convert_ids_to_seq(in_id, vocab_bulider)
out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else out_id, vocab_bulider)
gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)
writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:get_index(in_seq, '<pad>')]), global_step=step)
writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq), global_step=step)
writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:get_index(in_seq, '<pad>')]), global_step=step)
if __name__ == '__main__':
begin_time = time.strftime("%H%M%S", time.localtime())
model_name = 'transformer' + begin_time
opt = parse_args()
device = "cuda" if torch.cuda.is_available() else "cpu"
torch.cuda.set_device(opt.gpuid)
init_seed(opt.manualSeed)
ACCUMULATION = opt.batchsize // opt.realbatch
mylogger = LogManager(checkpoint_step=10,
save_dir='./save',
model_name=model_name,
log_file_name=model_name + '.log',
mode='max', device=device)
mylogger.save_args(opt)
writer = SummaryHelper(save_dir='./save', model_name=model_name)
train_data_dir = './data/opensubtitles'
# train_data_dir = './data/wmt15en-de'
vocab_file_list = ['dialogue_length3_6.post']
# vocab_file_list = ['all_de-en.bpe.post', 'all_de-en.bpe.response']
vocab_bulider = VocabBulider(
train_data_dir, src_files=vocab_file_list, ignore_unk_error=True,
vocab_file='vocab.txt', min_count=opt.mincount, update=opt.update)
print('most common 50:', vocab_bulider.most_common(50))
mylogger.log_info('vocab size: %d' % len(vocab_bulider))
# metircs
bleu_metirc = BLEUMetric(vocab_bulider.id2vocab, ignore_smoothing_error=True)
distinct_1 = DistinctNGram(ngram=1)
distinct_2 = DistinctNGram(ngram=2)
# train dataset and dataloader
if opt.cotk: # use dataset in paper 'cotk'
# opensub_file_name_list = ['all_de-en.bpe']
opensub_file_name_list = ['opensub_pair_dev', 'opensub_pair_test', 'opensub_pair_train']
unk_token = None
else: # use dataset in paper 'Non-Autoregressive Neural Dialogue Generation'
opensub_file_name_list = ['dialogue_length3_6']
unk_token = 'UNknown'
opensub_dataset = OpenSubDataset(
data_dir=train_data_dir, vocab_bulider=vocab_bulider,
file_name_list=opensub_file_name_list, unk_token='UNknown',
save_process=False, samples=opt.trainsamples, add_bos=True, add_eos=True)
print(opensub_dataset.sample())
opensub_dataloader = DataLoader(
opensub_dataset, batch_size=opt.realbatch,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=True, num_workers=opt.workers, drop_last=True)
# dev set
dev_data_dir = './data/imsdb'
imsdb_file_name_list = ['imsdb_lower']
# dev_data_dir = './data/wmt15en-de'
# imsdb_file_name_list = ['newstest']
imsdb_dataset = IMSDBDataset(
data_dir=dev_data_dir, vocab_bulider=vocab_bulider,
file_name_list=imsdb_file_name_list, save_process=False,
samples=opt.validsamples, add_bos=True, add_eos=True)
print(imsdb_dataset.sample())
imsdb_dataloader = DataLoader(
imsdb_dataset, batch_size=1,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=False, num_workers=opt.workers, drop_last=True)
# model definition
if opt.mine:
model = Transformer(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=True, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
else:
model = TransformerTorch(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=False, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
model.show_graph()
if opt.half:
model = model.half()
if opt.ft:
model = restore_best_state(model, opt.ckpt, save_dir='./save', device=model.device)
# optimizer and scheduler
if opt.warmup:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=1., betas=(opt.beta1, opt.beta2), eps=opt.eps)
rate_ratio = 1. / math.sqrt(opt.embedsize)
# top_lr = 1 / sqrt(d_model * warmup_step) at step == warmup_step
scheduler = optim.lr_scheduler.LambdaLR(
optimizer,
lr_lambda=lambda step: rate_ratio * min(1. / math.sqrt(step+1), step*(opt.warmup_step**(-1.5))))
else:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=opt.lr, betas=(opt.beta1, opt.beta2), eps=opt.eps,
weight_decay=opt.weight_decay)
scheduler = optim.lr_scheduler.StepLR(
optimizer, step_size=opt.schedulerstep, gamma=opt.gamma)
# loss function
# criterion = nn.CrossEntropyLoss(ignore_index=vocab_bulider.padid) # for Transformer
criterion = LabelSmoothedCrossEntropyLoss(eps=0.1, ignore_index=vocab_bulider.padid)
# run model
global_train_step, global_valid_step = 0, 0
run_model(
model, opensub_dataloader, imsdb_dataloader,
opt.niter, criterion, optimizer, scheduler)
writer.close()
| avg_loss = (total_loss / opt.logstep) * ACCUMULATION
avg_bleu = bleu_score / opt.logstep
avg_distinct_1 = distinct_1_score / opt.logstep
avg_distinct_2 = distinct_2_score / opt.logstep
mylogger.log(
i, epoch, model, value=avg_loss, is_train=True,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2], vocab_bulider, global_train_step, mode='train') | conditional_block |
p21_sim.py | # Copyright (c) 2015-2016 Derrick Sund
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from scipy.integrate import odeint
from numpy import arange
import time
import datetime
import os
import sys
infiles = []
dirname = ""
if len(sys.argv) > 1:
infiles = sys.argv[1:]
for infile in infiles:
if not os.path.isfile(infile):
sys.stderr.write(infile + ": No such file found.\n")
sys.exit(1)
#Constants. Do not add constants directly to the derivative function; violators
#will have rabid weasels set upon them.
alpha_p14 = 1 #DUMMY
alpha_Ink4 = 5 #DUMMY
alpha_p21 = 4 #DUMMY
alpha_p53 = 0.9 #DUMMY
alpha_mdm2r = 9.425 #DUMMY
alpha_MDM2 = 1 #DUMMY
alpha_CD = 1 #DUMMY
alpha_CE = 1 #DUMMY
alpha_CB = 1 #DUMMY
omega_p14 = 0.116 #DUMMY
omega_Ink4 = 0.139 #DUMMY
omega_p21 = 1 #DUMMY
omega_p21CE = 1 #DUMMY
omega_p53 = 2.079 #DUMMY
omega_p53MDM2 = 1 #DUMMY
omega_p53E6 = 1 #DUMMY
omega_mdm2r = 0.693 #DUMMY
omega_MDM2 = 8.318 #DUMMY
omega_CD = 0.023 #DUMMY
omega_CDInk4 = 1 #DUMMY
omega_CE = 1 #DUMMY
omega_CA = 1 #DUMMY
omega_CACDC20 = 1 #DUMMY
omega_CB = 1 #DUMMY
omega_CBCDC20 = 1 #DUMMY
beta_E2FRb = 2 #DUMMY
beta_E2FRbMP = beta_E2FRb * 0.3 #DUMMY, but should be smaller than beta_E2FRb
beta_mdm2p14 = 1 #DUMMY
beta_cp21 = 1 #DUMMY
beta_E7p21 = 1 #DUMMY
beta_E7pRB = 1 #DUMMY
beta_E7pRBP = 1 #DUMMY
beta_E7pRBPP = 1 #DUMMY
delta_E7p21 = 1 #DUMMY
delta_E7pRB = 1 #DUMMY
epsilon_RbCD = 0.4 #DUMMY
epsilon_RbCE = 0.7 #DUMMY
epsilon_E2F = 20 #DUMMY
epsilon_CDC20 = 1 #DUMMY
sigma_Rb = 0.3 #DUMMY
sigma_RbMP = 0.1 #DUMMY
sigma_E2F = 0.7 #DUMMY
sigma_CDC20 = 1 #DUMMY
kappa_CECA = 1 #DUMMY
kappa_CBCA = 1 #DUMMY
#Knockdown terms. These should all be equal to 1 unless we're testing the
#effects of a knockdown.
theta_E2F = 1
theta_p53 = 1
theta_CE = 1
theta_MDM2 = 1
theta_CD = 1
theta_CA = 1
theta_Ink4 = 1
theta_CB = 1
theta_CDC20 = 1
k_p21 = 1
k_p53 = 1
k_RbMP = 1
k_RbPP = 1
k_RbCD = 1
k_RbCE = 1
k_E2FCA = 1
k_E2F = 1
k_CD = 1
k_CA = 1
k_CB = 1
k_CDC20CB = 1
k_CDC20 = 1
E2F_tot = 10 #DUMMY
CDC20_tot = 0.285
Rb_tot = 10 #DUMMY
#Potentially override parameters
for infile in infiles:
reader = open(infile)
for line in reader.readlines():
exec(line)
#Dummy initial conditions
y0 = [0.1,0.1,0.1,0.1,Rb_tot,0.0,0.0,0.1,0.1,0.1,0.1,0.1,0.1,0.1,0.1,0,0]
#Abort early if the output directory already exists.
if dirname != "":
if os.path.exists(dirname):
sys.stderr.write("Output dir " + dirname + " already exists. Aborting.\n")
sys.exit(2)
#Functions to be called from the derivative functions.
def | (t):
return 0 #dummy
def E7(t):
return 0 #dummy
#Fractions for inhibition/inactive complex formation.
#Each has a sanity check in case chief input is near zero.
sanity_threshold = 0.00001
def f(e2f, rb, rbmp):
if e2f < sanity_threshold:
return 0
return e2f**2 / (e2f + beta_E2FRb * rb + beta_E2FRbMP * rbmp)
def g(mdm2, p14):
if mdm2 < sanity_threshold:
return 0
return mdm2**2 / (mdm2 + beta_mdm2p14 * p14)
def h(c, p21, cd, ce, ca, cb):
if c < sanity_threshold:
return 0
if ca+cb+cd+ce < sanity_threshold:
return 0
return c**2 / (c + beta_cp21 * p21 * c / (ca+cb+cd+ce))
def j(e7, e7prb, e7p21):
return e7 - e7prb - e7p21
#Variable key
#y[0] = p14
#y[1] = Ink4
#y[2] = p21
#y[3] = p53
#y[4] = Rb
#y[5] = pRb-P
#y[6] = pRb-PP
#y[7] = E2F
#y[8] = mdm2
#y[9] = pMDM2
#y[10] = CD (Cyclin D/CDK4-6 complex)
#y[11] = CE (Cyclin E/CDK2 complex)
#y[12] = CA (Cyclin A/CDK2 complex)
#y[13] = CB (Cyclin B/CDK1 complex)
#y[14] = CDC20
#y[15] = E7-pRB
#y[16] = E7-p21
names = []
names.append("p14")
names.append("Ink4")
names.append("p21")
names.append("p53")
names.append("pRb")
names.append("pRb-P")
names.append("pRb-PP")
names.append("E2F")
names.append("mdm2")
names.append("pMDM2")
names.append("CD")
names.append("CE")
names.append("CA")
names.append("CB")
names.append("CDC20")
names.append("E7-pRB")
names.append("E7-p21")
#The derivative function for the differential equation system.
def func(y,t):
return [
#We have p14 being produced by E2F after inhibition from Rb
#is accounted for, and degraded at a constant rate.
alpha_p14 * theta_E2F * f(y[7], y[4], y[5]) - omega_p14 * y[0],
#It's just like the p14 equation, but with Ink4 instead!
alpha_Ink4 * theta_E2F * f(y[7], y[4], y[5]) - omega_Ink4 * y[1],
#Form p21 at a rate proportional to p53 presence; degrade it
#"naturally" or with help from Cyclin E/CDK2. E7 sequesters p21.
alpha_p21 * theta_p53 * y[3] - omega_p21 * y[2] - omega_p21CE * theta_CE * y[11] * y[2]/(y[2]+k_p21) - beta_E7p21 * j(E7(t),y[15],y[16]) * y[2] + delta_E7p21 * y[16],
#P53 is generated naturally at a constant rate, and degrades
#both on its own and with help from MDM2.
alpha_p53 - omega_p53 * y[3] - (omega_p53MDM2 * theta_MDM2 * g(y[9], y[0]) + omega_p53E6 * E6(t)) * y[3]/(y[3]+k_p53),
#Rb gets monophosphorylated by Cyclin D/CDK4-6. Rb-monophosphate
#gets its phosphate cleaved at a constant rate. Rb of all sorts
#gets sequestered by E7.
-epsilon_RbCD * theta_CD * y[4]/(y[4]+k_RbCD) * y[10] + sigma_Rb * y[5]/(y[5]+k_RbMP) - beta_E7pRB * j(E7(t),y[15],y[16]) * y[4] + delta_E7pRB * y[15] * y[4]/Rb_tot,
#Rb-monophosphate can be formed by phosphorylation of Rb or cleavage
#of Rb-polyphosphate. It can be lost by Cyclin E/CDK2 or
#phosphatase activity. Rb of all sorts gets sequestered by E7.
epsilon_RbCD * theta_CD * y[4]/(y[4]+k_RbCD) * y[10] - sigma_Rb * y[5]/(y[5]+k_RbMP) - epsilon_RbCE * theta_CE * y[5]/(y[5]+k_RbCE) * y[11] + sigma_RbMP * y[6]/(y[6]+k_RbPP) - beta_E7pRBP * j(E7(t),y[15],y[16]) * y[5] + delta_E7pRB * y[15] * y[5]/Rb_tot,
#Rb-polyphosphate arises from Cyclin E/CDK2 activity on
#Rb-monophosphate, and is lost by phosphatase activity. Rb of all
#sorts gets sequestered by E7.
epsilon_RbCE * theta_CE * y[5]/(y[5]+k_RbCE) * y[11] - sigma_RbMP * y[6]/(y[6]+k_RbPP) - beta_E7pRBPP * j(E7(t),y[15],y[16]) * y[6] + delta_E7pRB * y[15] * y[6]/Rb_tot,
#E2F is inactivated by Cyclin A/CDK2. It is reactivated at a
#constant rate, or so this equation proposes.
-epsilon_E2F * theta_CA * y[7] * y[12]/(y[7] + k_E2FCA) + sigma_E2F * (E2F_tot - y[7])/(k_E2F + E2F_tot - y[7]),
#mdm2 mRNA is promoted by p53 and degrades rapidly.
alpha_mdm2r * theta_p53 * y[3] - omega_mdm2r * y[8],
#MDM2 protein is translated from mdm2 mRNA, and is degraded at a
#constant rate.
alpha_MDM2 * y[8] - omega_MDM2 * y[9],
#Cyclin D/CDK4-6 is promoted by growth factor, and can degrade either on its
#own or under the influence of Ink4.
alpha_CD - omega_CD * y[10] - y[10]/(y[10] + k_CD) * omega_CDInk4 * theta_Ink4 * y[1],
#Cyclin E/CDK2 is also promoted by E2F, and degrades on its own.
#When not inhibited by p21, it becomes Cyclin A/CDK2.
alpha_CE * theta_E2F * f(y[7], y[4], y[5]) - omega_CE * y[11] - kappa_CECA * theta_CA * h(y[11], y[2], y[10], y[11], y[12], y[13]) * y[12],
#Cyclin A/CDK2 forms from Cyclin E/CDK2. It degrades over time, and
#degrades faster under the influence of active CDC20.
kappa_CECA * theta_CA * h(y[11], y[2], y[10], y[11], y[12], y[13]) * y[12] - omega_CA * y[12] - y[12]/(y[12] + k_CA) * omega_CACDC20 * theta_CDC20 * y[14],
#Cyclin B/CDK1 is constantly produced, but normally gets degraded
#quickly; active Cyclin A/CDK2 slows down the degradation. Active
#CDC20 also degrades it, however.
alpha_CB - omega_CB * y[13] /(kappa_CBCA + theta_CA * h(y[12], y[2], y[10], y[11], y[12], y[13])) - y[13]/(y[13] + k_CB) * omega_CBCDC20 * theta_CDC20 * y[14],
#CDC20 is activated by Cyclin B/CDK1. It is inactivated gradually
#over time.
sigma_CDC20 * theta_CB * y[13] * (CDC20_tot - y[14])/(k_CDC20CB + CDC20_tot - y[14]) - epsilon_CDC20 * y[14]/(k_CDC20 + y[14]),
#E7 viral protein will associate with retinoblastoma protein,
#sequestering it into an inactive form.
beta_E7pRB * j(E7(t),y[15],y[16]) * y[4] + beta_E7pRBP * j(E7(t),y[15],y[16]) * y[5] + beta_E7pRBPP * j(E7(t),y[15],y[16]) * y[6] - delta_E7pRB * y[15],
#E7 viral protein will also associate with and sequester p21.
beta_E7p21 * j(E7(t),y[15],y[16]) * y[2] - delta_E7p21 * y[16],
]
t = arange(0, 2000.0, 0.1)
y = odeint(func, y0, t, ixpr=False, mxstep=5000)
if dirname == "":
dirname = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H:%M:%S')
os.makedirs(dirname)
os.chdir(dirname)
for i in range(len(y0)):
writer = open(names[i]+".txt", 'w')
for j in xrange((len(t) * 9) / 10, len(t)):
writer.write(str(t[j]) + " " + str(y[j][i]) + "\n")
writer.close()
| E6 | identifier_name |
p21_sim.py | # Copyright (c) 2015-2016 Derrick Sund
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from scipy.integrate import odeint
from numpy import arange
import time
import datetime
import os
import sys
infiles = []
dirname = ""
if len(sys.argv) > 1:
infiles = sys.argv[1:]
for infile in infiles:
if not os.path.isfile(infile):
sys.stderr.write(infile + ": No such file found.\n")
sys.exit(1)
#Constants. Do not add constants directly to the derivative function; violators
#will have rabid weasels set upon them.
alpha_p14 = 1 #DUMMY
alpha_Ink4 = 5 #DUMMY
alpha_p21 = 4 #DUMMY
alpha_p53 = 0.9 #DUMMY
alpha_mdm2r = 9.425 #DUMMY
alpha_MDM2 = 1 #DUMMY
alpha_CD = 1 #DUMMY
alpha_CE = 1 #DUMMY
alpha_CB = 1 #DUMMY
omega_p14 = 0.116 #DUMMY
omega_Ink4 = 0.139 #DUMMY
omega_p21 = 1 #DUMMY
omega_p21CE = 1 #DUMMY
omega_p53 = 2.079 #DUMMY
omega_p53MDM2 = 1 #DUMMY
omega_p53E6 = 1 #DUMMY
omega_mdm2r = 0.693 #DUMMY
omega_MDM2 = 8.318 #DUMMY
omega_CD = 0.023 #DUMMY
omega_CDInk4 = 1 #DUMMY
omega_CE = 1 #DUMMY
omega_CA = 1 #DUMMY
omega_CACDC20 = 1 #DUMMY
omega_CB = 1 #DUMMY
omega_CBCDC20 = 1 #DUMMY
beta_E2FRb = 2 #DUMMY
beta_E2FRbMP = beta_E2FRb * 0.3 #DUMMY, but should be smaller than beta_E2FRb
beta_mdm2p14 = 1 #DUMMY
beta_cp21 = 1 #DUMMY
beta_E7p21 = 1 #DUMMY
beta_E7pRB = 1 #DUMMY
beta_E7pRBP = 1 #DUMMY
beta_E7pRBPP = 1 #DUMMY
delta_E7p21 = 1 #DUMMY
delta_E7pRB = 1 #DUMMY
epsilon_RbCD = 0.4 #DUMMY
epsilon_RbCE = 0.7 #DUMMY
epsilon_E2F = 20 #DUMMY
epsilon_CDC20 = 1 #DUMMY
sigma_Rb = 0.3 #DUMMY
sigma_RbMP = 0.1 #DUMMY
sigma_E2F = 0.7 #DUMMY
sigma_CDC20 = 1 #DUMMY
kappa_CECA = 1 #DUMMY
kappa_CBCA = 1 #DUMMY
#Knockdown terms. These should all be equal to 1 unless we're testing the
#effects of a knockdown.
theta_E2F = 1
theta_p53 = 1
theta_CE = 1
theta_MDM2 = 1
theta_CD = 1
theta_CA = 1
theta_Ink4 = 1
theta_CB = 1
theta_CDC20 = 1
k_p21 = 1
k_p53 = 1
k_RbMP = 1
k_RbPP = 1
k_RbCD = 1
k_RbCE = 1
k_E2FCA = 1
k_E2F = 1
k_CD = 1
k_CA = 1
k_CB = 1
k_CDC20CB = 1
k_CDC20 = 1
E2F_tot = 10 #DUMMY
CDC20_tot = 0.285
Rb_tot = 10 #DUMMY
#Potentially override parameters
for infile in infiles:
reader = open(infile)
for line in reader.readlines():
exec(line)
#Dummy initial conditions
y0 = [0.1,0.1,0.1,0.1,Rb_tot,0.0,0.0,0.1,0.1,0.1,0.1,0.1,0.1,0.1,0.1,0,0]
#Abort early if the output directory already exists.
if dirname != "":
if os.path.exists(dirname):
sys.stderr.write("Output dir " + dirname + " already exists. Aborting.\n")
sys.exit(2)
#Functions to be called from the derivative functions.
def E6(t):
return 0 #dummy
def E7(t):
return 0 #dummy
#Fractions for inhibition/inactive complex formation.
#Each has a sanity check in case chief input is near zero.
sanity_threshold = 0.00001
def f(e2f, rb, rbmp):
if e2f < sanity_threshold:
return 0
return e2f**2 / (e2f + beta_E2FRb * rb + beta_E2FRbMP * rbmp)
def g(mdm2, p14):
if mdm2 < sanity_threshold:
return 0
return mdm2**2 / (mdm2 + beta_mdm2p14 * p14)
def h(c, p21, cd, ce, ca, cb):
if c < sanity_threshold:
return 0
if ca+cb+cd+ce < sanity_threshold:
return 0
return c**2 / (c + beta_cp21 * p21 * c / (ca+cb+cd+ce)) |
def j(e7, e7prb, e7p21):
return e7 - e7prb - e7p21
#Variable key
#y[0] = p14
#y[1] = Ink4
#y[2] = p21
#y[3] = p53
#y[4] = Rb
#y[5] = pRb-P
#y[6] = pRb-PP
#y[7] = E2F
#y[8] = mdm2
#y[9] = pMDM2
#y[10] = CD (Cyclin D/CDK4-6 complex)
#y[11] = CE (Cyclin E/CDK2 complex)
#y[12] = CA (Cyclin A/CDK2 complex)
#y[13] = CB (Cyclin B/CDK1 complex)
#y[14] = CDC20
#y[15] = E7-pRB
#y[16] = E7-p21
names = []
names.append("p14")
names.append("Ink4")
names.append("p21")
names.append("p53")
names.append("pRb")
names.append("pRb-P")
names.append("pRb-PP")
names.append("E2F")
names.append("mdm2")
names.append("pMDM2")
names.append("CD")
names.append("CE")
names.append("CA")
names.append("CB")
names.append("CDC20")
names.append("E7-pRB")
names.append("E7-p21")
#The derivative function for the differential equation system.
def func(y,t):
return [
#We have p14 being produced by E2F after inhibition from Rb
#is accounted for, and degraded at a constant rate.
alpha_p14 * theta_E2F * f(y[7], y[4], y[5]) - omega_p14 * y[0],
#It's just like the p14 equation, but with Ink4 instead!
alpha_Ink4 * theta_E2F * f(y[7], y[4], y[5]) - omega_Ink4 * y[1],
#Form p21 at a rate proportional to p53 presence; degrade it
#"naturally" or with help from Cyclin E/CDK2. E7 sequesters p21.
alpha_p21 * theta_p53 * y[3] - omega_p21 * y[2] - omega_p21CE * theta_CE * y[11] * y[2]/(y[2]+k_p21) - beta_E7p21 * j(E7(t),y[15],y[16]) * y[2] + delta_E7p21 * y[16],
#P53 is generated naturally at a constant rate, and degrades
#both on its own and with help from MDM2.
alpha_p53 - omega_p53 * y[3] - (omega_p53MDM2 * theta_MDM2 * g(y[9], y[0]) + omega_p53E6 * E6(t)) * y[3]/(y[3]+k_p53),
#Rb gets monophosphorylated by Cyclin D/CDK4-6. Rb-monophosphate
#gets its phosphate cleaved at a constant rate. Rb of all sorts
#gets sequestered by E7.
-epsilon_RbCD * theta_CD * y[4]/(y[4]+k_RbCD) * y[10] + sigma_Rb * y[5]/(y[5]+k_RbMP) - beta_E7pRB * j(E7(t),y[15],y[16]) * y[4] + delta_E7pRB * y[15] * y[4]/Rb_tot,
#Rb-monophosphate can be formed by phosphorylation of Rb or cleavage
#of Rb-polyphosphate. It can be lost by Cyclin E/CDK2 or
#phosphatase activity. Rb of all sorts gets sequestered by E7.
epsilon_RbCD * theta_CD * y[4]/(y[4]+k_RbCD) * y[10] - sigma_Rb * y[5]/(y[5]+k_RbMP) - epsilon_RbCE * theta_CE * y[5]/(y[5]+k_RbCE) * y[11] + sigma_RbMP * y[6]/(y[6]+k_RbPP) - beta_E7pRBP * j(E7(t),y[15],y[16]) * y[5] + delta_E7pRB * y[15] * y[5]/Rb_tot,
#Rb-polyphosphate arises from Cyclin E/CDK2 activity on
#Rb-monophosphate, and is lost by phosphatase activity. Rb of all
#sorts gets sequestered by E7.
epsilon_RbCE * theta_CE * y[5]/(y[5]+k_RbCE) * y[11] - sigma_RbMP * y[6]/(y[6]+k_RbPP) - beta_E7pRBPP * j(E7(t),y[15],y[16]) * y[6] + delta_E7pRB * y[15] * y[6]/Rb_tot,
#E2F is inactivated by Cyclin A/CDK2. It is reactivated at a
#constant rate, or so this equation proposes.
-epsilon_E2F * theta_CA * y[7] * y[12]/(y[7] + k_E2FCA) + sigma_E2F * (E2F_tot - y[7])/(k_E2F + E2F_tot - y[7]),
#mdm2 mRNA is promoted by p53 and degrades rapidly.
alpha_mdm2r * theta_p53 * y[3] - omega_mdm2r * y[8],
#MDM2 protein is translated from mdm2 mRNA, and is degraded at a
#constant rate.
alpha_MDM2 * y[8] - omega_MDM2 * y[9],
#Cyclin D/CDK4-6 is promoted by growth factor, and can degrade either on its
#own or under the influence of Ink4.
alpha_CD - omega_CD * y[10] - y[10]/(y[10] + k_CD) * omega_CDInk4 * theta_Ink4 * y[1],
#Cyclin E/CDK2 is also promoted by E2F, and degrades on its own.
#When not inhibited by p21, it becomes Cyclin A/CDK2.
alpha_CE * theta_E2F * f(y[7], y[4], y[5]) - omega_CE * y[11] - kappa_CECA * theta_CA * h(y[11], y[2], y[10], y[11], y[12], y[13]) * y[12],
#Cyclin A/CDK2 forms from Cyclin E/CDK2. It degrades over time, and
#degrades faster under the influence of active CDC20.
kappa_CECA * theta_CA * h(y[11], y[2], y[10], y[11], y[12], y[13]) * y[12] - omega_CA * y[12] - y[12]/(y[12] + k_CA) * omega_CACDC20 * theta_CDC20 * y[14],
#Cyclin B/CDK1 is constantly produced, but normally gets degraded
#quickly; active Cyclin A/CDK2 slows down the degradation. Active
#CDC20 also degrades it, however.
alpha_CB - omega_CB * y[13] /(kappa_CBCA + theta_CA * h(y[12], y[2], y[10], y[11], y[12], y[13])) - y[13]/(y[13] + k_CB) * omega_CBCDC20 * theta_CDC20 * y[14],
#CDC20 is activated by Cyclin B/CDK1. It is inactivated gradually
#over time.
sigma_CDC20 * theta_CB * y[13] * (CDC20_tot - y[14])/(k_CDC20CB + CDC20_tot - y[14]) - epsilon_CDC20 * y[14]/(k_CDC20 + y[14]),
#E7 viral protein will associate with retinoblastoma protein,
#sequestering it into an inactive form.
beta_E7pRB * j(E7(t),y[15],y[16]) * y[4] + beta_E7pRBP * j(E7(t),y[15],y[16]) * y[5] + beta_E7pRBPP * j(E7(t),y[15],y[16]) * y[6] - delta_E7pRB * y[15],
#E7 viral protein will also associate with and sequester p21.
beta_E7p21 * j(E7(t),y[15],y[16]) * y[2] - delta_E7p21 * y[16],
]
t = arange(0, 2000.0, 0.1)
y = odeint(func, y0, t, ixpr=False, mxstep=5000)
if dirname == "":
dirname = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H:%M:%S')
os.makedirs(dirname)
os.chdir(dirname)
for i in range(len(y0)):
writer = open(names[i]+".txt", 'w')
for j in xrange((len(t) * 9) / 10, len(t)):
writer.write(str(t[j]) + " " + str(y[j][i]) + "\n")
writer.close() | random_line_split | |
p21_sim.py | # Copyright (c) 2015-2016 Derrick Sund
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from scipy.integrate import odeint
from numpy import arange
import time
import datetime
import os
import sys
infiles = []
dirname = ""
if len(sys.argv) > 1:
infiles = sys.argv[1:]
for infile in infiles:
if not os.path.isfile(infile):
sys.stderr.write(infile + ": No such file found.\n")
sys.exit(1)
#Constants. Do not add constants directly to the derivative function; violators
#will have rabid weasels set upon them.
alpha_p14 = 1 #DUMMY
alpha_Ink4 = 5 #DUMMY
alpha_p21 = 4 #DUMMY
alpha_p53 = 0.9 #DUMMY
alpha_mdm2r = 9.425 #DUMMY
alpha_MDM2 = 1 #DUMMY
alpha_CD = 1 #DUMMY
alpha_CE = 1 #DUMMY
alpha_CB = 1 #DUMMY
omega_p14 = 0.116 #DUMMY
omega_Ink4 = 0.139 #DUMMY
omega_p21 = 1 #DUMMY
omega_p21CE = 1 #DUMMY
omega_p53 = 2.079 #DUMMY
omega_p53MDM2 = 1 #DUMMY
omega_p53E6 = 1 #DUMMY
omega_mdm2r = 0.693 #DUMMY
omega_MDM2 = 8.318 #DUMMY
omega_CD = 0.023 #DUMMY
omega_CDInk4 = 1 #DUMMY
omega_CE = 1 #DUMMY
omega_CA = 1 #DUMMY
omega_CACDC20 = 1 #DUMMY
omega_CB = 1 #DUMMY
omega_CBCDC20 = 1 #DUMMY
beta_E2FRb = 2 #DUMMY
beta_E2FRbMP = beta_E2FRb * 0.3 #DUMMY, but should be smaller than beta_E2FRb
beta_mdm2p14 = 1 #DUMMY
beta_cp21 = 1 #DUMMY
beta_E7p21 = 1 #DUMMY
beta_E7pRB = 1 #DUMMY
beta_E7pRBP = 1 #DUMMY
beta_E7pRBPP = 1 #DUMMY
delta_E7p21 = 1 #DUMMY
delta_E7pRB = 1 #DUMMY
epsilon_RbCD = 0.4 #DUMMY
epsilon_RbCE = 0.7 #DUMMY
epsilon_E2F = 20 #DUMMY
epsilon_CDC20 = 1 #DUMMY
sigma_Rb = 0.3 #DUMMY
sigma_RbMP = 0.1 #DUMMY
sigma_E2F = 0.7 #DUMMY
sigma_CDC20 = 1 #DUMMY
kappa_CECA = 1 #DUMMY
kappa_CBCA = 1 #DUMMY
#Knockdown terms. These should all be equal to 1 unless we're testing the
#effects of a knockdown.
theta_E2F = 1
theta_p53 = 1
theta_CE = 1
theta_MDM2 = 1
theta_CD = 1
theta_CA = 1
theta_Ink4 = 1
theta_CB = 1
theta_CDC20 = 1
k_p21 = 1
k_p53 = 1
k_RbMP = 1
k_RbPP = 1
k_RbCD = 1
k_RbCE = 1
k_E2FCA = 1
k_E2F = 1
k_CD = 1
k_CA = 1
k_CB = 1
k_CDC20CB = 1
k_CDC20 = 1
E2F_tot = 10 #DUMMY
CDC20_tot = 0.285
Rb_tot = 10 #DUMMY
#Potentially override parameters
for infile in infiles:
reader = open(infile)
for line in reader.readlines():
exec(line)
#Dummy initial conditions
y0 = [0.1,0.1,0.1,0.1,Rb_tot,0.0,0.0,0.1,0.1,0.1,0.1,0.1,0.1,0.1,0.1,0,0]
#Abort early if the output directory already exists.
if dirname != "":
if os.path.exists(dirname):
sys.stderr.write("Output dir " + dirname + " already exists. Aborting.\n")
sys.exit(2)
#Functions to be called from the derivative functions.
def E6(t):
return 0 #dummy
def E7(t):
return 0 #dummy
#Fractions for inhibition/inactive complex formation.
#Each has a sanity check in case chief input is near zero.
sanity_threshold = 0.00001
def f(e2f, rb, rbmp):
if e2f < sanity_threshold:
|
return e2f**2 / (e2f + beta_E2FRb * rb + beta_E2FRbMP * rbmp)
def g(mdm2, p14):
if mdm2 < sanity_threshold:
return 0
return mdm2**2 / (mdm2 + beta_mdm2p14 * p14)
def h(c, p21, cd, ce, ca, cb):
if c < sanity_threshold:
return 0
if ca+cb+cd+ce < sanity_threshold:
return 0
return c**2 / (c + beta_cp21 * p21 * c / (ca+cb+cd+ce))
def j(e7, e7prb, e7p21):
return e7 - e7prb - e7p21
#Variable key
#y[0] = p14
#y[1] = Ink4
#y[2] = p21
#y[3] = p53
#y[4] = Rb
#y[5] = pRb-P
#y[6] = pRb-PP
#y[7] = E2F
#y[8] = mdm2
#y[9] = pMDM2
#y[10] = CD (Cyclin D/CDK4-6 complex)
#y[11] = CE (Cyclin E/CDK2 complex)
#y[12] = CA (Cyclin A/CDK2 complex)
#y[13] = CB (Cyclin B/CDK1 complex)
#y[14] = CDC20
#y[15] = E7-pRB
#y[16] = E7-p21
names = []
names.append("p14")
names.append("Ink4")
names.append("p21")
names.append("p53")
names.append("pRb")
names.append("pRb-P")
names.append("pRb-PP")
names.append("E2F")
names.append("mdm2")
names.append("pMDM2")
names.append("CD")
names.append("CE")
names.append("CA")
names.append("CB")
names.append("CDC20")
names.append("E7-pRB")
names.append("E7-p21")
#The derivative function for the differential equation system.
def func(y,t):
return [
#We have p14 being produced by E2F after inhibition from Rb
#is accounted for, and degraded at a constant rate.
alpha_p14 * theta_E2F * f(y[7], y[4], y[5]) - omega_p14 * y[0],
#It's just like the p14 equation, but with Ink4 instead!
alpha_Ink4 * theta_E2F * f(y[7], y[4], y[5]) - omega_Ink4 * y[1],
#Form p21 at a rate proportional to p53 presence; degrade it
#"naturally" or with help from Cyclin E/CDK2. E7 sequesters p21.
alpha_p21 * theta_p53 * y[3] - omega_p21 * y[2] - omega_p21CE * theta_CE * y[11] * y[2]/(y[2]+k_p21) - beta_E7p21 * j(E7(t),y[15],y[16]) * y[2] + delta_E7p21 * y[16],
#P53 is generated naturally at a constant rate, and degrades
#both on its own and with help from MDM2.
alpha_p53 - omega_p53 * y[3] - (omega_p53MDM2 * theta_MDM2 * g(y[9], y[0]) + omega_p53E6 * E6(t)) * y[3]/(y[3]+k_p53),
#Rb gets monophosphorylated by Cyclin D/CDK4-6. Rb-monophosphate
#gets its phosphate cleaved at a constant rate. Rb of all sorts
#gets sequestered by E7.
-epsilon_RbCD * theta_CD * y[4]/(y[4]+k_RbCD) * y[10] + sigma_Rb * y[5]/(y[5]+k_RbMP) - beta_E7pRB * j(E7(t),y[15],y[16]) * y[4] + delta_E7pRB * y[15] * y[4]/Rb_tot,
#Rb-monophosphate can be formed by phosphorylation of Rb or cleavage
#of Rb-polyphosphate. It can be lost by Cyclin E/CDK2 or
#phosphatase activity. Rb of all sorts gets sequestered by E7.
epsilon_RbCD * theta_CD * y[4]/(y[4]+k_RbCD) * y[10] - sigma_Rb * y[5]/(y[5]+k_RbMP) - epsilon_RbCE * theta_CE * y[5]/(y[5]+k_RbCE) * y[11] + sigma_RbMP * y[6]/(y[6]+k_RbPP) - beta_E7pRBP * j(E7(t),y[15],y[16]) * y[5] + delta_E7pRB * y[15] * y[5]/Rb_tot,
#Rb-polyphosphate arises from Cyclin E/CDK2 activity on
#Rb-monophosphate, and is lost by phosphatase activity. Rb of all
#sorts gets sequestered by E7.
epsilon_RbCE * theta_CE * y[5]/(y[5]+k_RbCE) * y[11] - sigma_RbMP * y[6]/(y[6]+k_RbPP) - beta_E7pRBPP * j(E7(t),y[15],y[16]) * y[6] + delta_E7pRB * y[15] * y[6]/Rb_tot,
#E2F is inactivated by Cyclin A/CDK2. It is reactivated at a
#constant rate, or so this equation proposes.
-epsilon_E2F * theta_CA * y[7] * y[12]/(y[7] + k_E2FCA) + sigma_E2F * (E2F_tot - y[7])/(k_E2F + E2F_tot - y[7]),
#mdm2 mRNA is promoted by p53 and degrades rapidly.
alpha_mdm2r * theta_p53 * y[3] - omega_mdm2r * y[8],
#MDM2 protein is translated from mdm2 mRNA, and is degraded at a
#constant rate.
alpha_MDM2 * y[8] - omega_MDM2 * y[9],
#Cyclin D/CDK4-6 is promoted by growth factor, and can degrade either on its
#own or under the influence of Ink4.
alpha_CD - omega_CD * y[10] - y[10]/(y[10] + k_CD) * omega_CDInk4 * theta_Ink4 * y[1],
#Cyclin E/CDK2 is also promoted by E2F, and degrades on its own.
#When not inhibited by p21, it becomes Cyclin A/CDK2.
alpha_CE * theta_E2F * f(y[7], y[4], y[5]) - omega_CE * y[11] - kappa_CECA * theta_CA * h(y[11], y[2], y[10], y[11], y[12], y[13]) * y[12],
#Cyclin A/CDK2 forms from Cyclin E/CDK2. It degrades over time, and
#degrades faster under the influence of active CDC20.
kappa_CECA * theta_CA * h(y[11], y[2], y[10], y[11], y[12], y[13]) * y[12] - omega_CA * y[12] - y[12]/(y[12] + k_CA) * omega_CACDC20 * theta_CDC20 * y[14],
#Cyclin B/CDK1 is constantly produced, but normally gets degraded
#quickly; active Cyclin A/CDK2 slows down the degradation. Active
#CDC20 also degrades it, however.
alpha_CB - omega_CB * y[13] /(kappa_CBCA + theta_CA * h(y[12], y[2], y[10], y[11], y[12], y[13])) - y[13]/(y[13] + k_CB) * omega_CBCDC20 * theta_CDC20 * y[14],
#CDC20 is activated by Cyclin B/CDK1. It is inactivated gradually
#over time.
sigma_CDC20 * theta_CB * y[13] * (CDC20_tot - y[14])/(k_CDC20CB + CDC20_tot - y[14]) - epsilon_CDC20 * y[14]/(k_CDC20 + y[14]),
#E7 viral protein will associate with retinoblastoma protein,
#sequestering it into an inactive form.
beta_E7pRB * j(E7(t),y[15],y[16]) * y[4] + beta_E7pRBP * j(E7(t),y[15],y[16]) * y[5] + beta_E7pRBPP * j(E7(t),y[15],y[16]) * y[6] - delta_E7pRB * y[15],
#E7 viral protein will also associate with and sequester p21.
beta_E7p21 * j(E7(t),y[15],y[16]) * y[2] - delta_E7p21 * y[16],
]
t = arange(0, 2000.0, 0.1)
y = odeint(func, y0, t, ixpr=False, mxstep=5000)
if dirname == "":
dirname = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H:%M:%S')
os.makedirs(dirname)
os.chdir(dirname)
for i in range(len(y0)):
writer = open(names[i]+".txt", 'w')
for j in xrange((len(t) * 9) / 10, len(t)):
writer.write(str(t[j]) + " " + str(y[j][i]) + "\n")
writer.close()
| return 0 | conditional_block |
p21_sim.py | # Copyright (c) 2015-2016 Derrick Sund
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from scipy.integrate import odeint
from numpy import arange
import time
import datetime
import os
import sys
infiles = []
dirname = ""
if len(sys.argv) > 1:
infiles = sys.argv[1:]
for infile in infiles:
if not os.path.isfile(infile):
sys.stderr.write(infile + ": No such file found.\n")
sys.exit(1)
#Constants. Do not add constants directly to the derivative function; violators
#will have rabid weasels set upon them.
alpha_p14 = 1 #DUMMY
alpha_Ink4 = 5 #DUMMY
alpha_p21 = 4 #DUMMY
alpha_p53 = 0.9 #DUMMY
alpha_mdm2r = 9.425 #DUMMY
alpha_MDM2 = 1 #DUMMY
alpha_CD = 1 #DUMMY
alpha_CE = 1 #DUMMY
alpha_CB = 1 #DUMMY
omega_p14 = 0.116 #DUMMY
omega_Ink4 = 0.139 #DUMMY
omega_p21 = 1 #DUMMY
omega_p21CE = 1 #DUMMY
omega_p53 = 2.079 #DUMMY
omega_p53MDM2 = 1 #DUMMY
omega_p53E6 = 1 #DUMMY
omega_mdm2r = 0.693 #DUMMY
omega_MDM2 = 8.318 #DUMMY
omega_CD = 0.023 #DUMMY
omega_CDInk4 = 1 #DUMMY
omega_CE = 1 #DUMMY
omega_CA = 1 #DUMMY
omega_CACDC20 = 1 #DUMMY
omega_CB = 1 #DUMMY
omega_CBCDC20 = 1 #DUMMY
beta_E2FRb = 2 #DUMMY
beta_E2FRbMP = beta_E2FRb * 0.3 #DUMMY, but should be smaller than beta_E2FRb
beta_mdm2p14 = 1 #DUMMY
beta_cp21 = 1 #DUMMY
beta_E7p21 = 1 #DUMMY
beta_E7pRB = 1 #DUMMY
beta_E7pRBP = 1 #DUMMY
beta_E7pRBPP = 1 #DUMMY
delta_E7p21 = 1 #DUMMY
delta_E7pRB = 1 #DUMMY
epsilon_RbCD = 0.4 #DUMMY
epsilon_RbCE = 0.7 #DUMMY
epsilon_E2F = 20 #DUMMY
epsilon_CDC20 = 1 #DUMMY
sigma_Rb = 0.3 #DUMMY
sigma_RbMP = 0.1 #DUMMY
sigma_E2F = 0.7 #DUMMY
sigma_CDC20 = 1 #DUMMY
kappa_CECA = 1 #DUMMY
kappa_CBCA = 1 #DUMMY
#Knockdown terms. These should all be equal to 1 unless we're testing the
#effects of a knockdown.
theta_E2F = 1
theta_p53 = 1
theta_CE = 1
theta_MDM2 = 1
theta_CD = 1
theta_CA = 1
theta_Ink4 = 1
theta_CB = 1
theta_CDC20 = 1
k_p21 = 1
k_p53 = 1
k_RbMP = 1
k_RbPP = 1
k_RbCD = 1
k_RbCE = 1
k_E2FCA = 1
k_E2F = 1
k_CD = 1
k_CA = 1
k_CB = 1
k_CDC20CB = 1
k_CDC20 = 1
E2F_tot = 10 #DUMMY
CDC20_tot = 0.285
Rb_tot = 10 #DUMMY
#Potentially override parameters
for infile in infiles:
reader = open(infile)
for line in reader.readlines():
exec(line)
#Dummy initial conditions
y0 = [0.1,0.1,0.1,0.1,Rb_tot,0.0,0.0,0.1,0.1,0.1,0.1,0.1,0.1,0.1,0.1,0,0]
#Abort early if the output directory already exists.
if dirname != "":
if os.path.exists(dirname):
sys.stderr.write("Output dir " + dirname + " already exists. Aborting.\n")
sys.exit(2)
#Functions to be called from the derivative functions.
def E6(t):
return 0 #dummy
def E7(t):
return 0 #dummy
#Fractions for inhibition/inactive complex formation.
#Each has a sanity check in case chief input is near zero.
sanity_threshold = 0.00001
def f(e2f, rb, rbmp):
if e2f < sanity_threshold:
return 0
return e2f**2 / (e2f + beta_E2FRb * rb + beta_E2FRbMP * rbmp)
def g(mdm2, p14):
if mdm2 < sanity_threshold:
return 0
return mdm2**2 / (mdm2 + beta_mdm2p14 * p14)
def h(c, p21, cd, ce, ca, cb):
|
def j(e7, e7prb, e7p21):
return e7 - e7prb - e7p21
#Variable key
#y[0] = p14
#y[1] = Ink4
#y[2] = p21
#y[3] = p53
#y[4] = Rb
#y[5] = pRb-P
#y[6] = pRb-PP
#y[7] = E2F
#y[8] = mdm2
#y[9] = pMDM2
#y[10] = CD (Cyclin D/CDK4-6 complex)
#y[11] = CE (Cyclin E/CDK2 complex)
#y[12] = CA (Cyclin A/CDK2 complex)
#y[13] = CB (Cyclin B/CDK1 complex)
#y[14] = CDC20
#y[15] = E7-pRB
#y[16] = E7-p21
names = []
names.append("p14")
names.append("Ink4")
names.append("p21")
names.append("p53")
names.append("pRb")
names.append("pRb-P")
names.append("pRb-PP")
names.append("E2F")
names.append("mdm2")
names.append("pMDM2")
names.append("CD")
names.append("CE")
names.append("CA")
names.append("CB")
names.append("CDC20")
names.append("E7-pRB")
names.append("E7-p21")
#The derivative function for the differential equation system.
def func(y,t):
return [
#We have p14 being produced by E2F after inhibition from Rb
#is accounted for, and degraded at a constant rate.
alpha_p14 * theta_E2F * f(y[7], y[4], y[5]) - omega_p14 * y[0],
#It's just like the p14 equation, but with Ink4 instead!
alpha_Ink4 * theta_E2F * f(y[7], y[4], y[5]) - omega_Ink4 * y[1],
#Form p21 at a rate proportional to p53 presence; degrade it
#"naturally" or with help from Cyclin E/CDK2. E7 sequesters p21.
alpha_p21 * theta_p53 * y[3] - omega_p21 * y[2] - omega_p21CE * theta_CE * y[11] * y[2]/(y[2]+k_p21) - beta_E7p21 * j(E7(t),y[15],y[16]) * y[2] + delta_E7p21 * y[16],
#P53 is generated naturally at a constant rate, and degrades
#both on its own and with help from MDM2.
alpha_p53 - omega_p53 * y[3] - (omega_p53MDM2 * theta_MDM2 * g(y[9], y[0]) + omega_p53E6 * E6(t)) * y[3]/(y[3]+k_p53),
#Rb gets monophosphorylated by Cyclin D/CDK4-6. Rb-monophosphate
#gets its phosphate cleaved at a constant rate. Rb of all sorts
#gets sequestered by E7.
-epsilon_RbCD * theta_CD * y[4]/(y[4]+k_RbCD) * y[10] + sigma_Rb * y[5]/(y[5]+k_RbMP) - beta_E7pRB * j(E7(t),y[15],y[16]) * y[4] + delta_E7pRB * y[15] * y[4]/Rb_tot,
#Rb-monophosphate can be formed by phosphorylation of Rb or cleavage
#of Rb-polyphosphate. It can be lost by Cyclin E/CDK2 or
#phosphatase activity. Rb of all sorts gets sequestered by E7.
epsilon_RbCD * theta_CD * y[4]/(y[4]+k_RbCD) * y[10] - sigma_Rb * y[5]/(y[5]+k_RbMP) - epsilon_RbCE * theta_CE * y[5]/(y[5]+k_RbCE) * y[11] + sigma_RbMP * y[6]/(y[6]+k_RbPP) - beta_E7pRBP * j(E7(t),y[15],y[16]) * y[5] + delta_E7pRB * y[15] * y[5]/Rb_tot,
#Rb-polyphosphate arises from Cyclin E/CDK2 activity on
#Rb-monophosphate, and is lost by phosphatase activity. Rb of all
#sorts gets sequestered by E7.
epsilon_RbCE * theta_CE * y[5]/(y[5]+k_RbCE) * y[11] - sigma_RbMP * y[6]/(y[6]+k_RbPP) - beta_E7pRBPP * j(E7(t),y[15],y[16]) * y[6] + delta_E7pRB * y[15] * y[6]/Rb_tot,
#E2F is inactivated by Cyclin A/CDK2. It is reactivated at a
#constant rate, or so this equation proposes.
-epsilon_E2F * theta_CA * y[7] * y[12]/(y[7] + k_E2FCA) + sigma_E2F * (E2F_tot - y[7])/(k_E2F + E2F_tot - y[7]),
#mdm2 mRNA is promoted by p53 and degrades rapidly.
alpha_mdm2r * theta_p53 * y[3] - omega_mdm2r * y[8],
#MDM2 protein is translated from mdm2 mRNA, and is degraded at a
#constant rate.
alpha_MDM2 * y[8] - omega_MDM2 * y[9],
#Cyclin D/CDK4-6 is promoted by growth factor, and can degrade either on its
#own or under the influence of Ink4.
alpha_CD - omega_CD * y[10] - y[10]/(y[10] + k_CD) * omega_CDInk4 * theta_Ink4 * y[1],
#Cyclin E/CDK2 is also promoted by E2F, and degrades on its own.
#When not inhibited by p21, it becomes Cyclin A/CDK2.
alpha_CE * theta_E2F * f(y[7], y[4], y[5]) - omega_CE * y[11] - kappa_CECA * theta_CA * h(y[11], y[2], y[10], y[11], y[12], y[13]) * y[12],
#Cyclin A/CDK2 forms from Cyclin E/CDK2. It degrades over time, and
#degrades faster under the influence of active CDC20.
kappa_CECA * theta_CA * h(y[11], y[2], y[10], y[11], y[12], y[13]) * y[12] - omega_CA * y[12] - y[12]/(y[12] + k_CA) * omega_CACDC20 * theta_CDC20 * y[14],
#Cyclin B/CDK1 is constantly produced, but normally gets degraded
#quickly; active Cyclin A/CDK2 slows down the degradation. Active
#CDC20 also degrades it, however.
alpha_CB - omega_CB * y[13] /(kappa_CBCA + theta_CA * h(y[12], y[2], y[10], y[11], y[12], y[13])) - y[13]/(y[13] + k_CB) * omega_CBCDC20 * theta_CDC20 * y[14],
#CDC20 is activated by Cyclin B/CDK1. It is inactivated gradually
#over time.
sigma_CDC20 * theta_CB * y[13] * (CDC20_tot - y[14])/(k_CDC20CB + CDC20_tot - y[14]) - epsilon_CDC20 * y[14]/(k_CDC20 + y[14]),
#E7 viral protein will associate with retinoblastoma protein,
#sequestering it into an inactive form.
beta_E7pRB * j(E7(t),y[15],y[16]) * y[4] + beta_E7pRBP * j(E7(t),y[15],y[16]) * y[5] + beta_E7pRBPP * j(E7(t),y[15],y[16]) * y[6] - delta_E7pRB * y[15],
#E7 viral protein will also associate with and sequester p21.
beta_E7p21 * j(E7(t),y[15],y[16]) * y[2] - delta_E7p21 * y[16],
]
t = arange(0, 2000.0, 0.1)
y = odeint(func, y0, t, ixpr=False, mxstep=5000)
if dirname == "":
dirname = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H:%M:%S')
os.makedirs(dirname)
os.chdir(dirname)
for i in range(len(y0)):
writer = open(names[i]+".txt", 'w')
for j in xrange((len(t) * 9) / 10, len(t)):
writer.write(str(t[j]) + " " + str(y[j][i]) + "\n")
writer.close()
| if c < sanity_threshold:
return 0
if ca+cb+cd+ce < sanity_threshold:
return 0
return c**2 / (c + beta_cp21 * p21 * c / (ca+cb+cd+ce)) | identifier_body |
yaml.ts | import * as _ from 'lodash';
import { Position, TextDocument } from 'vscode-languageserver-textdocument';
import { Document, Options, parseCST } from 'yaml';
import { Node, Pair, Scalar, YAMLMap, YAMLSeq } from 'yaml/types';
import { IModuleMetadata } from '../interfaces/module';
import { DocsLibrary } from '../services/docsLibrary';
import { isTaskKeyword, playExclusiveKeywords } from './ansible';
/**
* A helper class used for building YAML path assertions and retrieving parent
* nodes. The assertions are built up from the most nested (last in array)
* element.
*/
export class AncestryBuilder<N extends Node | Pair = Node> {
private _path: Node[];
private _index: number;
constructor(path: Node[] | null, index?: number) {
this._path = path || [];
this._index = index || this._path.length - 1;
}
/**
* Move up the path, optionally asserting the type of the parent.
*
* Unless Pair is explicitly asserted, it is ignored/skipped over when moving
* up.
*/
parent<X extends Node | Pair>(
type?: new (...args: unknown[]) => X
): AncestryBuilder<X> {
this._index--;
if (this.get() instanceof Pair) {
if (!type || !(type === Pair.prototype.constructor)) {
this._index--;
}
}
if (type) {
if (!(this.get() instanceof type)) {
this._index = Number.MIN_SAFE_INTEGER;
}
}
return this as unknown as AncestryBuilder<X>;
}
/**
* Move up the path, asserting that the current node was a key of a mapping
* pair. The builder skips over the Pair to the parent YAMLMap.
*/
parentOfKey(): AncestryBuilder<YAMLMap> {
const node = this.get();
this.parent(Pair);
const pairNode = this.get();
if (pairNode instanceof Pair && pairNode.key === node) {
this.parent(YAMLMap);
} else {
this._index = Number.MIN_SAFE_INTEGER;
}
return this as unknown as AncestryBuilder<YAMLMap>;
}
/**
* Get node up to which the assertions have led.
*/
get(): N | null {
return (this._path[this._index] as N) || null;
}
/**
* Get the key of the Pair one level down the path.
*
* The key is returned only if it indeed is a string Scalar.
*/
// The `this` argument is for generics restriction of this method.
getStringKey(this: AncestryBuilder<YAMLMap>): string | null {
const node = this._path[this._index + 1];
if (
node instanceof Pair &&
node.key instanceof Scalar &&
typeof node.key.value === 'string'
) {
return node.key.value;
}
return null;
}
/**
* Get the value of the Pair one level down the path.
*/
// The `this` argument is for generics restriction of this method.
getValue(this: AncestryBuilder<YAMLMap>): Node | null {
const node = this._path[this._index + 1];
if (node instanceof Pair) {
return node.value;
}
return null;
}
/**
* Get the path to which the assertions have led.
*
* The path will be a subpath of the original path.
*/
getPath(): Node[] | null {
if (this._index < 0) return null;
const path = this._path.slice(0, this._index + 1);
return path;
}
/**
* Get the path to the key of the Pair one level down the path to which the
* assertions have led.
*
* The path will be a subpath of the original path.
*/
// The `this` argument is for generics restriction of this method.
getKeyPath(this: AncestryBuilder<YAMLMap>): Node[] | null {
if (this._index < 0) return null;
const path = this._path.slice(0, this._index + 1);
const node = this._path[this._index + 1];
if (node instanceof Pair) {
path.push(node);
path.push(node.key);
return path;
}
return null;
}
}
export function getPathAt(
document: TextDocument,
position: Position,
docs: Document[],
inclusive = false
): Node[] | null {
const offset = document.offsetAt(position);
const doc = _.find(docs, (d) => contains(d.contents, offset, inclusive));
if (doc && doc.contents) {
return getPathAtOffset([doc.contents], offset, inclusive);
}
return null;
}
export function contains(
node: Node | null,
offset: number,
inclusive: boolean
): boolean {
const range = getOrigRange(node);
return !!(
range &&
range[0] <= offset &&
(range[1] > offset || (inclusive && range[1] >= offset))
);
}
export function getPathAtOffset(
path: Node[],
offset: number,
inclusive: boolean
): Node[] | null {
if (path) {
const currentNode = path[path.length - 1];
if (currentNode instanceof YAMLMap) {
let pair = _.find(currentNode.items, (p) =>
contains(p.key, offset, inclusive)
);
if (pair) {
return getPathAtOffset(path.concat(pair, pair.key), offset, inclusive);
}
pair = _.find(currentNode.items, (p) =>
contains(p.value, offset, inclusive)
);
if (pair) {
return getPathAtOffset(
path.concat(pair, pair.value),
offset,
inclusive
);
}
pair = _.find(currentNode.items, (p) => {
const inBetweenNode = new Node();
const start = getOrigRange(p.key as Node)?.[1];
const end = getOrigRange(p.value as Node)?.[0];
if (start && end) {
inBetweenNode.range = [start, end - 1];
return contains(inBetweenNode, offset, inclusive);
} else return false;
});
if (pair) {
return path.concat(pair, new Node());
}
} else if (currentNode instanceof YAMLSeq) {
const item = _.find(currentNode.items, (n) =>
contains(n, offset, inclusive)
);
if (item) {
return getPathAtOffset(path.concat(item), offset, inclusive);
}
} else if (contains(currentNode, offset, inclusive)) {
return path;
}
return path.concat(new Node()); // empty node as indentation marker
}
return null;
}
export const tasksKey = /^(tasks|pre_tasks|post_tasks|block|rescue|always)$/;
/**
* Determines whether the path points at a parameter key of an Ansible task.
*/
export function isTaskParam(path: Node[]): boolean {
const taskListPath = new AncestryBuilder(path)
.parentOfKey()
.parent(YAMLSeq)
.getPath();
if (taskListPath) {
// basic shape of the task list has been found
if (isPlayParam(path) || isBlockParam(path) || isRoleParam(path))
return false;
if (taskListPath.length === 1) {
// case when the task list is at the top level of the document
return true;
}
const taskListKey = new AncestryBuilder(taskListPath)
.parent(YAMLMap)
.getStringKey();
if (taskListKey && tasksKey.test(taskListKey)) {
// case when a task list is defined explicitly by a keyword
return true;
}
}
return false;
}
/**
* Tries to find the list of collections declared at the Ansible play/block/task level.
*/
export function getDeclaredCollections(modulePath: Node[] | null): string[] {
const declaredCollections: string[] = [];
const taskParamsNode = new AncestryBuilder(modulePath).parent(YAMLMap).get();
declaredCollections.push(...getDeclaredCollectionsForMap(taskParamsNode));
let path: Node[] | null = new AncestryBuilder(modulePath)
.parent(YAMLMap)
.getPath();
while (true) {
// traverse the YAML up through the Ansible blocks
const builder = new AncestryBuilder(path).parent(YAMLSeq).parent(YAMLMap);
const key = builder.getStringKey();
if (key && /^block|rescue|always$/.test(key)) {
declaredCollections.push(...getDeclaredCollectionsForMap(builder.get()));
path = builder.getPath();
} else {
break;
}
}
// now we should be at the tasks/pre_tasks/post_tasks level
const playParamsNode = new AncestryBuilder(path)
.parent(YAMLSeq)
.parent(YAMLMap)
.get();
declaredCollections.push(...getDeclaredCollectionsForMap(playParamsNode));
return [...new Set(declaredCollections)]; // deduplicate
}
function getDeclaredCollectionsForMap(playNode: YAMLMap | null): string[] {
const declaredCollections: string[] = [];
const collectionsPair = _.find(
playNode?.items,
(pair) => pair.key instanceof Scalar && pair.key.value === 'collections'
);
if (collectionsPair) {
// we've found the collections declaration
const collectionsNode = collectionsPair.value;
if (collectionsNode instanceof YAMLSeq) {
for (const collectionNode of collectionsNode.items) {
if (collectionNode instanceof Scalar) {
declaredCollections.push(collectionNode.value);
}
}
}
}
return declaredCollections;
}
/**
* Heuristically determines whether the path points at an Ansible play. The
* `fileUri` helps guessing in case the YAML tree doesn't give any clues.
*
* Returns `undefined` if highly uncertain.
*/
export function isPlayParam(
path: Node[],
fileUri?: string
): boolean | undefined {
const isAtRoot =
new AncestryBuilder(path).parentOfKey().parent(YAMLSeq).getPath()
?.length === 1;
if (isAtRoot) {
const mapNode = new AncestryBuilder(path).parentOfKey().get() as YAMLMap;
const providedKeys = getYamlMapKeys(mapNode);
const containsPlayKeyword = providedKeys.some((p) =>
playExclusiveKeywords.has(p)
);
if (containsPlayKeyword) {
return true;
}
if (fileUri) {
const isInRole = /\/roles\/[^/]+\/tasks\//.test(fileUri);
if (isInRole) {
return false;
}
}
} else |
}
/**
* Determines whether the path points at one of Ansible block parameter keys.
*/
export function isBlockParam(path: Node[]): boolean {
const builder = new AncestryBuilder(path).parentOfKey();
const mapNode = builder.get();
// the block must have a list as parent
const isInYAMLSeq = !!builder.parent(YAMLSeq).get();
if (mapNode && isInYAMLSeq) {
const providedKeys = getYamlMapKeys(mapNode);
return providedKeys.includes('block');
}
return false;
}
/**
* Determines whether the path points at one of Ansible role parameter keys.
*/
export function isRoleParam(path: Node[]): boolean {
const rolesKey = new AncestryBuilder(path)
.parentOfKey()
.parent(YAMLSeq)
.parent(YAMLMap)
.getStringKey();
return rolesKey === 'roles';
}
/**
* For a given Ansible task parameter path, find the module if it has been
* provided for the task.
*/
export async function findProvidedModule(
taskParamPath: Node[],
document: TextDocument,
docsLibrary: DocsLibrary
): Promise<IModuleMetadata | undefined> {
const taskParameterMap = new AncestryBuilder(taskParamPath)
.parent(YAMLMap)
.get();
if (taskParameterMap) {
// find task parameters that have been provided by the user
const providedParameters = new Set(getYamlMapKeys(taskParameterMap));
// should usually be 0 or 1
const providedModuleNames = [...providedParameters].filter(
(x) => !x || !isTaskKeyword(x)
);
// find the module if it has been provided
for (const m of providedModuleNames) {
const [module] = await docsLibrary.findModule(
m,
taskParamPath,
document.uri
);
if (module) {
return module;
}
}
}
}
export function getYamlMapKeys(mapNode: YAMLMap): Array<string> {
return mapNode.items.map((pair) => {
if (pair.key && pair.key instanceof Scalar) {
return pair.key.value;
}
});
}
export function getOrigRange(
node: Node | null | undefined
): [number, number] | null | undefined {
if (node?.cstNode?.range) {
const range = node.cstNode.range;
return [
range.origStart !== undefined ? range.origStart : range.start,
range.origEnd !== undefined ? range.origEnd : range.end,
];
} else {
return node?.range;
}
}
/** Parsing with the YAML library tailored to the needs of this extension */
export function parseAllDocuments(str: string, options?: Options): Document[] {
const cst = parseCST(str);
cst.setOrigRanges();
const parsedDocuments: Document[] = [];
for (const cstDoc of cst) {
const parsedDocument = new Document(
Object.assign({ keepCstNodes: true }, options)
);
parsedDocument.parse(cstDoc);
parsedDocuments.push(parsedDocument);
}
return parsedDocuments;
}
| {
return false;
} | conditional_block |
yaml.ts | import * as _ from 'lodash';
import { Position, TextDocument } from 'vscode-languageserver-textdocument';
import { Document, Options, parseCST } from 'yaml';
import { Node, Pair, Scalar, YAMLMap, YAMLSeq } from 'yaml/types';
import { IModuleMetadata } from '../interfaces/module';
import { DocsLibrary } from '../services/docsLibrary';
import { isTaskKeyword, playExclusiveKeywords } from './ansible';
/**
* A helper class used for building YAML path assertions and retrieving parent
* nodes. The assertions are built up from the most nested (last in array)
* element.
*/
export class AncestryBuilder<N extends Node | Pair = Node> {
private _path: Node[];
private _index: number;
constructor(path: Node[] | null, index?: number) {
this._path = path || [];
this._index = index || this._path.length - 1;
}
/**
* Move up the path, optionally asserting the type of the parent.
*
* Unless Pair is explicitly asserted, it is ignored/skipped over when moving
* up.
*/
parent<X extends Node | Pair>(
type?: new (...args: unknown[]) => X
): AncestryBuilder<X> {
this._index--;
if (this.get() instanceof Pair) {
if (!type || !(type === Pair.prototype.constructor)) {
this._index--;
}
}
if (type) {
if (!(this.get() instanceof type)) {
this._index = Number.MIN_SAFE_INTEGER;
}
}
return this as unknown as AncestryBuilder<X>;
}
/**
* Move up the path, asserting that the current node was a key of a mapping
* pair. The builder skips over the Pair to the parent YAMLMap.
*/
parentOfKey(): AncestryBuilder<YAMLMap> {
const node = this.get();
this.parent(Pair);
const pairNode = this.get();
if (pairNode instanceof Pair && pairNode.key === node) {
this.parent(YAMLMap);
} else {
this._index = Number.MIN_SAFE_INTEGER;
}
return this as unknown as AncestryBuilder<YAMLMap>;
}
/**
* Get node up to which the assertions have led.
*/
get(): N | null {
return (this._path[this._index] as N) || null;
}
/**
* Get the key of the Pair one level down the path.
*
* The key is returned only if it indeed is a string Scalar.
*/
// The `this` argument is for generics restriction of this method.
getStringKey(this: AncestryBuilder<YAMLMap>): string | null {
const node = this._path[this._index + 1];
if (
node instanceof Pair &&
node.key instanceof Scalar &&
typeof node.key.value === 'string'
) {
return node.key.value;
}
return null;
}
/**
* Get the value of the Pair one level down the path.
*/
// The `this` argument is for generics restriction of this method.
getValue(this: AncestryBuilder<YAMLMap>): Node | null {
const node = this._path[this._index + 1];
if (node instanceof Pair) {
return node.value;
}
return null;
}
/**
* Get the path to which the assertions have led.
*
* The path will be a subpath of the original path.
*/
getPath(): Node[] | null |
/**
* Get the path to the key of the Pair one level down the path to which the
* assertions have led.
*
* The path will be a subpath of the original path.
*/
// The `this` argument is for generics restriction of this method.
getKeyPath(this: AncestryBuilder<YAMLMap>): Node[] | null {
if (this._index < 0) return null;
const path = this._path.slice(0, this._index + 1);
const node = this._path[this._index + 1];
if (node instanceof Pair) {
path.push(node);
path.push(node.key);
return path;
}
return null;
}
}
export function getPathAt(
document: TextDocument,
position: Position,
docs: Document[],
inclusive = false
): Node[] | null {
const offset = document.offsetAt(position);
const doc = _.find(docs, (d) => contains(d.contents, offset, inclusive));
if (doc && doc.contents) {
return getPathAtOffset([doc.contents], offset, inclusive);
}
return null;
}
export function contains(
node: Node | null,
offset: number,
inclusive: boolean
): boolean {
const range = getOrigRange(node);
return !!(
range &&
range[0] <= offset &&
(range[1] > offset || (inclusive && range[1] >= offset))
);
}
export function getPathAtOffset(
path: Node[],
offset: number,
inclusive: boolean
): Node[] | null {
if (path) {
const currentNode = path[path.length - 1];
if (currentNode instanceof YAMLMap) {
let pair = _.find(currentNode.items, (p) =>
contains(p.key, offset, inclusive)
);
if (pair) {
return getPathAtOffset(path.concat(pair, pair.key), offset, inclusive);
}
pair = _.find(currentNode.items, (p) =>
contains(p.value, offset, inclusive)
);
if (pair) {
return getPathAtOffset(
path.concat(pair, pair.value),
offset,
inclusive
);
}
pair = _.find(currentNode.items, (p) => {
const inBetweenNode = new Node();
const start = getOrigRange(p.key as Node)?.[1];
const end = getOrigRange(p.value as Node)?.[0];
if (start && end) {
inBetweenNode.range = [start, end - 1];
return contains(inBetweenNode, offset, inclusive);
} else return false;
});
if (pair) {
return path.concat(pair, new Node());
}
} else if (currentNode instanceof YAMLSeq) {
const item = _.find(currentNode.items, (n) =>
contains(n, offset, inclusive)
);
if (item) {
return getPathAtOffset(path.concat(item), offset, inclusive);
}
} else if (contains(currentNode, offset, inclusive)) {
return path;
}
return path.concat(new Node()); // empty node as indentation marker
}
return null;
}
export const tasksKey = /^(tasks|pre_tasks|post_tasks|block|rescue|always)$/;
/**
* Determines whether the path points at a parameter key of an Ansible task.
*/
export function isTaskParam(path: Node[]): boolean {
const taskListPath = new AncestryBuilder(path)
.parentOfKey()
.parent(YAMLSeq)
.getPath();
if (taskListPath) {
// basic shape of the task list has been found
if (isPlayParam(path) || isBlockParam(path) || isRoleParam(path))
return false;
if (taskListPath.length === 1) {
// case when the task list is at the top level of the document
return true;
}
const taskListKey = new AncestryBuilder(taskListPath)
.parent(YAMLMap)
.getStringKey();
if (taskListKey && tasksKey.test(taskListKey)) {
// case when a task list is defined explicitly by a keyword
return true;
}
}
return false;
}
/**
* Tries to find the list of collections declared at the Ansible play/block/task level.
*/
export function getDeclaredCollections(modulePath: Node[] | null): string[] {
const declaredCollections: string[] = [];
const taskParamsNode = new AncestryBuilder(modulePath).parent(YAMLMap).get();
declaredCollections.push(...getDeclaredCollectionsForMap(taskParamsNode));
let path: Node[] | null = new AncestryBuilder(modulePath)
.parent(YAMLMap)
.getPath();
while (true) {
// traverse the YAML up through the Ansible blocks
const builder = new AncestryBuilder(path).parent(YAMLSeq).parent(YAMLMap);
const key = builder.getStringKey();
if (key && /^block|rescue|always$/.test(key)) {
declaredCollections.push(...getDeclaredCollectionsForMap(builder.get()));
path = builder.getPath();
} else {
break;
}
}
// now we should be at the tasks/pre_tasks/post_tasks level
const playParamsNode = new AncestryBuilder(path)
.parent(YAMLSeq)
.parent(YAMLMap)
.get();
declaredCollections.push(...getDeclaredCollectionsForMap(playParamsNode));
return [...new Set(declaredCollections)]; // deduplicate
}
function getDeclaredCollectionsForMap(playNode: YAMLMap | null): string[] {
const declaredCollections: string[] = [];
const collectionsPair = _.find(
playNode?.items,
(pair) => pair.key instanceof Scalar && pair.key.value === 'collections'
);
if (collectionsPair) {
// we've found the collections declaration
const collectionsNode = collectionsPair.value;
if (collectionsNode instanceof YAMLSeq) {
for (const collectionNode of collectionsNode.items) {
if (collectionNode instanceof Scalar) {
declaredCollections.push(collectionNode.value);
}
}
}
}
return declaredCollections;
}
/**
* Heuristically determines whether the path points at an Ansible play. The
* `fileUri` helps guessing in case the YAML tree doesn't give any clues.
*
* Returns `undefined` if highly uncertain.
*/
export function isPlayParam(
path: Node[],
fileUri?: string
): boolean | undefined {
const isAtRoot =
new AncestryBuilder(path).parentOfKey().parent(YAMLSeq).getPath()
?.length === 1;
if (isAtRoot) {
const mapNode = new AncestryBuilder(path).parentOfKey().get() as YAMLMap;
const providedKeys = getYamlMapKeys(mapNode);
const containsPlayKeyword = providedKeys.some((p) =>
playExclusiveKeywords.has(p)
);
if (containsPlayKeyword) {
return true;
}
if (fileUri) {
const isInRole = /\/roles\/[^/]+\/tasks\//.test(fileUri);
if (isInRole) {
return false;
}
}
} else {
return false;
}
}
/**
* Determines whether the path points at one of Ansible block parameter keys.
*/
export function isBlockParam(path: Node[]): boolean {
const builder = new AncestryBuilder(path).parentOfKey();
const mapNode = builder.get();
// the block must have a list as parent
const isInYAMLSeq = !!builder.parent(YAMLSeq).get();
if (mapNode && isInYAMLSeq) {
const providedKeys = getYamlMapKeys(mapNode);
return providedKeys.includes('block');
}
return false;
}
/**
* Determines whether the path points at one of Ansible role parameter keys.
*/
export function isRoleParam(path: Node[]): boolean {
const rolesKey = new AncestryBuilder(path)
.parentOfKey()
.parent(YAMLSeq)
.parent(YAMLMap)
.getStringKey();
return rolesKey === 'roles';
}
/**
* For a given Ansible task parameter path, find the module if it has been
* provided for the task.
*/
export async function findProvidedModule(
taskParamPath: Node[],
document: TextDocument,
docsLibrary: DocsLibrary
): Promise<IModuleMetadata | undefined> {
const taskParameterMap = new AncestryBuilder(taskParamPath)
.parent(YAMLMap)
.get();
if (taskParameterMap) {
// find task parameters that have been provided by the user
const providedParameters = new Set(getYamlMapKeys(taskParameterMap));
// should usually be 0 or 1
const providedModuleNames = [...providedParameters].filter(
(x) => !x || !isTaskKeyword(x)
);
// find the module if it has been provided
for (const m of providedModuleNames) {
const [module] = await docsLibrary.findModule(
m,
taskParamPath,
document.uri
);
if (module) {
return module;
}
}
}
}
export function getYamlMapKeys(mapNode: YAMLMap): Array<string> {
return mapNode.items.map((pair) => {
if (pair.key && pair.key instanceof Scalar) {
return pair.key.value;
}
});
}
export function getOrigRange(
node: Node | null | undefined
): [number, number] | null | undefined {
if (node?.cstNode?.range) {
const range = node.cstNode.range;
return [
range.origStart !== undefined ? range.origStart : range.start,
range.origEnd !== undefined ? range.origEnd : range.end,
];
} else {
return node?.range;
}
}
/** Parsing with the YAML library tailored to the needs of this extension */
export function parseAllDocuments(str: string, options?: Options): Document[] {
const cst = parseCST(str);
cst.setOrigRanges();
const parsedDocuments: Document[] = [];
for (const cstDoc of cst) {
const parsedDocument = new Document(
Object.assign({ keepCstNodes: true }, options)
);
parsedDocument.parse(cstDoc);
parsedDocuments.push(parsedDocument);
}
return parsedDocuments;
}
| {
if (this._index < 0) return null;
const path = this._path.slice(0, this._index + 1);
return path;
} | identifier_body |
yaml.ts | import * as _ from 'lodash';
import { Position, TextDocument } from 'vscode-languageserver-textdocument';
import { Document, Options, parseCST } from 'yaml';
import { Node, Pair, Scalar, YAMLMap, YAMLSeq } from 'yaml/types';
import { IModuleMetadata } from '../interfaces/module';
import { DocsLibrary } from '../services/docsLibrary';
import { isTaskKeyword, playExclusiveKeywords } from './ansible';
/**
* A helper class used for building YAML path assertions and retrieving parent
* nodes. The assertions are built up from the most nested (last in array)
* element.
*/
export class AncestryBuilder<N extends Node | Pair = Node> {
private _path: Node[];
private _index: number;
constructor(path: Node[] | null, index?: number) {
this._path = path || [];
this._index = index || this._path.length - 1;
}
/**
* Move up the path, optionally asserting the type of the parent.
*
* Unless Pair is explicitly asserted, it is ignored/skipped over when moving
* up.
*/
parent<X extends Node | Pair>(
type?: new (...args: unknown[]) => X
): AncestryBuilder<X> {
this._index--;
if (this.get() instanceof Pair) {
if (!type || !(type === Pair.prototype.constructor)) {
this._index--;
}
}
if (type) {
if (!(this.get() instanceof type)) {
this._index = Number.MIN_SAFE_INTEGER;
}
}
return this as unknown as AncestryBuilder<X>;
}
/**
* Move up the path, asserting that the current node was a key of a mapping
* pair. The builder skips over the Pair to the parent YAMLMap.
*/
parentOfKey(): AncestryBuilder<YAMLMap> {
const node = this.get();
this.parent(Pair);
const pairNode = this.get();
if (pairNode instanceof Pair && pairNode.key === node) {
this.parent(YAMLMap);
} else {
this._index = Number.MIN_SAFE_INTEGER;
}
return this as unknown as AncestryBuilder<YAMLMap>;
}
/**
* Get node up to which the assertions have led.
*/
get(): N | null {
return (this._path[this._index] as N) || null;
}
/**
* Get the key of the Pair one level down the path.
*
* The key is returned only if it indeed is a string Scalar.
*/
// The `this` argument is for generics restriction of this method.
getStringKey(this: AncestryBuilder<YAMLMap>): string | null {
const node = this._path[this._index + 1];
if (
node instanceof Pair &&
node.key instanceof Scalar &&
typeof node.key.value === 'string'
) {
return node.key.value;
}
return null;
}
/**
* Get the value of the Pair one level down the path.
*/
// The `this` argument is for generics restriction of this method.
getValue(this: AncestryBuilder<YAMLMap>): Node | null {
const node = this._path[this._index + 1];
if (node instanceof Pair) {
return node.value;
} |
/**
* Get the path to which the assertions have led.
*
* The path will be a subpath of the original path.
*/
getPath(): Node[] | null {
if (this._index < 0) return null;
const path = this._path.slice(0, this._index + 1);
return path;
}
/**
* Get the path to the key of the Pair one level down the path to which the
* assertions have led.
*
* The path will be a subpath of the original path.
*/
// The `this` argument is for generics restriction of this method.
getKeyPath(this: AncestryBuilder<YAMLMap>): Node[] | null {
if (this._index < 0) return null;
const path = this._path.slice(0, this._index + 1);
const node = this._path[this._index + 1];
if (node instanceof Pair) {
path.push(node);
path.push(node.key);
return path;
}
return null;
}
}
export function getPathAt(
document: TextDocument,
position: Position,
docs: Document[],
inclusive = false
): Node[] | null {
const offset = document.offsetAt(position);
const doc = _.find(docs, (d) => contains(d.contents, offset, inclusive));
if (doc && doc.contents) {
return getPathAtOffset([doc.contents], offset, inclusive);
}
return null;
}
export function contains(
node: Node | null,
offset: number,
inclusive: boolean
): boolean {
const range = getOrigRange(node);
return !!(
range &&
range[0] <= offset &&
(range[1] > offset || (inclusive && range[1] >= offset))
);
}
export function getPathAtOffset(
path: Node[],
offset: number,
inclusive: boolean
): Node[] | null {
if (path) {
const currentNode = path[path.length - 1];
if (currentNode instanceof YAMLMap) {
let pair = _.find(currentNode.items, (p) =>
contains(p.key, offset, inclusive)
);
if (pair) {
return getPathAtOffset(path.concat(pair, pair.key), offset, inclusive);
}
pair = _.find(currentNode.items, (p) =>
contains(p.value, offset, inclusive)
);
if (pair) {
return getPathAtOffset(
path.concat(pair, pair.value),
offset,
inclusive
);
}
pair = _.find(currentNode.items, (p) => {
const inBetweenNode = new Node();
const start = getOrigRange(p.key as Node)?.[1];
const end = getOrigRange(p.value as Node)?.[0];
if (start && end) {
inBetweenNode.range = [start, end - 1];
return contains(inBetweenNode, offset, inclusive);
} else return false;
});
if (pair) {
return path.concat(pair, new Node());
}
} else if (currentNode instanceof YAMLSeq) {
const item = _.find(currentNode.items, (n) =>
contains(n, offset, inclusive)
);
if (item) {
return getPathAtOffset(path.concat(item), offset, inclusive);
}
} else if (contains(currentNode, offset, inclusive)) {
return path;
}
return path.concat(new Node()); // empty node as indentation marker
}
return null;
}
export const tasksKey = /^(tasks|pre_tasks|post_tasks|block|rescue|always)$/;
/**
* Determines whether the path points at a parameter key of an Ansible task.
*/
export function isTaskParam(path: Node[]): boolean {
const taskListPath = new AncestryBuilder(path)
.parentOfKey()
.parent(YAMLSeq)
.getPath();
if (taskListPath) {
// basic shape of the task list has been found
if (isPlayParam(path) || isBlockParam(path) || isRoleParam(path))
return false;
if (taskListPath.length === 1) {
// case when the task list is at the top level of the document
return true;
}
const taskListKey = new AncestryBuilder(taskListPath)
.parent(YAMLMap)
.getStringKey();
if (taskListKey && tasksKey.test(taskListKey)) {
// case when a task list is defined explicitly by a keyword
return true;
}
}
return false;
}
/**
* Tries to find the list of collections declared at the Ansible play/block/task level.
*/
export function getDeclaredCollections(modulePath: Node[] | null): string[] {
const declaredCollections: string[] = [];
const taskParamsNode = new AncestryBuilder(modulePath).parent(YAMLMap).get();
declaredCollections.push(...getDeclaredCollectionsForMap(taskParamsNode));
let path: Node[] | null = new AncestryBuilder(modulePath)
.parent(YAMLMap)
.getPath();
while (true) {
// traverse the YAML up through the Ansible blocks
const builder = new AncestryBuilder(path).parent(YAMLSeq).parent(YAMLMap);
const key = builder.getStringKey();
if (key && /^block|rescue|always$/.test(key)) {
declaredCollections.push(...getDeclaredCollectionsForMap(builder.get()));
path = builder.getPath();
} else {
break;
}
}
// now we should be at the tasks/pre_tasks/post_tasks level
const playParamsNode = new AncestryBuilder(path)
.parent(YAMLSeq)
.parent(YAMLMap)
.get();
declaredCollections.push(...getDeclaredCollectionsForMap(playParamsNode));
return [...new Set(declaredCollections)]; // deduplicate
}
function getDeclaredCollectionsForMap(playNode: YAMLMap | null): string[] {
const declaredCollections: string[] = [];
const collectionsPair = _.find(
playNode?.items,
(pair) => pair.key instanceof Scalar && pair.key.value === 'collections'
);
if (collectionsPair) {
// we've found the collections declaration
const collectionsNode = collectionsPair.value;
if (collectionsNode instanceof YAMLSeq) {
for (const collectionNode of collectionsNode.items) {
if (collectionNode instanceof Scalar) {
declaredCollections.push(collectionNode.value);
}
}
}
}
return declaredCollections;
}
/**
* Heuristically determines whether the path points at an Ansible play. The
* `fileUri` helps guessing in case the YAML tree doesn't give any clues.
*
* Returns `undefined` if highly uncertain.
*/
export function isPlayParam(
path: Node[],
fileUri?: string
): boolean | undefined {
const isAtRoot =
new AncestryBuilder(path).parentOfKey().parent(YAMLSeq).getPath()
?.length === 1;
if (isAtRoot) {
const mapNode = new AncestryBuilder(path).parentOfKey().get() as YAMLMap;
const providedKeys = getYamlMapKeys(mapNode);
const containsPlayKeyword = providedKeys.some((p) =>
playExclusiveKeywords.has(p)
);
if (containsPlayKeyword) {
return true;
}
if (fileUri) {
const isInRole = /\/roles\/[^/]+\/tasks\//.test(fileUri);
if (isInRole) {
return false;
}
}
} else {
return false;
}
}
/**
* Determines whether the path points at one of Ansible block parameter keys.
*/
export function isBlockParam(path: Node[]): boolean {
const builder = new AncestryBuilder(path).parentOfKey();
const mapNode = builder.get();
// the block must have a list as parent
const isInYAMLSeq = !!builder.parent(YAMLSeq).get();
if (mapNode && isInYAMLSeq) {
const providedKeys = getYamlMapKeys(mapNode);
return providedKeys.includes('block');
}
return false;
}
/**
* Determines whether the path points at one of Ansible role parameter keys.
*/
export function isRoleParam(path: Node[]): boolean {
const rolesKey = new AncestryBuilder(path)
.parentOfKey()
.parent(YAMLSeq)
.parent(YAMLMap)
.getStringKey();
return rolesKey === 'roles';
}
/**
* For a given Ansible task parameter path, find the module if it has been
* provided for the task.
*/
export async function findProvidedModule(
taskParamPath: Node[],
document: TextDocument,
docsLibrary: DocsLibrary
): Promise<IModuleMetadata | undefined> {
const taskParameterMap = new AncestryBuilder(taskParamPath)
.parent(YAMLMap)
.get();
if (taskParameterMap) {
// find task parameters that have been provided by the user
const providedParameters = new Set(getYamlMapKeys(taskParameterMap));
// should usually be 0 or 1
const providedModuleNames = [...providedParameters].filter(
(x) => !x || !isTaskKeyword(x)
);
// find the module if it has been provided
for (const m of providedModuleNames) {
const [module] = await docsLibrary.findModule(
m,
taskParamPath,
document.uri
);
if (module) {
return module;
}
}
}
}
export function getYamlMapKeys(mapNode: YAMLMap): Array<string> {
return mapNode.items.map((pair) => {
if (pair.key && pair.key instanceof Scalar) {
return pair.key.value;
}
});
}
export function getOrigRange(
node: Node | null | undefined
): [number, number] | null | undefined {
if (node?.cstNode?.range) {
const range = node.cstNode.range;
return [
range.origStart !== undefined ? range.origStart : range.start,
range.origEnd !== undefined ? range.origEnd : range.end,
];
} else {
return node?.range;
}
}
/** Parsing with the YAML library tailored to the needs of this extension */
export function parseAllDocuments(str: string, options?: Options): Document[] {
const cst = parseCST(str);
cst.setOrigRanges();
const parsedDocuments: Document[] = [];
for (const cstDoc of cst) {
const parsedDocument = new Document(
Object.assign({ keepCstNodes: true }, options)
);
parsedDocument.parse(cstDoc);
parsedDocuments.push(parsedDocument);
}
return parsedDocuments;
} | return null;
} | random_line_split |
yaml.ts | import * as _ from 'lodash';
import { Position, TextDocument } from 'vscode-languageserver-textdocument';
import { Document, Options, parseCST } from 'yaml';
import { Node, Pair, Scalar, YAMLMap, YAMLSeq } from 'yaml/types';
import { IModuleMetadata } from '../interfaces/module';
import { DocsLibrary } from '../services/docsLibrary';
import { isTaskKeyword, playExclusiveKeywords } from './ansible';
/**
* A helper class used for building YAML path assertions and retrieving parent
* nodes. The assertions are built up from the most nested (last in array)
* element.
*/
export class AncestryBuilder<N extends Node | Pair = Node> {
private _path: Node[];
private _index: number;
constructor(path: Node[] | null, index?: number) {
this._path = path || [];
this._index = index || this._path.length - 1;
}
/**
* Move up the path, optionally asserting the type of the parent.
*
* Unless Pair is explicitly asserted, it is ignored/skipped over when moving
* up.
*/
parent<X extends Node | Pair>(
type?: new (...args: unknown[]) => X
): AncestryBuilder<X> {
this._index--;
if (this.get() instanceof Pair) {
if (!type || !(type === Pair.prototype.constructor)) {
this._index--;
}
}
if (type) {
if (!(this.get() instanceof type)) {
this._index = Number.MIN_SAFE_INTEGER;
}
}
return this as unknown as AncestryBuilder<X>;
}
/**
* Move up the path, asserting that the current node was a key of a mapping
* pair. The builder skips over the Pair to the parent YAMLMap.
*/
parentOfKey(): AncestryBuilder<YAMLMap> {
const node = this.get();
this.parent(Pair);
const pairNode = this.get();
if (pairNode instanceof Pair && pairNode.key === node) {
this.parent(YAMLMap);
} else {
this._index = Number.MIN_SAFE_INTEGER;
}
return this as unknown as AncestryBuilder<YAMLMap>;
}
/**
* Get node up to which the assertions have led.
*/
| (): N | null {
return (this._path[this._index] as N) || null;
}
/**
* Get the key of the Pair one level down the path.
*
* The key is returned only if it indeed is a string Scalar.
*/
// The `this` argument is for generics restriction of this method.
getStringKey(this: AncestryBuilder<YAMLMap>): string | null {
const node = this._path[this._index + 1];
if (
node instanceof Pair &&
node.key instanceof Scalar &&
typeof node.key.value === 'string'
) {
return node.key.value;
}
return null;
}
/**
* Get the value of the Pair one level down the path.
*/
// The `this` argument is for generics restriction of this method.
getValue(this: AncestryBuilder<YAMLMap>): Node | null {
const node = this._path[this._index + 1];
if (node instanceof Pair) {
return node.value;
}
return null;
}
/**
* Get the path to which the assertions have led.
*
* The path will be a subpath of the original path.
*/
getPath(): Node[] | null {
if (this._index < 0) return null;
const path = this._path.slice(0, this._index + 1);
return path;
}
/**
* Get the path to the key of the Pair one level down the path to which the
* assertions have led.
*
* The path will be a subpath of the original path.
*/
// The `this` argument is for generics restriction of this method.
getKeyPath(this: AncestryBuilder<YAMLMap>): Node[] | null {
if (this._index < 0) return null;
const path = this._path.slice(0, this._index + 1);
const node = this._path[this._index + 1];
if (node instanceof Pair) {
path.push(node);
path.push(node.key);
return path;
}
return null;
}
}
export function getPathAt(
document: TextDocument,
position: Position,
docs: Document[],
inclusive = false
): Node[] | null {
const offset = document.offsetAt(position);
const doc = _.find(docs, (d) => contains(d.contents, offset, inclusive));
if (doc && doc.contents) {
return getPathAtOffset([doc.contents], offset, inclusive);
}
return null;
}
export function contains(
node: Node | null,
offset: number,
inclusive: boolean
): boolean {
const range = getOrigRange(node);
return !!(
range &&
range[0] <= offset &&
(range[1] > offset || (inclusive && range[1] >= offset))
);
}
export function getPathAtOffset(
path: Node[],
offset: number,
inclusive: boolean
): Node[] | null {
if (path) {
const currentNode = path[path.length - 1];
if (currentNode instanceof YAMLMap) {
let pair = _.find(currentNode.items, (p) =>
contains(p.key, offset, inclusive)
);
if (pair) {
return getPathAtOffset(path.concat(pair, pair.key), offset, inclusive);
}
pair = _.find(currentNode.items, (p) =>
contains(p.value, offset, inclusive)
);
if (pair) {
return getPathAtOffset(
path.concat(pair, pair.value),
offset,
inclusive
);
}
pair = _.find(currentNode.items, (p) => {
const inBetweenNode = new Node();
const start = getOrigRange(p.key as Node)?.[1];
const end = getOrigRange(p.value as Node)?.[0];
if (start && end) {
inBetweenNode.range = [start, end - 1];
return contains(inBetweenNode, offset, inclusive);
} else return false;
});
if (pair) {
return path.concat(pair, new Node());
}
} else if (currentNode instanceof YAMLSeq) {
const item = _.find(currentNode.items, (n) =>
contains(n, offset, inclusive)
);
if (item) {
return getPathAtOffset(path.concat(item), offset, inclusive);
}
} else if (contains(currentNode, offset, inclusive)) {
return path;
}
return path.concat(new Node()); // empty node as indentation marker
}
return null;
}
export const tasksKey = /^(tasks|pre_tasks|post_tasks|block|rescue|always)$/;
/**
* Determines whether the path points at a parameter key of an Ansible task.
*/
export function isTaskParam(path: Node[]): boolean {
const taskListPath = new AncestryBuilder(path)
.parentOfKey()
.parent(YAMLSeq)
.getPath();
if (taskListPath) {
// basic shape of the task list has been found
if (isPlayParam(path) || isBlockParam(path) || isRoleParam(path))
return false;
if (taskListPath.length === 1) {
// case when the task list is at the top level of the document
return true;
}
const taskListKey = new AncestryBuilder(taskListPath)
.parent(YAMLMap)
.getStringKey();
if (taskListKey && tasksKey.test(taskListKey)) {
// case when a task list is defined explicitly by a keyword
return true;
}
}
return false;
}
/**
* Tries to find the list of collections declared at the Ansible play/block/task level.
*/
export function getDeclaredCollections(modulePath: Node[] | null): string[] {
const declaredCollections: string[] = [];
const taskParamsNode = new AncestryBuilder(modulePath).parent(YAMLMap).get();
declaredCollections.push(...getDeclaredCollectionsForMap(taskParamsNode));
let path: Node[] | null = new AncestryBuilder(modulePath)
.parent(YAMLMap)
.getPath();
while (true) {
// traverse the YAML up through the Ansible blocks
const builder = new AncestryBuilder(path).parent(YAMLSeq).parent(YAMLMap);
const key = builder.getStringKey();
if (key && /^block|rescue|always$/.test(key)) {
declaredCollections.push(...getDeclaredCollectionsForMap(builder.get()));
path = builder.getPath();
} else {
break;
}
}
// now we should be at the tasks/pre_tasks/post_tasks level
const playParamsNode = new AncestryBuilder(path)
.parent(YAMLSeq)
.parent(YAMLMap)
.get();
declaredCollections.push(...getDeclaredCollectionsForMap(playParamsNode));
return [...new Set(declaredCollections)]; // deduplicate
}
function getDeclaredCollectionsForMap(playNode: YAMLMap | null): string[] {
const declaredCollections: string[] = [];
const collectionsPair = _.find(
playNode?.items,
(pair) => pair.key instanceof Scalar && pair.key.value === 'collections'
);
if (collectionsPair) {
// we've found the collections declaration
const collectionsNode = collectionsPair.value;
if (collectionsNode instanceof YAMLSeq) {
for (const collectionNode of collectionsNode.items) {
if (collectionNode instanceof Scalar) {
declaredCollections.push(collectionNode.value);
}
}
}
}
return declaredCollections;
}
/**
* Heuristically determines whether the path points at an Ansible play. The
* `fileUri` helps guessing in case the YAML tree doesn't give any clues.
*
* Returns `undefined` if highly uncertain.
*/
export function isPlayParam(
path: Node[],
fileUri?: string
): boolean | undefined {
const isAtRoot =
new AncestryBuilder(path).parentOfKey().parent(YAMLSeq).getPath()
?.length === 1;
if (isAtRoot) {
const mapNode = new AncestryBuilder(path).parentOfKey().get() as YAMLMap;
const providedKeys = getYamlMapKeys(mapNode);
const containsPlayKeyword = providedKeys.some((p) =>
playExclusiveKeywords.has(p)
);
if (containsPlayKeyword) {
return true;
}
if (fileUri) {
const isInRole = /\/roles\/[^/]+\/tasks\//.test(fileUri);
if (isInRole) {
return false;
}
}
} else {
return false;
}
}
/**
* Determines whether the path points at one of Ansible block parameter keys.
*/
export function isBlockParam(path: Node[]): boolean {
const builder = new AncestryBuilder(path).parentOfKey();
const mapNode = builder.get();
// the block must have a list as parent
const isInYAMLSeq = !!builder.parent(YAMLSeq).get();
if (mapNode && isInYAMLSeq) {
const providedKeys = getYamlMapKeys(mapNode);
return providedKeys.includes('block');
}
return false;
}
/**
* Determines whether the path points at one of Ansible role parameter keys.
*/
export function isRoleParam(path: Node[]): boolean {
const rolesKey = new AncestryBuilder(path)
.parentOfKey()
.parent(YAMLSeq)
.parent(YAMLMap)
.getStringKey();
return rolesKey === 'roles';
}
/**
* For a given Ansible task parameter path, find the module if it has been
* provided for the task.
*/
export async function findProvidedModule(
taskParamPath: Node[],
document: TextDocument,
docsLibrary: DocsLibrary
): Promise<IModuleMetadata | undefined> {
const taskParameterMap = new AncestryBuilder(taskParamPath)
.parent(YAMLMap)
.get();
if (taskParameterMap) {
// find task parameters that have been provided by the user
const providedParameters = new Set(getYamlMapKeys(taskParameterMap));
// should usually be 0 or 1
const providedModuleNames = [...providedParameters].filter(
(x) => !x || !isTaskKeyword(x)
);
// find the module if it has been provided
for (const m of providedModuleNames) {
const [module] = await docsLibrary.findModule(
m,
taskParamPath,
document.uri
);
if (module) {
return module;
}
}
}
}
export function getYamlMapKeys(mapNode: YAMLMap): Array<string> {
return mapNode.items.map((pair) => {
if (pair.key && pair.key instanceof Scalar) {
return pair.key.value;
}
});
}
export function getOrigRange(
node: Node | null | undefined
): [number, number] | null | undefined {
if (node?.cstNode?.range) {
const range = node.cstNode.range;
return [
range.origStart !== undefined ? range.origStart : range.start,
range.origEnd !== undefined ? range.origEnd : range.end,
];
} else {
return node?.range;
}
}
/** Parsing with the YAML library tailored to the needs of this extension */
export function parseAllDocuments(str: string, options?: Options): Document[] {
const cst = parseCST(str);
cst.setOrigRanges();
const parsedDocuments: Document[] = [];
for (const cstDoc of cst) {
const parsedDocument = new Document(
Object.assign({ keepCstNodes: true }, options)
);
parsedDocument.parse(cstDoc);
parsedDocuments.push(parsedDocument);
}
return parsedDocuments;
}
| get | identifier_name |
main.rs | use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fmt::{Display, Formatter, Result as FmtResult};
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::str::FromStr;
use anyhow::{anyhow, Error as AnyErr, Result as AnyResult};
use clap::{App, Arg};
use log::debug;
use slotmap::SlotMap;
use aoc::grid::{Compass, Position, Turn};
use aoc::intcomp::{IntComp, OutputVec, Stopped};
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Room {
pub name: String,
pub message: String,
pub items: BTreeSet<String>,
pub directions: Vec<Compass>,
}
impl Display for Room {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "Room[{}, doors=", self.name)?;
if !self.directions.is_empty() {
write!(f, "doors=")?;
for &d in &self.directions {
write!(f, "{}", d)?;
}
write!(f, ", ")?;
}
for (ix, item) in self.items.iter().enumerate() {
if ix == 0 {
write!(f, "items={}", item)?;
} else {
write!(f, ",{}", item)?;
}
if ix == self.items.len() - 1 {
write!(f, ", ")?;
}
}
write!(f, "message='{}']", self.message)?;
Ok(())
}
}
fn str_to_compass(s: &str) -> AnyResult<Compass> {
Ok(match s {
"north" => Compass::North,
"south" => Compass::South,
"east" => Compass::East,
"west" => Compass::West,
_ => return Err(anyhow!("'{s}' is not a compoass direction")),
})
}
#[derive(Debug, thiserror::Error)]
#[error("Ejection: {}", _0)]
struct Ejection(String);
impl FromStr for Room {
type Err = AnyErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut lines = s.lines();
let mut first = lines.next().ok_or_else(|| anyhow!("No first line"))?.trim();
while first.is_empty() {
first = lines
.next()
.ok_or_else(|| anyhow!("No non-empty first line"))?
.trim();
}
assert!(first.starts_with("== "));
assert!(first.ends_with(" =="));
let name = first
.trim_end_matches(" ==")
.trim_start_matches("== ")
.to_owned();
let message = lines
.next()
.ok_or_else(|| anyhow!("No second line"))?
.trim()
.to_owned();
assert!(!message.is_empty(), "Expected non-empty message");
let next = lines.next().ok_or_else(|| anyhow!("No third line"))?.trim();
assert!(next.is_empty(), "Expected third line to be empty");
let next = lines
.next()
.ok_or_else(|| anyhow!("No fourth line"))?
.trim();
assert!(
next == "Doors here lead:",
"Expected third line to be 'Doors here lead:'"
);
let mut directions = Vec::new();
let mut next = lines
.next()
.ok_or_else(|| anyhow!("No line after doors"))?
.trim();
while next.starts_with("- ") {
let dir = next.trim_start_matches("- ");
let dir = str_to_compass(dir)?;
directions.push(dir);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after directions"))?
.trim();
}
assert!(
next.is_empty(),
"Expected line after directions to be empty"
);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after directions + empty"))?
.trim();
let mut items = BTreeSet::new();
if next == "Items here:" {
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items"))?
.trim();
while next.starts_with("- ") {
let item = next.trim_start_matches("- ");
items.insert(item.to_owned());
next = lines
.next() | .ok_or_else(|| anyhow!("No line after items"))?
.trim();
}
assert!(
next.is_empty(),
"Expected line after items to be empty, got '{}'",
next
);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items + empty"))?
.trim();
}
if next.contains("Alert!") {
return Err(Ejection(next.into()).into());
}
assert_eq!(
next, "Command?",
"Expected line after items to be 'Command?'"
);
assert!(lines.next().is_none());
Ok(Room {
name,
message,
items,
directions,
})
}
}
#[derive(Debug, Clone)]
pub struct Explorer {
comp: IntComp,
room: Key,
direction: Compass,
carrying: BTreeSet<String>,
map: Map,
}
impl Explorer {
fn new(mut comp: IntComp) -> AnyResult<Self> {
let mut output = OutputVec::new();
comp.run_to_input(&mut output)?;
let out = output.as_string()?;
let room = Room::from_str(&out)?;
let mut map: Map = Default::default();
let key = map.add_room(room);
let exp = Explorer {
comp,
room: key,
direction: Compass::North,
carrying: Default::default(),
map,
};
Ok(exp)
}
fn see_room(&self) -> &Room {
self.map.get(self.room)
}
fn process_input_str(&mut self, output: &mut OutputVec, input: &str) -> anyhow::Result<String> {
log::debug!("Process 1: '{}'", input);
self.comp
.process_ascii(input, output)?
.expect(Stopped::Input)?;
log::debug!("Process 2: '\\n'");
self.comp
.process_ascii("\n", output)?
.expect(Stopped::Input)?;
log::debug!("Processed: '\\n'");
Ok(output.as_string()?)
}
fn process_str(&mut self, input: &str) -> anyhow::Result<String> {
let mut out = OutputVec::new();
match self.process_input_str(&mut out, input) {
Ok(v) => Ok(v),
Err(e) => {
let output = out.as_string()?;
log::warn!("process_str failure on input {}, output: {}", input, output);
Err(e)
}
}
}
// fn north(&mut self) -> anyhow::Result<String> {
// self.process_str("north")
// }
// fn south(&mut self) -> anyhow::Result<String> {
// self.process_str("south")
// }
// fn east(&mut self) -> anyhow::Result<String> {
// self.process_str("east")
// }
// fn west(&mut self) -> anyhow::Result<String> {
// self.process_str("west")
// }
pub fn step(&mut self, direction: Compass) -> anyhow::Result<()> {
let input = match direction {
Compass::East => "east",
Compass::North => "north",
Compass::South => "south",
Compass::West => "west",
};
log::debug!("Taking step {}", input);
let output = self.process_str(input)?;
log::debug!("Took step:\n{}\n", output);
let room = Room::from_str(&output)?;
let new = self.map.add_room(room);
self.map.add_door(self.room, direction, new);
self.room = new;
self.direction = direction;
Ok(())
}
pub fn take(&mut self, item: &str) -> anyhow::Result<String> {
log::debug!("Taking {}", item);
let mut s = String::from("take ");
s.push_str(item);
let result = self.process_str(&s)?;
let new = self.carrying.insert(item.to_string());
let room = self.map.rooms.get_mut(self.room).unwrap();
room.items.remove(item);
assert!(new, "Expected to add {}", item);
log::debug!(" took {}", item);
Ok(result)
}
pub fn drop(&mut self, item: &str) -> anyhow::Result<String> {
let found = self.carrying.remove(item);
assert!(!found, "Expected to drop {}", item);
let mut s = String::from("drop ");
s.push_str(item);
self.process_str(&s)
}
pub fn inventory(&mut self) -> anyhow::Result<String> {
self.process_str("inv")
}
fn left_wall_step(&mut self) -> AnyResult<()> {
let mut dir = self.direction + Turn::Left;
for _ in 0..4 {
log::debug!("Checking {} -> {}", self.direction, dir);
if self.see_room().directions.contains(&dir) {
break;
}
dir = dir + Turn::Right;
}
assert!(self.see_room().directions.contains(&dir));
self.step(dir)?;
log::debug!("Stepped {}, {}", dir, self.see_room().name);
Ok(())
}
fn explore_and_take(&mut self, items: &BTreeSet<String>) -> AnyResult<()> {
let start = self.room;
let mut start_directions = self.see_room().directions.clone();
start_directions.reverse();
loop {
let overlap: BTreeSet<String> = items
.intersection(&self.see_room().items)
.map(|s| s.to_owned())
.collect();
for item in overlap {
let _output = self.take(&item)?;
// println!("Took {}, output: {}", item, output.trim());
}
if self.see_room().name == "Security Checkpoint" {
// println!("inv: {}", self.inventory()?);
log::info!("Turning around at security checkpoint");
self.step(self.direction + Turn::Reverse)?;
continue;
}
if self.room == start {
let dir = match start_directions.pop() {
None => return Ok(()),
Some(d) => d,
};
self.step(dir)?;
continue;
}
self.left_wall_step()?;
}
}
pub fn goto(&mut self, room: &str) -> AnyResult<()> {
loop {
if self.see_room().name == room {
return Ok(());
}
self.left_wall_step()?;
}
}
}
type Key = slotmap::DefaultKey;
#[derive(Default, Debug, Clone)]
pub struct Map {
rooms_by_name: HashMap<String, Key>,
rooms: SlotMap<Key, Room>,
doors: HashMap<Key, BTreeMap<Compass, Key>>,
unvisited: HashMap<Key, BTreeSet<Compass>>,
}
impl Map {
fn add_room(&mut self, room: Room) -> Key {
if let Some(&key) = self.rooms_by_name.get(&room.name) {
return key;
}
let name = room.name.clone();
let directions = room.directions.clone();
let key = self.rooms.insert(room);
self.rooms_by_name.insert(name, key);
let unvisited = self.unvisited.insert(key, Default::default());
assert!(unvisited.is_none());
let unvisited = self.unvisited.get_mut(&key).unwrap();
for dir in directions {
unvisited.insert(dir);
}
key
}
fn visit(&mut self, room: Key, direction: Compass) {
if let Occupied(mut o) = self.unvisited.entry(room) {
o.get_mut().remove(&direction);
if o.get().is_empty() {
o.remove();
}
}
}
fn add_door(&mut self, first: Key, direction: Compass, second: Key) {
self.doors
.entry(first)
.or_default()
.insert(direction, second);
self.doors
.entry(second)
.or_default()
.insert(direction + Turn::Reverse, first);
self.visit(first, direction);
self.visit(second, direction + Turn::Reverse);
}
pub fn len(&self) -> usize {
self.rooms.len()
}
pub fn is_empty(&self) -> bool {
self.rooms.is_empty()
}
pub fn contains(&self, room: &Room) -> bool {
self.rooms_by_name.contains_key(&room.name)
}
fn get(&self, key: Key) -> &Room {
self.rooms.get(key).unwrap()
}
#[allow(dead_code)]
fn to_coords(&self, origin: Option<Key>) -> HashMap<Position, Key> {
let start = match (origin, self.rooms.iter().next()) {
(Some(k), _) => k,
(None, None) => return Default::default(),
(None, Some((k, _r))) => k,
};
let mut queue = vec![(Position(0, 0), start)];
let mut seen = HashSet::new();
let mut coords = HashMap::new();
while let Some((pos, r)) = queue.pop() {
match coords.entry(pos) {
Occupied(o) => {
assert!(seen.contains(&r));
assert!(*o.get() == r);
}
Vacant(v) => {
assert!(!seen.contains(&r));
seen.insert(r);
v.insert(r);
let neighbors = self.doors.get(&r).unwrap();
for (&d, &r) in neighbors {
queue.push((pos + d, r));
}
}
}
}
coords
}
}
/*
NV
||
SB KT=GW=PS
|| ||
CQ=HD
||
|| OB=ST
|| ||
SG HB=EG=WD=AR=SL
|| ||
HW=HC=CO
||
SC
AR: Arcade
CO: Corridor
CQ: Crew Quarters
EG: Engineering
GW: Gift Wrapping Center
HB: Hull Breach
HC: Hot Chocolate Fountain
HD: Holodeck
HW: Hallway
KT: Kitchen
NV: Navigation
OB: Observatory
SB: Sick Bay
SC: Security Checkpoint
SG: Storage
SL: Science Lab
ST: Stables
WD: Warp Drive Maintenance
*/
fn try_item_combos(initial_explorer: Explorer, items: Vec<String>) -> AnyResult<Explorer> {
let total = 1 << items.len();
for n in 0..total {
let mut explorer = initial_explorer.clone();
let cur_items: BTreeSet<String> = items
.iter()
.enumerate()
.filter_map(|(i, item)| {
if (n & (1 << i)) == 0 {
None
} else {
Some(item.clone())
}
})
.collect();
log::info!("Items: {:?}", cur_items);
explorer.explore_and_take(&cur_items)?;
assert_eq!(explorer.carrying, cur_items);
explorer.goto("Security Checkpoint")?;
let err = match explorer.left_wall_step() {
Ok(()) => return Ok(explorer),
Err(e) => e,
};
match err.downcast::<Ejection>() {
Ok(e) => log::info!(" {}", e),
Err(e) => return Err(e),
}
}
Err(anyhow::anyhow!("Got to end, found nothing!"))
}
#[allow(dead_code)]
fn explore_around(explorer: &mut Explorer) -> AnyResult<()> {
explorer.explore_and_take(&Default::default())?;
println!(
"Visited, back to start. Unvisited: {} Visited {} rooms with {} doors",
explorer.map.unvisited.len(),
explorer.map.rooms.len(),
explorer.map.doors.len()
);
println!("Items:");
for (_, room) in &explorer.map.rooms {
for item in &room.items {
println!(" - {}: {}", room.name, item);
}
}
println!("\nDoors:");
for (&ra, doors) in &explorer.map.doors {
for (dir, &rb) in doors {
let ra = explorer.map.rooms.get(ra).unwrap();
let rb = explorer.map.rooms.get(rb).unwrap();
println!(" {}: {} -> {}", dir, ra.name, rb.name);
}
}
Ok(())
}
fn main() -> anyhow::Result<()> {
env_logger::init();
let matches = App::new("Day 25")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.value_name("INPUT")
.takes_value(true),
)
.get_matches();
let input_path = matches.value_of("INPUT").unwrap_or("inputs/day25.txt");
debug!("Using input {}", input_path);
let file = File::open(input_path)?;
let buf_reader = BufReader::new(file);
let line: String = buf_reader
.lines()
.next()
.ok_or_else(|| anyhow::format_err!("No line found"))??;
let cp: IntComp = str::parse(&line)?;
let initial_explorer = Explorer::new(cp)?;
let all_items = vec![
// "food ration".to_owned(),
"candy cane".to_owned(),
"mouse".to_owned(),
// "mug".to_owned(),
"coin".to_owned(),
// "ornament".to_owned(),
"semiconductor".to_owned(),
// "mutex".to_owned(),
];
try_item_combos(initial_explorer, all_items)?;
Ok(())
}
#[cfg(test)]
mod tests {
use test_log::test;
#[allow(unused_imports)]
use super::*;
#[test]
fn test_thing() -> anyhow::Result<()> {
Ok(())
}
} | random_line_split | |
main.rs | use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fmt::{Display, Formatter, Result as FmtResult};
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::str::FromStr;
use anyhow::{anyhow, Error as AnyErr, Result as AnyResult};
use clap::{App, Arg};
use log::debug;
use slotmap::SlotMap;
use aoc::grid::{Compass, Position, Turn};
use aoc::intcomp::{IntComp, OutputVec, Stopped};
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Room {
pub name: String,
pub message: String,
pub items: BTreeSet<String>,
pub directions: Vec<Compass>,
}
impl Display for Room {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "Room[{}, doors=", self.name)?;
if !self.directions.is_empty() {
write!(f, "doors=")?;
for &d in &self.directions {
write!(f, "{}", d)?;
}
write!(f, ", ")?;
}
for (ix, item) in self.items.iter().enumerate() {
if ix == 0 {
write!(f, "items={}", item)?;
} else {
write!(f, ",{}", item)?;
}
if ix == self.items.len() - 1 {
write!(f, ", ")?;
}
}
write!(f, "message='{}']", self.message)?;
Ok(())
}
}
fn str_to_compass(s: &str) -> AnyResult<Compass> {
Ok(match s {
"north" => Compass::North,
"south" => Compass::South,
"east" => Compass::East,
"west" => Compass::West,
_ => return Err(anyhow!("'{s}' is not a compoass direction")),
})
}
#[derive(Debug, thiserror::Error)]
#[error("Ejection: {}", _0)]
struct Ejection(String);
impl FromStr for Room {
type Err = AnyErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut lines = s.lines();
let mut first = lines.next().ok_or_else(|| anyhow!("No first line"))?.trim();
while first.is_empty() {
first = lines
.next()
.ok_or_else(|| anyhow!("No non-empty first line"))?
.trim();
}
assert!(first.starts_with("== "));
assert!(first.ends_with(" =="));
let name = first
.trim_end_matches(" ==")
.trim_start_matches("== ")
.to_owned();
let message = lines
.next()
.ok_or_else(|| anyhow!("No second line"))?
.trim()
.to_owned();
assert!(!message.is_empty(), "Expected non-empty message");
let next = lines.next().ok_or_else(|| anyhow!("No third line"))?.trim();
assert!(next.is_empty(), "Expected third line to be empty");
let next = lines
.next()
.ok_or_else(|| anyhow!("No fourth line"))?
.trim();
assert!(
next == "Doors here lead:",
"Expected third line to be 'Doors here lead:'"
);
let mut directions = Vec::new();
let mut next = lines
.next()
.ok_or_else(|| anyhow!("No line after doors"))?
.trim();
while next.starts_with("- ") {
let dir = next.trim_start_matches("- ");
let dir = str_to_compass(dir)?;
directions.push(dir);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after directions"))?
.trim();
}
assert!(
next.is_empty(),
"Expected line after directions to be empty"
);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after directions + empty"))?
.trim();
let mut items = BTreeSet::new();
if next == "Items here:" {
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items"))?
.trim();
while next.starts_with("- ") {
let item = next.trim_start_matches("- ");
items.insert(item.to_owned());
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items"))?
.trim();
}
assert!(
next.is_empty(),
"Expected line after items to be empty, got '{}'",
next
);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items + empty"))?
.trim();
}
if next.contains("Alert!") {
return Err(Ejection(next.into()).into());
}
assert_eq!(
next, "Command?",
"Expected line after items to be 'Command?'"
);
assert!(lines.next().is_none());
Ok(Room {
name,
message,
items,
directions,
})
}
}
#[derive(Debug, Clone)]
pub struct Explorer {
comp: IntComp,
room: Key,
direction: Compass,
carrying: BTreeSet<String>,
map: Map,
}
impl Explorer {
fn new(mut comp: IntComp) -> AnyResult<Self> {
let mut output = OutputVec::new();
comp.run_to_input(&mut output)?;
let out = output.as_string()?;
let room = Room::from_str(&out)?;
let mut map: Map = Default::default();
let key = map.add_room(room);
let exp = Explorer {
comp,
room: key,
direction: Compass::North,
carrying: Default::default(),
map,
};
Ok(exp)
}
fn see_room(&self) -> &Room {
self.map.get(self.room)
}
fn process_input_str(&mut self, output: &mut OutputVec, input: &str) -> anyhow::Result<String> {
log::debug!("Process 1: '{}'", input);
self.comp
.process_ascii(input, output)?
.expect(Stopped::Input)?;
log::debug!("Process 2: '\\n'");
self.comp
.process_ascii("\n", output)?
.expect(Stopped::Input)?;
log::debug!("Processed: '\\n'");
Ok(output.as_string()?)
}
fn process_str(&mut self, input: &str) -> anyhow::Result<String> {
let mut out = OutputVec::new();
match self.process_input_str(&mut out, input) {
Ok(v) => Ok(v),
Err(e) => {
let output = out.as_string()?;
log::warn!("process_str failure on input {}, output: {}", input, output);
Err(e)
}
}
}
// fn north(&mut self) -> anyhow::Result<String> {
// self.process_str("north")
// }
// fn south(&mut self) -> anyhow::Result<String> {
// self.process_str("south")
// }
// fn east(&mut self) -> anyhow::Result<String> {
// self.process_str("east")
// }
// fn west(&mut self) -> anyhow::Result<String> {
// self.process_str("west")
// }
pub fn step(&mut self, direction: Compass) -> anyhow::Result<()> {
let input = match direction {
Compass::East => "east",
Compass::North => "north",
Compass::South => "south",
Compass::West => "west",
};
log::debug!("Taking step {}", input);
let output = self.process_str(input)?;
log::debug!("Took step:\n{}\n", output);
let room = Room::from_str(&output)?;
let new = self.map.add_room(room);
self.map.add_door(self.room, direction, new);
self.room = new;
self.direction = direction;
Ok(())
}
pub fn take(&mut self, item: &str) -> anyhow::Result<String> {
log::debug!("Taking {}", item);
let mut s = String::from("take ");
s.push_str(item);
let result = self.process_str(&s)?;
let new = self.carrying.insert(item.to_string());
let room = self.map.rooms.get_mut(self.room).unwrap();
room.items.remove(item);
assert!(new, "Expected to add {}", item);
log::debug!(" took {}", item);
Ok(result)
}
pub fn drop(&mut self, item: &str) -> anyhow::Result<String> {
let found = self.carrying.remove(item);
assert!(!found, "Expected to drop {}", item);
let mut s = String::from("drop ");
s.push_str(item);
self.process_str(&s)
}
pub fn inventory(&mut self) -> anyhow::Result<String> {
self.process_str("inv")
}
fn left_wall_step(&mut self) -> AnyResult<()> {
let mut dir = self.direction + Turn::Left;
for _ in 0..4 {
log::debug!("Checking {} -> {}", self.direction, dir);
if self.see_room().directions.contains(&dir) {
break;
}
dir = dir + Turn::Right;
}
assert!(self.see_room().directions.contains(&dir));
self.step(dir)?;
log::debug!("Stepped {}, {}", dir, self.see_room().name);
Ok(())
}
fn explore_and_take(&mut self, items: &BTreeSet<String>) -> AnyResult<()> {
let start = self.room;
let mut start_directions = self.see_room().directions.clone();
start_directions.reverse();
loop {
let overlap: BTreeSet<String> = items
.intersection(&self.see_room().items)
.map(|s| s.to_owned())
.collect();
for item in overlap {
let _output = self.take(&item)?;
// println!("Took {}, output: {}", item, output.trim());
}
if self.see_room().name == "Security Checkpoint" {
// println!("inv: {}", self.inventory()?);
log::info!("Turning around at security checkpoint");
self.step(self.direction + Turn::Reverse)?;
continue;
}
if self.room == start {
let dir = match start_directions.pop() {
None => return Ok(()),
Some(d) => d,
};
self.step(dir)?;
continue;
}
self.left_wall_step()?;
}
}
pub fn goto(&mut self, room: &str) -> AnyResult<()> {
loop {
if self.see_room().name == room {
return Ok(());
}
self.left_wall_step()?;
}
}
}
type Key = slotmap::DefaultKey;
#[derive(Default, Debug, Clone)]
pub struct Map {
rooms_by_name: HashMap<String, Key>,
rooms: SlotMap<Key, Room>,
doors: HashMap<Key, BTreeMap<Compass, Key>>,
unvisited: HashMap<Key, BTreeSet<Compass>>,
}
impl Map {
fn add_room(&mut self, room: Room) -> Key {
if let Some(&key) = self.rooms_by_name.get(&room.name) {
return key;
}
let name = room.name.clone();
let directions = room.directions.clone();
let key = self.rooms.insert(room);
self.rooms_by_name.insert(name, key);
let unvisited = self.unvisited.insert(key, Default::default());
assert!(unvisited.is_none());
let unvisited = self.unvisited.get_mut(&key).unwrap();
for dir in directions {
unvisited.insert(dir);
}
key
}
fn visit(&mut self, room: Key, direction: Compass) {
if let Occupied(mut o) = self.unvisited.entry(room) {
o.get_mut().remove(&direction);
if o.get().is_empty() {
o.remove();
}
}
}
fn add_door(&mut self, first: Key, direction: Compass, second: Key) {
self.doors
.entry(first)
.or_default()
.insert(direction, second);
self.doors
.entry(second)
.or_default()
.insert(direction + Turn::Reverse, first);
self.visit(first, direction);
self.visit(second, direction + Turn::Reverse);
}
pub fn len(&self) -> usize {
self.rooms.len()
}
pub fn is_empty(&self) -> bool {
self.rooms.is_empty()
}
pub fn contains(&self, room: &Room) -> bool {
self.rooms_by_name.contains_key(&room.name)
}
fn get(&self, key: Key) -> &Room {
self.rooms.get(key).unwrap()
}
#[allow(dead_code)]
fn to_coords(&self, origin: Option<Key>) -> HashMap<Position, Key> {
let start = match (origin, self.rooms.iter().next()) {
(Some(k), _) => k,
(None, None) => return Default::default(),
(None, Some((k, _r))) => k,
};
let mut queue = vec![(Position(0, 0), start)];
let mut seen = HashSet::new();
let mut coords = HashMap::new();
while let Some((pos, r)) = queue.pop() {
match coords.entry(pos) {
Occupied(o) => {
assert!(seen.contains(&r));
assert!(*o.get() == r);
}
Vacant(v) => {
assert!(!seen.contains(&r));
seen.insert(r);
v.insert(r);
let neighbors = self.doors.get(&r).unwrap();
for (&d, &r) in neighbors {
queue.push((pos + d, r));
}
}
}
}
coords
}
}
/*
NV
||
SB KT=GW=PS
|| ||
CQ=HD
||
|| OB=ST
|| ||
SG HB=EG=WD=AR=SL
|| ||
HW=HC=CO
||
SC
AR: Arcade
CO: Corridor
CQ: Crew Quarters
EG: Engineering
GW: Gift Wrapping Center
HB: Hull Breach
HC: Hot Chocolate Fountain
HD: Holodeck
HW: Hallway
KT: Kitchen
NV: Navigation
OB: Observatory
SB: Sick Bay
SC: Security Checkpoint
SG: Storage
SL: Science Lab
ST: Stables
WD: Warp Drive Maintenance
*/
fn try_item_combos(initial_explorer: Explorer, items: Vec<String>) -> AnyResult<Explorer> |
#[allow(dead_code)]
fn explore_around(explorer: &mut Explorer) -> AnyResult<()> {
explorer.explore_and_take(&Default::default())?;
println!(
"Visited, back to start. Unvisited: {} Visited {} rooms with {} doors",
explorer.map.unvisited.len(),
explorer.map.rooms.len(),
explorer.map.doors.len()
);
println!("Items:");
for (_, room) in &explorer.map.rooms {
for item in &room.items {
println!(" - {}: {}", room.name, item);
}
}
println!("\nDoors:");
for (&ra, doors) in &explorer.map.doors {
for (dir, &rb) in doors {
let ra = explorer.map.rooms.get(ra).unwrap();
let rb = explorer.map.rooms.get(rb).unwrap();
println!(" {}: {} -> {}", dir, ra.name, rb.name);
}
}
Ok(())
}
fn main() -> anyhow::Result<()> {
env_logger::init();
let matches = App::new("Day 25")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.value_name("INPUT")
.takes_value(true),
)
.get_matches();
let input_path = matches.value_of("INPUT").unwrap_or("inputs/day25.txt");
debug!("Using input {}", input_path);
let file = File::open(input_path)?;
let buf_reader = BufReader::new(file);
let line: String = buf_reader
.lines()
.next()
.ok_or_else(|| anyhow::format_err!("No line found"))??;
let cp: IntComp = str::parse(&line)?;
let initial_explorer = Explorer::new(cp)?;
let all_items = vec![
// "food ration".to_owned(),
"candy cane".to_owned(),
"mouse".to_owned(),
// "mug".to_owned(),
"coin".to_owned(),
// "ornament".to_owned(),
"semiconductor".to_owned(),
// "mutex".to_owned(),
];
try_item_combos(initial_explorer, all_items)?;
Ok(())
}
#[cfg(test)]
mod tests {
use test_log::test;
#[allow(unused_imports)]
use super::*;
#[test]
fn test_thing() -> anyhow::Result<()> {
Ok(())
}
}
| {
let total = 1 << items.len();
for n in 0..total {
let mut explorer = initial_explorer.clone();
let cur_items: BTreeSet<String> = items
.iter()
.enumerate()
.filter_map(|(i, item)| {
if (n & (1 << i)) == 0 {
None
} else {
Some(item.clone())
}
})
.collect();
log::info!("Items: {:?}", cur_items);
explorer.explore_and_take(&cur_items)?;
assert_eq!(explorer.carrying, cur_items);
explorer.goto("Security Checkpoint")?;
let err = match explorer.left_wall_step() {
Ok(()) => return Ok(explorer),
Err(e) => e,
};
match err.downcast::<Ejection>() {
Ok(e) => log::info!(" {}", e),
Err(e) => return Err(e),
}
}
Err(anyhow::anyhow!("Got to end, found nothing!"))
} | identifier_body |
main.rs | use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fmt::{Display, Formatter, Result as FmtResult};
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::str::FromStr;
use anyhow::{anyhow, Error as AnyErr, Result as AnyResult};
use clap::{App, Arg};
use log::debug;
use slotmap::SlotMap;
use aoc::grid::{Compass, Position, Turn};
use aoc::intcomp::{IntComp, OutputVec, Stopped};
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Room {
pub name: String,
pub message: String,
pub items: BTreeSet<String>,
pub directions: Vec<Compass>,
}
impl Display for Room {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "Room[{}, doors=", self.name)?;
if !self.directions.is_empty() {
write!(f, "doors=")?;
for &d in &self.directions {
write!(f, "{}", d)?;
}
write!(f, ", ")?;
}
for (ix, item) in self.items.iter().enumerate() {
if ix == 0 {
write!(f, "items={}", item)?;
} else {
write!(f, ",{}", item)?;
}
if ix == self.items.len() - 1 {
write!(f, ", ")?;
}
}
write!(f, "message='{}']", self.message)?;
Ok(())
}
}
fn str_to_compass(s: &str) -> AnyResult<Compass> {
Ok(match s {
"north" => Compass::North,
"south" => Compass::South,
"east" => Compass::East,
"west" => Compass::West,
_ => return Err(anyhow!("'{s}' is not a compoass direction")),
})
}
#[derive(Debug, thiserror::Error)]
#[error("Ejection: {}", _0)]
struct Ejection(String);
impl FromStr for Room {
type Err = AnyErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut lines = s.lines();
let mut first = lines.next().ok_or_else(|| anyhow!("No first line"))?.trim();
while first.is_empty() {
first = lines
.next()
.ok_or_else(|| anyhow!("No non-empty first line"))?
.trim();
}
assert!(first.starts_with("== "));
assert!(first.ends_with(" =="));
let name = first
.trim_end_matches(" ==")
.trim_start_matches("== ")
.to_owned();
let message = lines
.next()
.ok_or_else(|| anyhow!("No second line"))?
.trim()
.to_owned();
assert!(!message.is_empty(), "Expected non-empty message");
let next = lines.next().ok_or_else(|| anyhow!("No third line"))?.trim();
assert!(next.is_empty(), "Expected third line to be empty");
let next = lines
.next()
.ok_or_else(|| anyhow!("No fourth line"))?
.trim();
assert!(
next == "Doors here lead:",
"Expected third line to be 'Doors here lead:'"
);
let mut directions = Vec::new();
let mut next = lines
.next()
.ok_or_else(|| anyhow!("No line after doors"))?
.trim();
while next.starts_with("- ") {
let dir = next.trim_start_matches("- ");
let dir = str_to_compass(dir)?;
directions.push(dir);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after directions"))?
.trim();
}
assert!(
next.is_empty(),
"Expected line after directions to be empty"
);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after directions + empty"))?
.trim();
let mut items = BTreeSet::new();
if next == "Items here:" {
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items"))?
.trim();
while next.starts_with("- ") {
let item = next.trim_start_matches("- ");
items.insert(item.to_owned());
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items"))?
.trim();
}
assert!(
next.is_empty(),
"Expected line after items to be empty, got '{}'",
next
);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items + empty"))?
.trim();
}
if next.contains("Alert!") {
return Err(Ejection(next.into()).into());
}
assert_eq!(
next, "Command?",
"Expected line after items to be 'Command?'"
);
assert!(lines.next().is_none());
Ok(Room {
name,
message,
items,
directions,
})
}
}
#[derive(Debug, Clone)]
pub struct Explorer {
comp: IntComp,
room: Key,
direction: Compass,
carrying: BTreeSet<String>,
map: Map,
}
impl Explorer {
fn new(mut comp: IntComp) -> AnyResult<Self> {
let mut output = OutputVec::new();
comp.run_to_input(&mut output)?;
let out = output.as_string()?;
let room = Room::from_str(&out)?;
let mut map: Map = Default::default();
let key = map.add_room(room);
let exp = Explorer {
comp,
room: key,
direction: Compass::North,
carrying: Default::default(),
map,
};
Ok(exp)
}
fn see_room(&self) -> &Room {
self.map.get(self.room)
}
fn process_input_str(&mut self, output: &mut OutputVec, input: &str) -> anyhow::Result<String> {
log::debug!("Process 1: '{}'", input);
self.comp
.process_ascii(input, output)?
.expect(Stopped::Input)?;
log::debug!("Process 2: '\\n'");
self.comp
.process_ascii("\n", output)?
.expect(Stopped::Input)?;
log::debug!("Processed: '\\n'");
Ok(output.as_string()?)
}
fn process_str(&mut self, input: &str) -> anyhow::Result<String> {
let mut out = OutputVec::new();
match self.process_input_str(&mut out, input) {
Ok(v) => Ok(v),
Err(e) => {
let output = out.as_string()?;
log::warn!("process_str failure on input {}, output: {}", input, output);
Err(e)
}
}
}
// fn north(&mut self) -> anyhow::Result<String> {
// self.process_str("north")
// }
// fn south(&mut self) -> anyhow::Result<String> {
// self.process_str("south")
// }
// fn east(&mut self) -> anyhow::Result<String> {
// self.process_str("east")
// }
// fn west(&mut self) -> anyhow::Result<String> {
// self.process_str("west")
// }
pub fn step(&mut self, direction: Compass) -> anyhow::Result<()> {
let input = match direction {
Compass::East => "east",
Compass::North => "north",
Compass::South => "south",
Compass::West => "west",
};
log::debug!("Taking step {}", input);
let output = self.process_str(input)?;
log::debug!("Took step:\n{}\n", output);
let room = Room::from_str(&output)?;
let new = self.map.add_room(room);
self.map.add_door(self.room, direction, new);
self.room = new;
self.direction = direction;
Ok(())
}
pub fn take(&mut self, item: &str) -> anyhow::Result<String> {
log::debug!("Taking {}", item);
let mut s = String::from("take ");
s.push_str(item);
let result = self.process_str(&s)?;
let new = self.carrying.insert(item.to_string());
let room = self.map.rooms.get_mut(self.room).unwrap();
room.items.remove(item);
assert!(new, "Expected to add {}", item);
log::debug!(" took {}", item);
Ok(result)
}
pub fn drop(&mut self, item: &str) -> anyhow::Result<String> {
let found = self.carrying.remove(item);
assert!(!found, "Expected to drop {}", item);
let mut s = String::from("drop ");
s.push_str(item);
self.process_str(&s)
}
pub fn inventory(&mut self) -> anyhow::Result<String> {
self.process_str("inv")
}
fn left_wall_step(&mut self) -> AnyResult<()> {
let mut dir = self.direction + Turn::Left;
for _ in 0..4 {
log::debug!("Checking {} -> {}", self.direction, dir);
if self.see_room().directions.contains(&dir) {
break;
}
dir = dir + Turn::Right;
}
assert!(self.see_room().directions.contains(&dir));
self.step(dir)?;
log::debug!("Stepped {}, {}", dir, self.see_room().name);
Ok(())
}
fn explore_and_take(&mut self, items: &BTreeSet<String>) -> AnyResult<()> {
let start = self.room;
let mut start_directions = self.see_room().directions.clone();
start_directions.reverse();
loop {
let overlap: BTreeSet<String> = items
.intersection(&self.see_room().items)
.map(|s| s.to_owned())
.collect();
for item in overlap {
let _output = self.take(&item)?;
// println!("Took {}, output: {}", item, output.trim());
}
if self.see_room().name == "Security Checkpoint" {
// println!("inv: {}", self.inventory()?);
log::info!("Turning around at security checkpoint");
self.step(self.direction + Turn::Reverse)?;
continue;
}
if self.room == start {
let dir = match start_directions.pop() {
None => return Ok(()),
Some(d) => d,
};
self.step(dir)?;
continue;
}
self.left_wall_step()?;
}
}
pub fn goto(&mut self, room: &str) -> AnyResult<()> {
loop {
if self.see_room().name == room {
return Ok(());
}
self.left_wall_step()?;
}
}
}
type Key = slotmap::DefaultKey;
#[derive(Default, Debug, Clone)]
pub struct Map {
rooms_by_name: HashMap<String, Key>,
rooms: SlotMap<Key, Room>,
doors: HashMap<Key, BTreeMap<Compass, Key>>,
unvisited: HashMap<Key, BTreeSet<Compass>>,
}
impl Map {
fn add_room(&mut self, room: Room) -> Key {
if let Some(&key) = self.rooms_by_name.get(&room.name) {
return key;
}
let name = room.name.clone();
let directions = room.directions.clone();
let key = self.rooms.insert(room);
self.rooms_by_name.insert(name, key);
let unvisited = self.unvisited.insert(key, Default::default());
assert!(unvisited.is_none());
let unvisited = self.unvisited.get_mut(&key).unwrap();
for dir in directions {
unvisited.insert(dir);
}
key
}
fn visit(&mut self, room: Key, direction: Compass) {
if let Occupied(mut o) = self.unvisited.entry(room) {
o.get_mut().remove(&direction);
if o.get().is_empty() {
o.remove();
}
}
}
fn add_door(&mut self, first: Key, direction: Compass, second: Key) {
self.doors
.entry(first)
.or_default()
.insert(direction, second);
self.doors
.entry(second)
.or_default()
.insert(direction + Turn::Reverse, first);
self.visit(first, direction);
self.visit(second, direction + Turn::Reverse);
}
pub fn | (&self) -> usize {
self.rooms.len()
}
pub fn is_empty(&self) -> bool {
self.rooms.is_empty()
}
pub fn contains(&self, room: &Room) -> bool {
self.rooms_by_name.contains_key(&room.name)
}
fn get(&self, key: Key) -> &Room {
self.rooms.get(key).unwrap()
}
#[allow(dead_code)]
fn to_coords(&self, origin: Option<Key>) -> HashMap<Position, Key> {
let start = match (origin, self.rooms.iter().next()) {
(Some(k), _) => k,
(None, None) => return Default::default(),
(None, Some((k, _r))) => k,
};
let mut queue = vec![(Position(0, 0), start)];
let mut seen = HashSet::new();
let mut coords = HashMap::new();
while let Some((pos, r)) = queue.pop() {
match coords.entry(pos) {
Occupied(o) => {
assert!(seen.contains(&r));
assert!(*o.get() == r);
}
Vacant(v) => {
assert!(!seen.contains(&r));
seen.insert(r);
v.insert(r);
let neighbors = self.doors.get(&r).unwrap();
for (&d, &r) in neighbors {
queue.push((pos + d, r));
}
}
}
}
coords
}
}
/*
NV
||
SB KT=GW=PS
|| ||
CQ=HD
||
|| OB=ST
|| ||
SG HB=EG=WD=AR=SL
|| ||
HW=HC=CO
||
SC
AR: Arcade
CO: Corridor
CQ: Crew Quarters
EG: Engineering
GW: Gift Wrapping Center
HB: Hull Breach
HC: Hot Chocolate Fountain
HD: Holodeck
HW: Hallway
KT: Kitchen
NV: Navigation
OB: Observatory
SB: Sick Bay
SC: Security Checkpoint
SG: Storage
SL: Science Lab
ST: Stables
WD: Warp Drive Maintenance
*/
fn try_item_combos(initial_explorer: Explorer, items: Vec<String>) -> AnyResult<Explorer> {
let total = 1 << items.len();
for n in 0..total {
let mut explorer = initial_explorer.clone();
let cur_items: BTreeSet<String> = items
.iter()
.enumerate()
.filter_map(|(i, item)| {
if (n & (1 << i)) == 0 {
None
} else {
Some(item.clone())
}
})
.collect();
log::info!("Items: {:?}", cur_items);
explorer.explore_and_take(&cur_items)?;
assert_eq!(explorer.carrying, cur_items);
explorer.goto("Security Checkpoint")?;
let err = match explorer.left_wall_step() {
Ok(()) => return Ok(explorer),
Err(e) => e,
};
match err.downcast::<Ejection>() {
Ok(e) => log::info!(" {}", e),
Err(e) => return Err(e),
}
}
Err(anyhow::anyhow!("Got to end, found nothing!"))
}
#[allow(dead_code)]
fn explore_around(explorer: &mut Explorer) -> AnyResult<()> {
explorer.explore_and_take(&Default::default())?;
println!(
"Visited, back to start. Unvisited: {} Visited {} rooms with {} doors",
explorer.map.unvisited.len(),
explorer.map.rooms.len(),
explorer.map.doors.len()
);
println!("Items:");
for (_, room) in &explorer.map.rooms {
for item in &room.items {
println!(" - {}: {}", room.name, item);
}
}
println!("\nDoors:");
for (&ra, doors) in &explorer.map.doors {
for (dir, &rb) in doors {
let ra = explorer.map.rooms.get(ra).unwrap();
let rb = explorer.map.rooms.get(rb).unwrap();
println!(" {}: {} -> {}", dir, ra.name, rb.name);
}
}
Ok(())
}
fn main() -> anyhow::Result<()> {
env_logger::init();
let matches = App::new("Day 25")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.value_name("INPUT")
.takes_value(true),
)
.get_matches();
let input_path = matches.value_of("INPUT").unwrap_or("inputs/day25.txt");
debug!("Using input {}", input_path);
let file = File::open(input_path)?;
let buf_reader = BufReader::new(file);
let line: String = buf_reader
.lines()
.next()
.ok_or_else(|| anyhow::format_err!("No line found"))??;
let cp: IntComp = str::parse(&line)?;
let initial_explorer = Explorer::new(cp)?;
let all_items = vec![
// "food ration".to_owned(),
"candy cane".to_owned(),
"mouse".to_owned(),
// "mug".to_owned(),
"coin".to_owned(),
// "ornament".to_owned(),
"semiconductor".to_owned(),
// "mutex".to_owned(),
];
try_item_combos(initial_explorer, all_items)?;
Ok(())
}
#[cfg(test)]
mod tests {
use test_log::test;
#[allow(unused_imports)]
use super::*;
#[test]
fn test_thing() -> anyhow::Result<()> {
Ok(())
}
}
| len | identifier_name |
main.rs | use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fmt::{Display, Formatter, Result as FmtResult};
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::str::FromStr;
use anyhow::{anyhow, Error as AnyErr, Result as AnyResult};
use clap::{App, Arg};
use log::debug;
use slotmap::SlotMap;
use aoc::grid::{Compass, Position, Turn};
use aoc::intcomp::{IntComp, OutputVec, Stopped};
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Room {
pub name: String,
pub message: String,
pub items: BTreeSet<String>,
pub directions: Vec<Compass>,
}
impl Display for Room {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "Room[{}, doors=", self.name)?;
if !self.directions.is_empty() {
write!(f, "doors=")?;
for &d in &self.directions {
write!(f, "{}", d)?;
}
write!(f, ", ")?;
}
for (ix, item) in self.items.iter().enumerate() {
if ix == 0 {
write!(f, "items={}", item)?;
} else {
write!(f, ",{}", item)?;
}
if ix == self.items.len() - 1 {
write!(f, ", ")?;
}
}
write!(f, "message='{}']", self.message)?;
Ok(())
}
}
fn str_to_compass(s: &str) -> AnyResult<Compass> {
Ok(match s {
"north" => Compass::North,
"south" => Compass::South,
"east" => Compass::East,
"west" => Compass::West,
_ => return Err(anyhow!("'{s}' is not a compoass direction")),
})
}
#[derive(Debug, thiserror::Error)]
#[error("Ejection: {}", _0)]
struct Ejection(String);
impl FromStr for Room {
type Err = AnyErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut lines = s.lines();
let mut first = lines.next().ok_or_else(|| anyhow!("No first line"))?.trim();
while first.is_empty() {
first = lines
.next()
.ok_or_else(|| anyhow!("No non-empty first line"))?
.trim();
}
assert!(first.starts_with("== "));
assert!(first.ends_with(" =="));
let name = first
.trim_end_matches(" ==")
.trim_start_matches("== ")
.to_owned();
let message = lines
.next()
.ok_or_else(|| anyhow!("No second line"))?
.trim()
.to_owned();
assert!(!message.is_empty(), "Expected non-empty message");
let next = lines.next().ok_or_else(|| anyhow!("No third line"))?.trim();
assert!(next.is_empty(), "Expected third line to be empty");
let next = lines
.next()
.ok_or_else(|| anyhow!("No fourth line"))?
.trim();
assert!(
next == "Doors here lead:",
"Expected third line to be 'Doors here lead:'"
);
let mut directions = Vec::new();
let mut next = lines
.next()
.ok_or_else(|| anyhow!("No line after doors"))?
.trim();
while next.starts_with("- ") {
let dir = next.trim_start_matches("- ");
let dir = str_to_compass(dir)?;
directions.push(dir);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after directions"))?
.trim();
}
assert!(
next.is_empty(),
"Expected line after directions to be empty"
);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after directions + empty"))?
.trim();
let mut items = BTreeSet::new();
if next == "Items here:" {
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items"))?
.trim();
while next.starts_with("- ") {
let item = next.trim_start_matches("- ");
items.insert(item.to_owned());
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items"))?
.trim();
}
assert!(
next.is_empty(),
"Expected line after items to be empty, got '{}'",
next
);
next = lines
.next()
.ok_or_else(|| anyhow!("No line after items + empty"))?
.trim();
}
if next.contains("Alert!") {
return Err(Ejection(next.into()).into());
}
assert_eq!(
next, "Command?",
"Expected line after items to be 'Command?'"
);
assert!(lines.next().is_none());
Ok(Room {
name,
message,
items,
directions,
})
}
}
#[derive(Debug, Clone)]
pub struct Explorer {
comp: IntComp,
room: Key,
direction: Compass,
carrying: BTreeSet<String>,
map: Map,
}
impl Explorer {
fn new(mut comp: IntComp) -> AnyResult<Self> {
let mut output = OutputVec::new();
comp.run_to_input(&mut output)?;
let out = output.as_string()?;
let room = Room::from_str(&out)?;
let mut map: Map = Default::default();
let key = map.add_room(room);
let exp = Explorer {
comp,
room: key,
direction: Compass::North,
carrying: Default::default(),
map,
};
Ok(exp)
}
fn see_room(&self) -> &Room {
self.map.get(self.room)
}
fn process_input_str(&mut self, output: &mut OutputVec, input: &str) -> anyhow::Result<String> {
log::debug!("Process 1: '{}'", input);
self.comp
.process_ascii(input, output)?
.expect(Stopped::Input)?;
log::debug!("Process 2: '\\n'");
self.comp
.process_ascii("\n", output)?
.expect(Stopped::Input)?;
log::debug!("Processed: '\\n'");
Ok(output.as_string()?)
}
fn process_str(&mut self, input: &str) -> anyhow::Result<String> {
let mut out = OutputVec::new();
match self.process_input_str(&mut out, input) {
Ok(v) => Ok(v),
Err(e) => |
}
}
// fn north(&mut self) -> anyhow::Result<String> {
// self.process_str("north")
// }
// fn south(&mut self) -> anyhow::Result<String> {
// self.process_str("south")
// }
// fn east(&mut self) -> anyhow::Result<String> {
// self.process_str("east")
// }
// fn west(&mut self) -> anyhow::Result<String> {
// self.process_str("west")
// }
pub fn step(&mut self, direction: Compass) -> anyhow::Result<()> {
let input = match direction {
Compass::East => "east",
Compass::North => "north",
Compass::South => "south",
Compass::West => "west",
};
log::debug!("Taking step {}", input);
let output = self.process_str(input)?;
log::debug!("Took step:\n{}\n", output);
let room = Room::from_str(&output)?;
let new = self.map.add_room(room);
self.map.add_door(self.room, direction, new);
self.room = new;
self.direction = direction;
Ok(())
}
pub fn take(&mut self, item: &str) -> anyhow::Result<String> {
log::debug!("Taking {}", item);
let mut s = String::from("take ");
s.push_str(item);
let result = self.process_str(&s)?;
let new = self.carrying.insert(item.to_string());
let room = self.map.rooms.get_mut(self.room).unwrap();
room.items.remove(item);
assert!(new, "Expected to add {}", item);
log::debug!(" took {}", item);
Ok(result)
}
pub fn drop(&mut self, item: &str) -> anyhow::Result<String> {
let found = self.carrying.remove(item);
assert!(!found, "Expected to drop {}", item);
let mut s = String::from("drop ");
s.push_str(item);
self.process_str(&s)
}
pub fn inventory(&mut self) -> anyhow::Result<String> {
self.process_str("inv")
}
fn left_wall_step(&mut self) -> AnyResult<()> {
let mut dir = self.direction + Turn::Left;
for _ in 0..4 {
log::debug!("Checking {} -> {}", self.direction, dir);
if self.see_room().directions.contains(&dir) {
break;
}
dir = dir + Turn::Right;
}
assert!(self.see_room().directions.contains(&dir));
self.step(dir)?;
log::debug!("Stepped {}, {}", dir, self.see_room().name);
Ok(())
}
fn explore_and_take(&mut self, items: &BTreeSet<String>) -> AnyResult<()> {
let start = self.room;
let mut start_directions = self.see_room().directions.clone();
start_directions.reverse();
loop {
let overlap: BTreeSet<String> = items
.intersection(&self.see_room().items)
.map(|s| s.to_owned())
.collect();
for item in overlap {
let _output = self.take(&item)?;
// println!("Took {}, output: {}", item, output.trim());
}
if self.see_room().name == "Security Checkpoint" {
// println!("inv: {}", self.inventory()?);
log::info!("Turning around at security checkpoint");
self.step(self.direction + Turn::Reverse)?;
continue;
}
if self.room == start {
let dir = match start_directions.pop() {
None => return Ok(()),
Some(d) => d,
};
self.step(dir)?;
continue;
}
self.left_wall_step()?;
}
}
pub fn goto(&mut self, room: &str) -> AnyResult<()> {
loop {
if self.see_room().name == room {
return Ok(());
}
self.left_wall_step()?;
}
}
}
type Key = slotmap::DefaultKey;
#[derive(Default, Debug, Clone)]
pub struct Map {
rooms_by_name: HashMap<String, Key>,
rooms: SlotMap<Key, Room>,
doors: HashMap<Key, BTreeMap<Compass, Key>>,
unvisited: HashMap<Key, BTreeSet<Compass>>,
}
impl Map {
fn add_room(&mut self, room: Room) -> Key {
if let Some(&key) = self.rooms_by_name.get(&room.name) {
return key;
}
let name = room.name.clone();
let directions = room.directions.clone();
let key = self.rooms.insert(room);
self.rooms_by_name.insert(name, key);
let unvisited = self.unvisited.insert(key, Default::default());
assert!(unvisited.is_none());
let unvisited = self.unvisited.get_mut(&key).unwrap();
for dir in directions {
unvisited.insert(dir);
}
key
}
fn visit(&mut self, room: Key, direction: Compass) {
if let Occupied(mut o) = self.unvisited.entry(room) {
o.get_mut().remove(&direction);
if o.get().is_empty() {
o.remove();
}
}
}
fn add_door(&mut self, first: Key, direction: Compass, second: Key) {
self.doors
.entry(first)
.or_default()
.insert(direction, second);
self.doors
.entry(second)
.or_default()
.insert(direction + Turn::Reverse, first);
self.visit(first, direction);
self.visit(second, direction + Turn::Reverse);
}
pub fn len(&self) -> usize {
self.rooms.len()
}
pub fn is_empty(&self) -> bool {
self.rooms.is_empty()
}
pub fn contains(&self, room: &Room) -> bool {
self.rooms_by_name.contains_key(&room.name)
}
fn get(&self, key: Key) -> &Room {
self.rooms.get(key).unwrap()
}
#[allow(dead_code)]
fn to_coords(&self, origin: Option<Key>) -> HashMap<Position, Key> {
let start = match (origin, self.rooms.iter().next()) {
(Some(k), _) => k,
(None, None) => return Default::default(),
(None, Some((k, _r))) => k,
};
let mut queue = vec![(Position(0, 0), start)];
let mut seen = HashSet::new();
let mut coords = HashMap::new();
while let Some((pos, r)) = queue.pop() {
match coords.entry(pos) {
Occupied(o) => {
assert!(seen.contains(&r));
assert!(*o.get() == r);
}
Vacant(v) => {
assert!(!seen.contains(&r));
seen.insert(r);
v.insert(r);
let neighbors = self.doors.get(&r).unwrap();
for (&d, &r) in neighbors {
queue.push((pos + d, r));
}
}
}
}
coords
}
}
/*
NV
||
SB KT=GW=PS
|| ||
CQ=HD
||
|| OB=ST
|| ||
SG HB=EG=WD=AR=SL
|| ||
HW=HC=CO
||
SC
AR: Arcade
CO: Corridor
CQ: Crew Quarters
EG: Engineering
GW: Gift Wrapping Center
HB: Hull Breach
HC: Hot Chocolate Fountain
HD: Holodeck
HW: Hallway
KT: Kitchen
NV: Navigation
OB: Observatory
SB: Sick Bay
SC: Security Checkpoint
SG: Storage
SL: Science Lab
ST: Stables
WD: Warp Drive Maintenance
*/
fn try_item_combos(initial_explorer: Explorer, items: Vec<String>) -> AnyResult<Explorer> {
let total = 1 << items.len();
for n in 0..total {
let mut explorer = initial_explorer.clone();
let cur_items: BTreeSet<String> = items
.iter()
.enumerate()
.filter_map(|(i, item)| {
if (n & (1 << i)) == 0 {
None
} else {
Some(item.clone())
}
})
.collect();
log::info!("Items: {:?}", cur_items);
explorer.explore_and_take(&cur_items)?;
assert_eq!(explorer.carrying, cur_items);
explorer.goto("Security Checkpoint")?;
let err = match explorer.left_wall_step() {
Ok(()) => return Ok(explorer),
Err(e) => e,
};
match err.downcast::<Ejection>() {
Ok(e) => log::info!(" {}", e),
Err(e) => return Err(e),
}
}
Err(anyhow::anyhow!("Got to end, found nothing!"))
}
#[allow(dead_code)]
fn explore_around(explorer: &mut Explorer) -> AnyResult<()> {
explorer.explore_and_take(&Default::default())?;
println!(
"Visited, back to start. Unvisited: {} Visited {} rooms with {} doors",
explorer.map.unvisited.len(),
explorer.map.rooms.len(),
explorer.map.doors.len()
);
println!("Items:");
for (_, room) in &explorer.map.rooms {
for item in &room.items {
println!(" - {}: {}", room.name, item);
}
}
println!("\nDoors:");
for (&ra, doors) in &explorer.map.doors {
for (dir, &rb) in doors {
let ra = explorer.map.rooms.get(ra).unwrap();
let rb = explorer.map.rooms.get(rb).unwrap();
println!(" {}: {} -> {}", dir, ra.name, rb.name);
}
}
Ok(())
}
fn main() -> anyhow::Result<()> {
env_logger::init();
let matches = App::new("Day 25")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.value_name("INPUT")
.takes_value(true),
)
.get_matches();
let input_path = matches.value_of("INPUT").unwrap_or("inputs/day25.txt");
debug!("Using input {}", input_path);
let file = File::open(input_path)?;
let buf_reader = BufReader::new(file);
let line: String = buf_reader
.lines()
.next()
.ok_or_else(|| anyhow::format_err!("No line found"))??;
let cp: IntComp = str::parse(&line)?;
let initial_explorer = Explorer::new(cp)?;
let all_items = vec![
// "food ration".to_owned(),
"candy cane".to_owned(),
"mouse".to_owned(),
// "mug".to_owned(),
"coin".to_owned(),
// "ornament".to_owned(),
"semiconductor".to_owned(),
// "mutex".to_owned(),
];
try_item_combos(initial_explorer, all_items)?;
Ok(())
}
#[cfg(test)]
mod tests {
use test_log::test;
#[allow(unused_imports)]
use super::*;
#[test]
fn test_thing() -> anyhow::Result<()> {
Ok(())
}
}
| {
let output = out.as_string()?;
log::warn!("process_str failure on input {}, output: {}", input, output);
Err(e)
} | conditional_block |
client.go | package conntrack
import (
"encoding/binary"
"errors"
"fmt"
"net"
"strconv"
"syscall"
"unsafe"
"golang.org/x/sys/unix"
)
const (
sizeofGenmsg = uint32(unsafe.Sizeof(unix.Nfgenmsg{})) // TODO
)
type ConntrackListReq struct {
Header syscall.NlMsghdr
Body unix.Nfgenmsg
}
func (c *ConntrackListReq) toWireFormat() []byte {
// adapted from syscall/NetlinkRouteRequest.toWireFormat
b := make([]byte, c.Header.Len)
*(*uint32)(unsafe.Pointer(&b[0:4][0])) = c.Header.Len
*(*uint16)(unsafe.Pointer(&b[4:6][0])) = c.Header.Type
*(*uint16)(unsafe.Pointer(&b[6:8][0])) = c.Header.Flags
*(*uint32)(unsafe.Pointer(&b[8:12][0])) = c.Header.Seq
*(*uint32)(unsafe.Pointer(&b[12:16][0])) = c.Header.Pid
b[16] = byte(c.Body.Nfgen_family)
b[17] = byte(c.Body.Version)
*(*uint16)(unsafe.Pointer(&b[18:20][0])) = c.Body.Res_id
return b
}
func connectNetfilter(bufferSize int, groups uint32) (int, *syscall.SockaddrNetlink, error) {
s, err := syscall.Socket(syscall.AF_NETLINK, syscall.SOCK_RAW, syscall.NETLINK_NETFILTER)
if err != nil {
return 0, nil, err
}
lsa := &syscall.SockaddrNetlink{
Family: syscall.AF_NETLINK,
Groups: groups,
}
if err := syscall.Bind(s, lsa); err != nil {
return 0, nil, err
}
if bufferSize > 0 {
// Speculatively try SO_RCVBUFFORCE which needs CAP_NET_ADMIN
if err := syscall.SetsockoptInt(s, syscall.SOL_SOCKET, syscall.SO_RCVBUFFORCE, bufferSize); err != nil {
// and if that doesn't work fall back to the ordinary SO_RCVBUF
if err := syscall.SetsockoptInt(s, syscall.SOL_SOCKET, syscall.SO_RCVBUF, bufferSize); err != nil {
return 0, nil, err
}
}
}
return s, lsa, nil
}
// Make syscall asking for all connections. Invoke 'cb' for each connection.
func queryAllConnections(bufferSize int, cb func(Conn), inetFamily uint8) error {
s, lsa, err := connectNetfilter(bufferSize, 0)
if err != nil {
return err
}
defer syscall.Close(s)
msg := ConntrackListReq{
Header: syscall.NlMsghdr{
Len: syscall.NLMSG_HDRLEN + sizeofGenmsg,
Type: (NFNL_SUBSYS_CTNETLINK << 8) | uint16(IpctnlMsgCtGet),
Flags: syscall.NLM_F_REQUEST | syscall.NLM_F_DUMP,
Pid: 0,
Seq: 0,
},
Body: unix.Nfgenmsg{
Nfgen_family: inetFamily,
Version: NFNETLINK_V0,
Res_id: 0,
},
}
wb := msg.toWireFormat()
// fmt.Printf("msg bytes: %q\n", wb)
if err := syscall.Sendto(s, wb, 0, lsa); err != nil {
return err
}
return readMsgs(s, cb)
}
// Stream all connections instead of query for all of them at once.
func StreamAllConnections(inetFamily uint8) chan Conn {
ch := make(chan Conn, 1)
go func() {
queryAllConnections(0, func(c Conn) {
ch <- c
}, inetFamily)
close(ch)
}()
return ch
}
// Lists all the connections that conntrack is tracking.
func Connections(inetFamily uint8) ([]Conn, error) {
return ConnectionsSize(0, inetFamily)
}
// Lists all the connections that conntrack is tracking, using specified netlink buffer size.
func ConnectionsSize(bufferSize int, inetFamily uint8) ([]Conn, error) {
var conns []Conn
queryAllConnections(bufferSize, func(c Conn) {
conns = append(conns, c)
}, inetFamily)
return conns, nil
}
// Established lists all established TCP connections.
func Established(inetFamily uint8) ([]ConnTCP, error) {
var conns []ConnTCP
local := localIPs()
err := queryAllConnections(0, func(c Conn) {
if c.MsgType != NfctMsgUpdate {
fmt.Printf("msg isn't an update: %d\n", c.MsgType)
return
}
if c.TCPState != "ESTABLISHED" {
// fmt.Printf("state isn't ESTABLISHED: %s\n", c.TCPState)
return
}
if tc := c.ConnTCP(local); tc != nil {
conns = append(conns, *tc)
}
}, inetFamily)
if err != nil {
return nil, err
}
return conns, nil
}
// Follow gives a channel with all changes.
func Follow(flags uint32) (<-chan Conn, func(), error) {
return FollowSize(0, flags)
}
// Follow gives a channel with all changes, , using specified netlink buffer size.
func FollowSize(bufferSize int, flags uint32) (<-chan Conn, func(), error) {
var closing bool
s, _, err := connectNetfilter(bufferSize, flags)
stop := func() {
closing = true
syscall.Close(s)
}
if err != nil {
return nil, stop, err
}
res := make(chan Conn, 1)
go func() {
defer syscall.Close(s)
if err := readMsgs(s, func(c Conn) {
// if conn.TCPState != 3 {
// // 3 is TCP established.
// continue
// }
res <- c
}); err != nil && !closing {
panic(err)
}
}()
return res, stop, nil
}
func readMsgs(s int, cb func(Conn)) error {
rb := make([]byte, 2*syscall.Getpagesize())
loop:
for {
nr, _, err := syscall.Recvfrom(s, rb, 0)
if err == syscall.ENOBUFS {
// ENOBUF means we miss some events here. No way around it. That's life.
cb(Conn{Err: syscall.ENOBUFS})
continue
} else if err != nil {
return err
}
msgs, err := syscall.ParseNetlinkMessage(rb[:nr])
if err != nil {
return err
}
for _, msg := range msgs {
if msg.Header.Type == unix.NLMSG_ERROR {
return errors.New("NLMSG_ERROR")
}
if msg.Header.Type == unix.NLMSG_DONE {
break loop
}
if nflnSubsysID(msg.Header.Type) != NFNL_SUBSYS_CTNETLINK {
return fmt.Errorf(
"unexpected subsys_id: %d\n",
nflnSubsysID(msg.Header.Type),
)
}
conn, err := parsePayload(msg.Data[sizeofGenmsg:])
if err != nil {
return err
}
// Taken from conntrack/parse.c:__parse_message_type
switch CntlMsgTypes(nflnMsgType(msg.Header.Type)) {
case IpctnlMsgCtNew:
conn.MsgType = NfctMsgUpdate
if msg.Header.Flags&(syscall.NLM_F_CREATE|syscall.NLM_F_EXCL) > 0 {
conn.MsgType = NfctMsgNew
}
case IpctnlMsgCtDelete:
conn.MsgType = NfctMsgDestroy
}
cb(*conn)
}
}
return nil
}
type Tuple struct {
Proto uint8
Src net.IP
SrcPort uint16
Dst net.IP
DstPort uint16
// Flow stats.
Bytes uint64
Packets uint64
// ICMP stuff.
IcmpId uint16
IcmpType uint8
IcmpCode uint8
}
func (t Tuple) String() string {
return fmt.Sprintf("src=%v dst=%v sport=%d dport=%d packets=%d size=%d",t.Src,t.Dst,t.SrcPort,t.DstPort,t.Packets,t.Bytes)
}
type Conn struct {
MsgType NfConntrackMsg
TCPState string
Status CtStatus
Orig Tuple
Reply Tuple
// ct.mark, used to set permission type of the flow.
CtMark uint32
// ct.id, used to identify connections.
CtId uint32
// For multitenancy.
Zone uint16
// Error, if any.
Err error
}
func (c Conn) String() string{
return fmt.Sprintf("%d %s %v %v mark=%d\n",c.Orig.Proto,c.TCPState,c.Orig,c.Reply,c.CtMark)
}
// ConnTCP decides which way this connection is going and makes a ConnTCP.
func (c Conn) ConnTCP(local map[string]struct{}) *ConnTCP {
// conntrack gives us all connections, even things passing through, but it
// doesn't tell us what the local IP is. So we use `local` as a guide
// what's local.
src := c.Orig.Src.String()
dst := c.Orig.Dst.String()
_, srcLocal := local[src]
_, dstLocal := local[dst]
// If both are local we must just order things predictably.
if srcLocal && dstLocal {
srcLocal = c.Orig.SrcPort < c.Orig.DstPort
}
if srcLocal {
return &ConnTCP{
Local: src,
LocalPort: strconv.Itoa(int(c.Orig.SrcPort)),
Remote: dst,
RemotePort: strconv.Itoa(int(c.Orig.DstPort)),
}
}
if dstLocal {
return &ConnTCP{
Local: dst,
LocalPort: strconv.Itoa(int(c.Orig.DstPort)),
Remote: src,
RemotePort: strconv.Itoa(int(c.Orig.SrcPort)),
}
}
// Neither is local. conntrack also reports NAT connections.
return nil
}
func parsePayload(b []byte) (*Conn, error) {
// Most of this comes from libnetfilter_conntrack/src/conntrack/parse_mnl.c
conn := &Conn{}
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil |
for _, attr := range attrs {
switch CtattrType(attr.Typ) {
case CtaTupleOrig:
parseTuple(attr.Msg, &conn.Orig)
case CtaTupleReply:
parseTuple(attr.Msg, &conn.Reply)
case CtaCountersOrig:
conn.Orig.Packets, conn.Orig.Bytes, _ = parseCounters(attr.Msg)
case CtaCountersReply:
conn.Reply.Packets, conn.Reply.Bytes, _ = parseCounters(attr.Msg)
case CtaStatus:
conn.Status = CtStatus(binary.BigEndian.Uint32(attr.Msg))
case CtaProtoinfo:
parseProtoinfo(attr.Msg, conn)
case CtaMark:
conn.CtMark = binary.BigEndian.Uint32(attr.Msg)
case CtaZone:
conn.Zone = binary.BigEndian.Uint16(attr.Msg)
case CtaId:
conn.CtId = binary.BigEndian.Uint32(attr.Msg)
}
}
return conn, nil
}
func parseTuple(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
// fmt.Printf("pl: %d, type: %d, multi: %t, bigend: %t\n", len(attr.Msg), attr.Typ, attr.IsNested, attr.IsNetByteorder)
switch CtattrTuple(attr.Typ) {
case CtaTupleUnspec:
// fmt.Printf("It's a tuple unspec\n")
case CtaTupleIp:
// fmt.Printf("It's a tuple IP\n")
if err := parseIP(attr.Msg, tuple); err != nil {
return err
}
case CtaTupleProto:
// fmt.Printf("It's a tuple proto\n")
parseProto(attr.Msg, tuple)
}
}
return nil
}
func parseCounters(b []byte) (uint64, uint64, error) {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return 0, 0, fmt.Errorf("invalid tuple attr: %s", err)
}
packets := uint64(0)
bytes := uint64(0)
for _, attr := range attrs {
switch CtattrCounters(attr.Typ) {
case CtaCountersPackets:
packets = binary.BigEndian.Uint64(attr.Msg)
case CtaCountersBytes:
bytes = binary.BigEndian.Uint64(attr.Msg)
}
}
return packets, bytes, nil
}
func parseIP(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrIp(attr.Typ) {
case CtaIpV4Src:
tuple.Src = make(net.IP, len(attr.Msg))
copy(tuple.Src, attr.Msg)
case CtaIpV4Dst:
tuple.Dst = make(net.IP, len(attr.Msg))
copy(tuple.Dst, attr.Msg)
case CtaIpV6Src:
// TODO
case CtaIpV6Dst:
// TODO
}
}
return nil
}
func parseProto(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrL4proto(attr.Typ) {
// Protocol number.
case CtaProtoNum:
tuple.Proto = uint8(attr.Msg[0])
// TCP stuff.
case CtaProtoSrcPort:
tuple.SrcPort = binary.BigEndian.Uint16(attr.Msg)
case CtaProtoDstPort:
tuple.DstPort = binary.BigEndian.Uint16(attr.Msg)
// ICMP stuff.
case CtaProtoIcmpId:
tuple.IcmpId = binary.BigEndian.Uint16(attr.Msg)
case CtaProtoIcmpType:
tuple.IcmpType = attr.Msg[0]
case CtaProtoIcmpCode:
tuple.IcmpCode = attr.Msg[0]
}
}
return nil
}
func parseProtoinfo(b []byte, conn *Conn) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrProtoinfo(attr.Typ) {
case CtaProtoinfoTcp:
if err := parseProtoinfoTCP(attr.Msg, conn); err != nil {
return err
}
default:
// we're not interested in other protocols
}
}
return nil
}
func parseProtoinfoTCP(b []byte, conn *Conn) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrProtoinfoTcp(attr.Typ) {
case CtaProtoinfoTcpState:
conn.TCPState = tcpState[uint8(attr.Msg[0])]
default:
// not interested
}
}
return nil
}
| {
return conn, err
} | conditional_block |
client.go | package conntrack
import (
"encoding/binary"
"errors"
"fmt"
"net"
"strconv"
"syscall"
"unsafe"
"golang.org/x/sys/unix"
)
const (
sizeofGenmsg = uint32(unsafe.Sizeof(unix.Nfgenmsg{})) // TODO
)
type ConntrackListReq struct {
Header syscall.NlMsghdr
Body unix.Nfgenmsg
}
func (c *ConntrackListReq) toWireFormat() []byte {
// adapted from syscall/NetlinkRouteRequest.toWireFormat
b := make([]byte, c.Header.Len)
*(*uint32)(unsafe.Pointer(&b[0:4][0])) = c.Header.Len
*(*uint16)(unsafe.Pointer(&b[4:6][0])) = c.Header.Type
*(*uint16)(unsafe.Pointer(&b[6:8][0])) = c.Header.Flags
*(*uint32)(unsafe.Pointer(&b[8:12][0])) = c.Header.Seq
*(*uint32)(unsafe.Pointer(&b[12:16][0])) = c.Header.Pid
b[16] = byte(c.Body.Nfgen_family)
b[17] = byte(c.Body.Version)
*(*uint16)(unsafe.Pointer(&b[18:20][0])) = c.Body.Res_id
return b
}
func connectNetfilter(bufferSize int, groups uint32) (int, *syscall.SockaddrNetlink, error) {
s, err := syscall.Socket(syscall.AF_NETLINK, syscall.SOCK_RAW, syscall.NETLINK_NETFILTER)
if err != nil {
return 0, nil, err
}
lsa := &syscall.SockaddrNetlink{
Family: syscall.AF_NETLINK,
Groups: groups,
}
if err := syscall.Bind(s, lsa); err != nil {
return 0, nil, err
}
if bufferSize > 0 {
// Speculatively try SO_RCVBUFFORCE which needs CAP_NET_ADMIN
if err := syscall.SetsockoptInt(s, syscall.SOL_SOCKET, syscall.SO_RCVBUFFORCE, bufferSize); err != nil {
// and if that doesn't work fall back to the ordinary SO_RCVBUF
if err := syscall.SetsockoptInt(s, syscall.SOL_SOCKET, syscall.SO_RCVBUF, bufferSize); err != nil {
return 0, nil, err
}
}
}
return s, lsa, nil
}
// Make syscall asking for all connections. Invoke 'cb' for each connection.
func queryAllConnections(bufferSize int, cb func(Conn), inetFamily uint8) error {
s, lsa, err := connectNetfilter(bufferSize, 0)
if err != nil {
return err
}
defer syscall.Close(s)
msg := ConntrackListReq{
Header: syscall.NlMsghdr{
Len: syscall.NLMSG_HDRLEN + sizeofGenmsg,
Type: (NFNL_SUBSYS_CTNETLINK << 8) | uint16(IpctnlMsgCtGet),
Flags: syscall.NLM_F_REQUEST | syscall.NLM_F_DUMP,
Pid: 0,
Seq: 0,
},
Body: unix.Nfgenmsg{
Nfgen_family: inetFamily,
Version: NFNETLINK_V0,
Res_id: 0,
},
}
wb := msg.toWireFormat()
// fmt.Printf("msg bytes: %q\n", wb)
if err := syscall.Sendto(s, wb, 0, lsa); err != nil {
return err
}
return readMsgs(s, cb)
}
// Stream all connections instead of query for all of them at once.
func StreamAllConnections(inetFamily uint8) chan Conn {
ch := make(chan Conn, 1)
go func() {
queryAllConnections(0, func(c Conn) {
ch <- c
}, inetFamily)
close(ch)
}()
return ch
}
// Lists all the connections that conntrack is tracking.
func Connections(inetFamily uint8) ([]Conn, error) {
return ConnectionsSize(0, inetFamily)
}
// Lists all the connections that conntrack is tracking, using specified netlink buffer size.
func ConnectionsSize(bufferSize int, inetFamily uint8) ([]Conn, error) {
var conns []Conn
queryAllConnections(bufferSize, func(c Conn) {
conns = append(conns, c)
}, inetFamily)
return conns, nil
}
// Established lists all established TCP connections.
func Established(inetFamily uint8) ([]ConnTCP, error) {
var conns []ConnTCP
local := localIPs()
err := queryAllConnections(0, func(c Conn) {
if c.MsgType != NfctMsgUpdate {
fmt.Printf("msg isn't an update: %d\n", c.MsgType)
return
}
if c.TCPState != "ESTABLISHED" {
// fmt.Printf("state isn't ESTABLISHED: %s\n", c.TCPState)
return
}
if tc := c.ConnTCP(local); tc != nil {
conns = append(conns, *tc)
}
}, inetFamily)
if err != nil {
return nil, err
}
return conns, nil
}
// Follow gives a channel with all changes.
func Follow(flags uint32) (<-chan Conn, func(), error) {
return FollowSize(0, flags)
}
// Follow gives a channel with all changes, , using specified netlink buffer size.
func FollowSize(bufferSize int, flags uint32) (<-chan Conn, func(), error) {
var closing bool
s, _, err := connectNetfilter(bufferSize, flags)
stop := func() {
closing = true
syscall.Close(s)
}
if err != nil {
return nil, stop, err
}
res := make(chan Conn, 1)
go func() {
defer syscall.Close(s)
if err := readMsgs(s, func(c Conn) {
// if conn.TCPState != 3 {
// // 3 is TCP established.
// continue
// }
res <- c
}); err != nil && !closing {
panic(err)
}
}()
return res, stop, nil
}
func readMsgs(s int, cb func(Conn)) error {
rb := make([]byte, 2*syscall.Getpagesize())
loop:
for {
nr, _, err := syscall.Recvfrom(s, rb, 0)
if err == syscall.ENOBUFS {
// ENOBUF means we miss some events here. No way around it. That's life.
cb(Conn{Err: syscall.ENOBUFS})
continue
} else if err != nil {
return err
}
msgs, err := syscall.ParseNetlinkMessage(rb[:nr])
if err != nil {
return err
}
for _, msg := range msgs {
if msg.Header.Type == unix.NLMSG_ERROR {
return errors.New("NLMSG_ERROR")
}
if msg.Header.Type == unix.NLMSG_DONE {
break loop
}
if nflnSubsysID(msg.Header.Type) != NFNL_SUBSYS_CTNETLINK {
return fmt.Errorf(
"unexpected subsys_id: %d\n",
nflnSubsysID(msg.Header.Type),
)
}
conn, err := parsePayload(msg.Data[sizeofGenmsg:])
if err != nil {
return err
}
// Taken from conntrack/parse.c:__parse_message_type
switch CntlMsgTypes(nflnMsgType(msg.Header.Type)) {
case IpctnlMsgCtNew:
conn.MsgType = NfctMsgUpdate
if msg.Header.Flags&(syscall.NLM_F_CREATE|syscall.NLM_F_EXCL) > 0 {
conn.MsgType = NfctMsgNew
}
case IpctnlMsgCtDelete:
conn.MsgType = NfctMsgDestroy
}
cb(*conn)
}
}
return nil
}
type Tuple struct {
Proto uint8
Src net.IP
SrcPort uint16
Dst net.IP
DstPort uint16
// Flow stats.
Bytes uint64
Packets uint64
// ICMP stuff.
IcmpId uint16
IcmpType uint8
IcmpCode uint8
}
func (t Tuple) String() string {
return fmt.Sprintf("src=%v dst=%v sport=%d dport=%d packets=%d size=%d",t.Src,t.Dst,t.SrcPort,t.DstPort,t.Packets,t.Bytes)
}
type Conn struct {
MsgType NfConntrackMsg
TCPState string
Status CtStatus
Orig Tuple
Reply Tuple
// ct.mark, used to set permission type of the flow.
CtMark uint32
// ct.id, used to identify connections.
CtId uint32
// For multitenancy.
Zone uint16
// Error, if any.
Err error
}
func (c Conn) | () string{
return fmt.Sprintf("%d %s %v %v mark=%d\n",c.Orig.Proto,c.TCPState,c.Orig,c.Reply,c.CtMark)
}
// ConnTCP decides which way this connection is going and makes a ConnTCP.
func (c Conn) ConnTCP(local map[string]struct{}) *ConnTCP {
// conntrack gives us all connections, even things passing through, but it
// doesn't tell us what the local IP is. So we use `local` as a guide
// what's local.
src := c.Orig.Src.String()
dst := c.Orig.Dst.String()
_, srcLocal := local[src]
_, dstLocal := local[dst]
// If both are local we must just order things predictably.
if srcLocal && dstLocal {
srcLocal = c.Orig.SrcPort < c.Orig.DstPort
}
if srcLocal {
return &ConnTCP{
Local: src,
LocalPort: strconv.Itoa(int(c.Orig.SrcPort)),
Remote: dst,
RemotePort: strconv.Itoa(int(c.Orig.DstPort)),
}
}
if dstLocal {
return &ConnTCP{
Local: dst,
LocalPort: strconv.Itoa(int(c.Orig.DstPort)),
Remote: src,
RemotePort: strconv.Itoa(int(c.Orig.SrcPort)),
}
}
// Neither is local. conntrack also reports NAT connections.
return nil
}
func parsePayload(b []byte) (*Conn, error) {
// Most of this comes from libnetfilter_conntrack/src/conntrack/parse_mnl.c
conn := &Conn{}
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return conn, err
}
for _, attr := range attrs {
switch CtattrType(attr.Typ) {
case CtaTupleOrig:
parseTuple(attr.Msg, &conn.Orig)
case CtaTupleReply:
parseTuple(attr.Msg, &conn.Reply)
case CtaCountersOrig:
conn.Orig.Packets, conn.Orig.Bytes, _ = parseCounters(attr.Msg)
case CtaCountersReply:
conn.Reply.Packets, conn.Reply.Bytes, _ = parseCounters(attr.Msg)
case CtaStatus:
conn.Status = CtStatus(binary.BigEndian.Uint32(attr.Msg))
case CtaProtoinfo:
parseProtoinfo(attr.Msg, conn)
case CtaMark:
conn.CtMark = binary.BigEndian.Uint32(attr.Msg)
case CtaZone:
conn.Zone = binary.BigEndian.Uint16(attr.Msg)
case CtaId:
conn.CtId = binary.BigEndian.Uint32(attr.Msg)
}
}
return conn, nil
}
func parseTuple(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
// fmt.Printf("pl: %d, type: %d, multi: %t, bigend: %t\n", len(attr.Msg), attr.Typ, attr.IsNested, attr.IsNetByteorder)
switch CtattrTuple(attr.Typ) {
case CtaTupleUnspec:
// fmt.Printf("It's a tuple unspec\n")
case CtaTupleIp:
// fmt.Printf("It's a tuple IP\n")
if err := parseIP(attr.Msg, tuple); err != nil {
return err
}
case CtaTupleProto:
// fmt.Printf("It's a tuple proto\n")
parseProto(attr.Msg, tuple)
}
}
return nil
}
func parseCounters(b []byte) (uint64, uint64, error) {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return 0, 0, fmt.Errorf("invalid tuple attr: %s", err)
}
packets := uint64(0)
bytes := uint64(0)
for _, attr := range attrs {
switch CtattrCounters(attr.Typ) {
case CtaCountersPackets:
packets = binary.BigEndian.Uint64(attr.Msg)
case CtaCountersBytes:
bytes = binary.BigEndian.Uint64(attr.Msg)
}
}
return packets, bytes, nil
}
func parseIP(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrIp(attr.Typ) {
case CtaIpV4Src:
tuple.Src = make(net.IP, len(attr.Msg))
copy(tuple.Src, attr.Msg)
case CtaIpV4Dst:
tuple.Dst = make(net.IP, len(attr.Msg))
copy(tuple.Dst, attr.Msg)
case CtaIpV6Src:
// TODO
case CtaIpV6Dst:
// TODO
}
}
return nil
}
func parseProto(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrL4proto(attr.Typ) {
// Protocol number.
case CtaProtoNum:
tuple.Proto = uint8(attr.Msg[0])
// TCP stuff.
case CtaProtoSrcPort:
tuple.SrcPort = binary.BigEndian.Uint16(attr.Msg)
case CtaProtoDstPort:
tuple.DstPort = binary.BigEndian.Uint16(attr.Msg)
// ICMP stuff.
case CtaProtoIcmpId:
tuple.IcmpId = binary.BigEndian.Uint16(attr.Msg)
case CtaProtoIcmpType:
tuple.IcmpType = attr.Msg[0]
case CtaProtoIcmpCode:
tuple.IcmpCode = attr.Msg[0]
}
}
return nil
}
func parseProtoinfo(b []byte, conn *Conn) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrProtoinfo(attr.Typ) {
case CtaProtoinfoTcp:
if err := parseProtoinfoTCP(attr.Msg, conn); err != nil {
return err
}
default:
// we're not interested in other protocols
}
}
return nil
}
func parseProtoinfoTCP(b []byte, conn *Conn) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrProtoinfoTcp(attr.Typ) {
case CtaProtoinfoTcpState:
conn.TCPState = tcpState[uint8(attr.Msg[0])]
default:
// not interested
}
}
return nil
}
| String | identifier_name |
client.go | package conntrack
import (
"encoding/binary"
"errors"
"fmt"
"net"
"strconv"
"syscall"
"unsafe"
"golang.org/x/sys/unix"
)
const (
sizeofGenmsg = uint32(unsafe.Sizeof(unix.Nfgenmsg{})) // TODO
)
type ConntrackListReq struct {
Header syscall.NlMsghdr
Body unix.Nfgenmsg
}
func (c *ConntrackListReq) toWireFormat() []byte {
// adapted from syscall/NetlinkRouteRequest.toWireFormat
b := make([]byte, c.Header.Len)
*(*uint32)(unsafe.Pointer(&b[0:4][0])) = c.Header.Len
*(*uint16)(unsafe.Pointer(&b[4:6][0])) = c.Header.Type
*(*uint16)(unsafe.Pointer(&b[6:8][0])) = c.Header.Flags
*(*uint32)(unsafe.Pointer(&b[8:12][0])) = c.Header.Seq
*(*uint32)(unsafe.Pointer(&b[12:16][0])) = c.Header.Pid
b[16] = byte(c.Body.Nfgen_family)
b[17] = byte(c.Body.Version)
*(*uint16)(unsafe.Pointer(&b[18:20][0])) = c.Body.Res_id
return b
}
func connectNetfilter(bufferSize int, groups uint32) (int, *syscall.SockaddrNetlink, error) {
s, err := syscall.Socket(syscall.AF_NETLINK, syscall.SOCK_RAW, syscall.NETLINK_NETFILTER)
if err != nil {
return 0, nil, err
}
lsa := &syscall.SockaddrNetlink{
Family: syscall.AF_NETLINK,
Groups: groups,
}
if err := syscall.Bind(s, lsa); err != nil {
return 0, nil, err
}
if bufferSize > 0 {
// Speculatively try SO_RCVBUFFORCE which needs CAP_NET_ADMIN
if err := syscall.SetsockoptInt(s, syscall.SOL_SOCKET, syscall.SO_RCVBUFFORCE, bufferSize); err != nil {
// and if that doesn't work fall back to the ordinary SO_RCVBUF
if err := syscall.SetsockoptInt(s, syscall.SOL_SOCKET, syscall.SO_RCVBUF, bufferSize); err != nil {
return 0, nil, err
}
}
}
return s, lsa, nil
}
// Make syscall asking for all connections. Invoke 'cb' for each connection.
func queryAllConnections(bufferSize int, cb func(Conn), inetFamily uint8) error {
s, lsa, err := connectNetfilter(bufferSize, 0)
if err != nil {
return err
}
defer syscall.Close(s)
msg := ConntrackListReq{
Header: syscall.NlMsghdr{
Len: syscall.NLMSG_HDRLEN + sizeofGenmsg,
Type: (NFNL_SUBSYS_CTNETLINK << 8) | uint16(IpctnlMsgCtGet),
Flags: syscall.NLM_F_REQUEST | syscall.NLM_F_DUMP,
Pid: 0,
Seq: 0,
},
Body: unix.Nfgenmsg{
Nfgen_family: inetFamily,
Version: NFNETLINK_V0,
Res_id: 0,
},
}
wb := msg.toWireFormat()
// fmt.Printf("msg bytes: %q\n", wb)
if err := syscall.Sendto(s, wb, 0, lsa); err != nil {
return err
}
return readMsgs(s, cb)
}
// Stream all connections instead of query for all of them at once.
func StreamAllConnections(inetFamily uint8) chan Conn {
ch := make(chan Conn, 1)
go func() {
queryAllConnections(0, func(c Conn) {
ch <- c
}, inetFamily)
close(ch)
}()
return ch
}
// Lists all the connections that conntrack is tracking.
func Connections(inetFamily uint8) ([]Conn, error) {
return ConnectionsSize(0, inetFamily)
}
// Lists all the connections that conntrack is tracking, using specified netlink buffer size.
func ConnectionsSize(bufferSize int, inetFamily uint8) ([]Conn, error) {
var conns []Conn
queryAllConnections(bufferSize, func(c Conn) {
conns = append(conns, c)
}, inetFamily)
return conns, nil
}
// Established lists all established TCP connections.
func Established(inetFamily uint8) ([]ConnTCP, error) {
var conns []ConnTCP
local := localIPs()
err := queryAllConnections(0, func(c Conn) {
if c.MsgType != NfctMsgUpdate {
fmt.Printf("msg isn't an update: %d\n", c.MsgType)
return
}
if c.TCPState != "ESTABLISHED" {
// fmt.Printf("state isn't ESTABLISHED: %s\n", c.TCPState)
return
}
if tc := c.ConnTCP(local); tc != nil {
conns = append(conns, *tc)
}
}, inetFamily)
if err != nil {
return nil, err
}
return conns, nil
}
// Follow gives a channel with all changes.
func Follow(flags uint32) (<-chan Conn, func(), error) {
return FollowSize(0, flags)
}
// Follow gives a channel with all changes, , using specified netlink buffer size.
func FollowSize(bufferSize int, flags uint32) (<-chan Conn, func(), error) {
var closing bool
s, _, err := connectNetfilter(bufferSize, flags)
stop := func() {
closing = true
syscall.Close(s)
}
if err != nil {
return nil, stop, err
}
res := make(chan Conn, 1)
go func() {
defer syscall.Close(s)
if err := readMsgs(s, func(c Conn) {
// if conn.TCPState != 3 {
// // 3 is TCP established.
// continue
// }
res <- c
}); err != nil && !closing {
panic(err)
}
}()
return res, stop, nil
}
func readMsgs(s int, cb func(Conn)) error {
rb := make([]byte, 2*syscall.Getpagesize())
loop:
for {
nr, _, err := syscall.Recvfrom(s, rb, 0)
if err == syscall.ENOBUFS {
// ENOBUF means we miss some events here. No way around it. That's life.
cb(Conn{Err: syscall.ENOBUFS})
continue
} else if err != nil {
return err
}
msgs, err := syscall.ParseNetlinkMessage(rb[:nr])
if err != nil {
return err
}
for _, msg := range msgs {
if msg.Header.Type == unix.NLMSG_ERROR {
return errors.New("NLMSG_ERROR")
}
if msg.Header.Type == unix.NLMSG_DONE {
break loop
}
if nflnSubsysID(msg.Header.Type) != NFNL_SUBSYS_CTNETLINK {
return fmt.Errorf(
"unexpected subsys_id: %d\n",
nflnSubsysID(msg.Header.Type),
)
}
conn, err := parsePayload(msg.Data[sizeofGenmsg:])
if err != nil {
return err
} | if msg.Header.Flags&(syscall.NLM_F_CREATE|syscall.NLM_F_EXCL) > 0 {
conn.MsgType = NfctMsgNew
}
case IpctnlMsgCtDelete:
conn.MsgType = NfctMsgDestroy
}
cb(*conn)
}
}
return nil
}
type Tuple struct {
Proto uint8
Src net.IP
SrcPort uint16
Dst net.IP
DstPort uint16
// Flow stats.
Bytes uint64
Packets uint64
// ICMP stuff.
IcmpId uint16
IcmpType uint8
IcmpCode uint8
}
func (t Tuple) String() string {
return fmt.Sprintf("src=%v dst=%v sport=%d dport=%d packets=%d size=%d",t.Src,t.Dst,t.SrcPort,t.DstPort,t.Packets,t.Bytes)
}
type Conn struct {
MsgType NfConntrackMsg
TCPState string
Status CtStatus
Orig Tuple
Reply Tuple
// ct.mark, used to set permission type of the flow.
CtMark uint32
// ct.id, used to identify connections.
CtId uint32
// For multitenancy.
Zone uint16
// Error, if any.
Err error
}
func (c Conn) String() string{
return fmt.Sprintf("%d %s %v %v mark=%d\n",c.Orig.Proto,c.TCPState,c.Orig,c.Reply,c.CtMark)
}
// ConnTCP decides which way this connection is going and makes a ConnTCP.
func (c Conn) ConnTCP(local map[string]struct{}) *ConnTCP {
// conntrack gives us all connections, even things passing through, but it
// doesn't tell us what the local IP is. So we use `local` as a guide
// what's local.
src := c.Orig.Src.String()
dst := c.Orig.Dst.String()
_, srcLocal := local[src]
_, dstLocal := local[dst]
// If both are local we must just order things predictably.
if srcLocal && dstLocal {
srcLocal = c.Orig.SrcPort < c.Orig.DstPort
}
if srcLocal {
return &ConnTCP{
Local: src,
LocalPort: strconv.Itoa(int(c.Orig.SrcPort)),
Remote: dst,
RemotePort: strconv.Itoa(int(c.Orig.DstPort)),
}
}
if dstLocal {
return &ConnTCP{
Local: dst,
LocalPort: strconv.Itoa(int(c.Orig.DstPort)),
Remote: src,
RemotePort: strconv.Itoa(int(c.Orig.SrcPort)),
}
}
// Neither is local. conntrack also reports NAT connections.
return nil
}
func parsePayload(b []byte) (*Conn, error) {
// Most of this comes from libnetfilter_conntrack/src/conntrack/parse_mnl.c
conn := &Conn{}
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return conn, err
}
for _, attr := range attrs {
switch CtattrType(attr.Typ) {
case CtaTupleOrig:
parseTuple(attr.Msg, &conn.Orig)
case CtaTupleReply:
parseTuple(attr.Msg, &conn.Reply)
case CtaCountersOrig:
conn.Orig.Packets, conn.Orig.Bytes, _ = parseCounters(attr.Msg)
case CtaCountersReply:
conn.Reply.Packets, conn.Reply.Bytes, _ = parseCounters(attr.Msg)
case CtaStatus:
conn.Status = CtStatus(binary.BigEndian.Uint32(attr.Msg))
case CtaProtoinfo:
parseProtoinfo(attr.Msg, conn)
case CtaMark:
conn.CtMark = binary.BigEndian.Uint32(attr.Msg)
case CtaZone:
conn.Zone = binary.BigEndian.Uint16(attr.Msg)
case CtaId:
conn.CtId = binary.BigEndian.Uint32(attr.Msg)
}
}
return conn, nil
}
func parseTuple(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
// fmt.Printf("pl: %d, type: %d, multi: %t, bigend: %t\n", len(attr.Msg), attr.Typ, attr.IsNested, attr.IsNetByteorder)
switch CtattrTuple(attr.Typ) {
case CtaTupleUnspec:
// fmt.Printf("It's a tuple unspec\n")
case CtaTupleIp:
// fmt.Printf("It's a tuple IP\n")
if err := parseIP(attr.Msg, tuple); err != nil {
return err
}
case CtaTupleProto:
// fmt.Printf("It's a tuple proto\n")
parseProto(attr.Msg, tuple)
}
}
return nil
}
func parseCounters(b []byte) (uint64, uint64, error) {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return 0, 0, fmt.Errorf("invalid tuple attr: %s", err)
}
packets := uint64(0)
bytes := uint64(0)
for _, attr := range attrs {
switch CtattrCounters(attr.Typ) {
case CtaCountersPackets:
packets = binary.BigEndian.Uint64(attr.Msg)
case CtaCountersBytes:
bytes = binary.BigEndian.Uint64(attr.Msg)
}
}
return packets, bytes, nil
}
func parseIP(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrIp(attr.Typ) {
case CtaIpV4Src:
tuple.Src = make(net.IP, len(attr.Msg))
copy(tuple.Src, attr.Msg)
case CtaIpV4Dst:
tuple.Dst = make(net.IP, len(attr.Msg))
copy(tuple.Dst, attr.Msg)
case CtaIpV6Src:
// TODO
case CtaIpV6Dst:
// TODO
}
}
return nil
}
func parseProto(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrL4proto(attr.Typ) {
// Protocol number.
case CtaProtoNum:
tuple.Proto = uint8(attr.Msg[0])
// TCP stuff.
case CtaProtoSrcPort:
tuple.SrcPort = binary.BigEndian.Uint16(attr.Msg)
case CtaProtoDstPort:
tuple.DstPort = binary.BigEndian.Uint16(attr.Msg)
// ICMP stuff.
case CtaProtoIcmpId:
tuple.IcmpId = binary.BigEndian.Uint16(attr.Msg)
case CtaProtoIcmpType:
tuple.IcmpType = attr.Msg[0]
case CtaProtoIcmpCode:
tuple.IcmpCode = attr.Msg[0]
}
}
return nil
}
func parseProtoinfo(b []byte, conn *Conn) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrProtoinfo(attr.Typ) {
case CtaProtoinfoTcp:
if err := parseProtoinfoTCP(attr.Msg, conn); err != nil {
return err
}
default:
// we're not interested in other protocols
}
}
return nil
}
func parseProtoinfoTCP(b []byte, conn *Conn) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrProtoinfoTcp(attr.Typ) {
case CtaProtoinfoTcpState:
conn.TCPState = tcpState[uint8(attr.Msg[0])]
default:
// not interested
}
}
return nil
} |
// Taken from conntrack/parse.c:__parse_message_type
switch CntlMsgTypes(nflnMsgType(msg.Header.Type)) {
case IpctnlMsgCtNew:
conn.MsgType = NfctMsgUpdate | random_line_split |
client.go | package conntrack
import (
"encoding/binary"
"errors"
"fmt"
"net"
"strconv"
"syscall"
"unsafe"
"golang.org/x/sys/unix"
)
const (
sizeofGenmsg = uint32(unsafe.Sizeof(unix.Nfgenmsg{})) // TODO
)
type ConntrackListReq struct {
Header syscall.NlMsghdr
Body unix.Nfgenmsg
}
func (c *ConntrackListReq) toWireFormat() []byte {
// adapted from syscall/NetlinkRouteRequest.toWireFormat
b := make([]byte, c.Header.Len)
*(*uint32)(unsafe.Pointer(&b[0:4][0])) = c.Header.Len
*(*uint16)(unsafe.Pointer(&b[4:6][0])) = c.Header.Type
*(*uint16)(unsafe.Pointer(&b[6:8][0])) = c.Header.Flags
*(*uint32)(unsafe.Pointer(&b[8:12][0])) = c.Header.Seq
*(*uint32)(unsafe.Pointer(&b[12:16][0])) = c.Header.Pid
b[16] = byte(c.Body.Nfgen_family)
b[17] = byte(c.Body.Version)
*(*uint16)(unsafe.Pointer(&b[18:20][0])) = c.Body.Res_id
return b
}
func connectNetfilter(bufferSize int, groups uint32) (int, *syscall.SockaddrNetlink, error) {
s, err := syscall.Socket(syscall.AF_NETLINK, syscall.SOCK_RAW, syscall.NETLINK_NETFILTER)
if err != nil {
return 0, nil, err
}
lsa := &syscall.SockaddrNetlink{
Family: syscall.AF_NETLINK,
Groups: groups,
}
if err := syscall.Bind(s, lsa); err != nil {
return 0, nil, err
}
if bufferSize > 0 {
// Speculatively try SO_RCVBUFFORCE which needs CAP_NET_ADMIN
if err := syscall.SetsockoptInt(s, syscall.SOL_SOCKET, syscall.SO_RCVBUFFORCE, bufferSize); err != nil {
// and if that doesn't work fall back to the ordinary SO_RCVBUF
if err := syscall.SetsockoptInt(s, syscall.SOL_SOCKET, syscall.SO_RCVBUF, bufferSize); err != nil {
return 0, nil, err
}
}
}
return s, lsa, nil
}
// Make syscall asking for all connections. Invoke 'cb' for each connection.
func queryAllConnections(bufferSize int, cb func(Conn), inetFamily uint8) error {
s, lsa, err := connectNetfilter(bufferSize, 0)
if err != nil {
return err
}
defer syscall.Close(s)
msg := ConntrackListReq{
Header: syscall.NlMsghdr{
Len: syscall.NLMSG_HDRLEN + sizeofGenmsg,
Type: (NFNL_SUBSYS_CTNETLINK << 8) | uint16(IpctnlMsgCtGet),
Flags: syscall.NLM_F_REQUEST | syscall.NLM_F_DUMP,
Pid: 0,
Seq: 0,
},
Body: unix.Nfgenmsg{
Nfgen_family: inetFamily,
Version: NFNETLINK_V0,
Res_id: 0,
},
}
wb := msg.toWireFormat()
// fmt.Printf("msg bytes: %q\n", wb)
if err := syscall.Sendto(s, wb, 0, lsa); err != nil {
return err
}
return readMsgs(s, cb)
}
// Stream all connections instead of query for all of them at once.
func StreamAllConnections(inetFamily uint8) chan Conn {
ch := make(chan Conn, 1)
go func() {
queryAllConnections(0, func(c Conn) {
ch <- c
}, inetFamily)
close(ch)
}()
return ch
}
// Lists all the connections that conntrack is tracking.
func Connections(inetFamily uint8) ([]Conn, error) {
return ConnectionsSize(0, inetFamily)
}
// Lists all the connections that conntrack is tracking, using specified netlink buffer size.
func ConnectionsSize(bufferSize int, inetFamily uint8) ([]Conn, error) {
var conns []Conn
queryAllConnections(bufferSize, func(c Conn) {
conns = append(conns, c)
}, inetFamily)
return conns, nil
}
// Established lists all established TCP connections.
func Established(inetFamily uint8) ([]ConnTCP, error) {
var conns []ConnTCP
local := localIPs()
err := queryAllConnections(0, func(c Conn) {
if c.MsgType != NfctMsgUpdate {
fmt.Printf("msg isn't an update: %d\n", c.MsgType)
return
}
if c.TCPState != "ESTABLISHED" {
// fmt.Printf("state isn't ESTABLISHED: %s\n", c.TCPState)
return
}
if tc := c.ConnTCP(local); tc != nil {
conns = append(conns, *tc)
}
}, inetFamily)
if err != nil {
return nil, err
}
return conns, nil
}
// Follow gives a channel with all changes.
func Follow(flags uint32) (<-chan Conn, func(), error) {
return FollowSize(0, flags)
}
// Follow gives a channel with all changes, , using specified netlink buffer size.
func FollowSize(bufferSize int, flags uint32) (<-chan Conn, func(), error) {
var closing bool
s, _, err := connectNetfilter(bufferSize, flags)
stop := func() {
closing = true
syscall.Close(s)
}
if err != nil {
return nil, stop, err
}
res := make(chan Conn, 1)
go func() {
defer syscall.Close(s)
if err := readMsgs(s, func(c Conn) {
// if conn.TCPState != 3 {
// // 3 is TCP established.
// continue
// }
res <- c
}); err != nil && !closing {
panic(err)
}
}()
return res, stop, nil
}
func readMsgs(s int, cb func(Conn)) error {
rb := make([]byte, 2*syscall.Getpagesize())
loop:
for {
nr, _, err := syscall.Recvfrom(s, rb, 0)
if err == syscall.ENOBUFS {
// ENOBUF means we miss some events here. No way around it. That's life.
cb(Conn{Err: syscall.ENOBUFS})
continue
} else if err != nil {
return err
}
msgs, err := syscall.ParseNetlinkMessage(rb[:nr])
if err != nil {
return err
}
for _, msg := range msgs {
if msg.Header.Type == unix.NLMSG_ERROR {
return errors.New("NLMSG_ERROR")
}
if msg.Header.Type == unix.NLMSG_DONE {
break loop
}
if nflnSubsysID(msg.Header.Type) != NFNL_SUBSYS_CTNETLINK {
return fmt.Errorf(
"unexpected subsys_id: %d\n",
nflnSubsysID(msg.Header.Type),
)
}
conn, err := parsePayload(msg.Data[sizeofGenmsg:])
if err != nil {
return err
}
// Taken from conntrack/parse.c:__parse_message_type
switch CntlMsgTypes(nflnMsgType(msg.Header.Type)) {
case IpctnlMsgCtNew:
conn.MsgType = NfctMsgUpdate
if msg.Header.Flags&(syscall.NLM_F_CREATE|syscall.NLM_F_EXCL) > 0 {
conn.MsgType = NfctMsgNew
}
case IpctnlMsgCtDelete:
conn.MsgType = NfctMsgDestroy
}
cb(*conn)
}
}
return nil
}
type Tuple struct {
Proto uint8
Src net.IP
SrcPort uint16
Dst net.IP
DstPort uint16
// Flow stats.
Bytes uint64
Packets uint64
// ICMP stuff.
IcmpId uint16
IcmpType uint8
IcmpCode uint8
}
func (t Tuple) String() string {
return fmt.Sprintf("src=%v dst=%v sport=%d dport=%d packets=%d size=%d",t.Src,t.Dst,t.SrcPort,t.DstPort,t.Packets,t.Bytes)
}
type Conn struct {
MsgType NfConntrackMsg
TCPState string
Status CtStatus
Orig Tuple
Reply Tuple
// ct.mark, used to set permission type of the flow.
CtMark uint32
// ct.id, used to identify connections.
CtId uint32
// For multitenancy.
Zone uint16
// Error, if any.
Err error
}
func (c Conn) String() string |
// ConnTCP decides which way this connection is going and makes a ConnTCP.
func (c Conn) ConnTCP(local map[string]struct{}) *ConnTCP {
// conntrack gives us all connections, even things passing through, but it
// doesn't tell us what the local IP is. So we use `local` as a guide
// what's local.
src := c.Orig.Src.String()
dst := c.Orig.Dst.String()
_, srcLocal := local[src]
_, dstLocal := local[dst]
// If both are local we must just order things predictably.
if srcLocal && dstLocal {
srcLocal = c.Orig.SrcPort < c.Orig.DstPort
}
if srcLocal {
return &ConnTCP{
Local: src,
LocalPort: strconv.Itoa(int(c.Orig.SrcPort)),
Remote: dst,
RemotePort: strconv.Itoa(int(c.Orig.DstPort)),
}
}
if dstLocal {
return &ConnTCP{
Local: dst,
LocalPort: strconv.Itoa(int(c.Orig.DstPort)),
Remote: src,
RemotePort: strconv.Itoa(int(c.Orig.SrcPort)),
}
}
// Neither is local. conntrack also reports NAT connections.
return nil
}
func parsePayload(b []byte) (*Conn, error) {
// Most of this comes from libnetfilter_conntrack/src/conntrack/parse_mnl.c
conn := &Conn{}
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return conn, err
}
for _, attr := range attrs {
switch CtattrType(attr.Typ) {
case CtaTupleOrig:
parseTuple(attr.Msg, &conn.Orig)
case CtaTupleReply:
parseTuple(attr.Msg, &conn.Reply)
case CtaCountersOrig:
conn.Orig.Packets, conn.Orig.Bytes, _ = parseCounters(attr.Msg)
case CtaCountersReply:
conn.Reply.Packets, conn.Reply.Bytes, _ = parseCounters(attr.Msg)
case CtaStatus:
conn.Status = CtStatus(binary.BigEndian.Uint32(attr.Msg))
case CtaProtoinfo:
parseProtoinfo(attr.Msg, conn)
case CtaMark:
conn.CtMark = binary.BigEndian.Uint32(attr.Msg)
case CtaZone:
conn.Zone = binary.BigEndian.Uint16(attr.Msg)
case CtaId:
conn.CtId = binary.BigEndian.Uint32(attr.Msg)
}
}
return conn, nil
}
func parseTuple(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
// fmt.Printf("pl: %d, type: %d, multi: %t, bigend: %t\n", len(attr.Msg), attr.Typ, attr.IsNested, attr.IsNetByteorder)
switch CtattrTuple(attr.Typ) {
case CtaTupleUnspec:
// fmt.Printf("It's a tuple unspec\n")
case CtaTupleIp:
// fmt.Printf("It's a tuple IP\n")
if err := parseIP(attr.Msg, tuple); err != nil {
return err
}
case CtaTupleProto:
// fmt.Printf("It's a tuple proto\n")
parseProto(attr.Msg, tuple)
}
}
return nil
}
func parseCounters(b []byte) (uint64, uint64, error) {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return 0, 0, fmt.Errorf("invalid tuple attr: %s", err)
}
packets := uint64(0)
bytes := uint64(0)
for _, attr := range attrs {
switch CtattrCounters(attr.Typ) {
case CtaCountersPackets:
packets = binary.BigEndian.Uint64(attr.Msg)
case CtaCountersBytes:
bytes = binary.BigEndian.Uint64(attr.Msg)
}
}
return packets, bytes, nil
}
func parseIP(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrIp(attr.Typ) {
case CtaIpV4Src:
tuple.Src = make(net.IP, len(attr.Msg))
copy(tuple.Src, attr.Msg)
case CtaIpV4Dst:
tuple.Dst = make(net.IP, len(attr.Msg))
copy(tuple.Dst, attr.Msg)
case CtaIpV6Src:
// TODO
case CtaIpV6Dst:
// TODO
}
}
return nil
}
func parseProto(b []byte, tuple *Tuple) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrL4proto(attr.Typ) {
// Protocol number.
case CtaProtoNum:
tuple.Proto = uint8(attr.Msg[0])
// TCP stuff.
case CtaProtoSrcPort:
tuple.SrcPort = binary.BigEndian.Uint16(attr.Msg)
case CtaProtoDstPort:
tuple.DstPort = binary.BigEndian.Uint16(attr.Msg)
// ICMP stuff.
case CtaProtoIcmpId:
tuple.IcmpId = binary.BigEndian.Uint16(attr.Msg)
case CtaProtoIcmpType:
tuple.IcmpType = attr.Msg[0]
case CtaProtoIcmpCode:
tuple.IcmpCode = attr.Msg[0]
}
}
return nil
}
func parseProtoinfo(b []byte, conn *Conn) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrProtoinfo(attr.Typ) {
case CtaProtoinfoTcp:
if err := parseProtoinfoTCP(attr.Msg, conn); err != nil {
return err
}
default:
// we're not interested in other protocols
}
}
return nil
}
func parseProtoinfoTCP(b []byte, conn *Conn) error {
var attrSpace [16]Attr
attrs, err := parseAttrs(b, attrSpace[0:0])
if err != nil {
return fmt.Errorf("invalid tuple attr: %s", err)
}
for _, attr := range attrs {
switch CtattrProtoinfoTcp(attr.Typ) {
case CtaProtoinfoTcpState:
conn.TCPState = tcpState[uint8(attr.Msg[0])]
default:
// not interested
}
}
return nil
}
| {
return fmt.Sprintf("%d %s %v %v mark=%d\n",c.Orig.Proto,c.TCPState,c.Orig,c.Reply,c.CtMark)
} | identifier_body |
mongos.go | package psmdb
import (
"fmt"
"sort"
"strconv"
"strings"
"github.com/go-logr/logr"
api "github.com/percona/percona-server-mongodb-operator/pkg/apis/psmdb/v1"
"github.com/percona/percona-server-mongodb-operator/version"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
func MongosDeployment(cr *api.PerconaServerMongoDB) *appsv1.Deployment {
return &appsv1.Deployment{
TypeMeta: metav1.TypeMeta{
APIVersion: "apps/v1",
Kind: "Deployment",
},
ObjectMeta: metav1.ObjectMeta{
Name: cr.MongosNamespacedName().Name,
Namespace: cr.MongosNamespacedName().Namespace,
},
}
}
func MongosDeploymentSpec(cr *api.PerconaServerMongoDB, operatorPod corev1.Pod, log logr.Logger, customConf CustomConfig, cfgInstances []string) (appsv1.DeploymentSpec, error) {
ls := map[string]string{
"app.kubernetes.io/name": "percona-server-mongodb",
"app.kubernetes.io/instance": cr.Name,
"app.kubernetes.io/component": "mongos",
"app.kubernetes.io/managed-by": "percona-server-mongodb-operator",
"app.kubernetes.io/part-of": "percona-server-mongodb",
}
if cr.Spec.Sharding.Mongos.Labels != nil {
for k, v := range cr.Spec.Sharding.Mongos.Labels {
ls[k] = v
}
}
c, err := mongosContainer(cr, customConf.Type.IsUsable(), cfgInstances)
if err != nil {
return appsv1.DeploymentSpec{}, fmt.Errorf("failed to create container %v", err)
}
initContainers := InitContainers(cr, operatorPod)
for i := range initContainers {
initContainers[i].Resources.Limits = c.Resources.Limits
initContainers[i].Resources.Requests = c.Resources.Requests
}
containers, ok := cr.Spec.Sharding.Mongos.MultiAZ.WithSidecars(c)
if !ok {
log.Info(fmt.Sprintf("Sidecar container name cannot be %s. It's skipped", c.Name))
}
annotations := cr.Spec.Sharding.Mongos.MultiAZ.Annotations
if annotations == nil {
annotations = make(map[string]string)
}
if customConf.Type.IsUsable() {
annotations["percona.com/configuration-hash"] = customConf.HashHex
}
zero := intstr.FromInt(0)
return appsv1.DeploymentSpec{
Replicas: &cr.Spec.Sharding.Mongos.Size,
Selector: &metav1.LabelSelector{
MatchLabels: ls,
},
Template: corev1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: ls,
Annotations: annotations,
},
Spec: corev1.PodSpec{
SecurityContext: cr.Spec.Sharding.Mongos.PodSecurityContext,
Affinity: PodAffinity(cr, cr.Spec.Sharding.Mongos.MultiAZ.Affinity, ls),
NodeSelector: cr.Spec.Sharding.Mongos.MultiAZ.NodeSelector,
Tolerations: cr.Spec.Sharding.Mongos.MultiAZ.Tolerations,
PriorityClassName: cr.Spec.Sharding.Mongos.MultiAZ.PriorityClassName,
RestartPolicy: corev1.RestartPolicyAlways,
ImagePullSecrets: cr.Spec.ImagePullSecrets,
Containers: containers,
InitContainers: initContainers,
Volumes: volumes(cr, customConf.Type),
SchedulerName: cr.Spec.SchedulerName,
RuntimeClassName: cr.Spec.Sharding.Mongos.MultiAZ.RuntimeClassName,
},
},
Strategy: appsv1.DeploymentStrategy{
Type: appsv1.RollingUpdateDeploymentStrategyType,
RollingUpdate: &appsv1.RollingUpdateDeployment{
MaxSurge: &zero,
},
},
}, nil
}
func InitContainers(cr *api.PerconaServerMongoDB, operatorPod corev1.Pod) []corev1.Container {
image := cr.Spec.InitImage
if len(image) == 0 {
if cr.CompareVersion(version.Version) != 0 {
image = strings.Split(operatorPod.Spec.Containers[0].Image, ":")[0] + ":" + cr.Spec.CRVersion
} else {
image = operatorPod.Spec.Containers[0].Image
}
}
return []corev1.Container{EntrypointInitContainer(image, cr.Spec.ImagePullPolicy)}
}
func | (cr *api.PerconaServerMongoDB, useConfigFile bool, cfgInstances []string) (corev1.Container, error) {
fvar := false
resources, err := CreateResources(cr.Spec.Sharding.Mongos.ResourcesSpec)
if err != nil {
return corev1.Container{}, fmt.Errorf("resource creation: %v", err)
}
volumes := []corev1.VolumeMount{
{
Name: MongodDataVolClaimName,
MountPath: MongodContainerDataDir,
},
{
Name: InternalKey(cr),
MountPath: mongodSecretsDir,
ReadOnly: true,
},
{
Name: "ssl",
MountPath: sslDir,
ReadOnly: true,
},
{
Name: "ssl-internal",
MountPath: sslInternalDir,
ReadOnly: true,
},
}
if useConfigFile {
volumes = append(volumes, corev1.VolumeMount{
Name: "config",
MountPath: mongosConfigDir,
})
}
if cr.CompareVersion("1.8.0") >= 0 {
volumes = append(volumes, corev1.VolumeMount{
Name: "users-secret-file",
MountPath: "/etc/users-secret",
ReadOnly: true,
})
}
container := corev1.Container{
Name: "mongos",
Image: cr.Spec.Image,
ImagePullPolicy: cr.Spec.ImagePullPolicy,
Args: mongosContainerArgs(cr, resources, useConfigFile, cfgInstances),
Ports: []corev1.ContainerPort{
{
Name: mongosPortName,
HostPort: cr.Spec.Sharding.Mongos.HostPort,
ContainerPort: cr.Spec.Sharding.Mongos.Port,
},
},
Env: []corev1.EnvVar{
{
Name: "MONGODB_PORT",
Value: strconv.Itoa(int(cr.Spec.Sharding.Mongos.Port)),
},
},
EnvFrom: []corev1.EnvFromSource{
{
SecretRef: &corev1.SecretEnvSource{
LocalObjectReference: corev1.LocalObjectReference{
Name: cr.Spec.Secrets.Users,
},
Optional: &fvar,
},
},
{
SecretRef: &corev1.SecretEnvSource{
LocalObjectReference: corev1.LocalObjectReference{
Name: api.UserSecretName(cr),
},
Optional: &fvar,
},
},
},
WorkingDir: MongodContainerDataDir,
LivenessProbe: &cr.Spec.Sharding.Mongos.LivenessProbe.Probe,
ReadinessProbe: cr.Spec.Sharding.Mongos.ReadinessProbe,
SecurityContext: cr.Spec.Sharding.Mongos.ContainerSecurityContext,
Resources: resources,
VolumeMounts: volumes,
Command: []string{"/data/db/ps-entry.sh"},
}
return container, nil
}
func mongosContainerArgs(cr *api.PerconaServerMongoDB, resources corev1.ResourceRequirements, useConfigFile bool, cfgInstances []string) []string {
mdSpec := cr.Spec.Mongod
msSpec := cr.Spec.Sharding.Mongos
cfgRs := cr.Spec.Sharding.ConfigsvrReplSet
// sort config instances to prevent unnecessary updates
sort.Strings(cfgInstances)
configDB := fmt.Sprintf("%s/%s", cfgRs.Name, strings.Join(cfgInstances, ","))
args := []string{
"mongos",
"--bind_ip_all",
"--port=" + strconv.Itoa(int(msSpec.Port)),
"--sslAllowInvalidCertificates",
"--configdb",
configDB,
}
if cr.CompareVersion("1.7.0") >= 0 {
args = append(args,
"--relaxPermChecks",
)
}
if cr.Spec.UnsafeConf {
args = append(args,
"--clusterAuthMode=keyFile",
"--keyFile="+mongodSecretsDir+"/mongodb-key",
)
} else {
args = append(args,
"--sslMode=preferSSL",
"--clusterAuthMode=x509",
)
}
if mdSpec.Security != nil && mdSpec.Security.RedactClientLogData {
args = append(args, "--redactClientLogData")
}
if msSpec.SetParameter != nil {
if msSpec.SetParameter.CursorTimeoutMillis > 0 {
args = append(args,
"--setParameter",
"cursorTimeoutMillis="+strconv.Itoa(msSpec.SetParameter.CursorTimeoutMillis),
)
}
}
if msSpec.AuditLog != nil && msSpec.AuditLog.Destination == api.AuditLogDestinationFile {
if msSpec.AuditLog.Filter == "" {
msSpec.AuditLog.Filter = "{}"
}
args = append(args,
"--auditDestination=file",
"--auditFilter="+msSpec.AuditLog.Filter,
"--auditFormat="+string(msSpec.AuditLog.Format),
)
switch msSpec.AuditLog.Format {
case api.AuditLogFormatBSON:
args = append(args, "--auditPath="+MongodContainerDataDir+"/auditLog.bson")
default:
args = append(args, "--auditPath="+MongodContainerDataDir+"/auditLog.json")
}
}
if useConfigFile {
args = append(args, fmt.Sprintf("--config=%s/mongos.conf", mongosConfigDir))
}
return args
}
func volumes(cr *api.PerconaServerMongoDB, configSource VolumeSourceType) []corev1.Volume {
fvar, tvar := false, true
volumes := []corev1.Volume{
{
Name: InternalKey(cr),
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
DefaultMode: &secretFileMode,
SecretName: InternalKey(cr),
Optional: &fvar,
},
},
},
{
Name: "ssl",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: cr.Spec.Secrets.SSL,
Optional: &cr.Spec.UnsafeConf,
DefaultMode: &secretFileMode,
},
},
},
{
Name: "ssl-internal",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: cr.Spec.Secrets.SSLInternal,
Optional: &tvar,
DefaultMode: &secretFileMode,
},
},
},
{
Name: MongodDataVolClaimName,
VolumeSource: corev1.VolumeSource{
EmptyDir: &corev1.EmptyDirVolumeSource{},
},
},
}
if cr.CompareVersion("1.8.0") >= 0 {
volumes = append(volumes, corev1.Volume{
Name: "users-secret-file",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: api.InternalUserSecretName(cr),
},
},
})
}
if configSource.IsUsable() {
volumes = append(volumes, corev1.Volume{
Name: "config",
VolumeSource: configSource.VolumeSource(MongosCustomConfigName(cr.Name)),
})
}
return volumes
}
func MongosService(cr *api.PerconaServerMongoDB) corev1.Service {
svc := corev1.Service{
TypeMeta: metav1.TypeMeta{
APIVersion: "v1",
Kind: "Service",
},
ObjectMeta: metav1.ObjectMeta{
Name: cr.Name + "-" + "mongos",
Namespace: cr.Namespace,
},
}
if cr.Spec.Sharding.Mongos != nil {
svc.Annotations = cr.Spec.Sharding.Mongos.Expose.ServiceAnnotations
}
return svc
}
func MongosServiceSpec(cr *api.PerconaServerMongoDB) corev1.ServiceSpec {
ls := map[string]string{
"app.kubernetes.io/name": "percona-server-mongodb",
"app.kubernetes.io/instance": cr.Name,
"app.kubernetes.io/managed-by": "percona-server-mongodb-operator",
"app.kubernetes.io/part-of": "percona-server-mongodb",
"app.kubernetes.io/component": "mongos",
}
spec := corev1.ServiceSpec{
Ports: []corev1.ServicePort{
{
Name: mongosPortName,
Port: cr.Spec.Sharding.Mongos.Port,
TargetPort: intstr.FromInt(int(cr.Spec.Sharding.Mongos.Port)),
},
},
Selector: ls,
LoadBalancerSourceRanges: cr.Spec.Sharding.Mongos.Expose.LoadBalancerSourceRanges,
}
switch cr.Spec.Sharding.Mongos.Expose.ExposeType {
case corev1.ServiceTypeNodePort:
spec.Type = corev1.ServiceTypeNodePort
spec.ExternalTrafficPolicy = "Local"
case corev1.ServiceTypeLoadBalancer:
spec.Type = corev1.ServiceTypeLoadBalancer
spec.ExternalTrafficPolicy = "Cluster"
default:
spec.Type = corev1.ServiceTypeClusterIP
}
return spec
}
| mongosContainer | identifier_name |
mongos.go | package psmdb
import (
"fmt"
"sort"
"strconv"
"strings"
"github.com/go-logr/logr"
api "github.com/percona/percona-server-mongodb-operator/pkg/apis/psmdb/v1"
"github.com/percona/percona-server-mongodb-operator/version"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
func MongosDeployment(cr *api.PerconaServerMongoDB) *appsv1.Deployment {
return &appsv1.Deployment{
TypeMeta: metav1.TypeMeta{
APIVersion: "apps/v1",
Kind: "Deployment",
},
ObjectMeta: metav1.ObjectMeta{
Name: cr.MongosNamespacedName().Name,
Namespace: cr.MongosNamespacedName().Namespace,
},
}
}
func MongosDeploymentSpec(cr *api.PerconaServerMongoDB, operatorPod corev1.Pod, log logr.Logger, customConf CustomConfig, cfgInstances []string) (appsv1.DeploymentSpec, error) {
ls := map[string]string{
"app.kubernetes.io/name": "percona-server-mongodb",
"app.kubernetes.io/instance": cr.Name,
"app.kubernetes.io/component": "mongos",
"app.kubernetes.io/managed-by": "percona-server-mongodb-operator",
"app.kubernetes.io/part-of": "percona-server-mongodb",
}
if cr.Spec.Sharding.Mongos.Labels != nil {
for k, v := range cr.Spec.Sharding.Mongos.Labels {
ls[k] = v
}
}
c, err := mongosContainer(cr, customConf.Type.IsUsable(), cfgInstances)
if err != nil {
return appsv1.DeploymentSpec{}, fmt.Errorf("failed to create container %v", err)
}
initContainers := InitContainers(cr, operatorPod)
for i := range initContainers {
initContainers[i].Resources.Limits = c.Resources.Limits
initContainers[i].Resources.Requests = c.Resources.Requests
}
containers, ok := cr.Spec.Sharding.Mongos.MultiAZ.WithSidecars(c)
if !ok {
log.Info(fmt.Sprintf("Sidecar container name cannot be %s. It's skipped", c.Name))
}
annotations := cr.Spec.Sharding.Mongos.MultiAZ.Annotations
if annotations == nil {
annotations = make(map[string]string)
}
if customConf.Type.IsUsable() {
annotations["percona.com/configuration-hash"] = customConf.HashHex
}
zero := intstr.FromInt(0)
return appsv1.DeploymentSpec{
Replicas: &cr.Spec.Sharding.Mongos.Size,
Selector: &metav1.LabelSelector{
MatchLabels: ls,
},
Template: corev1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: ls,
Annotations: annotations,
},
Spec: corev1.PodSpec{
SecurityContext: cr.Spec.Sharding.Mongos.PodSecurityContext,
Affinity: PodAffinity(cr, cr.Spec.Sharding.Mongos.MultiAZ.Affinity, ls),
NodeSelector: cr.Spec.Sharding.Mongos.MultiAZ.NodeSelector,
Tolerations: cr.Spec.Sharding.Mongos.MultiAZ.Tolerations,
PriorityClassName: cr.Spec.Sharding.Mongos.MultiAZ.PriorityClassName,
RestartPolicy: corev1.RestartPolicyAlways,
ImagePullSecrets: cr.Spec.ImagePullSecrets,
Containers: containers,
InitContainers: initContainers,
Volumes: volumes(cr, customConf.Type),
SchedulerName: cr.Spec.SchedulerName,
RuntimeClassName: cr.Spec.Sharding.Mongos.MultiAZ.RuntimeClassName,
},
},
Strategy: appsv1.DeploymentStrategy{
Type: appsv1.RollingUpdateDeploymentStrategyType,
RollingUpdate: &appsv1.RollingUpdateDeployment{
MaxSurge: &zero,
},
},
}, nil
}
func InitContainers(cr *api.PerconaServerMongoDB, operatorPod corev1.Pod) []corev1.Container {
image := cr.Spec.InitImage
if len(image) == 0 {
if cr.CompareVersion(version.Version) != 0 {
image = strings.Split(operatorPod.Spec.Containers[0].Image, ":")[0] + ":" + cr.Spec.CRVersion
} else {
image = operatorPod.Spec.Containers[0].Image
}
}
return []corev1.Container{EntrypointInitContainer(image, cr.Spec.ImagePullPolicy)}
}
func mongosContainer(cr *api.PerconaServerMongoDB, useConfigFile bool, cfgInstances []string) (corev1.Container, error) {
fvar := false
resources, err := CreateResources(cr.Spec.Sharding.Mongos.ResourcesSpec)
if err != nil {
return corev1.Container{}, fmt.Errorf("resource creation: %v", err)
}
volumes := []corev1.VolumeMount{
{
Name: MongodDataVolClaimName,
MountPath: MongodContainerDataDir,
},
{
Name: InternalKey(cr),
MountPath: mongodSecretsDir,
ReadOnly: true,
},
{
Name: "ssl",
MountPath: sslDir,
ReadOnly: true,
},
{
Name: "ssl-internal",
MountPath: sslInternalDir,
ReadOnly: true,
},
}
if useConfigFile {
volumes = append(volumes, corev1.VolumeMount{
Name: "config",
MountPath: mongosConfigDir,
})
}
if cr.CompareVersion("1.8.0") >= 0 {
volumes = append(volumes, corev1.VolumeMount{
Name: "users-secret-file",
MountPath: "/etc/users-secret",
ReadOnly: true,
})
}
container := corev1.Container{
Name: "mongos",
Image: cr.Spec.Image,
ImagePullPolicy: cr.Spec.ImagePullPolicy,
Args: mongosContainerArgs(cr, resources, useConfigFile, cfgInstances),
Ports: []corev1.ContainerPort{
{
Name: mongosPortName,
HostPort: cr.Spec.Sharding.Mongos.HostPort,
ContainerPort: cr.Spec.Sharding.Mongos.Port,
},
},
Env: []corev1.EnvVar{
{
Name: "MONGODB_PORT",
Value: strconv.Itoa(int(cr.Spec.Sharding.Mongos.Port)),
},
},
EnvFrom: []corev1.EnvFromSource{
{
SecretRef: &corev1.SecretEnvSource{
LocalObjectReference: corev1.LocalObjectReference{
Name: cr.Spec.Secrets.Users,
},
Optional: &fvar,
},
},
{
SecretRef: &corev1.SecretEnvSource{
LocalObjectReference: corev1.LocalObjectReference{
Name: api.UserSecretName(cr),
},
Optional: &fvar,
},
},
},
WorkingDir: MongodContainerDataDir,
LivenessProbe: &cr.Spec.Sharding.Mongos.LivenessProbe.Probe,
ReadinessProbe: cr.Spec.Sharding.Mongos.ReadinessProbe,
SecurityContext: cr.Spec.Sharding.Mongos.ContainerSecurityContext,
Resources: resources,
VolumeMounts: volumes,
Command: []string{"/data/db/ps-entry.sh"},
}
return container, nil
}
func mongosContainerArgs(cr *api.PerconaServerMongoDB, resources corev1.ResourceRequirements, useConfigFile bool, cfgInstances []string) []string {
mdSpec := cr.Spec.Mongod
msSpec := cr.Spec.Sharding.Mongos
cfgRs := cr.Spec.Sharding.ConfigsvrReplSet
// sort config instances to prevent unnecessary updates
sort.Strings(cfgInstances)
configDB := fmt.Sprintf("%s/%s", cfgRs.Name, strings.Join(cfgInstances, ","))
args := []string{
"mongos",
"--bind_ip_all",
"--port=" + strconv.Itoa(int(msSpec.Port)),
"--sslAllowInvalidCertificates",
"--configdb",
configDB,
}
if cr.CompareVersion("1.7.0") >= 0 {
args = append(args,
"--relaxPermChecks",
)
}
if cr.Spec.UnsafeConf {
args = append(args,
"--clusterAuthMode=keyFile",
"--keyFile="+mongodSecretsDir+"/mongodb-key",
)
} else {
args = append(args,
"--sslMode=preferSSL",
"--clusterAuthMode=x509",
)
}
if mdSpec.Security != nil && mdSpec.Security.RedactClientLogData {
args = append(args, "--redactClientLogData")
}
if msSpec.SetParameter != nil {
if msSpec.SetParameter.CursorTimeoutMillis > 0 {
args = append(args,
"--setParameter",
"cursorTimeoutMillis="+strconv.Itoa(msSpec.SetParameter.CursorTimeoutMillis),
)
}
}
if msSpec.AuditLog != nil && msSpec.AuditLog.Destination == api.AuditLogDestinationFile {
if msSpec.AuditLog.Filter == "" {
msSpec.AuditLog.Filter = "{}"
}
args = append(args,
"--auditDestination=file",
"--auditFilter="+msSpec.AuditLog.Filter,
"--auditFormat="+string(msSpec.AuditLog.Format),
)
switch msSpec.AuditLog.Format {
case api.AuditLogFormatBSON:
args = append(args, "--auditPath="+MongodContainerDataDir+"/auditLog.bson")
default:
args = append(args, "--auditPath="+MongodContainerDataDir+"/auditLog.json")
}
}
if useConfigFile |
return args
}
func volumes(cr *api.PerconaServerMongoDB, configSource VolumeSourceType) []corev1.Volume {
fvar, tvar := false, true
volumes := []corev1.Volume{
{
Name: InternalKey(cr),
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
DefaultMode: &secretFileMode,
SecretName: InternalKey(cr),
Optional: &fvar,
},
},
},
{
Name: "ssl",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: cr.Spec.Secrets.SSL,
Optional: &cr.Spec.UnsafeConf,
DefaultMode: &secretFileMode,
},
},
},
{
Name: "ssl-internal",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: cr.Spec.Secrets.SSLInternal,
Optional: &tvar,
DefaultMode: &secretFileMode,
},
},
},
{
Name: MongodDataVolClaimName,
VolumeSource: corev1.VolumeSource{
EmptyDir: &corev1.EmptyDirVolumeSource{},
},
},
}
if cr.CompareVersion("1.8.0") >= 0 {
volumes = append(volumes, corev1.Volume{
Name: "users-secret-file",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: api.InternalUserSecretName(cr),
},
},
})
}
if configSource.IsUsable() {
volumes = append(volumes, corev1.Volume{
Name: "config",
VolumeSource: configSource.VolumeSource(MongosCustomConfigName(cr.Name)),
})
}
return volumes
}
func MongosService(cr *api.PerconaServerMongoDB) corev1.Service {
svc := corev1.Service{
TypeMeta: metav1.TypeMeta{
APIVersion: "v1",
Kind: "Service",
},
ObjectMeta: metav1.ObjectMeta{
Name: cr.Name + "-" + "mongos",
Namespace: cr.Namespace,
},
}
if cr.Spec.Sharding.Mongos != nil {
svc.Annotations = cr.Spec.Sharding.Mongos.Expose.ServiceAnnotations
}
return svc
}
func MongosServiceSpec(cr *api.PerconaServerMongoDB) corev1.ServiceSpec {
ls := map[string]string{
"app.kubernetes.io/name": "percona-server-mongodb",
"app.kubernetes.io/instance": cr.Name,
"app.kubernetes.io/managed-by": "percona-server-mongodb-operator",
"app.kubernetes.io/part-of": "percona-server-mongodb",
"app.kubernetes.io/component": "mongos",
}
spec := corev1.ServiceSpec{
Ports: []corev1.ServicePort{
{
Name: mongosPortName,
Port: cr.Spec.Sharding.Mongos.Port,
TargetPort: intstr.FromInt(int(cr.Spec.Sharding.Mongos.Port)),
},
},
Selector: ls,
LoadBalancerSourceRanges: cr.Spec.Sharding.Mongos.Expose.LoadBalancerSourceRanges,
}
switch cr.Spec.Sharding.Mongos.Expose.ExposeType {
case corev1.ServiceTypeNodePort:
spec.Type = corev1.ServiceTypeNodePort
spec.ExternalTrafficPolicy = "Local"
case corev1.ServiceTypeLoadBalancer:
spec.Type = corev1.ServiceTypeLoadBalancer
spec.ExternalTrafficPolicy = "Cluster"
default:
spec.Type = corev1.ServiceTypeClusterIP
}
return spec
}
| {
args = append(args, fmt.Sprintf("--config=%s/mongos.conf", mongosConfigDir))
} | conditional_block |
mongos.go | package psmdb
import (
"fmt"
"sort"
"strconv"
"strings"
"github.com/go-logr/logr"
api "github.com/percona/percona-server-mongodb-operator/pkg/apis/psmdb/v1"
"github.com/percona/percona-server-mongodb-operator/version"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
func MongosDeployment(cr *api.PerconaServerMongoDB) *appsv1.Deployment {
return &appsv1.Deployment{
TypeMeta: metav1.TypeMeta{
APIVersion: "apps/v1",
Kind: "Deployment",
},
ObjectMeta: metav1.ObjectMeta{
Name: cr.MongosNamespacedName().Name,
Namespace: cr.MongosNamespacedName().Namespace,
},
}
}
func MongosDeploymentSpec(cr *api.PerconaServerMongoDB, operatorPod corev1.Pod, log logr.Logger, customConf CustomConfig, cfgInstances []string) (appsv1.DeploymentSpec, error) {
ls := map[string]string{
"app.kubernetes.io/name": "percona-server-mongodb",
"app.kubernetes.io/instance": cr.Name,
"app.kubernetes.io/component": "mongos",
"app.kubernetes.io/managed-by": "percona-server-mongodb-operator",
"app.kubernetes.io/part-of": "percona-server-mongodb",
}
if cr.Spec.Sharding.Mongos.Labels != nil {
for k, v := range cr.Spec.Sharding.Mongos.Labels {
ls[k] = v
}
}
c, err := mongosContainer(cr, customConf.Type.IsUsable(), cfgInstances)
if err != nil {
return appsv1.DeploymentSpec{}, fmt.Errorf("failed to create container %v", err)
}
initContainers := InitContainers(cr, operatorPod)
for i := range initContainers {
initContainers[i].Resources.Limits = c.Resources.Limits
initContainers[i].Resources.Requests = c.Resources.Requests
}
containers, ok := cr.Spec.Sharding.Mongos.MultiAZ.WithSidecars(c)
if !ok {
log.Info(fmt.Sprintf("Sidecar container name cannot be %s. It's skipped", c.Name))
}
annotations := cr.Spec.Sharding.Mongos.MultiAZ.Annotations
if annotations == nil {
annotations = make(map[string]string)
}
if customConf.Type.IsUsable() {
annotations["percona.com/configuration-hash"] = customConf.HashHex
}
zero := intstr.FromInt(0)
return appsv1.DeploymentSpec{
Replicas: &cr.Spec.Sharding.Mongos.Size,
Selector: &metav1.LabelSelector{
MatchLabels: ls,
},
Template: corev1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: ls,
Annotations: annotations,
},
Spec: corev1.PodSpec{
SecurityContext: cr.Spec.Sharding.Mongos.PodSecurityContext,
Affinity: PodAffinity(cr, cr.Spec.Sharding.Mongos.MultiAZ.Affinity, ls),
NodeSelector: cr.Spec.Sharding.Mongos.MultiAZ.NodeSelector,
Tolerations: cr.Spec.Sharding.Mongos.MultiAZ.Tolerations,
PriorityClassName: cr.Spec.Sharding.Mongos.MultiAZ.PriorityClassName,
RestartPolicy: corev1.RestartPolicyAlways,
ImagePullSecrets: cr.Spec.ImagePullSecrets,
Containers: containers,
InitContainers: initContainers,
Volumes: volumes(cr, customConf.Type),
SchedulerName: cr.Spec.SchedulerName,
RuntimeClassName: cr.Spec.Sharding.Mongos.MultiAZ.RuntimeClassName,
},
},
Strategy: appsv1.DeploymentStrategy{
Type: appsv1.RollingUpdateDeploymentStrategyType,
RollingUpdate: &appsv1.RollingUpdateDeployment{
MaxSurge: &zero,
},
},
}, nil
}
func InitContainers(cr *api.PerconaServerMongoDB, operatorPod corev1.Pod) []corev1.Container {
image := cr.Spec.InitImage
if len(image) == 0 {
if cr.CompareVersion(version.Version) != 0 {
image = strings.Split(operatorPod.Spec.Containers[0].Image, ":")[0] + ":" + cr.Spec.CRVersion
} else {
image = operatorPod.Spec.Containers[0].Image
}
}
return []corev1.Container{EntrypointInitContainer(image, cr.Spec.ImagePullPolicy)}
}
func mongosContainer(cr *api.PerconaServerMongoDB, useConfigFile bool, cfgInstances []string) (corev1.Container, error) {
fvar := false
resources, err := CreateResources(cr.Spec.Sharding.Mongos.ResourcesSpec)
if err != nil {
return corev1.Container{}, fmt.Errorf("resource creation: %v", err)
}
volumes := []corev1.VolumeMount{
{
Name: MongodDataVolClaimName,
MountPath: MongodContainerDataDir,
},
{
Name: InternalKey(cr),
MountPath: mongodSecretsDir,
ReadOnly: true,
},
{
Name: "ssl",
MountPath: sslDir,
ReadOnly: true,
},
{
Name: "ssl-internal",
MountPath: sslInternalDir,
ReadOnly: true,
},
}
if useConfigFile {
volumes = append(volumes, corev1.VolumeMount{
Name: "config",
MountPath: mongosConfigDir,
})
}
if cr.CompareVersion("1.8.0") >= 0 {
volumes = append(volumes, corev1.VolumeMount{
Name: "users-secret-file",
MountPath: "/etc/users-secret",
ReadOnly: true,
})
}
container := corev1.Container{
Name: "mongos",
Image: cr.Spec.Image,
ImagePullPolicy: cr.Spec.ImagePullPolicy,
Args: mongosContainerArgs(cr, resources, useConfigFile, cfgInstances),
Ports: []corev1.ContainerPort{
{
Name: mongosPortName,
HostPort: cr.Spec.Sharding.Mongos.HostPort,
ContainerPort: cr.Spec.Sharding.Mongos.Port,
},
},
Env: []corev1.EnvVar{
{
Name: "MONGODB_PORT",
Value: strconv.Itoa(int(cr.Spec.Sharding.Mongos.Port)),
},
},
EnvFrom: []corev1.EnvFromSource{
{
SecretRef: &corev1.SecretEnvSource{
LocalObjectReference: corev1.LocalObjectReference{
Name: cr.Spec.Secrets.Users,
},
Optional: &fvar,
},
},
{
SecretRef: &corev1.SecretEnvSource{
LocalObjectReference: corev1.LocalObjectReference{
Name: api.UserSecretName(cr),
},
Optional: &fvar,
},
},
},
WorkingDir: MongodContainerDataDir,
LivenessProbe: &cr.Spec.Sharding.Mongos.LivenessProbe.Probe,
ReadinessProbe: cr.Spec.Sharding.Mongos.ReadinessProbe,
SecurityContext: cr.Spec.Sharding.Mongos.ContainerSecurityContext,
Resources: resources,
VolumeMounts: volumes,
Command: []string{"/data/db/ps-entry.sh"},
}
return container, nil
}
func mongosContainerArgs(cr *api.PerconaServerMongoDB, resources corev1.ResourceRequirements, useConfigFile bool, cfgInstances []string) []string {
mdSpec := cr.Spec.Mongod
msSpec := cr.Spec.Sharding.Mongos
cfgRs := cr.Spec.Sharding.ConfigsvrReplSet
// sort config instances to prevent unnecessary updates
sort.Strings(cfgInstances)
configDB := fmt.Sprintf("%s/%s", cfgRs.Name, strings.Join(cfgInstances, ","))
args := []string{
"mongos",
"--bind_ip_all",
"--port=" + strconv.Itoa(int(msSpec.Port)),
"--sslAllowInvalidCertificates",
"--configdb",
configDB,
}
if cr.CompareVersion("1.7.0") >= 0 {
args = append(args,
"--relaxPermChecks",
)
}
if cr.Spec.UnsafeConf {
args = append(args,
"--clusterAuthMode=keyFile",
"--keyFile="+mongodSecretsDir+"/mongodb-key",
)
} else {
args = append(args,
"--sslMode=preferSSL",
"--clusterAuthMode=x509",
)
}
if mdSpec.Security != nil && mdSpec.Security.RedactClientLogData {
args = append(args, "--redactClientLogData")
}
if msSpec.SetParameter != nil {
if msSpec.SetParameter.CursorTimeoutMillis > 0 {
args = append(args,
"--setParameter",
"cursorTimeoutMillis="+strconv.Itoa(msSpec.SetParameter.CursorTimeoutMillis),
)
}
}
if msSpec.AuditLog != nil && msSpec.AuditLog.Destination == api.AuditLogDestinationFile {
if msSpec.AuditLog.Filter == "" {
msSpec.AuditLog.Filter = "{}"
}
args = append(args,
"--auditDestination=file",
"--auditFilter="+msSpec.AuditLog.Filter,
"--auditFormat="+string(msSpec.AuditLog.Format),
)
switch msSpec.AuditLog.Format {
case api.AuditLogFormatBSON:
args = append(args, "--auditPath="+MongodContainerDataDir+"/auditLog.bson")
default:
args = append(args, "--auditPath="+MongodContainerDataDir+"/auditLog.json")
}
}
if useConfigFile {
args = append(args, fmt.Sprintf("--config=%s/mongos.conf", mongosConfigDir))
}
return args
}
func volumes(cr *api.PerconaServerMongoDB, configSource VolumeSourceType) []corev1.Volume {
fvar, tvar := false, true
volumes := []corev1.Volume{
{
Name: InternalKey(cr),
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
DefaultMode: &secretFileMode,
SecretName: InternalKey(cr),
Optional: &fvar,
},
},
},
{
Name: "ssl",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: cr.Spec.Secrets.SSL,
Optional: &cr.Spec.UnsafeConf,
DefaultMode: &secretFileMode,
},
},
},
{
Name: "ssl-internal",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: cr.Spec.Secrets.SSLInternal,
Optional: &tvar,
DefaultMode: &secretFileMode,
},
},
},
{
Name: MongodDataVolClaimName,
VolumeSource: corev1.VolumeSource{
EmptyDir: &corev1.EmptyDirVolumeSource{},
},
},
}
if cr.CompareVersion("1.8.0") >= 0 {
volumes = append(volumes, corev1.Volume{
Name: "users-secret-file",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: api.InternalUserSecretName(cr),
},
},
})
}
if configSource.IsUsable() {
volumes = append(volumes, corev1.Volume{
Name: "config",
VolumeSource: configSource.VolumeSource(MongosCustomConfigName(cr.Name)),
})
}
return volumes
}
func MongosService(cr *api.PerconaServerMongoDB) corev1.Service {
svc := corev1.Service{
TypeMeta: metav1.TypeMeta{
APIVersion: "v1",
Kind: "Service",
},
ObjectMeta: metav1.ObjectMeta{
Name: cr.Name + "-" + "mongos",
Namespace: cr.Namespace,
},
}
if cr.Spec.Sharding.Mongos != nil {
svc.Annotations = cr.Spec.Sharding.Mongos.Expose.ServiceAnnotations
}
return svc
}
func MongosServiceSpec(cr *api.PerconaServerMongoDB) corev1.ServiceSpec {
ls := map[string]string{ | "app.kubernetes.io/instance": cr.Name,
"app.kubernetes.io/managed-by": "percona-server-mongodb-operator",
"app.kubernetes.io/part-of": "percona-server-mongodb",
"app.kubernetes.io/component": "mongos",
}
spec := corev1.ServiceSpec{
Ports: []corev1.ServicePort{
{
Name: mongosPortName,
Port: cr.Spec.Sharding.Mongos.Port,
TargetPort: intstr.FromInt(int(cr.Spec.Sharding.Mongos.Port)),
},
},
Selector: ls,
LoadBalancerSourceRanges: cr.Spec.Sharding.Mongos.Expose.LoadBalancerSourceRanges,
}
switch cr.Spec.Sharding.Mongos.Expose.ExposeType {
case corev1.ServiceTypeNodePort:
spec.Type = corev1.ServiceTypeNodePort
spec.ExternalTrafficPolicy = "Local"
case corev1.ServiceTypeLoadBalancer:
spec.Type = corev1.ServiceTypeLoadBalancer
spec.ExternalTrafficPolicy = "Cluster"
default:
spec.Type = corev1.ServiceTypeClusterIP
}
return spec
} | "app.kubernetes.io/name": "percona-server-mongodb", | random_line_split |
mongos.go | package psmdb
import (
"fmt"
"sort"
"strconv"
"strings"
"github.com/go-logr/logr"
api "github.com/percona/percona-server-mongodb-operator/pkg/apis/psmdb/v1"
"github.com/percona/percona-server-mongodb-operator/version"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
func MongosDeployment(cr *api.PerconaServerMongoDB) *appsv1.Deployment {
return &appsv1.Deployment{
TypeMeta: metav1.TypeMeta{
APIVersion: "apps/v1",
Kind: "Deployment",
},
ObjectMeta: metav1.ObjectMeta{
Name: cr.MongosNamespacedName().Name,
Namespace: cr.MongosNamespacedName().Namespace,
},
}
}
func MongosDeploymentSpec(cr *api.PerconaServerMongoDB, operatorPod corev1.Pod, log logr.Logger, customConf CustomConfig, cfgInstances []string) (appsv1.DeploymentSpec, error) {
ls := map[string]string{
"app.kubernetes.io/name": "percona-server-mongodb",
"app.kubernetes.io/instance": cr.Name,
"app.kubernetes.io/component": "mongos",
"app.kubernetes.io/managed-by": "percona-server-mongodb-operator",
"app.kubernetes.io/part-of": "percona-server-mongodb",
}
if cr.Spec.Sharding.Mongos.Labels != nil {
for k, v := range cr.Spec.Sharding.Mongos.Labels {
ls[k] = v
}
}
c, err := mongosContainer(cr, customConf.Type.IsUsable(), cfgInstances)
if err != nil {
return appsv1.DeploymentSpec{}, fmt.Errorf("failed to create container %v", err)
}
initContainers := InitContainers(cr, operatorPod)
for i := range initContainers {
initContainers[i].Resources.Limits = c.Resources.Limits
initContainers[i].Resources.Requests = c.Resources.Requests
}
containers, ok := cr.Spec.Sharding.Mongos.MultiAZ.WithSidecars(c)
if !ok {
log.Info(fmt.Sprintf("Sidecar container name cannot be %s. It's skipped", c.Name))
}
annotations := cr.Spec.Sharding.Mongos.MultiAZ.Annotations
if annotations == nil {
annotations = make(map[string]string)
}
if customConf.Type.IsUsable() {
annotations["percona.com/configuration-hash"] = customConf.HashHex
}
zero := intstr.FromInt(0)
return appsv1.DeploymentSpec{
Replicas: &cr.Spec.Sharding.Mongos.Size,
Selector: &metav1.LabelSelector{
MatchLabels: ls,
},
Template: corev1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: ls,
Annotations: annotations,
},
Spec: corev1.PodSpec{
SecurityContext: cr.Spec.Sharding.Mongos.PodSecurityContext,
Affinity: PodAffinity(cr, cr.Spec.Sharding.Mongos.MultiAZ.Affinity, ls),
NodeSelector: cr.Spec.Sharding.Mongos.MultiAZ.NodeSelector,
Tolerations: cr.Spec.Sharding.Mongos.MultiAZ.Tolerations,
PriorityClassName: cr.Spec.Sharding.Mongos.MultiAZ.PriorityClassName,
RestartPolicy: corev1.RestartPolicyAlways,
ImagePullSecrets: cr.Spec.ImagePullSecrets,
Containers: containers,
InitContainers: initContainers,
Volumes: volumes(cr, customConf.Type),
SchedulerName: cr.Spec.SchedulerName,
RuntimeClassName: cr.Spec.Sharding.Mongos.MultiAZ.RuntimeClassName,
},
},
Strategy: appsv1.DeploymentStrategy{
Type: appsv1.RollingUpdateDeploymentStrategyType,
RollingUpdate: &appsv1.RollingUpdateDeployment{
MaxSurge: &zero,
},
},
}, nil
}
func InitContainers(cr *api.PerconaServerMongoDB, operatorPod corev1.Pod) []corev1.Container {
image := cr.Spec.InitImage
if len(image) == 0 {
if cr.CompareVersion(version.Version) != 0 {
image = strings.Split(operatorPod.Spec.Containers[0].Image, ":")[0] + ":" + cr.Spec.CRVersion
} else {
image = operatorPod.Spec.Containers[0].Image
}
}
return []corev1.Container{EntrypointInitContainer(image, cr.Spec.ImagePullPolicy)}
}
func mongosContainer(cr *api.PerconaServerMongoDB, useConfigFile bool, cfgInstances []string) (corev1.Container, error) {
fvar := false
resources, err := CreateResources(cr.Spec.Sharding.Mongos.ResourcesSpec)
if err != nil {
return corev1.Container{}, fmt.Errorf("resource creation: %v", err)
}
volumes := []corev1.VolumeMount{
{
Name: MongodDataVolClaimName,
MountPath: MongodContainerDataDir,
},
{
Name: InternalKey(cr),
MountPath: mongodSecretsDir,
ReadOnly: true,
},
{
Name: "ssl",
MountPath: sslDir,
ReadOnly: true,
},
{
Name: "ssl-internal",
MountPath: sslInternalDir,
ReadOnly: true,
},
}
if useConfigFile {
volumes = append(volumes, corev1.VolumeMount{
Name: "config",
MountPath: mongosConfigDir,
})
}
if cr.CompareVersion("1.8.0") >= 0 {
volumes = append(volumes, corev1.VolumeMount{
Name: "users-secret-file",
MountPath: "/etc/users-secret",
ReadOnly: true,
})
}
container := corev1.Container{
Name: "mongos",
Image: cr.Spec.Image,
ImagePullPolicy: cr.Spec.ImagePullPolicy,
Args: mongosContainerArgs(cr, resources, useConfigFile, cfgInstances),
Ports: []corev1.ContainerPort{
{
Name: mongosPortName,
HostPort: cr.Spec.Sharding.Mongos.HostPort,
ContainerPort: cr.Spec.Sharding.Mongos.Port,
},
},
Env: []corev1.EnvVar{
{
Name: "MONGODB_PORT",
Value: strconv.Itoa(int(cr.Spec.Sharding.Mongos.Port)),
},
},
EnvFrom: []corev1.EnvFromSource{
{
SecretRef: &corev1.SecretEnvSource{
LocalObjectReference: corev1.LocalObjectReference{
Name: cr.Spec.Secrets.Users,
},
Optional: &fvar,
},
},
{
SecretRef: &corev1.SecretEnvSource{
LocalObjectReference: corev1.LocalObjectReference{
Name: api.UserSecretName(cr),
},
Optional: &fvar,
},
},
},
WorkingDir: MongodContainerDataDir,
LivenessProbe: &cr.Spec.Sharding.Mongos.LivenessProbe.Probe,
ReadinessProbe: cr.Spec.Sharding.Mongos.ReadinessProbe,
SecurityContext: cr.Spec.Sharding.Mongos.ContainerSecurityContext,
Resources: resources,
VolumeMounts: volumes,
Command: []string{"/data/db/ps-entry.sh"},
}
return container, nil
}
func mongosContainerArgs(cr *api.PerconaServerMongoDB, resources corev1.ResourceRequirements, useConfigFile bool, cfgInstances []string) []string {
mdSpec := cr.Spec.Mongod
msSpec := cr.Spec.Sharding.Mongos
cfgRs := cr.Spec.Sharding.ConfigsvrReplSet
// sort config instances to prevent unnecessary updates
sort.Strings(cfgInstances)
configDB := fmt.Sprintf("%s/%s", cfgRs.Name, strings.Join(cfgInstances, ","))
args := []string{
"mongos",
"--bind_ip_all",
"--port=" + strconv.Itoa(int(msSpec.Port)),
"--sslAllowInvalidCertificates",
"--configdb",
configDB,
}
if cr.CompareVersion("1.7.0") >= 0 {
args = append(args,
"--relaxPermChecks",
)
}
if cr.Spec.UnsafeConf {
args = append(args,
"--clusterAuthMode=keyFile",
"--keyFile="+mongodSecretsDir+"/mongodb-key",
)
} else {
args = append(args,
"--sslMode=preferSSL",
"--clusterAuthMode=x509",
)
}
if mdSpec.Security != nil && mdSpec.Security.RedactClientLogData {
args = append(args, "--redactClientLogData")
}
if msSpec.SetParameter != nil {
if msSpec.SetParameter.CursorTimeoutMillis > 0 {
args = append(args,
"--setParameter",
"cursorTimeoutMillis="+strconv.Itoa(msSpec.SetParameter.CursorTimeoutMillis),
)
}
}
if msSpec.AuditLog != nil && msSpec.AuditLog.Destination == api.AuditLogDestinationFile {
if msSpec.AuditLog.Filter == "" {
msSpec.AuditLog.Filter = "{}"
}
args = append(args,
"--auditDestination=file",
"--auditFilter="+msSpec.AuditLog.Filter,
"--auditFormat="+string(msSpec.AuditLog.Format),
)
switch msSpec.AuditLog.Format {
case api.AuditLogFormatBSON:
args = append(args, "--auditPath="+MongodContainerDataDir+"/auditLog.bson")
default:
args = append(args, "--auditPath="+MongodContainerDataDir+"/auditLog.json")
}
}
if useConfigFile {
args = append(args, fmt.Sprintf("--config=%s/mongos.conf", mongosConfigDir))
}
return args
}
func volumes(cr *api.PerconaServerMongoDB, configSource VolumeSourceType) []corev1.Volume |
func MongosService(cr *api.PerconaServerMongoDB) corev1.Service {
svc := corev1.Service{
TypeMeta: metav1.TypeMeta{
APIVersion: "v1",
Kind: "Service",
},
ObjectMeta: metav1.ObjectMeta{
Name: cr.Name + "-" + "mongos",
Namespace: cr.Namespace,
},
}
if cr.Spec.Sharding.Mongos != nil {
svc.Annotations = cr.Spec.Sharding.Mongos.Expose.ServiceAnnotations
}
return svc
}
func MongosServiceSpec(cr *api.PerconaServerMongoDB) corev1.ServiceSpec {
ls := map[string]string{
"app.kubernetes.io/name": "percona-server-mongodb",
"app.kubernetes.io/instance": cr.Name,
"app.kubernetes.io/managed-by": "percona-server-mongodb-operator",
"app.kubernetes.io/part-of": "percona-server-mongodb",
"app.kubernetes.io/component": "mongos",
}
spec := corev1.ServiceSpec{
Ports: []corev1.ServicePort{
{
Name: mongosPortName,
Port: cr.Spec.Sharding.Mongos.Port,
TargetPort: intstr.FromInt(int(cr.Spec.Sharding.Mongos.Port)),
},
},
Selector: ls,
LoadBalancerSourceRanges: cr.Spec.Sharding.Mongos.Expose.LoadBalancerSourceRanges,
}
switch cr.Spec.Sharding.Mongos.Expose.ExposeType {
case corev1.ServiceTypeNodePort:
spec.Type = corev1.ServiceTypeNodePort
spec.ExternalTrafficPolicy = "Local"
case corev1.ServiceTypeLoadBalancer:
spec.Type = corev1.ServiceTypeLoadBalancer
spec.ExternalTrafficPolicy = "Cluster"
default:
spec.Type = corev1.ServiceTypeClusterIP
}
return spec
}
| {
fvar, tvar := false, true
volumes := []corev1.Volume{
{
Name: InternalKey(cr),
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
DefaultMode: &secretFileMode,
SecretName: InternalKey(cr),
Optional: &fvar,
},
},
},
{
Name: "ssl",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: cr.Spec.Secrets.SSL,
Optional: &cr.Spec.UnsafeConf,
DefaultMode: &secretFileMode,
},
},
},
{
Name: "ssl-internal",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: cr.Spec.Secrets.SSLInternal,
Optional: &tvar,
DefaultMode: &secretFileMode,
},
},
},
{
Name: MongodDataVolClaimName,
VolumeSource: corev1.VolumeSource{
EmptyDir: &corev1.EmptyDirVolumeSource{},
},
},
}
if cr.CompareVersion("1.8.0") >= 0 {
volumes = append(volumes, corev1.Volume{
Name: "users-secret-file",
VolumeSource: corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: api.InternalUserSecretName(cr),
},
},
})
}
if configSource.IsUsable() {
volumes = append(volumes, corev1.Volume{
Name: "config",
VolumeSource: configSource.VolumeSource(MongosCustomConfigName(cr.Name)),
})
}
return volumes
} | identifier_body |
gridview.rs | use rand::Rng;
use std::collections::VecDeque;
use pathfinding::kuhn_munkres::kuhn_munkres_min;
use pathfinding::matrix::Matrix;
use command::Command;
use grid::droplet::{Blob, SimpleBlob};
use grid::Electrode;
use plan::Path;
use process::ProcessId;
use util::collections::{Map, Set};
#[cfg(feature = "pi")]
use pi::RaspberryPi;
use super::{Droplet, DropletId, DropletInfo, Grid, Location};
pub struct GridView {
pub grid: Grid,
completed: Vec<Snapshot>,
planned: VecDeque<Snapshot>,
pub done: bool,
pub bad_edges: Set<(Location, Location)>,
#[cfg(feature = "pi")]
pub pi: Option<RaspberryPi>,
}
#[must_use]
#[derive(Debug, Default)]
pub struct Snapshot {
pub droplets: Map<DropletId, Droplet>,
pub commands_to_finalize: Vec<Box<dyn Command>>,
}
#[derive(Debug, PartialEq)]
pub enum DropletDiff {
Disappeared,
DidNotMove,
Moved { from: Location, to: Location },
}
impl Snapshot {
pub fn new_with_same_droplets(&self) -> Snapshot {
let mut new_snapshot = Snapshot::default();
new_snapshot.droplets = self.droplets.clone();
// clear out the destination because we're doing to replan
for d in new_snapshot.droplets.values_mut() {
d.destination = None;
}
new_snapshot
}
#[cfg(not(feature = "pi"))]
fn finalize(&mut self) {
// we need to drain this so we can mutate the command without mutating
// self, as we need to pass self into cmd.finalize
// this feels pretty ugly....
let mut x: Vec<_> = self.commands_to_finalize.drain(..).collect();
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self)
}
self.commands_to_finalize = x;
}
#[cfg(feature = "pi")]
fn finalize(&mut self, pi: Option<&mut RaspberryPi>) {
// we need to drain this so we can mutate the command without mutating
// self, as we need to pass self into cmd.finalize
// this feels pretty ugly....
let mut x: Vec<_> = self.commands_to_finalize.drain(..).collect();
if let Some(pi) = pi {
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self, Some(pi))
}
} else {
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self, None)
}
}
self.commands_to_finalize = x;
}
pub fn abort(mut self, gridview: &mut GridView) {
for mut cmd in self.commands_to_finalize.drain(..) {
debug!("Sending command back for replanning: {:#?}", cmd);
if let Err((mut cmd, err)) = gridview.plan(cmd) {
cmd.abort(err);
}
}
}
pub fn droplet_info(&self, pid_option: Option<ProcessId>) -> Vec<DropletInfo> {
self.droplets
.values()
.filter(|&d| pid_option.map_or(true, |pid| d.id.process_id == pid))
.map(|d| d.info())
.collect()
}
/// Returns an invalid droplet, if any.
fn get_collision(&self) -> Option<(i32, Droplet, Droplet)> {
for (id1, droplet1) in &self.droplets {
for (id2, droplet2) in &self.droplets {
if id1 == id2 {
continue;
}
if droplet1.collision_group == droplet2.collision_group {
continue;
}
let distance = droplet1.collision_distance(droplet2);
if distance <= 0 {
return Some((distance, droplet1.clone(), droplet2.clone()));
}
}
}
None
}
pub fn to_blobs(&self) -> Vec<SimpleBlob> {
self.droplets.values().map(|d| d.to_blob()).collect()
}
/// Takes a map of droplet ids to droplets (as in that
/// of the planner/executor view) and a vector of blobs
/// (as in that of the chip view) and returns a matching
/// of droplet ids to closest matching blobs.
///
/// Can currently only handle where both views contain
/// the same number of 'droplets'
fn match_with_blobs<B: Blob>(&self, blobs: &[B]) -> Option<Map<DropletId, B>> {
// Ensure lengths are the same
if self.droplets.len() != blobs.len() {
error!("Expected and actual droplets are of different lengths");
return None;
}
let mut result = Map::new(); // to be returned
let mut ids = vec![]; // store corresponding ids to indeces
let mut matches = vec![]; // store similarity between blobs/droplets
let n = blobs.len();
// store the id of each droplet in its corresponding
// index in 'ids', then store the similarity of each
// droplet to each blob in 'matches'
for (&id, droplet) in &self.droplets {
ids.push(id);
for blob in blobs {
let similarity = blob.get_similarity(&droplet);
// must be non-negative for the algorithm to work
assert!(similarity >= 0);
matches.push(similarity);
}
}
// convert the matches vector to a matrix
// input should be [1,2,3,4], where the output
// matrix is [[1,2],[3,4]]
let m: Matrix<i32> = Matrix::from_vec(n, n, matches);
// km is a vector of size n where the value at each index
// corresponds to the index of a blob
let (_c, km) = kuhn_munkres_min(&m);
for i in 0..n {
result.insert(ids[i], blobs[km[i]].clone());
}
Some(result)
}
// this will take commands_to_finalize from the old snapshot into the new
// one if an error is found produced
pub fn correct(&mut self, blobs: &[impl Blob]) -> Option<Snapshot> {
let blob_matching = self.match_with_blobs(blobs)?;
let mut was_error = false;
let new_droplets: Map<_, _> = blob_matching
.iter()
.map(|(&id, blob)| {
let d = self.droplets.get_mut(&id).unwrap();
let d_new = blob.to_droplet(id);
if d.location != d_new.location || d.dimensions != d_new.dimensions {
info!("Found error in droplet {:?}", id);
debug!("Droplet error\n Expected: {:#?}\n Found: {:#?}", d, d_new);
was_error = true;
}
// HACK FIXME this mutation is not great
if (d.volume - d_new.volume).abs() > 1.0 {
info!(
"volume of {} changed: {} -> {}",
id.id, d.volume, d_new.volume
)
}
d.volume = d_new.volume;
(id, d_new)
}).collect();
if was_error {
let mut new_snapshot = Snapshot {
droplets: new_droplets,
commands_to_finalize: Vec::new(),
};
::std::mem::swap(
&mut new_snapshot.commands_to_finalize,
&mut self.commands_to_finalize,
);
Some(new_snapshot)
} else {
None
}
}
pub fn diff_droplet(&self, id: &DropletId, other: &Snapshot) -> DropletDiff {
use self::DropletDiff::*;
let droplet = self
.droplets
.get(id)
.expect("id should be in self snapshot");
if let Some(other_droplet) = other.droplets.get(id) {
// NOTE we only care about location diffs for now
let loc = droplet.location;
let other_loc = other_droplet.location;
if loc != other_loc {
// for now, just assert that we are only moving one spot at a time
// FIXME HACK
// assert_eq!((&loc - &other_loc).norm(), 1);
Moved {
from: loc,
to: other_loc,
}
} else {
DidNotMove
}
} else {
Disappeared
}
}
pub fn get_error_edges(
&self,
planned_outcome: &Snapshot,
actual_outcome: &Snapshot,
) -> Vec<(Location, Location)> {
use self::DropletDiff::*;
self.droplets
.keys()
.filter_map(|id| {
let planned_diff = self.diff_droplet(id, planned_outcome);
let actual_diff = self.diff_droplet(id, actual_outcome);
match (planned_diff, actual_diff) {
(Moved { from, to }, DidNotMove) => {
if (&from - &to).norm() == 1 {
Some((from, to))
} else {
warn!("Droplet {} jumped from {} to {}!", id.id, from, to);
None
}
}
_ => None,
}
}).collect()
}
}
#[derive(Debug)]
pub enum ExecResponse {
Step(Snapshot),
NotReady,
Done,
}
impl GridView {
pub fn new(grid: Grid) -> GridView {
let mut planned = VecDeque::new();
planned.push_back(Snapshot::default());
#[cfg(feature = "pi")]
let pi = match ::std::env::var("PUDDLE_PI") {
Ok(s) => if s == "1" {
let mut pi = RaspberryPi::new().unwrap();
info!("Initialized the pi!");
Some(pi)
} else {
warn!("Couldn't read PUDDLE_PI={}", s);
None
},
Err(_) => {
info!("Did not start the pi!");
None
}
};
GridView {
grid: grid,
planned,
completed: Vec::new(),
done: false,
bad_edges: Set::new(),
#[cfg(feature = "pi")]
pi,
}
}
pub fn close(&mut self) {
info!("Marking gridview as DONE!");
self.done = true;
}
pub fn execute(&mut self) -> ExecResponse {
use self::ExecResponse::*;
// compare with len - 1 because we wouldn't want to "write out" a state
// that hasn't been fully planned
let resp = if let Some(planned_snapshot) = self.planned.pop_front() {
Step(planned_snapshot)
} else if self.done {
Done
} else {
NotReady
};
trace!(
"execute sending {:?}. Completed: {}, planned: {}.",
resp,
self.completed.len(),
self.planned.len(),
);
resp
}
pub fn commit_pending(&mut self, mut snapshot: Snapshot) {
#[cfg(not(feature = "pi"))]
snapshot.finalize();
#[cfg(feature = "pi")]
snapshot.finalize(self.pi.as_mut());
self.completed.push(snapshot);
}
pub fn snapshot(&self) -> &Snapshot {
self.planned.back().unwrap()
}
// TODO probably shouldn't provide this
pub fn snapshot_mut(&mut self) -> &mut Snapshot {
self.planned.back_mut().unwrap()
}
pub fn snapshot_ensure(&mut self) {
if self.planned.is_empty() {
let last = self.completed.last().unwrap();
self.planned.push_back(last.new_with_same_droplets())
}
}
pub fn exec_snapshot(&self) -> &Snapshot {
self.completed.last().unwrap()
}
fn tick(&mut self) {
let new_snapshot = {
let just_planned = self.planned.back().unwrap();
if let Some(col) = just_planned.get_collision() {
panic!("collision: {:#?}", col);
};
just_planned.new_with_same_droplets()
};
self.planned.push_back(new_snapshot);
trace!("TICK! len={}", self.planned.len());
}
fn update(&mut self, id: DropletId, func: impl FnOnce(&mut Droplet)) {
let now = self.planned.back_mut().unwrap();
let droplet = now
.droplets
.get_mut(&id)
.unwrap_or_else(|| panic!("Tried to remove a non-existent droplet: {:?}", id));
func(droplet);
}
pub fn plan_droplet_info(&self, pid_option: Option<ProcessId>) -> Vec<DropletInfo> {
// gets from the planner for now
self.planned.back().unwrap().droplet_info(pid_option)
}
pub fn take_paths(&mut self, paths: &Map<DropletId, Path>, final_tick: bool) {
let max_len = paths.values().map(|path| path.len()).max().unwrap_or(0);
// make sure that all droplets start where they are at this time step
for (id, path) in paths.iter() {
let snapshot = self.planned.back().unwrap();
let droplet = &snapshot.droplets[&id];
assert_eq!(droplet.location, path[0]);
}
for i in 1..max_len {
for (&id, path) in paths.iter() {
if i < path.len() {
self.update(id, |droplet| {
assert!(droplet.location.distance_to(&path[i]) <= 1);
droplet.location = path[i];
});
}
}
if i < max_len - 1 || final_tick {
self.tick();
}
}
}
pub fn subview(
&mut self,
ids: impl IntoIterator<Item = DropletId>,
mapping: Map<Location, Location>,
) -> GridSubView {
GridSubView {
backing_gridview: self,
mapping: mapping,
ids: ids.into_iter().collect(),
}
}
pub fn register(&mut self, cmd: Box<dyn Command>) {
// this goes in the *just planned* thing, not the one currently being planned.
let just_planned = self.planned.len() - 2;
self.planned[just_planned].commands_to_finalize.push(cmd)
}
pub fn rollback(&mut self, new_snapshot: &Snapshot) {
let old_planned: Vec<_> = self.planned.drain(..).collect();
self.planned
.push_back(new_snapshot.new_with_same_droplets());
assert_eq!(self.planned.len(), 1);
for planned_snapshot in old_planned {
planned_snapshot.abort(self)
}
}
pub fn perturb(&self, rng: &mut impl Rng, snapshot: &Snapshot) -> Option<Snapshot> {
let now = snapshot;
let then = self.completed.last()?;
let id = {
let ids: Vec<_> = now.droplets.keys().collect();
match rng.choose(ids.as_slice()) {
Some(&&id) => id,
None => return None,
}
};
let mut now2 = now.new_with_same_droplets();
if let Some(old_droplet) = then.droplets.get(&id) {
let was_there = now2.droplets.insert(id, old_droplet.clone());
assert!(was_there.is_some());
}
Some(now2)
}
pub fn add_error_edges(&mut self, planned: &Snapshot, actual: &Snapshot) {
let previous = self.completed.last().unwrap();
let edges = previous.get_error_edges(planned, actual);
let n_edges = edges.len();
warn!(
"Added error {} edges, now there are {}: {:?}",
n_edges,
self.bad_edges.len() / 2,
edges,
);
for (loc1, loc2) in edges {
// for now, insert edges both ways
self.bad_edges.insert((loc1, loc2));
self.bad_edges.insert((loc2, loc1));
}
}
}
pub struct GridSubView<'a> {
backing_gridview: &'a mut GridView,
mapping: Map<Location, Location>,
ids: Set<DropletId>,
}
impl<'a> GridSubView<'a> {
pub fn tick(&mut self) {
self.backing_gridview.tick()
}
#[cfg(feature = "pi")]
pub fn with_pi<T>(&mut self, f: impl FnOnce(&mut RaspberryPi) -> T) -> Option<T> {
self.backing_gridview.pi.as_mut().map(f)
}
pub fn get_electrode(&self, loc: &Location) -> Option<&Electrode> {
let actual_loc = self.mapping.get(loc)?;
self.backing_gridview.grid.get_cell(&actual_loc)
}
// TODO: translate or somehow hide the untranslated location of this
pub fn get(&self, id: &DropletId) -> &Droplet {
assert!(self.ids.contains(&id));
&self.backing_gridview.snapshot().droplets[id]
}
fn get_mut(&mut self, id: &DropletId) -> &mut Droplet {
assert!(self.ids.contains(&id));
self.backing_gridview
.snapshot_mut()
.droplets
.get_mut(id)
.unwrap()
}
pub fn insert(&mut self, mut droplet: Droplet) {
let new_loc = self.mapping.get(&droplet.location);
trace!("Inserting {:#?} at {:?}", droplet, new_loc);
droplet.location = *new_loc.unwrap();
let was_not_there = self.ids.insert(droplet.id);
assert!(was_not_there);
let snapshot = self.backing_gridview.snapshot_mut();
let was_there = snapshot.droplets.insert(droplet.id, droplet);
assert!(was_there.is_none());
}
pub fn remove(&mut self, id: &DropletId) -> Droplet {
let was_there = self.ids.remove(id);
assert!(was_there);
let snapshot = self.backing_gridview.snapshot_mut();
let mut droplet = snapshot.droplets.remove(id).unwrap();
// FIXME this is pretty dumb
let (unmapped_loc, _) = self
.mapping
.iter()
.find(|(_, &v)| v == droplet.location)
.unwrap();
droplet.location = *unmapped_loc;
droplet
}
fn check_droplet(&self, id: &DropletId) {
// TODO will this have translated or real location??
let droplet = self.get(id);
let mapped_to: Set<_> = self.mapping.values().collect();
// TODO this is pretty slow
for i in 0..droplet.dimensions.y {
for j in 0..droplet.dimensions.x {
let loc = Location {
y: droplet.location.y + i,
x: droplet.location.x + j,
};
if !mapped_to.contains(&loc) {
panic!("{} was unmapped!, mapping: {:#?}", loc, self.mapping);
}
}
}
}
fn update(&mut self, id: &DropletId, func: impl FnOnce(&mut Droplet)) {
func(self.get_mut(id));
self.check_droplet(id);
}
pub fn move_west(&mut self, id: DropletId) {
trace!("Moving droplet {:?} west", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.west();
})
}
pub fn move_east(&mut self, id: DropletId) {
trace!("Moving droplet {:?} east", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.east();
})
}
pub fn move_north(&mut self, id: DropletId) {
trace!("Moving droplet {:?} north", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.north();
})
}
pub fn move_south(&mut self, id: DropletId) {
trace!("Moving droplet {:?} south", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.south();
})
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use grid::parse::tests::parse_strings;
pub fn id2c(id: &DropletId) -> char {
assert!(id.id < 255);
(id.id as u8) as char
}
pub fn c2id(c: char) -> DropletId {
for u in 0x00u8..0xff {
let c2 = u as char;
if c == c2 {
return DropletId {
id: u as usize,
process_id: 0,
};
}
}
panic!("Can't make {} a u8", c);
}
pub fn parse_gridview(strs: &[&str]) -> GridView {
// same chars are guaranteed to have the same ids
let (grid, blobs) = parse_strings(&strs);
let mut snapshot = Snapshot::default();
for (ch, blob) in blobs.iter() {
let id = c2id(*ch);
snapshot.droplets.insert(id, blob.to_droplet(id));
}
let mut gv = GridView::new(grid);
gv.planned[0] = snapshot;
gv
}
pub fn parse_snapshot(strs: &[&str]) -> Snapshot {
let mut gv = parse_gridview(strs);
gv.planned.remove(0).unwrap()
}
fn check_all_matched(
snapshot_strs: &[&str],
blob_strs: &[&str],
) -> Option<Map<DropletId, SimpleBlob>> {
let snapshot = parse_snapshot(&snapshot_strs);
let (_, chip_blobs) = parse_strings(&blob_strs);
let blobs: Vec<SimpleBlob> = chip_blobs.values().cloned().collect();
let result: Map<DropletId, SimpleBlob> = snapshot.match_with_blobs(&blobs)?;
// create the expected map by mapping the ids in the snapshot
// to the associated blob which corresponds to the character
let mut expected: Map<DropletId, SimpleBlob> = Map::new();
for id in snapshot.droplets.keys() {
expected.insert(*id, chip_blobs[&id2c(id)].clone());
}
for id in expected.keys() {
// we can't compare blobs or droplets, so we get the droplet_info
assert_eq!(
result.get(id).map(|blob| blob.to_droplet(*id).info()),
expected.get(id).map(|blob| blob.to_droplet(*id).info())
)
}
Some(result)
}
#[test]
fn test_no_diff() {
let strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
assert!(check_all_matched(&strs, &strs).is_some());
}
#[test]
fn test_location_diff() {
let exec_strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
let chip_strs = vec![
"aa...........",
"............c",
".....bb......",
".............", |
assert!(check_all_matched(&exec_strs, &chip_strs).is_some());
}
#[test]
fn test_dimension_diff() {
let exec_strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
let chip_strs = vec![
"aa.........cc",
".....b.......",
".....b.......",
".............",
];
assert!(check_all_matched(&exec_strs, &chip_strs).is_some());
}
#[test]
fn test_mix_split_diff() {
let exec_strs = vec![
"aa...........",
".....bb..c...",
".............",
".............",
];
let chip_strs = vec![
"aa...........",
".....bbb.....",
".............",
".............",
];
assert!(check_all_matched(&exec_strs, &chip_strs).is_none());
}
#[test]
fn test_droplet_diff() {
use self::DropletDiff::*;
let old = parse_snapshot(&[
".a...........",
".....bb..c...",
".............",
".............",
]);
let new = parse_snapshot(&[
".............",
".a...bb......",
".............",
".............",
]);
// locations for droplet a
let from = Location { y: 0, x: 1 };
let to = Location { y: 1, x: 1 };
assert_eq!(old.diff_droplet(&c2id('a'), &new), Moved { from, to });
assert_eq!(old.diff_droplet(&c2id('b'), &new), DidNotMove);
assert_eq!(old.diff_droplet(&c2id('c'), &new), Disappeared);
let error_edges = {
let planned = &new;
let actual = &old;
old.get_error_edges(planned, actual)
};
assert_eq!(error_edges.len(), 1);
assert_eq!(error_edges[0], (from, to));
}
} | ]; | random_line_split |
gridview.rs | use rand::Rng;
use std::collections::VecDeque;
use pathfinding::kuhn_munkres::kuhn_munkres_min;
use pathfinding::matrix::Matrix;
use command::Command;
use grid::droplet::{Blob, SimpleBlob};
use grid::Electrode;
use plan::Path;
use process::ProcessId;
use util::collections::{Map, Set};
#[cfg(feature = "pi")]
use pi::RaspberryPi;
use super::{Droplet, DropletId, DropletInfo, Grid, Location};
pub struct GridView {
pub grid: Grid,
completed: Vec<Snapshot>,
planned: VecDeque<Snapshot>,
pub done: bool,
pub bad_edges: Set<(Location, Location)>,
#[cfg(feature = "pi")]
pub pi: Option<RaspberryPi>,
}
#[must_use]
#[derive(Debug, Default)]
pub struct Snapshot {
pub droplets: Map<DropletId, Droplet>,
pub commands_to_finalize: Vec<Box<dyn Command>>,
}
#[derive(Debug, PartialEq)]
pub enum DropletDiff {
Disappeared,
DidNotMove,
Moved { from: Location, to: Location },
}
impl Snapshot {
pub fn new_with_same_droplets(&self) -> Snapshot {
let mut new_snapshot = Snapshot::default();
new_snapshot.droplets = self.droplets.clone();
// clear out the destination because we're doing to replan
for d in new_snapshot.droplets.values_mut() {
d.destination = None;
}
new_snapshot
}
#[cfg(not(feature = "pi"))]
fn finalize(&mut self) {
// we need to drain this so we can mutate the command without mutating
// self, as we need to pass self into cmd.finalize
// this feels pretty ugly....
let mut x: Vec<_> = self.commands_to_finalize.drain(..).collect();
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self)
}
self.commands_to_finalize = x;
}
#[cfg(feature = "pi")]
fn finalize(&mut self, pi: Option<&mut RaspberryPi>) {
// we need to drain this so we can mutate the command without mutating
// self, as we need to pass self into cmd.finalize
// this feels pretty ugly....
let mut x: Vec<_> = self.commands_to_finalize.drain(..).collect();
if let Some(pi) = pi {
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self, Some(pi))
}
} else {
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self, None)
}
}
self.commands_to_finalize = x;
}
pub fn abort(mut self, gridview: &mut GridView) {
for mut cmd in self.commands_to_finalize.drain(..) {
debug!("Sending command back for replanning: {:#?}", cmd);
if let Err((mut cmd, err)) = gridview.plan(cmd) {
cmd.abort(err);
}
}
}
pub fn droplet_info(&self, pid_option: Option<ProcessId>) -> Vec<DropletInfo> {
self.droplets
.values()
.filter(|&d| pid_option.map_or(true, |pid| d.id.process_id == pid))
.map(|d| d.info())
.collect()
}
/// Returns an invalid droplet, if any.
fn get_collision(&self) -> Option<(i32, Droplet, Droplet)> {
for (id1, droplet1) in &self.droplets {
for (id2, droplet2) in &self.droplets {
if id1 == id2 {
continue;
}
if droplet1.collision_group == droplet2.collision_group {
continue;
}
let distance = droplet1.collision_distance(droplet2);
if distance <= 0 {
return Some((distance, droplet1.clone(), droplet2.clone()));
}
}
}
None
}
pub fn to_blobs(&self) -> Vec<SimpleBlob> {
self.droplets.values().map(|d| d.to_blob()).collect()
}
/// Takes a map of droplet ids to droplets (as in that
/// of the planner/executor view) and a vector of blobs
/// (as in that of the chip view) and returns a matching
/// of droplet ids to closest matching blobs.
///
/// Can currently only handle where both views contain
/// the same number of 'droplets'
fn match_with_blobs<B: Blob>(&self, blobs: &[B]) -> Option<Map<DropletId, B>> {
// Ensure lengths are the same
if self.droplets.len() != blobs.len() {
error!("Expected and actual droplets are of different lengths");
return None;
}
let mut result = Map::new(); // to be returned
let mut ids = vec![]; // store corresponding ids to indeces
let mut matches = vec![]; // store similarity between blobs/droplets
let n = blobs.len();
// store the id of each droplet in its corresponding
// index in 'ids', then store the similarity of each
// droplet to each blob in 'matches'
for (&id, droplet) in &self.droplets {
ids.push(id);
for blob in blobs {
let similarity = blob.get_similarity(&droplet);
// must be non-negative for the algorithm to work
assert!(similarity >= 0);
matches.push(similarity);
}
}
// convert the matches vector to a matrix
// input should be [1,2,3,4], where the output
// matrix is [[1,2],[3,4]]
let m: Matrix<i32> = Matrix::from_vec(n, n, matches);
// km is a vector of size n where the value at each index
// corresponds to the index of a blob
let (_c, km) = kuhn_munkres_min(&m);
for i in 0..n {
result.insert(ids[i], blobs[km[i]].clone());
}
Some(result)
}
// this will take commands_to_finalize from the old snapshot into the new
// one if an error is found produced
pub fn correct(&mut self, blobs: &[impl Blob]) -> Option<Snapshot> {
let blob_matching = self.match_with_blobs(blobs)?;
let mut was_error = false;
let new_droplets: Map<_, _> = blob_matching
.iter()
.map(|(&id, blob)| {
let d = self.droplets.get_mut(&id).unwrap();
let d_new = blob.to_droplet(id);
if d.location != d_new.location || d.dimensions != d_new.dimensions {
info!("Found error in droplet {:?}", id);
debug!("Droplet error\n Expected: {:#?}\n Found: {:#?}", d, d_new);
was_error = true;
}
// HACK FIXME this mutation is not great
if (d.volume - d_new.volume).abs() > 1.0 {
info!(
"volume of {} changed: {} -> {}",
id.id, d.volume, d_new.volume
)
}
d.volume = d_new.volume;
(id, d_new)
}).collect();
if was_error {
let mut new_snapshot = Snapshot {
droplets: new_droplets,
commands_to_finalize: Vec::new(),
};
::std::mem::swap(
&mut new_snapshot.commands_to_finalize,
&mut self.commands_to_finalize,
);
Some(new_snapshot)
} else {
None
}
}
pub fn diff_droplet(&self, id: &DropletId, other: &Snapshot) -> DropletDiff {
use self::DropletDiff::*;
let droplet = self
.droplets
.get(id)
.expect("id should be in self snapshot");
if let Some(other_droplet) = other.droplets.get(id) {
// NOTE we only care about location diffs for now
let loc = droplet.location;
let other_loc = other_droplet.location;
if loc != other_loc {
// for now, just assert that we are only moving one spot at a time
// FIXME HACK
// assert_eq!((&loc - &other_loc).norm(), 1);
Moved {
from: loc,
to: other_loc,
}
} else {
DidNotMove
}
} else {
Disappeared
}
}
pub fn get_error_edges(
&self,
planned_outcome: &Snapshot,
actual_outcome: &Snapshot,
) -> Vec<(Location, Location)> {
use self::DropletDiff::*;
self.droplets
.keys()
.filter_map(|id| {
let planned_diff = self.diff_droplet(id, planned_outcome);
let actual_diff = self.diff_droplet(id, actual_outcome);
match (planned_diff, actual_diff) {
(Moved { from, to }, DidNotMove) => {
if (&from - &to).norm() == 1 {
Some((from, to))
} else {
warn!("Droplet {} jumped from {} to {}!", id.id, from, to);
None
}
}
_ => None,
}
}).collect()
}
}
#[derive(Debug)]
pub enum ExecResponse {
Step(Snapshot),
NotReady,
Done,
}
impl GridView {
pub fn new(grid: Grid) -> GridView {
let mut planned = VecDeque::new();
planned.push_back(Snapshot::default());
#[cfg(feature = "pi")]
let pi = match ::std::env::var("PUDDLE_PI") {
Ok(s) => if s == "1" {
let mut pi = RaspberryPi::new().unwrap();
info!("Initialized the pi!");
Some(pi)
} else {
warn!("Couldn't read PUDDLE_PI={}", s);
None
},
Err(_) => {
info!("Did not start the pi!");
None
}
};
GridView {
grid: grid,
planned,
completed: Vec::new(),
done: false,
bad_edges: Set::new(),
#[cfg(feature = "pi")]
pi,
}
}
pub fn close(&mut self) {
info!("Marking gridview as DONE!");
self.done = true;
}
pub fn execute(&mut self) -> ExecResponse {
use self::ExecResponse::*;
// compare with len - 1 because we wouldn't want to "write out" a state
// that hasn't been fully planned
let resp = if let Some(planned_snapshot) = self.planned.pop_front() {
Step(planned_snapshot)
} else if self.done {
Done
} else {
NotReady
};
trace!(
"execute sending {:?}. Completed: {}, planned: {}.",
resp,
self.completed.len(),
self.planned.len(),
);
resp
}
pub fn commit_pending(&mut self, mut snapshot: Snapshot) {
#[cfg(not(feature = "pi"))]
snapshot.finalize();
#[cfg(feature = "pi")]
snapshot.finalize(self.pi.as_mut());
self.completed.push(snapshot);
}
pub fn snapshot(&self) -> &Snapshot {
self.planned.back().unwrap()
}
// TODO probably shouldn't provide this
pub fn snapshot_mut(&mut self) -> &mut Snapshot |
pub fn snapshot_ensure(&mut self) {
if self.planned.is_empty() {
let last = self.completed.last().unwrap();
self.planned.push_back(last.new_with_same_droplets())
}
}
pub fn exec_snapshot(&self) -> &Snapshot {
self.completed.last().unwrap()
}
fn tick(&mut self) {
let new_snapshot = {
let just_planned = self.planned.back().unwrap();
if let Some(col) = just_planned.get_collision() {
panic!("collision: {:#?}", col);
};
just_planned.new_with_same_droplets()
};
self.planned.push_back(new_snapshot);
trace!("TICK! len={}", self.planned.len());
}
fn update(&mut self, id: DropletId, func: impl FnOnce(&mut Droplet)) {
let now = self.planned.back_mut().unwrap();
let droplet = now
.droplets
.get_mut(&id)
.unwrap_or_else(|| panic!("Tried to remove a non-existent droplet: {:?}", id));
func(droplet);
}
pub fn plan_droplet_info(&self, pid_option: Option<ProcessId>) -> Vec<DropletInfo> {
// gets from the planner for now
self.planned.back().unwrap().droplet_info(pid_option)
}
pub fn take_paths(&mut self, paths: &Map<DropletId, Path>, final_tick: bool) {
let max_len = paths.values().map(|path| path.len()).max().unwrap_or(0);
// make sure that all droplets start where they are at this time step
for (id, path) in paths.iter() {
let snapshot = self.planned.back().unwrap();
let droplet = &snapshot.droplets[&id];
assert_eq!(droplet.location, path[0]);
}
for i in 1..max_len {
for (&id, path) in paths.iter() {
if i < path.len() {
self.update(id, |droplet| {
assert!(droplet.location.distance_to(&path[i]) <= 1);
droplet.location = path[i];
});
}
}
if i < max_len - 1 || final_tick {
self.tick();
}
}
}
pub fn subview(
&mut self,
ids: impl IntoIterator<Item = DropletId>,
mapping: Map<Location, Location>,
) -> GridSubView {
GridSubView {
backing_gridview: self,
mapping: mapping,
ids: ids.into_iter().collect(),
}
}
pub fn register(&mut self, cmd: Box<dyn Command>) {
// this goes in the *just planned* thing, not the one currently being planned.
let just_planned = self.planned.len() - 2;
self.planned[just_planned].commands_to_finalize.push(cmd)
}
pub fn rollback(&mut self, new_snapshot: &Snapshot) {
let old_planned: Vec<_> = self.planned.drain(..).collect();
self.planned
.push_back(new_snapshot.new_with_same_droplets());
assert_eq!(self.planned.len(), 1);
for planned_snapshot in old_planned {
planned_snapshot.abort(self)
}
}
pub fn perturb(&self, rng: &mut impl Rng, snapshot: &Snapshot) -> Option<Snapshot> {
let now = snapshot;
let then = self.completed.last()?;
let id = {
let ids: Vec<_> = now.droplets.keys().collect();
match rng.choose(ids.as_slice()) {
Some(&&id) => id,
None => return None,
}
};
let mut now2 = now.new_with_same_droplets();
if let Some(old_droplet) = then.droplets.get(&id) {
let was_there = now2.droplets.insert(id, old_droplet.clone());
assert!(was_there.is_some());
}
Some(now2)
}
pub fn add_error_edges(&mut self, planned: &Snapshot, actual: &Snapshot) {
let previous = self.completed.last().unwrap();
let edges = previous.get_error_edges(planned, actual);
let n_edges = edges.len();
warn!(
"Added error {} edges, now there are {}: {:?}",
n_edges,
self.bad_edges.len() / 2,
edges,
);
for (loc1, loc2) in edges {
// for now, insert edges both ways
self.bad_edges.insert((loc1, loc2));
self.bad_edges.insert((loc2, loc1));
}
}
}
pub struct GridSubView<'a> {
backing_gridview: &'a mut GridView,
mapping: Map<Location, Location>,
ids: Set<DropletId>,
}
impl<'a> GridSubView<'a> {
pub fn tick(&mut self) {
self.backing_gridview.tick()
}
#[cfg(feature = "pi")]
pub fn with_pi<T>(&mut self, f: impl FnOnce(&mut RaspberryPi) -> T) -> Option<T> {
self.backing_gridview.pi.as_mut().map(f)
}
pub fn get_electrode(&self, loc: &Location) -> Option<&Electrode> {
let actual_loc = self.mapping.get(loc)?;
self.backing_gridview.grid.get_cell(&actual_loc)
}
// TODO: translate or somehow hide the untranslated location of this
pub fn get(&self, id: &DropletId) -> &Droplet {
assert!(self.ids.contains(&id));
&self.backing_gridview.snapshot().droplets[id]
}
fn get_mut(&mut self, id: &DropletId) -> &mut Droplet {
assert!(self.ids.contains(&id));
self.backing_gridview
.snapshot_mut()
.droplets
.get_mut(id)
.unwrap()
}
pub fn insert(&mut self, mut droplet: Droplet) {
let new_loc = self.mapping.get(&droplet.location);
trace!("Inserting {:#?} at {:?}", droplet, new_loc);
droplet.location = *new_loc.unwrap();
let was_not_there = self.ids.insert(droplet.id);
assert!(was_not_there);
let snapshot = self.backing_gridview.snapshot_mut();
let was_there = snapshot.droplets.insert(droplet.id, droplet);
assert!(was_there.is_none());
}
pub fn remove(&mut self, id: &DropletId) -> Droplet {
let was_there = self.ids.remove(id);
assert!(was_there);
let snapshot = self.backing_gridview.snapshot_mut();
let mut droplet = snapshot.droplets.remove(id).unwrap();
// FIXME this is pretty dumb
let (unmapped_loc, _) = self
.mapping
.iter()
.find(|(_, &v)| v == droplet.location)
.unwrap();
droplet.location = *unmapped_loc;
droplet
}
fn check_droplet(&self, id: &DropletId) {
// TODO will this have translated or real location??
let droplet = self.get(id);
let mapped_to: Set<_> = self.mapping.values().collect();
// TODO this is pretty slow
for i in 0..droplet.dimensions.y {
for j in 0..droplet.dimensions.x {
let loc = Location {
y: droplet.location.y + i,
x: droplet.location.x + j,
};
if !mapped_to.contains(&loc) {
panic!("{} was unmapped!, mapping: {:#?}", loc, self.mapping);
}
}
}
}
fn update(&mut self, id: &DropletId, func: impl FnOnce(&mut Droplet)) {
func(self.get_mut(id));
self.check_droplet(id);
}
pub fn move_west(&mut self, id: DropletId) {
trace!("Moving droplet {:?} west", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.west();
})
}
pub fn move_east(&mut self, id: DropletId) {
trace!("Moving droplet {:?} east", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.east();
})
}
pub fn move_north(&mut self, id: DropletId) {
trace!("Moving droplet {:?} north", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.north();
})
}
pub fn move_south(&mut self, id: DropletId) {
trace!("Moving droplet {:?} south", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.south();
})
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use grid::parse::tests::parse_strings;
pub fn id2c(id: &DropletId) -> char {
assert!(id.id < 255);
(id.id as u8) as char
}
pub fn c2id(c: char) -> DropletId {
for u in 0x00u8..0xff {
let c2 = u as char;
if c == c2 {
return DropletId {
id: u as usize,
process_id: 0,
};
}
}
panic!("Can't make {} a u8", c);
}
pub fn parse_gridview(strs: &[&str]) -> GridView {
// same chars are guaranteed to have the same ids
let (grid, blobs) = parse_strings(&strs);
let mut snapshot = Snapshot::default();
for (ch, blob) in blobs.iter() {
let id = c2id(*ch);
snapshot.droplets.insert(id, blob.to_droplet(id));
}
let mut gv = GridView::new(grid);
gv.planned[0] = snapshot;
gv
}
pub fn parse_snapshot(strs: &[&str]) -> Snapshot {
let mut gv = parse_gridview(strs);
gv.planned.remove(0).unwrap()
}
fn check_all_matched(
snapshot_strs: &[&str],
blob_strs: &[&str],
) -> Option<Map<DropletId, SimpleBlob>> {
let snapshot = parse_snapshot(&snapshot_strs);
let (_, chip_blobs) = parse_strings(&blob_strs);
let blobs: Vec<SimpleBlob> = chip_blobs.values().cloned().collect();
let result: Map<DropletId, SimpleBlob> = snapshot.match_with_blobs(&blobs)?;
// create the expected map by mapping the ids in the snapshot
// to the associated blob which corresponds to the character
let mut expected: Map<DropletId, SimpleBlob> = Map::new();
for id in snapshot.droplets.keys() {
expected.insert(*id, chip_blobs[&id2c(id)].clone());
}
for id in expected.keys() {
// we can't compare blobs or droplets, so we get the droplet_info
assert_eq!(
result.get(id).map(|blob| blob.to_droplet(*id).info()),
expected.get(id).map(|blob| blob.to_droplet(*id).info())
)
}
Some(result)
}
#[test]
fn test_no_diff() {
let strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
assert!(check_all_matched(&strs, &strs).is_some());
}
#[test]
fn test_location_diff() {
let exec_strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
let chip_strs = vec![
"aa...........",
"............c",
".....bb......",
".............",
];
assert!(check_all_matched(&exec_strs, &chip_strs).is_some());
}
#[test]
fn test_dimension_diff() {
let exec_strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
let chip_strs = vec![
"aa.........cc",
".....b.......",
".....b.......",
".............",
];
assert!(check_all_matched(&exec_strs, &chip_strs).is_some());
}
#[test]
fn test_mix_split_diff() {
let exec_strs = vec![
"aa...........",
".....bb..c...",
".............",
".............",
];
let chip_strs = vec![
"aa...........",
".....bbb.....",
".............",
".............",
];
assert!(check_all_matched(&exec_strs, &chip_strs).is_none());
}
#[test]
fn test_droplet_diff() {
use self::DropletDiff::*;
let old = parse_snapshot(&[
".a...........",
".....bb..c...",
".............",
".............",
]);
let new = parse_snapshot(&[
".............",
".a...bb......",
".............",
".............",
]);
// locations for droplet a
let from = Location { y: 0, x: 1 };
let to = Location { y: 1, x: 1 };
assert_eq!(old.diff_droplet(&c2id('a'), &new), Moved { from, to });
assert_eq!(old.diff_droplet(&c2id('b'), &new), DidNotMove);
assert_eq!(old.diff_droplet(&c2id('c'), &new), Disappeared);
let error_edges = {
let planned = &new;
let actual = &old;
old.get_error_edges(planned, actual)
};
assert_eq!(error_edges.len(), 1);
assert_eq!(error_edges[0], (from, to));
}
}
| {
self.planned.back_mut().unwrap()
} | identifier_body |
gridview.rs | use rand::Rng;
use std::collections::VecDeque;
use pathfinding::kuhn_munkres::kuhn_munkres_min;
use pathfinding::matrix::Matrix;
use command::Command;
use grid::droplet::{Blob, SimpleBlob};
use grid::Electrode;
use plan::Path;
use process::ProcessId;
use util::collections::{Map, Set};
#[cfg(feature = "pi")]
use pi::RaspberryPi;
use super::{Droplet, DropletId, DropletInfo, Grid, Location};
pub struct GridView {
pub grid: Grid,
completed: Vec<Snapshot>,
planned: VecDeque<Snapshot>,
pub done: bool,
pub bad_edges: Set<(Location, Location)>,
#[cfg(feature = "pi")]
pub pi: Option<RaspberryPi>,
}
#[must_use]
#[derive(Debug, Default)]
pub struct Snapshot {
pub droplets: Map<DropletId, Droplet>,
pub commands_to_finalize: Vec<Box<dyn Command>>,
}
#[derive(Debug, PartialEq)]
pub enum | {
Disappeared,
DidNotMove,
Moved { from: Location, to: Location },
}
impl Snapshot {
pub fn new_with_same_droplets(&self) -> Snapshot {
let mut new_snapshot = Snapshot::default();
new_snapshot.droplets = self.droplets.clone();
// clear out the destination because we're doing to replan
for d in new_snapshot.droplets.values_mut() {
d.destination = None;
}
new_snapshot
}
#[cfg(not(feature = "pi"))]
fn finalize(&mut self) {
// we need to drain this so we can mutate the command without mutating
// self, as we need to pass self into cmd.finalize
// this feels pretty ugly....
let mut x: Vec<_> = self.commands_to_finalize.drain(..).collect();
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self)
}
self.commands_to_finalize = x;
}
#[cfg(feature = "pi")]
fn finalize(&mut self, pi: Option<&mut RaspberryPi>) {
// we need to drain this so we can mutate the command without mutating
// self, as we need to pass self into cmd.finalize
// this feels pretty ugly....
let mut x: Vec<_> = self.commands_to_finalize.drain(..).collect();
if let Some(pi) = pi {
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self, Some(pi))
}
} else {
for cmd in &mut x {
debug!("Finalizing command: {:#?}", cmd);
cmd.finalize(self, None)
}
}
self.commands_to_finalize = x;
}
pub fn abort(mut self, gridview: &mut GridView) {
for mut cmd in self.commands_to_finalize.drain(..) {
debug!("Sending command back for replanning: {:#?}", cmd);
if let Err((mut cmd, err)) = gridview.plan(cmd) {
cmd.abort(err);
}
}
}
pub fn droplet_info(&self, pid_option: Option<ProcessId>) -> Vec<DropletInfo> {
self.droplets
.values()
.filter(|&d| pid_option.map_or(true, |pid| d.id.process_id == pid))
.map(|d| d.info())
.collect()
}
/// Returns an invalid droplet, if any.
fn get_collision(&self) -> Option<(i32, Droplet, Droplet)> {
for (id1, droplet1) in &self.droplets {
for (id2, droplet2) in &self.droplets {
if id1 == id2 {
continue;
}
if droplet1.collision_group == droplet2.collision_group {
continue;
}
let distance = droplet1.collision_distance(droplet2);
if distance <= 0 {
return Some((distance, droplet1.clone(), droplet2.clone()));
}
}
}
None
}
pub fn to_blobs(&self) -> Vec<SimpleBlob> {
self.droplets.values().map(|d| d.to_blob()).collect()
}
/// Takes a map of droplet ids to droplets (as in that
/// of the planner/executor view) and a vector of blobs
/// (as in that of the chip view) and returns a matching
/// of droplet ids to closest matching blobs.
///
/// Can currently only handle where both views contain
/// the same number of 'droplets'
fn match_with_blobs<B: Blob>(&self, blobs: &[B]) -> Option<Map<DropletId, B>> {
// Ensure lengths are the same
if self.droplets.len() != blobs.len() {
error!("Expected and actual droplets are of different lengths");
return None;
}
let mut result = Map::new(); // to be returned
let mut ids = vec![]; // store corresponding ids to indeces
let mut matches = vec![]; // store similarity between blobs/droplets
let n = blobs.len();
// store the id of each droplet in its corresponding
// index in 'ids', then store the similarity of each
// droplet to each blob in 'matches'
for (&id, droplet) in &self.droplets {
ids.push(id);
for blob in blobs {
let similarity = blob.get_similarity(&droplet);
// must be non-negative for the algorithm to work
assert!(similarity >= 0);
matches.push(similarity);
}
}
// convert the matches vector to a matrix
// input should be [1,2,3,4], where the output
// matrix is [[1,2],[3,4]]
let m: Matrix<i32> = Matrix::from_vec(n, n, matches);
// km is a vector of size n where the value at each index
// corresponds to the index of a blob
let (_c, km) = kuhn_munkres_min(&m);
for i in 0..n {
result.insert(ids[i], blobs[km[i]].clone());
}
Some(result)
}
// this will take commands_to_finalize from the old snapshot into the new
// one if an error is found produced
pub fn correct(&mut self, blobs: &[impl Blob]) -> Option<Snapshot> {
let blob_matching = self.match_with_blobs(blobs)?;
let mut was_error = false;
let new_droplets: Map<_, _> = blob_matching
.iter()
.map(|(&id, blob)| {
let d = self.droplets.get_mut(&id).unwrap();
let d_new = blob.to_droplet(id);
if d.location != d_new.location || d.dimensions != d_new.dimensions {
info!("Found error in droplet {:?}", id);
debug!("Droplet error\n Expected: {:#?}\n Found: {:#?}", d, d_new);
was_error = true;
}
// HACK FIXME this mutation is not great
if (d.volume - d_new.volume).abs() > 1.0 {
info!(
"volume of {} changed: {} -> {}",
id.id, d.volume, d_new.volume
)
}
d.volume = d_new.volume;
(id, d_new)
}).collect();
if was_error {
let mut new_snapshot = Snapshot {
droplets: new_droplets,
commands_to_finalize: Vec::new(),
};
::std::mem::swap(
&mut new_snapshot.commands_to_finalize,
&mut self.commands_to_finalize,
);
Some(new_snapshot)
} else {
None
}
}
pub fn diff_droplet(&self, id: &DropletId, other: &Snapshot) -> DropletDiff {
use self::DropletDiff::*;
let droplet = self
.droplets
.get(id)
.expect("id should be in self snapshot");
if let Some(other_droplet) = other.droplets.get(id) {
// NOTE we only care about location diffs for now
let loc = droplet.location;
let other_loc = other_droplet.location;
if loc != other_loc {
// for now, just assert that we are only moving one spot at a time
// FIXME HACK
// assert_eq!((&loc - &other_loc).norm(), 1);
Moved {
from: loc,
to: other_loc,
}
} else {
DidNotMove
}
} else {
Disappeared
}
}
pub fn get_error_edges(
&self,
planned_outcome: &Snapshot,
actual_outcome: &Snapshot,
) -> Vec<(Location, Location)> {
use self::DropletDiff::*;
self.droplets
.keys()
.filter_map(|id| {
let planned_diff = self.diff_droplet(id, planned_outcome);
let actual_diff = self.diff_droplet(id, actual_outcome);
match (planned_diff, actual_diff) {
(Moved { from, to }, DidNotMove) => {
if (&from - &to).norm() == 1 {
Some((from, to))
} else {
warn!("Droplet {} jumped from {} to {}!", id.id, from, to);
None
}
}
_ => None,
}
}).collect()
}
}
#[derive(Debug)]
pub enum ExecResponse {
Step(Snapshot),
NotReady,
Done,
}
impl GridView {
pub fn new(grid: Grid) -> GridView {
let mut planned = VecDeque::new();
planned.push_back(Snapshot::default());
#[cfg(feature = "pi")]
let pi = match ::std::env::var("PUDDLE_PI") {
Ok(s) => if s == "1" {
let mut pi = RaspberryPi::new().unwrap();
info!("Initialized the pi!");
Some(pi)
} else {
warn!("Couldn't read PUDDLE_PI={}", s);
None
},
Err(_) => {
info!("Did not start the pi!");
None
}
};
GridView {
grid: grid,
planned,
completed: Vec::new(),
done: false,
bad_edges: Set::new(),
#[cfg(feature = "pi")]
pi,
}
}
pub fn close(&mut self) {
info!("Marking gridview as DONE!");
self.done = true;
}
pub fn execute(&mut self) -> ExecResponse {
use self::ExecResponse::*;
// compare with len - 1 because we wouldn't want to "write out" a state
// that hasn't been fully planned
let resp = if let Some(planned_snapshot) = self.planned.pop_front() {
Step(planned_snapshot)
} else if self.done {
Done
} else {
NotReady
};
trace!(
"execute sending {:?}. Completed: {}, planned: {}.",
resp,
self.completed.len(),
self.planned.len(),
);
resp
}
pub fn commit_pending(&mut self, mut snapshot: Snapshot) {
#[cfg(not(feature = "pi"))]
snapshot.finalize();
#[cfg(feature = "pi")]
snapshot.finalize(self.pi.as_mut());
self.completed.push(snapshot);
}
pub fn snapshot(&self) -> &Snapshot {
self.planned.back().unwrap()
}
// TODO probably shouldn't provide this
pub fn snapshot_mut(&mut self) -> &mut Snapshot {
self.planned.back_mut().unwrap()
}
pub fn snapshot_ensure(&mut self) {
if self.planned.is_empty() {
let last = self.completed.last().unwrap();
self.planned.push_back(last.new_with_same_droplets())
}
}
pub fn exec_snapshot(&self) -> &Snapshot {
self.completed.last().unwrap()
}
fn tick(&mut self) {
let new_snapshot = {
let just_planned = self.planned.back().unwrap();
if let Some(col) = just_planned.get_collision() {
panic!("collision: {:#?}", col);
};
just_planned.new_with_same_droplets()
};
self.planned.push_back(new_snapshot);
trace!("TICK! len={}", self.planned.len());
}
fn update(&mut self, id: DropletId, func: impl FnOnce(&mut Droplet)) {
let now = self.planned.back_mut().unwrap();
let droplet = now
.droplets
.get_mut(&id)
.unwrap_or_else(|| panic!("Tried to remove a non-existent droplet: {:?}", id));
func(droplet);
}
pub fn plan_droplet_info(&self, pid_option: Option<ProcessId>) -> Vec<DropletInfo> {
// gets from the planner for now
self.planned.back().unwrap().droplet_info(pid_option)
}
pub fn take_paths(&mut self, paths: &Map<DropletId, Path>, final_tick: bool) {
let max_len = paths.values().map(|path| path.len()).max().unwrap_or(0);
// make sure that all droplets start where they are at this time step
for (id, path) in paths.iter() {
let snapshot = self.planned.back().unwrap();
let droplet = &snapshot.droplets[&id];
assert_eq!(droplet.location, path[0]);
}
for i in 1..max_len {
for (&id, path) in paths.iter() {
if i < path.len() {
self.update(id, |droplet| {
assert!(droplet.location.distance_to(&path[i]) <= 1);
droplet.location = path[i];
});
}
}
if i < max_len - 1 || final_tick {
self.tick();
}
}
}
pub fn subview(
&mut self,
ids: impl IntoIterator<Item = DropletId>,
mapping: Map<Location, Location>,
) -> GridSubView {
GridSubView {
backing_gridview: self,
mapping: mapping,
ids: ids.into_iter().collect(),
}
}
pub fn register(&mut self, cmd: Box<dyn Command>) {
// this goes in the *just planned* thing, not the one currently being planned.
let just_planned = self.planned.len() - 2;
self.planned[just_planned].commands_to_finalize.push(cmd)
}
pub fn rollback(&mut self, new_snapshot: &Snapshot) {
let old_planned: Vec<_> = self.planned.drain(..).collect();
self.planned
.push_back(new_snapshot.new_with_same_droplets());
assert_eq!(self.planned.len(), 1);
for planned_snapshot in old_planned {
planned_snapshot.abort(self)
}
}
pub fn perturb(&self, rng: &mut impl Rng, snapshot: &Snapshot) -> Option<Snapshot> {
let now = snapshot;
let then = self.completed.last()?;
let id = {
let ids: Vec<_> = now.droplets.keys().collect();
match rng.choose(ids.as_slice()) {
Some(&&id) => id,
None => return None,
}
};
let mut now2 = now.new_with_same_droplets();
if let Some(old_droplet) = then.droplets.get(&id) {
let was_there = now2.droplets.insert(id, old_droplet.clone());
assert!(was_there.is_some());
}
Some(now2)
}
pub fn add_error_edges(&mut self, planned: &Snapshot, actual: &Snapshot) {
let previous = self.completed.last().unwrap();
let edges = previous.get_error_edges(planned, actual);
let n_edges = edges.len();
warn!(
"Added error {} edges, now there are {}: {:?}",
n_edges,
self.bad_edges.len() / 2,
edges,
);
for (loc1, loc2) in edges {
// for now, insert edges both ways
self.bad_edges.insert((loc1, loc2));
self.bad_edges.insert((loc2, loc1));
}
}
}
pub struct GridSubView<'a> {
backing_gridview: &'a mut GridView,
mapping: Map<Location, Location>,
ids: Set<DropletId>,
}
impl<'a> GridSubView<'a> {
pub fn tick(&mut self) {
self.backing_gridview.tick()
}
#[cfg(feature = "pi")]
pub fn with_pi<T>(&mut self, f: impl FnOnce(&mut RaspberryPi) -> T) -> Option<T> {
self.backing_gridview.pi.as_mut().map(f)
}
pub fn get_electrode(&self, loc: &Location) -> Option<&Electrode> {
let actual_loc = self.mapping.get(loc)?;
self.backing_gridview.grid.get_cell(&actual_loc)
}
// TODO: translate or somehow hide the untranslated location of this
pub fn get(&self, id: &DropletId) -> &Droplet {
assert!(self.ids.contains(&id));
&self.backing_gridview.snapshot().droplets[id]
}
fn get_mut(&mut self, id: &DropletId) -> &mut Droplet {
assert!(self.ids.contains(&id));
self.backing_gridview
.snapshot_mut()
.droplets
.get_mut(id)
.unwrap()
}
pub fn insert(&mut self, mut droplet: Droplet) {
let new_loc = self.mapping.get(&droplet.location);
trace!("Inserting {:#?} at {:?}", droplet, new_loc);
droplet.location = *new_loc.unwrap();
let was_not_there = self.ids.insert(droplet.id);
assert!(was_not_there);
let snapshot = self.backing_gridview.snapshot_mut();
let was_there = snapshot.droplets.insert(droplet.id, droplet);
assert!(was_there.is_none());
}
pub fn remove(&mut self, id: &DropletId) -> Droplet {
let was_there = self.ids.remove(id);
assert!(was_there);
let snapshot = self.backing_gridview.snapshot_mut();
let mut droplet = snapshot.droplets.remove(id).unwrap();
// FIXME this is pretty dumb
let (unmapped_loc, _) = self
.mapping
.iter()
.find(|(_, &v)| v == droplet.location)
.unwrap();
droplet.location = *unmapped_loc;
droplet
}
fn check_droplet(&self, id: &DropletId) {
// TODO will this have translated or real location??
let droplet = self.get(id);
let mapped_to: Set<_> = self.mapping.values().collect();
// TODO this is pretty slow
for i in 0..droplet.dimensions.y {
for j in 0..droplet.dimensions.x {
let loc = Location {
y: droplet.location.y + i,
x: droplet.location.x + j,
};
if !mapped_to.contains(&loc) {
panic!("{} was unmapped!, mapping: {:#?}", loc, self.mapping);
}
}
}
}
fn update(&mut self, id: &DropletId, func: impl FnOnce(&mut Droplet)) {
func(self.get_mut(id));
self.check_droplet(id);
}
pub fn move_west(&mut self, id: DropletId) {
trace!("Moving droplet {:?} west", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.west();
})
}
pub fn move_east(&mut self, id: DropletId) {
trace!("Moving droplet {:?} east", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.east();
})
}
pub fn move_north(&mut self, id: DropletId) {
trace!("Moving droplet {:?} north", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.north();
})
}
pub fn move_south(&mut self, id: DropletId) {
trace!("Moving droplet {:?} south", id);
self.update(&id, |droplet| {
droplet.location = droplet.location.south();
})
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use grid::parse::tests::parse_strings;
pub fn id2c(id: &DropletId) -> char {
assert!(id.id < 255);
(id.id as u8) as char
}
pub fn c2id(c: char) -> DropletId {
for u in 0x00u8..0xff {
let c2 = u as char;
if c == c2 {
return DropletId {
id: u as usize,
process_id: 0,
};
}
}
panic!("Can't make {} a u8", c);
}
pub fn parse_gridview(strs: &[&str]) -> GridView {
// same chars are guaranteed to have the same ids
let (grid, blobs) = parse_strings(&strs);
let mut snapshot = Snapshot::default();
for (ch, blob) in blobs.iter() {
let id = c2id(*ch);
snapshot.droplets.insert(id, blob.to_droplet(id));
}
let mut gv = GridView::new(grid);
gv.planned[0] = snapshot;
gv
}
pub fn parse_snapshot(strs: &[&str]) -> Snapshot {
let mut gv = parse_gridview(strs);
gv.planned.remove(0).unwrap()
}
fn check_all_matched(
snapshot_strs: &[&str],
blob_strs: &[&str],
) -> Option<Map<DropletId, SimpleBlob>> {
let snapshot = parse_snapshot(&snapshot_strs);
let (_, chip_blobs) = parse_strings(&blob_strs);
let blobs: Vec<SimpleBlob> = chip_blobs.values().cloned().collect();
let result: Map<DropletId, SimpleBlob> = snapshot.match_with_blobs(&blobs)?;
// create the expected map by mapping the ids in the snapshot
// to the associated blob which corresponds to the character
let mut expected: Map<DropletId, SimpleBlob> = Map::new();
for id in snapshot.droplets.keys() {
expected.insert(*id, chip_blobs[&id2c(id)].clone());
}
for id in expected.keys() {
// we can't compare blobs or droplets, so we get the droplet_info
assert_eq!(
result.get(id).map(|blob| blob.to_droplet(*id).info()),
expected.get(id).map(|blob| blob.to_droplet(*id).info())
)
}
Some(result)
}
#[test]
fn test_no_diff() {
let strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
assert!(check_all_matched(&strs, &strs).is_some());
}
#[test]
fn test_location_diff() {
let exec_strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
let chip_strs = vec![
"aa...........",
"............c",
".....bb......",
".............",
];
assert!(check_all_matched(&exec_strs, &chip_strs).is_some());
}
#[test]
fn test_dimension_diff() {
let exec_strs = vec![
"aa..........c",
".....bb......",
".............",
".............",
];
let chip_strs = vec![
"aa.........cc",
".....b.......",
".....b.......",
".............",
];
assert!(check_all_matched(&exec_strs, &chip_strs).is_some());
}
#[test]
fn test_mix_split_diff() {
let exec_strs = vec![
"aa...........",
".....bb..c...",
".............",
".............",
];
let chip_strs = vec![
"aa...........",
".....bbb.....",
".............",
".............",
];
assert!(check_all_matched(&exec_strs, &chip_strs).is_none());
}
#[test]
fn test_droplet_diff() {
use self::DropletDiff::*;
let old = parse_snapshot(&[
".a...........",
".....bb..c...",
".............",
".............",
]);
let new = parse_snapshot(&[
".............",
".a...bb......",
".............",
".............",
]);
// locations for droplet a
let from = Location { y: 0, x: 1 };
let to = Location { y: 1, x: 1 };
assert_eq!(old.diff_droplet(&c2id('a'), &new), Moved { from, to });
assert_eq!(old.diff_droplet(&c2id('b'), &new), DidNotMove);
assert_eq!(old.diff_droplet(&c2id('c'), &new), Disappeared);
let error_edges = {
let planned = &new;
let actual = &old;
old.get_error_edges(planned, actual)
};
assert_eq!(error_edges.len(), 1);
assert_eq!(error_edges[0], (from, to));
}
}
| DropletDiff | identifier_name |
wix.rs | use super::common;
use super::path_utils::{copy, Options};
use super::settings::Settings;
use handlebars::{to_json, Handlebars};
use lazy_static::lazy_static;
use regex::Regex;
use serde::Serialize;
use sha2::Digest;
use uuid::Uuid;
use zip::ZipArchive;
use std::collections::BTreeMap;
use std::fs::{create_dir_all, remove_dir_all, write, File};
use std::io::{Cursor, Read, Write};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
// URLS for the WIX toolchain. Can be used for crossplatform compilation.
pub const WIX_URL: &str =
"https://github.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311-binaries.zip";
pub const WIX_SHA256: &str = "2c1888d5d1dba377fc7fa14444cf556963747ff9a0a289a3599cf09da03b9e2e";
// For Cross Platform Complilation.
// const VC_REDIST_X86_URL: &str =
// "https://download.visualstudio.microsoft.com/download/pr/c8edbb87-c7ec-4500-a461-71e8912d25e9/99ba493d660597490cbb8b3211d2cae4/vc_redist.x86.exe";
// const VC_REDIST_X86_SHA256: &str =
// "3a43e8a55a3f3e4b73d01872c16d47a19dd825756784f4580187309e7d1fcb74";
// const VC_REDIST_X64_URL: &str =
// "https://download.visualstudio.microsoft.com/download/pr/9e04d214-5a9d-4515-9960-3d71398d98c3/1e1e62ab57bbb4bf5199e8ce88f040be/vc_redist.x64.exe";
// const VC_REDIST_X64_SHA256: &str =
// "d6cd2445f68815fe02489fafe0127819e44851e26dfbe702612bc0d223cbbc2b";
// A v4 UUID that was generated specifically for tauri-bundler, to be used as a
// namespace for generating v5 UUIDs from bundle identifier strings.
const UUID_NAMESPACE: [u8; 16] = [
0xfd, 0x85, 0x95, 0xa8, 0x17, 0xa3, 0x47, 0x4e, 0xa6, 0x16, 0x76, 0x14, 0x8d, 0xfa, 0x0c, 0x7b,
];
// setup for the main.wxs template file using handlebars. Dynamically changes the template on compilation based on the application metadata.
lazy_static! {
static ref HANDLEBARS: Handlebars<'static> = {
let mut handlebars = Handlebars::new();
handlebars
.register_template_string("main.wxs", include_str!("templates/main.wxs"))
.or_else(|e| Err(e.to_string()))
.expect("Failed to setup handlebar template");
handlebars
};
}
/// Mapper between a resource directory name and its ResourceDirectory descriptor.
type ResourceMap = BTreeMap<String, ResourceDirectory>;
/// A binary to bundle with WIX.
/// External binaries or additional project binaries are represented with this data structure.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize)]
struct | {
/// the GUID to use on the WIX XML.
guid: String,
/// the id to use on the WIX XML.
id: String,
/// the binary path.
path: String,
}
/// A Resource file to bundle with WIX.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize, Clone)]
struct ResourceFile {
/// the GUID to use on the WIX XML.
guid: String,
/// the id to use on the WIX XML.
id: String,
/// the file path.
path: String,
}
/// A resource directory to bundle with WIX.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize)]
struct ResourceDirectory {
/// the directory name of the described resource.
name: String,
/// the files of the described resource directory.
files: Vec<ResourceFile>,
/// the directories that are children of the described resource directory.
directories: Vec<ResourceDirectory>,
}
impl ResourceDirectory {
/// Adds a file to this directory descriptor.
fn add_file(&mut self, file: ResourceFile) {
self.files.push(file);
}
/// Generates the wix XML string to bundle this directory resources recursively
fn get_wix_data(self) -> crate::Result<(String, Vec<String>)> {
let mut files = String::from("");
let mut file_ids = Vec::new();
for file in self.files {
file_ids.push(file.id.clone());
files.push_str(
format!(
r#"<Component Id="{id}" Guid="{guid}" Win64="$(var.Win64)" KeyPath="yes"><File Id="PathFile_{id}" Source="{path}" /></Component>"#,
id = file.id,
guid = file.guid,
path = file.path
).as_str()
);
}
let mut directories = String::from("");
for directory in self.directories {
let (wix_string, ids) = directory.get_wix_data()?;
for id in ids {
file_ids.push(id)
}
directories.push_str(wix_string.as_str());
}
let wix_string = format!(
r#"<Directory Id="{name}" Name="{name}">{contents}</Directory>"#,
name = self.name,
contents = format!("{}{}", files, directories)
);
Ok((wix_string, file_ids))
}
}
/// Copies the icons to the binary path, under the `resources` folder,
/// and returns the path to that directory.
fn copy_icons(settings: &Settings) -> crate::Result<PathBuf> {
let base_dir = settings.project_out_directory();
let resource_dir = base_dir.join("resources");
let mut image_path = PathBuf::from(settings.project_out_directory());
// pop off till in tauri_src dir
image_path.pop();
image_path.pop();
// get icon dir and icon file.
let image_path = image_path.join("icons");
let opts = super::path_utils::Options::default();
copy(
image_path,
&resource_dir,
&Options {
copy_files: true,
overwrite: true,
..opts
},
)?;
Ok(resource_dir)
}
/// Function used to download Wix and VC_REDIST. Checks SHA256 to verify the download.
fn download_and_verify(url: &str, hash: &str) -> crate::Result<Vec<u8>> {
common::print_info(format!("Downloading {}", url).as_str())?;
let response = attohttpc::get(url).send()?;
let data: Vec<u8> = response.bytes()?;
common::print_info("validating hash")?;
let mut hasher = sha2::Sha256::new();
hasher.update(&data);
let url_hash = hasher.finalize().to_vec();
let expected_hash = hex::decode(hash)?;
if expected_hash == url_hash {
Ok(data)
} else {
Err(crate::Error::HashError)
}
}
/// The installer directory of the app.
fn app_installer_dir(settings: &Settings) -> crate::Result<PathBuf> {
let arch = match settings.binary_arch() {
"x86" => "x86",
"x86_64" => "x64",
target => {
return Err(crate::Error::ArchError(format!(
"Unsupported architecture: {}",
target
)))
}
};
let package_base_name = format!(
"{}_{}_{}",
settings.main_binary_name().replace(".exe", ""),
settings.version_string(),
arch
);
Ok(
settings
.project_out_directory()
.to_path_buf()
.join(format!("bundle/msi/{}.msi", package_base_name)),
)
}
/// Extracts the zips from Wix and VC_REDIST into a useable path.
fn extract_zip(data: &Vec<u8>, path: &Path) -> crate::Result<()> {
let cursor = Cursor::new(data);
let mut zipa = ZipArchive::new(cursor)?;
for i in 0..zipa.len() {
let mut file = zipa.by_index(i)?;
let dest_path = path.join(file.name());
let parent = dest_path.parent().expect("Failed to get parent");
if !parent.exists() {
create_dir_all(parent)?;
}
let mut buff: Vec<u8> = Vec::new();
file.read_to_end(&mut buff)?;
let mut fileout = File::create(dest_path).expect("Failed to open file");
fileout.write_all(&buff)?;
}
Ok(())
}
/// Generates the UUID for the Wix template.
fn generate_package_guid(settings: &Settings) -> Uuid {
generate_guid(settings.bundle_identifier().as_bytes())
}
/// Generates a GUID.
fn generate_guid(key: &[u8]) -> Uuid {
let namespace = Uuid::from_bytes(UUID_NAMESPACE);
Uuid::new_v5(&namespace, key)
}
// Specifically goes and gets Wix and verifies the download via Sha256
pub fn get_and_extract_wix(path: &Path) -> crate::Result<()> {
common::print_info("Verifying wix package")?;
let data = download_and_verify(WIX_URL, WIX_SHA256)?;
common::print_info("extracting WIX")?;
extract_zip(&data, path)
}
// For if bundler needs DLL files.
// fn run_heat_exe(
// wix_toolset_path: &Path,
// build_path: &Path,
// harvest_dir: &Path,
// platform: &str,
// ) -> Result<(), String> {
// let mut args = vec!["dir"];
// let harvest_str = harvest_dir.display().to_string();
// args.push(&harvest_str);
// args.push("-platform");
// args.push(platform);
// args.push("-cg");
// args.push("AppFiles");
// args.push("-dr");
// args.push("APPLICATIONFOLDER");
// args.push("-gg");
// args.push("-srd");
// args.push("-out");
// args.push("appdir.wxs");
// args.push("-var");
// args.push("var.SourceDir");
// let heat_exe = wix_toolset_path.join("heat.exe");
// let mut cmd = Command::new(&heat_exe)
// .args(&args)
// .stdout(Stdio::piped())
// .current_dir(build_path)
// .spawn()
// .expect("error running heat.exe");
// {
// let stdout = cmd.stdout.as_mut().unwrap();
// let reader = BufReader::new(stdout);
// for line in reader.lines() {
// info!(logger, "{}", line.unwrap());
// }
// }
// let status = cmd.wait().unwrap();
// if status.success() {
// Ok(())
// } else {
// Err("error running heat.exe".to_string())
// }
// }
/// Runs the Candle.exe executable for Wix. Candle parses the wxs file and generates the code for building the installer.
fn run_candle(
settings: &Settings,
wix_toolset_path: &Path,
build_path: &Path,
wxs_file_name: &str,
) -> crate::Result<()> {
let arch = match settings.binary_arch() {
"x86_64" => "x64",
"x86" => "x86",
target => {
return Err(crate::Error::ArchError(format!(
"unsupported target: {}",
target
)))
}
};
let main_binary = settings
.binaries()
.iter()
.find(|bin| bin.main())
.ok_or_else(|| anyhow::anyhow!("Failed to get main binary"))?;
let args = vec![
"-arch".to_string(),
arch.to_string(),
wxs_file_name.to_string(),
format!(
"-dSourceDir={}",
settings.binary_path(main_binary).display()
),
];
let candle_exe = wix_toolset_path.join("candle.exe");
common::print_info(format!("running candle for {}", wxs_file_name).as_str())?;
let mut cmd = Command::new(&candle_exe);
cmd
.args(&args)
.stdout(Stdio::piped())
.current_dir(build_path);
common::print_info("running candle.exe")?;
common::execute_with_output(&mut cmd).map_err(|_| crate::Error::CandleError)
}
/// Runs the Light.exe file. Light takes the generated code from Candle and produces an MSI Installer.
fn run_light(
wix_toolset_path: &Path,
build_path: &Path,
wixobjs: &[&str],
output_path: &Path,
) -> crate::Result<PathBuf> {
let light_exe = wix_toolset_path.join("light.exe");
let mut args: Vec<String> = vec![
"-ext".to_string(),
"WixUIExtension".to_string(),
"-o".to_string(),
output_path.display().to_string(),
];
for p in wixobjs {
args.push(p.to_string());
}
let mut cmd = Command::new(&light_exe);
cmd
.args(&args)
.stdout(Stdio::piped())
.current_dir(build_path);
common::print_info(format!("running light to produce {}", output_path.display()).as_str())?;
common::execute_with_output(&mut cmd)
.map(|_| output_path.to_path_buf())
.map_err(|_| crate::Error::LightError)
}
// fn get_icon_data() -> crate::Result<()> {
// Ok(())
// }
// Entry point for bundling and creating the MSI installer. For now the only supported platform is Windows x64.
pub fn build_wix_app_installer(
settings: &Settings,
wix_toolset_path: &Path,
) -> crate::Result<PathBuf> {
let arch = match settings.binary_arch() {
"x86_64" => "x64",
"x86" => "x86",
target => {
return Err(crate::Error::ArchError(format!(
"unsupported target: {}",
target
)))
}
};
// target only supports x64.
common::print_info(format!("Target: {}", arch).as_str())?;
let output_path = settings
.project_out_directory()
.join("bundle/msi")
.join(arch);
let mut data = BTreeMap::new();
if let Ok(tauri_config) = crate::bundle::tauri_config::get() {
data.insert(
"embedded_server",
to_json(tauri_config.tauri.embedded_server.active),
);
}
data.insert("product_name", to_json(settings.bundle_name()));
data.insert("version", to_json(settings.version_string()));
let manufacturer = settings.bundle_identifier().to_string();
data.insert("manufacturer", to_json(manufacturer.as_str()));
let upgrade_code = Uuid::new_v5(
&Uuid::NAMESPACE_DNS,
format!("{}.app.x64", &settings.main_binary_name()).as_bytes(),
)
.to_string();
data.insert("upgrade_code", to_json(&upgrade_code.as_str()));
let path_guid = generate_package_guid(settings).to_string();
data.insert("path_component_guid", to_json(&path_guid.as_str()));
let shortcut_guid = generate_package_guid(settings).to_string();
data.insert("shortcut_guid", to_json(&shortcut_guid.as_str()));
let app_exe_name = settings.main_binary_name().to_string();
data.insert("app_exe_name", to_json(&app_exe_name));
let binaries = generate_binaries_data(&settings)?;
let binaries_json = to_json(&binaries);
data.insert("binaries", binaries_json);
let resources = generate_resource_data(&settings)?;
let mut resources_wix_string = String::from("");
let mut files_ids = Vec::new();
for (_, dir) in resources {
let (wix_string, ids) = dir.get_wix_data()?;
resources_wix_string.push_str(wix_string.as_str());
for id in ids {
files_ids.push(id);
}
}
data.insert("resources", to_json(resources_wix_string));
data.insert("resource_file_ids", to_json(files_ids));
let main_binary = settings
.binaries()
.iter()
.find(|bin| bin.main())
.ok_or_else(|| anyhow::anyhow!("Failed to get main binary"))?;
let app_exe_source = settings.binary_path(main_binary).display().to_string();
data.insert("app_exe_source", to_json(&app_exe_source));
// copy icons from icons folder to resource folder near msi
let image_path = copy_icons(&settings)?;
let path = image_path.join("icon.ico").display().to_string();
data.insert("icon_path", to_json(path.as_str()));
let temp = HANDLEBARS.render("main.wxs", &data)?;
if output_path.exists() {
remove_dir_all(&output_path).or_else(|e| Err(e))?;
}
create_dir_all(&output_path).or_else(|e| Err(e))?;
let main_wxs_path = output_path.join("main.wxs");
write(&main_wxs_path, temp).or_else(|e| Err(e))?;
let input_basenames = vec!["main"];
for basename in &input_basenames {
let wxs = format!("{}.wxs", basename);
run_candle(settings, &wix_toolset_path, &output_path, &wxs)?;
}
let wixobjs = vec!["main.wixobj"];
let target = run_light(
&wix_toolset_path,
&output_path,
&wixobjs,
&app_installer_dir(settings)?,
)?;
Ok(target)
}
/// Generates the data required for the external binaries and extra binaries bundling.
fn generate_binaries_data(settings: &Settings) -> crate::Result<Vec<Binary>> {
let mut binaries = Vec::new();
let regex = Regex::new(r"[^\w\d\.]")?;
let cwd = std::env::current_dir()?;
for src in settings.external_binaries() {
let src = src?;
let filename = src
.file_name()
.expect("failed to extract external binary filename")
.to_os_string()
.into_string()
.expect("failed to convert external binary filename to string");
let guid = generate_guid(filename.as_bytes()).to_string();
binaries.push(Binary {
guid,
path: cwd
.join(src)
.into_os_string()
.into_string()
.expect("failed to read external binary path"),
id: regex.replace_all(&filename, "").to_string(),
});
}
for bin in settings.binaries() {
let filename = bin.name();
let guid = generate_guid(filename.as_bytes()).to_string();
if !bin.main() {
binaries.push(Binary {
guid,
path: settings
.binary_path(bin)
.into_os_string()
.into_string()
.expect("failed to read binary path"),
id: regex.replace_all(&filename, "").to_string(),
})
}
}
Ok(binaries)
}
/// Generates the data required for the resource bundling on wix
fn generate_resource_data(settings: &Settings) -> crate::Result<ResourceMap> {
let mut resources = ResourceMap::new();
let regex = Regex::new(r"[^\w\d\.]")?;
let cwd = std::env::current_dir()?;
for src in settings.resource_files() {
let src = src?;
let filename = src
.file_name()
.expect("failed to extract resource filename")
.to_os_string()
.into_string()
.expect("failed to convert resource filename to string");
let resource_path = cwd
.join(src.clone())
.into_os_string()
.into_string()
.expect("failed to read resource path");
let resource_entry = ResourceFile {
guid: generate_guid(filename.as_bytes()).to_string(),
path: resource_path,
id: regex.replace_all(&filename, "").to_string(),
};
// split the resource path directories
let mut directories = src
.components()
.filter(|component| {
let comp = component.as_os_str();
comp != "." && comp != ".."
})
.collect::<Vec<_>>();
directories.truncate(directories.len() - 1);
// transform the directory structure to a chained vec structure
for directory in directories {
let directory_name = directory
.as_os_str()
.to_os_string()
.into_string()
.expect("failed to read resource folder name");
// if the directory is already on the map
if resources.contains_key(&directory_name) {
let directory_entry = &mut resources
.get_mut(&directory_name)
.expect("Unable to handle resources");
if directory_entry.name == directory_name {
// the directory entry is the root of the chain
directory_entry.add_file(resource_entry.clone());
} else {
let index = directory_entry
.directories
.iter()
.position(|f| f.name == directory_name);
if index.is_some() {
// the directory entry is already a part of the chain
let dir = directory_entry
.directories
.get_mut(index.expect("Unable to get index"))
.expect("Unable to get directory");
dir.add_file(resource_entry.clone());
} else {
// push it to the chain
directory_entry.directories.push(ResourceDirectory {
name: directory_name.clone(),
directories: vec![],
files: vec![resource_entry.clone()],
});
}
}
} else {
resources.insert(
directory_name.clone(),
ResourceDirectory {
name: directory_name.clone(),
directories: vec![],
files: vec![resource_entry.clone()],
},
);
}
}
}
Ok(resources)
}
| Binary | identifier_name |
wix.rs | use super::common;
use super::path_utils::{copy, Options};
use super::settings::Settings;
use handlebars::{to_json, Handlebars};
use lazy_static::lazy_static;
use regex::Regex;
use serde::Serialize;
use sha2::Digest;
use uuid::Uuid;
use zip::ZipArchive;
use std::collections::BTreeMap;
use std::fs::{create_dir_all, remove_dir_all, write, File};
use std::io::{Cursor, Read, Write};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
// URLS for the WIX toolchain. Can be used for crossplatform compilation.
pub const WIX_URL: &str =
"https://github.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311-binaries.zip";
pub const WIX_SHA256: &str = "2c1888d5d1dba377fc7fa14444cf556963747ff9a0a289a3599cf09da03b9e2e";
// For Cross Platform Complilation.
// const VC_REDIST_X86_URL: &str =
// "https://download.visualstudio.microsoft.com/download/pr/c8edbb87-c7ec-4500-a461-71e8912d25e9/99ba493d660597490cbb8b3211d2cae4/vc_redist.x86.exe";
// const VC_REDIST_X86_SHA256: &str =
// "3a43e8a55a3f3e4b73d01872c16d47a19dd825756784f4580187309e7d1fcb74";
// const VC_REDIST_X64_URL: &str =
// "https://download.visualstudio.microsoft.com/download/pr/9e04d214-5a9d-4515-9960-3d71398d98c3/1e1e62ab57bbb4bf5199e8ce88f040be/vc_redist.x64.exe";
// const VC_REDIST_X64_SHA256: &str =
// "d6cd2445f68815fe02489fafe0127819e44851e26dfbe702612bc0d223cbbc2b";
// A v4 UUID that was generated specifically for tauri-bundler, to be used as a
// namespace for generating v5 UUIDs from bundle identifier strings.
const UUID_NAMESPACE: [u8; 16] = [
0xfd, 0x85, 0x95, 0xa8, 0x17, 0xa3, 0x47, 0x4e, 0xa6, 0x16, 0x76, 0x14, 0x8d, 0xfa, 0x0c, 0x7b,
];
// setup for the main.wxs template file using handlebars. Dynamically changes the template on compilation based on the application metadata.
lazy_static! {
static ref HANDLEBARS: Handlebars<'static> = {
let mut handlebars = Handlebars::new();
handlebars
.register_template_string("main.wxs", include_str!("templates/main.wxs"))
.or_else(|e| Err(e.to_string()))
.expect("Failed to setup handlebar template");
handlebars
};
}
/// Mapper between a resource directory name and its ResourceDirectory descriptor.
type ResourceMap = BTreeMap<String, ResourceDirectory>;
/// A binary to bundle with WIX.
/// External binaries or additional project binaries are represented with this data structure.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize)]
struct Binary {
/// the GUID to use on the WIX XML.
guid: String,
/// the id to use on the WIX XML.
id: String,
/// the binary path.
path: String,
}
/// A Resource file to bundle with WIX.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize, Clone)]
struct ResourceFile {
/// the GUID to use on the WIX XML.
guid: String,
/// the id to use on the WIX XML.
id: String,
/// the file path.
path: String,
}
/// A resource directory to bundle with WIX.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize)]
struct ResourceDirectory {
/// the directory name of the described resource.
name: String,
/// the files of the described resource directory.
files: Vec<ResourceFile>,
/// the directories that are children of the described resource directory.
directories: Vec<ResourceDirectory>,
}
impl ResourceDirectory {
/// Adds a file to this directory descriptor.
fn add_file(&mut self, file: ResourceFile) {
self.files.push(file);
}
/// Generates the wix XML string to bundle this directory resources recursively
fn get_wix_data(self) -> crate::Result<(String, Vec<String>)> {
let mut files = String::from("");
let mut file_ids = Vec::new();
for file in self.files {
file_ids.push(file.id.clone());
files.push_str(
format!(
r#"<Component Id="{id}" Guid="{guid}" Win64="$(var.Win64)" KeyPath="yes"><File Id="PathFile_{id}" Source="{path}" /></Component>"#,
id = file.id,
guid = file.guid,
path = file.path
).as_str()
);
}
let mut directories = String::from("");
for directory in self.directories {
let (wix_string, ids) = directory.get_wix_data()?;
for id in ids {
file_ids.push(id)
}
directories.push_str(wix_string.as_str());
}
let wix_string = format!(
r#"<Directory Id="{name}" Name="{name}">{contents}</Directory>"#,
name = self.name,
contents = format!("{}{}", files, directories)
);
Ok((wix_string, file_ids))
}
}
/// Copies the icons to the binary path, under the `resources` folder,
/// and returns the path to that directory.
fn copy_icons(settings: &Settings) -> crate::Result<PathBuf> {
let base_dir = settings.project_out_directory();
let resource_dir = base_dir.join("resources");
let mut image_path = PathBuf::from(settings.project_out_directory());
// pop off till in tauri_src dir
image_path.pop();
image_path.pop();
// get icon dir and icon file.
let image_path = image_path.join("icons");
let opts = super::path_utils::Options::default();
copy(
image_path,
&resource_dir,
&Options {
copy_files: true,
overwrite: true,
..opts
},
)?;
Ok(resource_dir)
}
/// Function used to download Wix and VC_REDIST. Checks SHA256 to verify the download.
fn download_and_verify(url: &str, hash: &str) -> crate::Result<Vec<u8>> {
common::print_info(format!("Downloading {}", url).as_str())?;
let response = attohttpc::get(url).send()?;
let data: Vec<u8> = response.bytes()?;
common::print_info("validating hash")?;
let mut hasher = sha2::Sha256::new();
hasher.update(&data);
let url_hash = hasher.finalize().to_vec();
let expected_hash = hex::decode(hash)?;
if expected_hash == url_hash {
Ok(data)
} else {
Err(crate::Error::HashError)
}
}
/// The installer directory of the app.
fn app_installer_dir(settings: &Settings) -> crate::Result<PathBuf> {
let arch = match settings.binary_arch() {
"x86" => "x86",
"x86_64" => "x64",
target => {
return Err(crate::Error::ArchError(format!(
"Unsupported architecture: {}",
target
)))
}
};
let package_base_name = format!(
"{}_{}_{}",
settings.main_binary_name().replace(".exe", ""),
settings.version_string(),
arch
);
Ok(
settings
.project_out_directory()
.to_path_buf()
.join(format!("bundle/msi/{}.msi", package_base_name)),
)
}
/// Extracts the zips from Wix and VC_REDIST into a useable path.
fn extract_zip(data: &Vec<u8>, path: &Path) -> crate::Result<()> {
let cursor = Cursor::new(data);
let mut zipa = ZipArchive::new(cursor)?;
for i in 0..zipa.len() {
let mut file = zipa.by_index(i)?;
let dest_path = path.join(file.name());
let parent = dest_path.parent().expect("Failed to get parent");
if !parent.exists() {
create_dir_all(parent)?;
}
let mut buff: Vec<u8> = Vec::new();
file.read_to_end(&mut buff)?;
let mut fileout = File::create(dest_path).expect("Failed to open file");
fileout.write_all(&buff)?;
}
Ok(())
}
/// Generates the UUID for the Wix template.
fn generate_package_guid(settings: &Settings) -> Uuid {
generate_guid(settings.bundle_identifier().as_bytes())
}
/// Generates a GUID.
fn generate_guid(key: &[u8]) -> Uuid {
let namespace = Uuid::from_bytes(UUID_NAMESPACE);
Uuid::new_v5(&namespace, key)
}
// Specifically goes and gets Wix and verifies the download via Sha256
pub fn get_and_extract_wix(path: &Path) -> crate::Result<()> {
common::print_info("Verifying wix package")?;
let data = download_and_verify(WIX_URL, WIX_SHA256)?;
common::print_info("extracting WIX")?;
extract_zip(&data, path)
}
// For if bundler needs DLL files.
// fn run_heat_exe(
// wix_toolset_path: &Path,
// build_path: &Path,
// harvest_dir: &Path,
// platform: &str,
// ) -> Result<(), String> {
// let mut args = vec!["dir"];
// let harvest_str = harvest_dir.display().to_string();
// args.push(&harvest_str);
// args.push("-platform");
// args.push(platform);
// args.push("-cg");
// args.push("AppFiles");
// args.push("-dr");
// args.push("APPLICATIONFOLDER");
// args.push("-gg");
// args.push("-srd");
// args.push("-out");
// args.push("appdir.wxs");
// args.push("-var");
// args.push("var.SourceDir");
// let heat_exe = wix_toolset_path.join("heat.exe");
// let mut cmd = Command::new(&heat_exe)
// .args(&args)
// .stdout(Stdio::piped())
// .current_dir(build_path)
// .spawn()
// .expect("error running heat.exe");
// {
// let stdout = cmd.stdout.as_mut().unwrap();
// let reader = BufReader::new(stdout);
// for line in reader.lines() {
// info!(logger, "{}", line.unwrap());
// }
// }
// let status = cmd.wait().unwrap();
// if status.success() {
// Ok(())
// } else {
// Err("error running heat.exe".to_string())
// }
// }
/// Runs the Candle.exe executable for Wix. Candle parses the wxs file and generates the code for building the installer.
fn run_candle(
settings: &Settings,
wix_toolset_path: &Path,
build_path: &Path,
wxs_file_name: &str,
) -> crate::Result<()> |
/// Runs the Light.exe file. Light takes the generated code from Candle and produces an MSI Installer.
fn run_light(
wix_toolset_path: &Path,
build_path: &Path,
wixobjs: &[&str],
output_path: &Path,
) -> crate::Result<PathBuf> {
let light_exe = wix_toolset_path.join("light.exe");
let mut args: Vec<String> = vec![
"-ext".to_string(),
"WixUIExtension".to_string(),
"-o".to_string(),
output_path.display().to_string(),
];
for p in wixobjs {
args.push(p.to_string());
}
let mut cmd = Command::new(&light_exe);
cmd
.args(&args)
.stdout(Stdio::piped())
.current_dir(build_path);
common::print_info(format!("running light to produce {}", output_path.display()).as_str())?;
common::execute_with_output(&mut cmd)
.map(|_| output_path.to_path_buf())
.map_err(|_| crate::Error::LightError)
}
// fn get_icon_data() -> crate::Result<()> {
// Ok(())
// }
// Entry point for bundling and creating the MSI installer. For now the only supported platform is Windows x64.
pub fn build_wix_app_installer(
settings: &Settings,
wix_toolset_path: &Path,
) -> crate::Result<PathBuf> {
let arch = match settings.binary_arch() {
"x86_64" => "x64",
"x86" => "x86",
target => {
return Err(crate::Error::ArchError(format!(
"unsupported target: {}",
target
)))
}
};
// target only supports x64.
common::print_info(format!("Target: {}", arch).as_str())?;
let output_path = settings
.project_out_directory()
.join("bundle/msi")
.join(arch);
let mut data = BTreeMap::new();
if let Ok(tauri_config) = crate::bundle::tauri_config::get() {
data.insert(
"embedded_server",
to_json(tauri_config.tauri.embedded_server.active),
);
}
data.insert("product_name", to_json(settings.bundle_name()));
data.insert("version", to_json(settings.version_string()));
let manufacturer = settings.bundle_identifier().to_string();
data.insert("manufacturer", to_json(manufacturer.as_str()));
let upgrade_code = Uuid::new_v5(
&Uuid::NAMESPACE_DNS,
format!("{}.app.x64", &settings.main_binary_name()).as_bytes(),
)
.to_string();
data.insert("upgrade_code", to_json(&upgrade_code.as_str()));
let path_guid = generate_package_guid(settings).to_string();
data.insert("path_component_guid", to_json(&path_guid.as_str()));
let shortcut_guid = generate_package_guid(settings).to_string();
data.insert("shortcut_guid", to_json(&shortcut_guid.as_str()));
let app_exe_name = settings.main_binary_name().to_string();
data.insert("app_exe_name", to_json(&app_exe_name));
let binaries = generate_binaries_data(&settings)?;
let binaries_json = to_json(&binaries);
data.insert("binaries", binaries_json);
let resources = generate_resource_data(&settings)?;
let mut resources_wix_string = String::from("");
let mut files_ids = Vec::new();
for (_, dir) in resources {
let (wix_string, ids) = dir.get_wix_data()?;
resources_wix_string.push_str(wix_string.as_str());
for id in ids {
files_ids.push(id);
}
}
data.insert("resources", to_json(resources_wix_string));
data.insert("resource_file_ids", to_json(files_ids));
let main_binary = settings
.binaries()
.iter()
.find(|bin| bin.main())
.ok_or_else(|| anyhow::anyhow!("Failed to get main binary"))?;
let app_exe_source = settings.binary_path(main_binary).display().to_string();
data.insert("app_exe_source", to_json(&app_exe_source));
// copy icons from icons folder to resource folder near msi
let image_path = copy_icons(&settings)?;
let path = image_path.join("icon.ico").display().to_string();
data.insert("icon_path", to_json(path.as_str()));
let temp = HANDLEBARS.render("main.wxs", &data)?;
if output_path.exists() {
remove_dir_all(&output_path).or_else(|e| Err(e))?;
}
create_dir_all(&output_path).or_else(|e| Err(e))?;
let main_wxs_path = output_path.join("main.wxs");
write(&main_wxs_path, temp).or_else(|e| Err(e))?;
let input_basenames = vec!["main"];
for basename in &input_basenames {
let wxs = format!("{}.wxs", basename);
run_candle(settings, &wix_toolset_path, &output_path, &wxs)?;
}
let wixobjs = vec!["main.wixobj"];
let target = run_light(
&wix_toolset_path,
&output_path,
&wixobjs,
&app_installer_dir(settings)?,
)?;
Ok(target)
}
/// Generates the data required for the external binaries and extra binaries bundling.
fn generate_binaries_data(settings: &Settings) -> crate::Result<Vec<Binary>> {
let mut binaries = Vec::new();
let regex = Regex::new(r"[^\w\d\.]")?;
let cwd = std::env::current_dir()?;
for src in settings.external_binaries() {
let src = src?;
let filename = src
.file_name()
.expect("failed to extract external binary filename")
.to_os_string()
.into_string()
.expect("failed to convert external binary filename to string");
let guid = generate_guid(filename.as_bytes()).to_string();
binaries.push(Binary {
guid,
path: cwd
.join(src)
.into_os_string()
.into_string()
.expect("failed to read external binary path"),
id: regex.replace_all(&filename, "").to_string(),
});
}
for bin in settings.binaries() {
let filename = bin.name();
let guid = generate_guid(filename.as_bytes()).to_string();
if !bin.main() {
binaries.push(Binary {
guid,
path: settings
.binary_path(bin)
.into_os_string()
.into_string()
.expect("failed to read binary path"),
id: regex.replace_all(&filename, "").to_string(),
})
}
}
Ok(binaries)
}
/// Generates the data required for the resource bundling on wix
fn generate_resource_data(settings: &Settings) -> crate::Result<ResourceMap> {
let mut resources = ResourceMap::new();
let regex = Regex::new(r"[^\w\d\.]")?;
let cwd = std::env::current_dir()?;
for src in settings.resource_files() {
let src = src?;
let filename = src
.file_name()
.expect("failed to extract resource filename")
.to_os_string()
.into_string()
.expect("failed to convert resource filename to string");
let resource_path = cwd
.join(src.clone())
.into_os_string()
.into_string()
.expect("failed to read resource path");
let resource_entry = ResourceFile {
guid: generate_guid(filename.as_bytes()).to_string(),
path: resource_path,
id: regex.replace_all(&filename, "").to_string(),
};
// split the resource path directories
let mut directories = src
.components()
.filter(|component| {
let comp = component.as_os_str();
comp != "." && comp != ".."
})
.collect::<Vec<_>>();
directories.truncate(directories.len() - 1);
// transform the directory structure to a chained vec structure
for directory in directories {
let directory_name = directory
.as_os_str()
.to_os_string()
.into_string()
.expect("failed to read resource folder name");
// if the directory is already on the map
if resources.contains_key(&directory_name) {
let directory_entry = &mut resources
.get_mut(&directory_name)
.expect("Unable to handle resources");
if directory_entry.name == directory_name {
// the directory entry is the root of the chain
directory_entry.add_file(resource_entry.clone());
} else {
let index = directory_entry
.directories
.iter()
.position(|f| f.name == directory_name);
if index.is_some() {
// the directory entry is already a part of the chain
let dir = directory_entry
.directories
.get_mut(index.expect("Unable to get index"))
.expect("Unable to get directory");
dir.add_file(resource_entry.clone());
} else {
// push it to the chain
directory_entry.directories.push(ResourceDirectory {
name: directory_name.clone(),
directories: vec![],
files: vec![resource_entry.clone()],
});
}
}
} else {
resources.insert(
directory_name.clone(),
ResourceDirectory {
name: directory_name.clone(),
directories: vec![],
files: vec![resource_entry.clone()],
},
);
}
}
}
Ok(resources)
}
| {
let arch = match settings.binary_arch() {
"x86_64" => "x64",
"x86" => "x86",
target => {
return Err(crate::Error::ArchError(format!(
"unsupported target: {}",
target
)))
}
};
let main_binary = settings
.binaries()
.iter()
.find(|bin| bin.main())
.ok_or_else(|| anyhow::anyhow!("Failed to get main binary"))?;
let args = vec![
"-arch".to_string(),
arch.to_string(),
wxs_file_name.to_string(),
format!(
"-dSourceDir={}",
settings.binary_path(main_binary).display()
),
];
let candle_exe = wix_toolset_path.join("candle.exe");
common::print_info(format!("running candle for {}", wxs_file_name).as_str())?;
let mut cmd = Command::new(&candle_exe);
cmd
.args(&args)
.stdout(Stdio::piped())
.current_dir(build_path);
common::print_info("running candle.exe")?;
common::execute_with_output(&mut cmd).map_err(|_| crate::Error::CandleError)
} | identifier_body |
wix.rs | use super::common;
use super::path_utils::{copy, Options};
use super::settings::Settings;
use handlebars::{to_json, Handlebars};
use lazy_static::lazy_static;
use regex::Regex;
use serde::Serialize;
use sha2::Digest;
use uuid::Uuid;
use zip::ZipArchive;
use std::collections::BTreeMap;
use std::fs::{create_dir_all, remove_dir_all, write, File};
use std::io::{Cursor, Read, Write};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
// URLS for the WIX toolchain. Can be used for crossplatform compilation.
pub const WIX_URL: &str =
"https://github.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311-binaries.zip";
pub const WIX_SHA256: &str = "2c1888d5d1dba377fc7fa14444cf556963747ff9a0a289a3599cf09da03b9e2e";
// For Cross Platform Complilation.
// const VC_REDIST_X86_URL: &str =
// "https://download.visualstudio.microsoft.com/download/pr/c8edbb87-c7ec-4500-a461-71e8912d25e9/99ba493d660597490cbb8b3211d2cae4/vc_redist.x86.exe";
// const VC_REDIST_X86_SHA256: &str =
// "3a43e8a55a3f3e4b73d01872c16d47a19dd825756784f4580187309e7d1fcb74";
// const VC_REDIST_X64_URL: &str =
// "https://download.visualstudio.microsoft.com/download/pr/9e04d214-5a9d-4515-9960-3d71398d98c3/1e1e62ab57bbb4bf5199e8ce88f040be/vc_redist.x64.exe";
// const VC_REDIST_X64_SHA256: &str =
// "d6cd2445f68815fe02489fafe0127819e44851e26dfbe702612bc0d223cbbc2b";
// A v4 UUID that was generated specifically for tauri-bundler, to be used as a
// namespace for generating v5 UUIDs from bundle identifier strings.
const UUID_NAMESPACE: [u8; 16] = [
0xfd, 0x85, 0x95, 0xa8, 0x17, 0xa3, 0x47, 0x4e, 0xa6, 0x16, 0x76, 0x14, 0x8d, 0xfa, 0x0c, 0x7b,
];
// setup for the main.wxs template file using handlebars. Dynamically changes the template on compilation based on the application metadata.
lazy_static! {
static ref HANDLEBARS: Handlebars<'static> = {
let mut handlebars = Handlebars::new();
handlebars
.register_template_string("main.wxs", include_str!("templates/main.wxs"))
.or_else(|e| Err(e.to_string()))
.expect("Failed to setup handlebar template");
handlebars
};
}
/// Mapper between a resource directory name and its ResourceDirectory descriptor.
type ResourceMap = BTreeMap<String, ResourceDirectory>;
/// A binary to bundle with WIX.
/// External binaries or additional project binaries are represented with this data structure.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize)]
struct Binary {
/// the GUID to use on the WIX XML.
guid: String,
/// the id to use on the WIX XML.
id: String,
/// the binary path.
path: String,
}
/// A Resource file to bundle with WIX.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize, Clone)]
struct ResourceFile {
/// the GUID to use on the WIX XML.
guid: String,
/// the id to use on the WIX XML.
id: String,
/// the file path.
path: String,
}
/// A resource directory to bundle with WIX.
/// This data structure is needed because WIX requires each path to have its own `id` and `guid`.
#[derive(Serialize)]
struct ResourceDirectory {
/// the directory name of the described resource.
name: String,
/// the files of the described resource directory.
files: Vec<ResourceFile>,
/// the directories that are children of the described resource directory.
directories: Vec<ResourceDirectory>,
}
impl ResourceDirectory {
/// Adds a file to this directory descriptor.
fn add_file(&mut self, file: ResourceFile) {
self.files.push(file);
}
/// Generates the wix XML string to bundle this directory resources recursively
fn get_wix_data(self) -> crate::Result<(String, Vec<String>)> {
let mut files = String::from("");
let mut file_ids = Vec::new();
for file in self.files {
file_ids.push(file.id.clone());
files.push_str(
format!(
r#"<Component Id="{id}" Guid="{guid}" Win64="$(var.Win64)" KeyPath="yes"><File Id="PathFile_{id}" Source="{path}" /></Component>"#,
id = file.id,
guid = file.guid,
path = file.path
).as_str()
);
}
let mut directories = String::from("");
for directory in self.directories {
let (wix_string, ids) = directory.get_wix_data()?;
for id in ids {
file_ids.push(id)
}
directories.push_str(wix_string.as_str());
}
let wix_string = format!(
r#"<Directory Id="{name}" Name="{name}">{contents}</Directory>"#,
name = self.name,
contents = format!("{}{}", files, directories)
);
Ok((wix_string, file_ids))
}
}
/// Copies the icons to the binary path, under the `resources` folder,
/// and returns the path to that directory.
fn copy_icons(settings: &Settings) -> crate::Result<PathBuf> {
let base_dir = settings.project_out_directory();
let resource_dir = base_dir.join("resources");
let mut image_path = PathBuf::from(settings.project_out_directory());
// pop off till in tauri_src dir
image_path.pop();
image_path.pop();
// get icon dir and icon file.
let image_path = image_path.join("icons");
let opts = super::path_utils::Options::default();
copy(
image_path,
&resource_dir,
&Options {
copy_files: true,
overwrite: true,
..opts
},
)?;
Ok(resource_dir)
}
/// Function used to download Wix and VC_REDIST. Checks SHA256 to verify the download.
fn download_and_verify(url: &str, hash: &str) -> crate::Result<Vec<u8>> {
common::print_info(format!("Downloading {}", url).as_str())?;
let response = attohttpc::get(url).send()?;
let data: Vec<u8> = response.bytes()?;
common::print_info("validating hash")?;
let mut hasher = sha2::Sha256::new();
hasher.update(&data);
let url_hash = hasher.finalize().to_vec();
let expected_hash = hex::decode(hash)?;
if expected_hash == url_hash {
Ok(data)
} else {
Err(crate::Error::HashError)
}
}
/// The installer directory of the app.
fn app_installer_dir(settings: &Settings) -> crate::Result<PathBuf> {
let arch = match settings.binary_arch() {
"x86" => "x86",
"x86_64" => "x64",
target => {
return Err(crate::Error::ArchError(format!(
"Unsupported architecture: {}",
target
)))
}
};
let package_base_name = format!(
"{}_{}_{}",
settings.main_binary_name().replace(".exe", ""),
settings.version_string(),
arch
);
Ok(
settings
.project_out_directory()
.to_path_buf()
.join(format!("bundle/msi/{}.msi", package_base_name)),
)
}
/// Extracts the zips from Wix and VC_REDIST into a useable path.
fn extract_zip(data: &Vec<u8>, path: &Path) -> crate::Result<()> {
let cursor = Cursor::new(data);
let mut zipa = ZipArchive::new(cursor)?;
for i in 0..zipa.len() {
let mut file = zipa.by_index(i)?;
let dest_path = path.join(file.name());
let parent = dest_path.parent().expect("Failed to get parent");
if !parent.exists() {
create_dir_all(parent)?;
}
let mut buff: Vec<u8> = Vec::new();
file.read_to_end(&mut buff)?;
let mut fileout = File::create(dest_path).expect("Failed to open file");
fileout.write_all(&buff)?;
}
Ok(())
}
/// Generates the UUID for the Wix template.
fn generate_package_guid(settings: &Settings) -> Uuid {
generate_guid(settings.bundle_identifier().as_bytes())
}
/// Generates a GUID.
fn generate_guid(key: &[u8]) -> Uuid {
let namespace = Uuid::from_bytes(UUID_NAMESPACE);
Uuid::new_v5(&namespace, key)
}
// Specifically goes and gets Wix and verifies the download via Sha256
pub fn get_and_extract_wix(path: &Path) -> crate::Result<()> {
common::print_info("Verifying wix package")?;
let data = download_and_verify(WIX_URL, WIX_SHA256)?;
common::print_info("extracting WIX")?;
extract_zip(&data, path)
}
// For if bundler needs DLL files.
// fn run_heat_exe(
// wix_toolset_path: &Path,
// build_path: &Path,
// harvest_dir: &Path,
// platform: &str,
// ) -> Result<(), String> {
// let mut args = vec!["dir"];
// let harvest_str = harvest_dir.display().to_string();
// args.push(&harvest_str);
// args.push("-platform");
// args.push(platform);
// args.push("-cg");
// args.push("AppFiles");
// args.push("-dr");
// args.push("APPLICATIONFOLDER");
// args.push("-gg");
// args.push("-srd");
// args.push("-out");
// args.push("appdir.wxs");
// args.push("-var");
// args.push("var.SourceDir");
// let heat_exe = wix_toolset_path.join("heat.exe");
// let mut cmd = Command::new(&heat_exe)
// .args(&args)
// .stdout(Stdio::piped())
// .current_dir(build_path)
// .spawn()
// .expect("error running heat.exe");
// {
// let stdout = cmd.stdout.as_mut().unwrap();
// let reader = BufReader::new(stdout);
// for line in reader.lines() {
// info!(logger, "{}", line.unwrap());
// }
// }
// let status = cmd.wait().unwrap();
// if status.success() {
// Ok(())
// } else {
// Err("error running heat.exe".to_string())
// }
// }
/// Runs the Candle.exe executable for Wix. Candle parses the wxs file and generates the code for building the installer.
fn run_candle(
settings: &Settings,
wix_toolset_path: &Path,
build_path: &Path,
wxs_file_name: &str,
) -> crate::Result<()> {
let arch = match settings.binary_arch() {
"x86_64" => "x64",
"x86" => "x86",
target => {
return Err(crate::Error::ArchError(format!(
"unsupported target: {}",
target
)))
}
};
let main_binary = settings
.binaries()
.iter()
.find(|bin| bin.main())
.ok_or_else(|| anyhow::anyhow!("Failed to get main binary"))?;
let args = vec![
"-arch".to_string(),
arch.to_string(),
wxs_file_name.to_string(),
format!(
"-dSourceDir={}",
settings.binary_path(main_binary).display()
),
];
let candle_exe = wix_toolset_path.join("candle.exe");
common::print_info(format!("running candle for {}", wxs_file_name).as_str())?;
let mut cmd = Command::new(&candle_exe);
cmd
.args(&args)
.stdout(Stdio::piped())
.current_dir(build_path);
common::print_info("running candle.exe")?;
common::execute_with_output(&mut cmd).map_err(|_| crate::Error::CandleError)
}
/// Runs the Light.exe file. Light takes the generated code from Candle and produces an MSI Installer.
fn run_light(
wix_toolset_path: &Path,
build_path: &Path,
wixobjs: &[&str],
output_path: &Path,
) -> crate::Result<PathBuf> {
let light_exe = wix_toolset_path.join("light.exe");
let mut args: Vec<String> = vec![
"-ext".to_string(),
"WixUIExtension".to_string(),
"-o".to_string(),
output_path.display().to_string(),
];
for p in wixobjs {
args.push(p.to_string());
}
let mut cmd = Command::new(&light_exe);
cmd
.args(&args)
.stdout(Stdio::piped())
.current_dir(build_path);
common::print_info(format!("running light to produce {}", output_path.display()).as_str())?;
common::execute_with_output(&mut cmd)
.map(|_| output_path.to_path_buf())
.map_err(|_| crate::Error::LightError)
}
// fn get_icon_data() -> crate::Result<()> {
// Ok(())
// }
// Entry point for bundling and creating the MSI installer. For now the only supported platform is Windows x64.
pub fn build_wix_app_installer(
settings: &Settings,
wix_toolset_path: &Path,
) -> crate::Result<PathBuf> {
let arch = match settings.binary_arch() {
"x86_64" => "x64",
"x86" => "x86",
target => {
return Err(crate::Error::ArchError(format!(
"unsupported target: {}",
target
)))
}
};
// target only supports x64.
common::print_info(format!("Target: {}", arch).as_str())?;
let output_path = settings
.project_out_directory()
.join("bundle/msi")
.join(arch);
let mut data = BTreeMap::new();
if let Ok(tauri_config) = crate::bundle::tauri_config::get() {
data.insert(
"embedded_server",
to_json(tauri_config.tauri.embedded_server.active),
);
}
data.insert("product_name", to_json(settings.bundle_name()));
data.insert("version", to_json(settings.version_string()));
let manufacturer = settings.bundle_identifier().to_string();
data.insert("manufacturer", to_json(manufacturer.as_str()));
let upgrade_code = Uuid::new_v5(
&Uuid::NAMESPACE_DNS,
format!("{}.app.x64", &settings.main_binary_name()).as_bytes(),
)
.to_string();
data.insert("upgrade_code", to_json(&upgrade_code.as_str()));
let path_guid = generate_package_guid(settings).to_string();
data.insert("path_component_guid", to_json(&path_guid.as_str()));
let shortcut_guid = generate_package_guid(settings).to_string();
data.insert("shortcut_guid", to_json(&shortcut_guid.as_str()));
let app_exe_name = settings.main_binary_name().to_string();
data.insert("app_exe_name", to_json(&app_exe_name));
let binaries = generate_binaries_data(&settings)?;
let binaries_json = to_json(&binaries);
data.insert("binaries", binaries_json);
let resources = generate_resource_data(&settings)?;
let mut resources_wix_string = String::from("");
let mut files_ids = Vec::new();
for (_, dir) in resources {
let (wix_string, ids) = dir.get_wix_data()?;
resources_wix_string.push_str(wix_string.as_str());
for id in ids {
files_ids.push(id);
}
}
data.insert("resources", to_json(resources_wix_string));
data.insert("resource_file_ids", to_json(files_ids));
let main_binary = settings
.binaries()
.iter()
.find(|bin| bin.main())
.ok_or_else(|| anyhow::anyhow!("Failed to get main binary"))?;
let app_exe_source = settings.binary_path(main_binary).display().to_string();
data.insert("app_exe_source", to_json(&app_exe_source));
// copy icons from icons folder to resource folder near msi
let image_path = copy_icons(&settings)?;
let path = image_path.join("icon.ico").display().to_string();
data.insert("icon_path", to_json(path.as_str()));
let temp = HANDLEBARS.render("main.wxs", &data)?;
if output_path.exists() {
remove_dir_all(&output_path).or_else(|e| Err(e))?;
}
create_dir_all(&output_path).or_else(|e| Err(e))?;
let main_wxs_path = output_path.join("main.wxs");
write(&main_wxs_path, temp).or_else(|e| Err(e))?;
let input_basenames = vec!["main"];
for basename in &input_basenames {
let wxs = format!("{}.wxs", basename);
run_candle(settings, &wix_toolset_path, &output_path, &wxs)?;
}
let wixobjs = vec!["main.wixobj"];
let target = run_light(
&wix_toolset_path,
&output_path,
&wixobjs,
&app_installer_dir(settings)?,
)?;
Ok(target)
}
/// Generates the data required for the external binaries and extra binaries bundling.
fn generate_binaries_data(settings: &Settings) -> crate::Result<Vec<Binary>> {
let mut binaries = Vec::new();
let regex = Regex::new(r"[^\w\d\.]")?;
let cwd = std::env::current_dir()?;
for src in settings.external_binaries() {
let src = src?;
let filename = src
.file_name()
.expect("failed to extract external binary filename")
.to_os_string()
.into_string()
.expect("failed to convert external binary filename to string");
let guid = generate_guid(filename.as_bytes()).to_string();
binaries.push(Binary {
guid,
path: cwd
.join(src)
.into_os_string()
.into_string()
.expect("failed to read external binary path"),
id: regex.replace_all(&filename, "").to_string(),
});
}
for bin in settings.binaries() {
let filename = bin.name();
let guid = generate_guid(filename.as_bytes()).to_string();
if !bin.main() {
binaries.push(Binary {
guid,
path: settings
.binary_path(bin)
.into_os_string()
.into_string()
.expect("failed to read binary path"),
id: regex.replace_all(&filename, "").to_string(),
})
}
}
Ok(binaries)
}
/// Generates the data required for the resource bundling on wix
fn generate_resource_data(settings: &Settings) -> crate::Result<ResourceMap> {
let mut resources = ResourceMap::new();
let regex = Regex::new(r"[^\w\d\.]")?;
let cwd = std::env::current_dir()?;
for src in settings.resource_files() {
let src = src?;
let filename = src
.file_name()
.expect("failed to extract resource filename")
.to_os_string()
.into_string()
.expect("failed to convert resource filename to string");
let resource_path = cwd
.join(src.clone())
.into_os_string()
.into_string()
.expect("failed to read resource path");
let resource_entry = ResourceFile {
guid: generate_guid(filename.as_bytes()).to_string(),
path: resource_path,
id: regex.replace_all(&filename, "").to_string(),
};
// split the resource path directories
let mut directories = src
.components()
.filter(|component| {
let comp = component.as_os_str();
comp != "." && comp != ".."
})
.collect::<Vec<_>>();
directories.truncate(directories.len() - 1);
// transform the directory structure to a chained vec structure
for directory in directories {
let directory_name = directory
.as_os_str()
.to_os_string()
.into_string()
.expect("failed to read resource folder name");
// if the directory is already on the map
if resources.contains_key(&directory_name) {
let directory_entry = &mut resources
.get_mut(&directory_name)
.expect("Unable to handle resources");
if directory_entry.name == directory_name {
// the directory entry is the root of the chain
directory_entry.add_file(resource_entry.clone());
} else {
let index = directory_entry | .directories
.iter()
.position(|f| f.name == directory_name);
if index.is_some() {
// the directory entry is already a part of the chain
let dir = directory_entry
.directories
.get_mut(index.expect("Unable to get index"))
.expect("Unable to get directory");
dir.add_file(resource_entry.clone());
} else {
// push it to the chain
directory_entry.directories.push(ResourceDirectory {
name: directory_name.clone(),
directories: vec![],
files: vec![resource_entry.clone()],
});
}
}
} else {
resources.insert(
directory_name.clone(),
ResourceDirectory {
name: directory_name.clone(),
directories: vec![],
files: vec![resource_entry.clone()],
},
);
}
}
}
Ok(resources)
} | random_line_split | |
collision.go | package game
import (
"errors"
"fmt"
"math"
"github.com/ghthor/filu/rpg2d/coord"
"github.com/ghthor/filu/rpg2d/entity"
"github.com/ghthor/filu/rpg2d/quad"
"github.com/ghthor/filu/sim/stime"
)
type narrowPhaseLocker struct {
*ActorIndexLocker
}
type narrowPhase struct {
actorIndex ActorIndex
// Reset at the beginning of every ResolveCollisions call
solved []quad.Collision
// Generated at the beginning of every ResolveCollisions call
collisionIndex quad.CollisionIndex
}
func newNarrowPhaseLocker(actorMap *ActorIndexLocker) narrowPhaseLocker {
return narrowPhaseLocker{actorMap}
}
func newNarrowPhase(actorIndex ActorIndex) narrowPhase {
return narrowPhase{actorIndex, make([]quad.Collision, 0, 10), nil}
}
// Returns if the collision exists in the
// slice of collisions that have been
// solved during this narrow phase tick.
func (phase narrowPhase) hasSolved(c quad.Collision) bool {
for _, solved := range phase.solved {
if c.IsSameAs(solved) {
return true
}
}
return false
}
func (phase narrowPhaseLocker) ResolveCollisions(cg *quad.CollisionGroup, now stime.Time) ([]entity.Entity, []entity.Entity) {
defer phase.ActorIndexLocker.RUnlock()
return newNarrowPhase(phase.ActorIndexLocker.RLock()).ResolveCollisions(cg, now)
}
// Implementation of the quad.NarrowPhaseHandler interface.
func (phase narrowPhase) ResolveCollisions(cg *quad.CollisionGroup, now stime.Time) ([]entity.Entity, []entity.Entity) {
// Reset the resolved slice
phase.solved = phase.solved[:0]
// Generate a collision index for the collision group
phase.collisionIndex = cg.CollisionIndex()
// A map to store entities that still remain in the world
remaining := make(map[entity.Id]entity.Entity, len(cg.Entities))
remainingSlice := func() []entity.Entity {
// Build a slice from the `remaining` map
s := make([]entity.Entity, 0, len(remaining))
for _, e := range remaining {
s = append(s, e)
}
return s
}
for _, c := range cg.Collisions {
if phase.hasSolved(c) {
continue
}
var entities []entity.Entity
// Resolve type of entity in collision.A
switch e := c.A.(type) {
case actorEntity:
// Resolve the type of entity in collision.B
entities = phase.resolveActorEntity(phase.actorIndex[e.ActorId()], c.B, c, now)
default:
switch e := c.B.(type) {
case actorEntity:
// Resolve the type of entity in collision.B
entities = phase.resolveActorEntity(phase.actorIndex[e.ActorId()], c.A, c, now)
}
}
// As collisions are solved they return entities
// that have been created or modified and we store
// them in a map by their Id. Multiple collisions
// may modify and entity, therefor we only will
// one version of the entity back to engine when
// we return.
for _, e := range entities {
remaining[e.Id()] = e
}
}
return remainingSlice(), nil
}
func (phase *narrowPhase) resolveActorEntity(a *actor, with entity.Entity, collision quad.Collision, now stime.Time) []entity.Entity {
switch e := with.(type) {
case actorEntity:
b := phase.actorIndex[e.ActorId()]
return phase.solveActorActor(&solverActorActor{}, a, b, collision)
case assailEntity:
return phase.solveActorAssail(a, e, collision, now)
case wallEntity:
a.revertMoveAction()
return []entity.Entity{a.Entity(), e}
}
return nil
} | }
var percentDamage float64
switch a.pathAction {
case nil:
if a.Cell() == assail.Cell() {
percentDamage = 1.0
}
default:
coordCollision := coord.NewCellCollision(*a.pathAction, assail.Cell())
percentDamage = coordCollision.OverlapAt(now)
}
damage := int(math.Floor(float64(assail.damage) * percentDamage))
a.hp -= damage
if a.hp <= 0 {
a.hp = 100
a.actorEntity.cell = origin
a.actorEntity.facing = coord.South
a.actorEntity.pathAction = nil
}
return []entity.Entity{a.Entity()}
}
func newActorActorCollision(a, b *actor) (*actor, *actor, coord.Collision) {
var collision coord.Collision
switch {
case a.pathAction == nil && b.pathAction != nil:
a, b = b, a
fallthrough
case a.pathAction != nil && b.pathAction == nil:
collision = coord.NewCellCollision(*a.pathAction, b.Cell())
// A or B may have had a previous collision resolved that
// caused this collision to not be possible anymore.
// It is more relevant to return nil here then a
// coord.Collision with type CT_NONE
if collision.Type() == coord.CT_NONE {
return a, b, nil
}
case a.pathAction != nil && b.pathAction != nil:
pathCollision := coord.NewPathCollision(*a.pathAction, *b.pathAction)
// coord.NewPathCollision can flip the,
// A and B paths to simplify the number
// of collision types. This normalizes
// actor A with pathCollision.A
if *a.pathAction != pathCollision.A {
a, b = b, a
}
collision = pathCollision
case a.pathAction == nil && b.pathAction == nil:
// This case handles actors being on the same square,
// but not moving at all.
// There isn't a coord.CollisionType for this case.
// Maybe there should be?
return a, b, nil
default:
panic(fmt.Sprintf("impossible collision between {%v} {%v}", a, b))
}
return a, b, collision
}
type node struct {
actor *actor
entity entity.Entity
}
// Move forward in the directed graph. This movement is based on
// which entity is occupying the destination of the other's path action.
func followGraph(a, b *actor, collision quad.Collision) node {
// normalize a, b to collision.[A, B]
if a.actorEntity.Id() != collision.A.Id() {
a, b = b, a
}
var actor *actor
var entity entity.Entity
switch {
case a.pathAction.Orig == b.pathAction.Dest:
entity = collision.A
actor = a
case b.pathAction.Orig == a.pathAction.Dest:
entity = collision.B
actor = b
default:
panic(fmt.Sprintf("unexpected graph state %v between %v & %v", collision, a, b))
}
return node{actor, entity}
}
// Used to figure out which actor is "A" if
// the collision was CT_A_INTO_B instead of CT_NONE
func currentNode(a, b *actor, collision quad.Collision) *actor {
switch {
case a.pathAction.Orig == b.pathAction.Dest:
return b
case b.pathAction.Orig == a.pathAction.Dest:
return a
default:
panic(fmt.Sprintf("unexpected graph state %v between %v & %v", collision, a, b))
}
}
// Compare entity Id's with the entities in
// a collision and return the one that isn't
// the actor.
func otherEntityIn(a *actor, collision quad.Collision) entity.Entity {
var e entity.Entity
// figure out is prioritized actor is A or B in the collision
switch {
case a.actorEntity.Id() != collision.A.Id():
e = collision.A
case a.actorEntity.Id() != collision.B.Id():
e = collision.B
default:
panic(fmt.Sprintf("unexpected graph state %v actor %v", collision, a))
}
return e
}
// Store what actor's have been visited during
// a recursive solve. Used to avoid infinite
// recursion through a cycle in the graph.
type solverActorActor struct {
visited []*actor
}
func (s solverActorActor) hasVisited(actor *actor) bool {
for _, a := range s.visited {
if actor == a {
return true
}
}
return false
}
func (phase *narrowPhase) solveActorActor(solver *solverActorActor, a, b *actor, collision quad.Collision) []entity.Entity {
// When this functions returns the
// collision will have been solved
defer func() {
phase.solved = append(phase.solved, collision)
}()
var entities []entity.Entity
attemptSolve:
a, b, coordCollision := newActorActorCollision(a, b)
if coordCollision == nil {
goto resolved
}
switch coordCollision.Type() {
case coord.CT_NONE:
// This may not be entirely accurate.
// We should walk through the collision index
// of our partner too see if they should resolve
// some of there collisions first. They may
// appear to be moving to us right now, but
// have a collision that when solved will
// render them motionless, thus we would become
// motionless as well.
e, err := phase.solveDependencies(solver, a, b, collision)
switch err {
case nil:
if len(e) > 0 {
entities = append(entities, e...)
}
// Try solving again
goto attemptSolve
case errCycleDetected:
// Detected a cycle, we can't move
currentNode(a, b, collision).revertMoveAction()
goto resolved
case errNoDependencies:
// All dependencies have been solved
// We can move
goto resolved
}
case coord.CT_CELL_DEST:
a.revertMoveAction()
goto resolved
case coord.CT_SWAP:
a.revertMoveAction()
b.revertMoveAction()
goto resolved
case coord.CT_A_INTO_B_FROM_SIDE:
// This may not be entirely accurate.
// We should walk through the collision index
// of our partner too see if they should resolve
// some of there collisions first. They may
// appear to be moving to us right now, but
// have a collision that when solved will
// render them motionless, thus we would become
// motionless as well.
e, err := phase.solveDependencies(solver, a, b, collision)
switch err {
case nil:
if len(e) > 0 {
entities = append(entities, e...)
}
// Try solving again
goto attemptSolve
case errCycleDetected:
a.revertMoveAction()
goto resolved
case errNoDependencies:
if a.pathAction.End() >= b.pathAction.End() {
goto resolved
}
a.revertMoveAction()
goto resolved
}
case coord.CT_A_INTO_B:
a.revertMoveAction()
goto resolved
case coord.CT_HEAD_TO_HEAD:
fallthrough
case coord.CT_FROM_SIDE:
if a.pathAction.Start() < b.pathAction.Start() {
// A has already won the destination
b.revertMoveAction()
goto resolved
} else if a.pathAction.Start() > b.pathAction.Start() {
// B has already won the destination
a.revertMoveAction()
goto resolved
}
// Start values are equal
if a.pathAction.End() < b.pathAction.End() {
// A is moving faster and wins the destination
b.revertMoveAction()
goto resolved
} else if a.pathAction.End() > b.pathAction.End() {
// B is moving faster and wins the destination
a.revertMoveAction()
goto resolved
}
// End values are equal
// Movement direction priority goes in this order
// N -> E -> S -> W
if a.facing < b.facing {
// A's movement direction has a higher priority
b.revertMoveAction()
goto resolved
} else {
// B's movement direction has a higher priority
a.revertMoveAction()
goto resolved
}
}
resolved:
return append(entities, a.Entity(), b.Entity())
}
var errNoDependencies = errors.New("no dependencies")
var errCycleDetected = errors.New("cycle detected")
// Error can be errNoDependencies, errCycleDetected or nil
func (phase *narrowPhase) solveDependencies(solver *solverActorActor, a, b *actor, collision quad.Collision) ([]entity.Entity, error) {
node := followGraph(a, b, collision)
// Mark what actors have been visited
if a != node.actor {
solver.visited = append(solver.visited, a)
} else {
solver.visited = append(solver.visited, b)
}
// If the next node only has one collision
// then there are no dependencies and the
// collision can be solved
if len(phase.collisionIndex[node.entity]) == 1 {
return nil, errNoDependencies
}
// Walk through the directed graph of collisions and solve
// all the collisions that the collision depends on.
for _, c := range phase.collisionIndex[node.entity] {
// Ignore the collision that caused us to recurse
if c.IsSameAs(collision) {
continue
}
// Avoid solving a collision that's already been solving.
if phase.hasSolved(c) {
continue
}
e := otherEntityIn(node.actor, c)
switch e := e.(type) {
case actorEntity:
actor := phase.actorIndex[e.ActorId()]
// Detect cycles
if solver.hasVisited(actor) {
return nil, errCycleDetected
}
// Recurse
return phase.solveActorActor(solver, node.actor, actor, c), nil
}
}
return nil, errNoDependencies
} |
func (phase *narrowPhase) solveActorAssail(a *actor, assail assailEntity, collision quad.Collision, now stime.Time) []entity.Entity {
// Don't damage yourself
if assail.spawnedBy == a.actorEntity.Id() {
return []entity.Entity{a.Entity()} | random_line_split |
collision.go | package game
import (
"errors"
"fmt"
"math"
"github.com/ghthor/filu/rpg2d/coord"
"github.com/ghthor/filu/rpg2d/entity"
"github.com/ghthor/filu/rpg2d/quad"
"github.com/ghthor/filu/sim/stime"
)
type narrowPhaseLocker struct {
*ActorIndexLocker
}
type narrowPhase struct {
actorIndex ActorIndex
// Reset at the beginning of every ResolveCollisions call
solved []quad.Collision
// Generated at the beginning of every ResolveCollisions call
collisionIndex quad.CollisionIndex
}
func newNarrowPhaseLocker(actorMap *ActorIndexLocker) narrowPhaseLocker {
return narrowPhaseLocker{actorMap}
}
func newNarrowPhase(actorIndex ActorIndex) narrowPhase {
return narrowPhase{actorIndex, make([]quad.Collision, 0, 10), nil}
}
// Returns if the collision exists in the
// slice of collisions that have been
// solved during this narrow phase tick.
func (phase narrowPhase) hasSolved(c quad.Collision) bool {
for _, solved := range phase.solved |
return false
}
func (phase narrowPhaseLocker) ResolveCollisions(cg *quad.CollisionGroup, now stime.Time) ([]entity.Entity, []entity.Entity) {
defer phase.ActorIndexLocker.RUnlock()
return newNarrowPhase(phase.ActorIndexLocker.RLock()).ResolveCollisions(cg, now)
}
// Implementation of the quad.NarrowPhaseHandler interface.
func (phase narrowPhase) ResolveCollisions(cg *quad.CollisionGroup, now stime.Time) ([]entity.Entity, []entity.Entity) {
// Reset the resolved slice
phase.solved = phase.solved[:0]
// Generate a collision index for the collision group
phase.collisionIndex = cg.CollisionIndex()
// A map to store entities that still remain in the world
remaining := make(map[entity.Id]entity.Entity, len(cg.Entities))
remainingSlice := func() []entity.Entity {
// Build a slice from the `remaining` map
s := make([]entity.Entity, 0, len(remaining))
for _, e := range remaining {
s = append(s, e)
}
return s
}
for _, c := range cg.Collisions {
if phase.hasSolved(c) {
continue
}
var entities []entity.Entity
// Resolve type of entity in collision.A
switch e := c.A.(type) {
case actorEntity:
// Resolve the type of entity in collision.B
entities = phase.resolveActorEntity(phase.actorIndex[e.ActorId()], c.B, c, now)
default:
switch e := c.B.(type) {
case actorEntity:
// Resolve the type of entity in collision.B
entities = phase.resolveActorEntity(phase.actorIndex[e.ActorId()], c.A, c, now)
}
}
// As collisions are solved they return entities
// that have been created or modified and we store
// them in a map by their Id. Multiple collisions
// may modify and entity, therefor we only will
// one version of the entity back to engine when
// we return.
for _, e := range entities {
remaining[e.Id()] = e
}
}
return remainingSlice(), nil
}
func (phase *narrowPhase) resolveActorEntity(a *actor, with entity.Entity, collision quad.Collision, now stime.Time) []entity.Entity {
switch e := with.(type) {
case actorEntity:
b := phase.actorIndex[e.ActorId()]
return phase.solveActorActor(&solverActorActor{}, a, b, collision)
case assailEntity:
return phase.solveActorAssail(a, e, collision, now)
case wallEntity:
a.revertMoveAction()
return []entity.Entity{a.Entity(), e}
}
return nil
}
func (phase *narrowPhase) solveActorAssail(a *actor, assail assailEntity, collision quad.Collision, now stime.Time) []entity.Entity {
// Don't damage yourself
if assail.spawnedBy == a.actorEntity.Id() {
return []entity.Entity{a.Entity()}
}
var percentDamage float64
switch a.pathAction {
case nil:
if a.Cell() == assail.Cell() {
percentDamage = 1.0
}
default:
coordCollision := coord.NewCellCollision(*a.pathAction, assail.Cell())
percentDamage = coordCollision.OverlapAt(now)
}
damage := int(math.Floor(float64(assail.damage) * percentDamage))
a.hp -= damage
if a.hp <= 0 {
a.hp = 100
a.actorEntity.cell = origin
a.actorEntity.facing = coord.South
a.actorEntity.pathAction = nil
}
return []entity.Entity{a.Entity()}
}
func newActorActorCollision(a, b *actor) (*actor, *actor, coord.Collision) {
var collision coord.Collision
switch {
case a.pathAction == nil && b.pathAction != nil:
a, b = b, a
fallthrough
case a.pathAction != nil && b.pathAction == nil:
collision = coord.NewCellCollision(*a.pathAction, b.Cell())
// A or B may have had a previous collision resolved that
// caused this collision to not be possible anymore.
// It is more relevant to return nil here then a
// coord.Collision with type CT_NONE
if collision.Type() == coord.CT_NONE {
return a, b, nil
}
case a.pathAction != nil && b.pathAction != nil:
pathCollision := coord.NewPathCollision(*a.pathAction, *b.pathAction)
// coord.NewPathCollision can flip the,
// A and B paths to simplify the number
// of collision types. This normalizes
// actor A with pathCollision.A
if *a.pathAction != pathCollision.A {
a, b = b, a
}
collision = pathCollision
case a.pathAction == nil && b.pathAction == nil:
// This case handles actors being on the same square,
// but not moving at all.
// There isn't a coord.CollisionType for this case.
// Maybe there should be?
return a, b, nil
default:
panic(fmt.Sprintf("impossible collision between {%v} {%v}", a, b))
}
return a, b, collision
}
type node struct {
actor *actor
entity entity.Entity
}
// Move forward in the directed graph. This movement is based on
// which entity is occupying the destination of the other's path action.
func followGraph(a, b *actor, collision quad.Collision) node {
// normalize a, b to collision.[A, B]
if a.actorEntity.Id() != collision.A.Id() {
a, b = b, a
}
var actor *actor
var entity entity.Entity
switch {
case a.pathAction.Orig == b.pathAction.Dest:
entity = collision.A
actor = a
case b.pathAction.Orig == a.pathAction.Dest:
entity = collision.B
actor = b
default:
panic(fmt.Sprintf("unexpected graph state %v between %v & %v", collision, a, b))
}
return node{actor, entity}
}
// Used to figure out which actor is "A" if
// the collision was CT_A_INTO_B instead of CT_NONE
func currentNode(a, b *actor, collision quad.Collision) *actor {
switch {
case a.pathAction.Orig == b.pathAction.Dest:
return b
case b.pathAction.Orig == a.pathAction.Dest:
return a
default:
panic(fmt.Sprintf("unexpected graph state %v between %v & %v", collision, a, b))
}
}
// Compare entity Id's with the entities in
// a collision and return the one that isn't
// the actor.
func otherEntityIn(a *actor, collision quad.Collision) entity.Entity {
var e entity.Entity
// figure out is prioritized actor is A or B in the collision
switch {
case a.actorEntity.Id() != collision.A.Id():
e = collision.A
case a.actorEntity.Id() != collision.B.Id():
e = collision.B
default:
panic(fmt.Sprintf("unexpected graph state %v actor %v", collision, a))
}
return e
}
// Store what actor's have been visited during
// a recursive solve. Used to avoid infinite
// recursion through a cycle in the graph.
type solverActorActor struct {
visited []*actor
}
func (s solverActorActor) hasVisited(actor *actor) bool {
for _, a := range s.visited {
if actor == a {
return true
}
}
return false
}
func (phase *narrowPhase) solveActorActor(solver *solverActorActor, a, b *actor, collision quad.Collision) []entity.Entity {
// When this functions returns the
// collision will have been solved
defer func() {
phase.solved = append(phase.solved, collision)
}()
var entities []entity.Entity
attemptSolve:
a, b, coordCollision := newActorActorCollision(a, b)
if coordCollision == nil {
goto resolved
}
switch coordCollision.Type() {
case coord.CT_NONE:
// This may not be entirely accurate.
// We should walk through the collision index
// of our partner too see if they should resolve
// some of there collisions first. They may
// appear to be moving to us right now, but
// have a collision that when solved will
// render them motionless, thus we would become
// motionless as well.
e, err := phase.solveDependencies(solver, a, b, collision)
switch err {
case nil:
if len(e) > 0 {
entities = append(entities, e...)
}
// Try solving again
goto attemptSolve
case errCycleDetected:
// Detected a cycle, we can't move
currentNode(a, b, collision).revertMoveAction()
goto resolved
case errNoDependencies:
// All dependencies have been solved
// We can move
goto resolved
}
case coord.CT_CELL_DEST:
a.revertMoveAction()
goto resolved
case coord.CT_SWAP:
a.revertMoveAction()
b.revertMoveAction()
goto resolved
case coord.CT_A_INTO_B_FROM_SIDE:
// This may not be entirely accurate.
// We should walk through the collision index
// of our partner too see if they should resolve
// some of there collisions first. They may
// appear to be moving to us right now, but
// have a collision that when solved will
// render them motionless, thus we would become
// motionless as well.
e, err := phase.solveDependencies(solver, a, b, collision)
switch err {
case nil:
if len(e) > 0 {
entities = append(entities, e...)
}
// Try solving again
goto attemptSolve
case errCycleDetected:
a.revertMoveAction()
goto resolved
case errNoDependencies:
if a.pathAction.End() >= b.pathAction.End() {
goto resolved
}
a.revertMoveAction()
goto resolved
}
case coord.CT_A_INTO_B:
a.revertMoveAction()
goto resolved
case coord.CT_HEAD_TO_HEAD:
fallthrough
case coord.CT_FROM_SIDE:
if a.pathAction.Start() < b.pathAction.Start() {
// A has already won the destination
b.revertMoveAction()
goto resolved
} else if a.pathAction.Start() > b.pathAction.Start() {
// B has already won the destination
a.revertMoveAction()
goto resolved
}
// Start values are equal
if a.pathAction.End() < b.pathAction.End() {
// A is moving faster and wins the destination
b.revertMoveAction()
goto resolved
} else if a.pathAction.End() > b.pathAction.End() {
// B is moving faster and wins the destination
a.revertMoveAction()
goto resolved
}
// End values are equal
// Movement direction priority goes in this order
// N -> E -> S -> W
if a.facing < b.facing {
// A's movement direction has a higher priority
b.revertMoveAction()
goto resolved
} else {
// B's movement direction has a higher priority
a.revertMoveAction()
goto resolved
}
}
resolved:
return append(entities, a.Entity(), b.Entity())
}
var errNoDependencies = errors.New("no dependencies")
var errCycleDetected = errors.New("cycle detected")
// Error can be errNoDependencies, errCycleDetected or nil
func (phase *narrowPhase) solveDependencies(solver *solverActorActor, a, b *actor, collision quad.Collision) ([]entity.Entity, error) {
node := followGraph(a, b, collision)
// Mark what actors have been visited
if a != node.actor {
solver.visited = append(solver.visited, a)
} else {
solver.visited = append(solver.visited, b)
}
// If the next node only has one collision
// then there are no dependencies and the
// collision can be solved
if len(phase.collisionIndex[node.entity]) == 1 {
return nil, errNoDependencies
}
// Walk through the directed graph of collisions and solve
// all the collisions that the collision depends on.
for _, c := range phase.collisionIndex[node.entity] {
// Ignore the collision that caused us to recurse
if c.IsSameAs(collision) {
continue
}
// Avoid solving a collision that's already been solving.
if phase.hasSolved(c) {
continue
}
e := otherEntityIn(node.actor, c)
switch e := e.(type) {
case actorEntity:
actor := phase.actorIndex[e.ActorId()]
// Detect cycles
if solver.hasVisited(actor) {
return nil, errCycleDetected
}
// Recurse
return phase.solveActorActor(solver, node.actor, actor, c), nil
}
}
return nil, errNoDependencies
}
| {
if c.IsSameAs(solved) {
return true
}
} | conditional_block |
collision.go | package game
import (
"errors"
"fmt"
"math"
"github.com/ghthor/filu/rpg2d/coord"
"github.com/ghthor/filu/rpg2d/entity"
"github.com/ghthor/filu/rpg2d/quad"
"github.com/ghthor/filu/sim/stime"
)
type narrowPhaseLocker struct {
*ActorIndexLocker
}
type narrowPhase struct {
actorIndex ActorIndex
// Reset at the beginning of every ResolveCollisions call
solved []quad.Collision
// Generated at the beginning of every ResolveCollisions call
collisionIndex quad.CollisionIndex
}
func newNarrowPhaseLocker(actorMap *ActorIndexLocker) narrowPhaseLocker {
return narrowPhaseLocker{actorMap}
}
func newNarrowPhase(actorIndex ActorIndex) narrowPhase {
return narrowPhase{actorIndex, make([]quad.Collision, 0, 10), nil}
}
// Returns if the collision exists in the
// slice of collisions that have been
// solved during this narrow phase tick.
func (phase narrowPhase) hasSolved(c quad.Collision) bool {
for _, solved := range phase.solved {
if c.IsSameAs(solved) {
return true
}
}
return false
}
func (phase narrowPhaseLocker) ResolveCollisions(cg *quad.CollisionGroup, now stime.Time) ([]entity.Entity, []entity.Entity) {
defer phase.ActorIndexLocker.RUnlock()
return newNarrowPhase(phase.ActorIndexLocker.RLock()).ResolveCollisions(cg, now)
}
// Implementation of the quad.NarrowPhaseHandler interface.
func (phase narrowPhase) ResolveCollisions(cg *quad.CollisionGroup, now stime.Time) ([]entity.Entity, []entity.Entity) {
// Reset the resolved slice
phase.solved = phase.solved[:0]
// Generate a collision index for the collision group
phase.collisionIndex = cg.CollisionIndex()
// A map to store entities that still remain in the world
remaining := make(map[entity.Id]entity.Entity, len(cg.Entities))
remainingSlice := func() []entity.Entity {
// Build a slice from the `remaining` map
s := make([]entity.Entity, 0, len(remaining))
for _, e := range remaining {
s = append(s, e)
}
return s
}
for _, c := range cg.Collisions {
if phase.hasSolved(c) {
continue
}
var entities []entity.Entity
// Resolve type of entity in collision.A
switch e := c.A.(type) {
case actorEntity:
// Resolve the type of entity in collision.B
entities = phase.resolveActorEntity(phase.actorIndex[e.ActorId()], c.B, c, now)
default:
switch e := c.B.(type) {
case actorEntity:
// Resolve the type of entity in collision.B
entities = phase.resolveActorEntity(phase.actorIndex[e.ActorId()], c.A, c, now)
}
}
// As collisions are solved they return entities
// that have been created or modified and we store
// them in a map by their Id. Multiple collisions
// may modify and entity, therefor we only will
// one version of the entity back to engine when
// we return.
for _, e := range entities {
remaining[e.Id()] = e
}
}
return remainingSlice(), nil
}
func (phase *narrowPhase) resolveActorEntity(a *actor, with entity.Entity, collision quad.Collision, now stime.Time) []entity.Entity {
switch e := with.(type) {
case actorEntity:
b := phase.actorIndex[e.ActorId()]
return phase.solveActorActor(&solverActorActor{}, a, b, collision)
case assailEntity:
return phase.solveActorAssail(a, e, collision, now)
case wallEntity:
a.revertMoveAction()
return []entity.Entity{a.Entity(), e}
}
return nil
}
func (phase *narrowPhase) solveActorAssail(a *actor, assail assailEntity, collision quad.Collision, now stime.Time) []entity.Entity {
// Don't damage yourself
if assail.spawnedBy == a.actorEntity.Id() {
return []entity.Entity{a.Entity()}
}
var percentDamage float64
switch a.pathAction {
case nil:
if a.Cell() == assail.Cell() {
percentDamage = 1.0
}
default:
coordCollision := coord.NewCellCollision(*a.pathAction, assail.Cell())
percentDamage = coordCollision.OverlapAt(now)
}
damage := int(math.Floor(float64(assail.damage) * percentDamage))
a.hp -= damage
if a.hp <= 0 {
a.hp = 100
a.actorEntity.cell = origin
a.actorEntity.facing = coord.South
a.actorEntity.pathAction = nil
}
return []entity.Entity{a.Entity()}
}
func newActorActorCollision(a, b *actor) (*actor, *actor, coord.Collision) {
var collision coord.Collision
switch {
case a.pathAction == nil && b.pathAction != nil:
a, b = b, a
fallthrough
case a.pathAction != nil && b.pathAction == nil:
collision = coord.NewCellCollision(*a.pathAction, b.Cell())
// A or B may have had a previous collision resolved that
// caused this collision to not be possible anymore.
// It is more relevant to return nil here then a
// coord.Collision with type CT_NONE
if collision.Type() == coord.CT_NONE {
return a, b, nil
}
case a.pathAction != nil && b.pathAction != nil:
pathCollision := coord.NewPathCollision(*a.pathAction, *b.pathAction)
// coord.NewPathCollision can flip the,
// A and B paths to simplify the number
// of collision types. This normalizes
// actor A with pathCollision.A
if *a.pathAction != pathCollision.A {
a, b = b, a
}
collision = pathCollision
case a.pathAction == nil && b.pathAction == nil:
// This case handles actors being on the same square,
// but not moving at all.
// There isn't a coord.CollisionType for this case.
// Maybe there should be?
return a, b, nil
default:
panic(fmt.Sprintf("impossible collision between {%v} {%v}", a, b))
}
return a, b, collision
}
type node struct {
actor *actor
entity entity.Entity
}
// Move forward in the directed graph. This movement is based on
// which entity is occupying the destination of the other's path action.
func followGraph(a, b *actor, collision quad.Collision) node {
// normalize a, b to collision.[A, B]
if a.actorEntity.Id() != collision.A.Id() {
a, b = b, a
}
var actor *actor
var entity entity.Entity
switch {
case a.pathAction.Orig == b.pathAction.Dest:
entity = collision.A
actor = a
case b.pathAction.Orig == a.pathAction.Dest:
entity = collision.B
actor = b
default:
panic(fmt.Sprintf("unexpected graph state %v between %v & %v", collision, a, b))
}
return node{actor, entity}
}
// Used to figure out which actor is "A" if
// the collision was CT_A_INTO_B instead of CT_NONE
func currentNode(a, b *actor, collision quad.Collision) *actor |
// Compare entity Id's with the entities in
// a collision and return the one that isn't
// the actor.
func otherEntityIn(a *actor, collision quad.Collision) entity.Entity {
var e entity.Entity
// figure out is prioritized actor is A or B in the collision
switch {
case a.actorEntity.Id() != collision.A.Id():
e = collision.A
case a.actorEntity.Id() != collision.B.Id():
e = collision.B
default:
panic(fmt.Sprintf("unexpected graph state %v actor %v", collision, a))
}
return e
}
// Store what actor's have been visited during
// a recursive solve. Used to avoid infinite
// recursion through a cycle in the graph.
type solverActorActor struct {
visited []*actor
}
func (s solverActorActor) hasVisited(actor *actor) bool {
for _, a := range s.visited {
if actor == a {
return true
}
}
return false
}
func (phase *narrowPhase) solveActorActor(solver *solverActorActor, a, b *actor, collision quad.Collision) []entity.Entity {
// When this functions returns the
// collision will have been solved
defer func() {
phase.solved = append(phase.solved, collision)
}()
var entities []entity.Entity
attemptSolve:
a, b, coordCollision := newActorActorCollision(a, b)
if coordCollision == nil {
goto resolved
}
switch coordCollision.Type() {
case coord.CT_NONE:
// This may not be entirely accurate.
// We should walk through the collision index
// of our partner too see if they should resolve
// some of there collisions first. They may
// appear to be moving to us right now, but
// have a collision that when solved will
// render them motionless, thus we would become
// motionless as well.
e, err := phase.solveDependencies(solver, a, b, collision)
switch err {
case nil:
if len(e) > 0 {
entities = append(entities, e...)
}
// Try solving again
goto attemptSolve
case errCycleDetected:
// Detected a cycle, we can't move
currentNode(a, b, collision).revertMoveAction()
goto resolved
case errNoDependencies:
// All dependencies have been solved
// We can move
goto resolved
}
case coord.CT_CELL_DEST:
a.revertMoveAction()
goto resolved
case coord.CT_SWAP:
a.revertMoveAction()
b.revertMoveAction()
goto resolved
case coord.CT_A_INTO_B_FROM_SIDE:
// This may not be entirely accurate.
// We should walk through the collision index
// of our partner too see if they should resolve
// some of there collisions first. They may
// appear to be moving to us right now, but
// have a collision that when solved will
// render them motionless, thus we would become
// motionless as well.
e, err := phase.solveDependencies(solver, a, b, collision)
switch err {
case nil:
if len(e) > 0 {
entities = append(entities, e...)
}
// Try solving again
goto attemptSolve
case errCycleDetected:
a.revertMoveAction()
goto resolved
case errNoDependencies:
if a.pathAction.End() >= b.pathAction.End() {
goto resolved
}
a.revertMoveAction()
goto resolved
}
case coord.CT_A_INTO_B:
a.revertMoveAction()
goto resolved
case coord.CT_HEAD_TO_HEAD:
fallthrough
case coord.CT_FROM_SIDE:
if a.pathAction.Start() < b.pathAction.Start() {
// A has already won the destination
b.revertMoveAction()
goto resolved
} else if a.pathAction.Start() > b.pathAction.Start() {
// B has already won the destination
a.revertMoveAction()
goto resolved
}
// Start values are equal
if a.pathAction.End() < b.pathAction.End() {
// A is moving faster and wins the destination
b.revertMoveAction()
goto resolved
} else if a.pathAction.End() > b.pathAction.End() {
// B is moving faster and wins the destination
a.revertMoveAction()
goto resolved
}
// End values are equal
// Movement direction priority goes in this order
// N -> E -> S -> W
if a.facing < b.facing {
// A's movement direction has a higher priority
b.revertMoveAction()
goto resolved
} else {
// B's movement direction has a higher priority
a.revertMoveAction()
goto resolved
}
}
resolved:
return append(entities, a.Entity(), b.Entity())
}
var errNoDependencies = errors.New("no dependencies")
var errCycleDetected = errors.New("cycle detected")
// Error can be errNoDependencies, errCycleDetected or nil
func (phase *narrowPhase) solveDependencies(solver *solverActorActor, a, b *actor, collision quad.Collision) ([]entity.Entity, error) {
node := followGraph(a, b, collision)
// Mark what actors have been visited
if a != node.actor {
solver.visited = append(solver.visited, a)
} else {
solver.visited = append(solver.visited, b)
}
// If the next node only has one collision
// then there are no dependencies and the
// collision can be solved
if len(phase.collisionIndex[node.entity]) == 1 {
return nil, errNoDependencies
}
// Walk through the directed graph of collisions and solve
// all the collisions that the collision depends on.
for _, c := range phase.collisionIndex[node.entity] {
// Ignore the collision that caused us to recurse
if c.IsSameAs(collision) {
continue
}
// Avoid solving a collision that's already been solving.
if phase.hasSolved(c) {
continue
}
e := otherEntityIn(node.actor, c)
switch e := e.(type) {
case actorEntity:
actor := phase.actorIndex[e.ActorId()]
// Detect cycles
if solver.hasVisited(actor) {
return nil, errCycleDetected
}
// Recurse
return phase.solveActorActor(solver, node.actor, actor, c), nil
}
}
return nil, errNoDependencies
}
| {
switch {
case a.pathAction.Orig == b.pathAction.Dest:
return b
case b.pathAction.Orig == a.pathAction.Dest:
return a
default:
panic(fmt.Sprintf("unexpected graph state %v between %v & %v", collision, a, b))
}
} | identifier_body |
collision.go | package game
import (
"errors"
"fmt"
"math"
"github.com/ghthor/filu/rpg2d/coord"
"github.com/ghthor/filu/rpg2d/entity"
"github.com/ghthor/filu/rpg2d/quad"
"github.com/ghthor/filu/sim/stime"
)
type narrowPhaseLocker struct {
*ActorIndexLocker
}
type narrowPhase struct {
actorIndex ActorIndex
// Reset at the beginning of every ResolveCollisions call
solved []quad.Collision
// Generated at the beginning of every ResolveCollisions call
collisionIndex quad.CollisionIndex
}
func newNarrowPhaseLocker(actorMap *ActorIndexLocker) narrowPhaseLocker {
return narrowPhaseLocker{actorMap}
}
func newNarrowPhase(actorIndex ActorIndex) narrowPhase {
return narrowPhase{actorIndex, make([]quad.Collision, 0, 10), nil}
}
// Returns if the collision exists in the
// slice of collisions that have been
// solved during this narrow phase tick.
func (phase narrowPhase) hasSolved(c quad.Collision) bool {
for _, solved := range phase.solved {
if c.IsSameAs(solved) {
return true
}
}
return false
}
func (phase narrowPhaseLocker) ResolveCollisions(cg *quad.CollisionGroup, now stime.Time) ([]entity.Entity, []entity.Entity) {
defer phase.ActorIndexLocker.RUnlock()
return newNarrowPhase(phase.ActorIndexLocker.RLock()).ResolveCollisions(cg, now)
}
// Implementation of the quad.NarrowPhaseHandler interface.
func (phase narrowPhase) ResolveCollisions(cg *quad.CollisionGroup, now stime.Time) ([]entity.Entity, []entity.Entity) {
// Reset the resolved slice
phase.solved = phase.solved[:0]
// Generate a collision index for the collision group
phase.collisionIndex = cg.CollisionIndex()
// A map to store entities that still remain in the world
remaining := make(map[entity.Id]entity.Entity, len(cg.Entities))
remainingSlice := func() []entity.Entity {
// Build a slice from the `remaining` map
s := make([]entity.Entity, 0, len(remaining))
for _, e := range remaining {
s = append(s, e)
}
return s
}
for _, c := range cg.Collisions {
if phase.hasSolved(c) {
continue
}
var entities []entity.Entity
// Resolve type of entity in collision.A
switch e := c.A.(type) {
case actorEntity:
// Resolve the type of entity in collision.B
entities = phase.resolveActorEntity(phase.actorIndex[e.ActorId()], c.B, c, now)
default:
switch e := c.B.(type) {
case actorEntity:
// Resolve the type of entity in collision.B
entities = phase.resolveActorEntity(phase.actorIndex[e.ActorId()], c.A, c, now)
}
}
// As collisions are solved they return entities
// that have been created or modified and we store
// them in a map by their Id. Multiple collisions
// may modify and entity, therefor we only will
// one version of the entity back to engine when
// we return.
for _, e := range entities {
remaining[e.Id()] = e
}
}
return remainingSlice(), nil
}
func (phase *narrowPhase) resolveActorEntity(a *actor, with entity.Entity, collision quad.Collision, now stime.Time) []entity.Entity {
switch e := with.(type) {
case actorEntity:
b := phase.actorIndex[e.ActorId()]
return phase.solveActorActor(&solverActorActor{}, a, b, collision)
case assailEntity:
return phase.solveActorAssail(a, e, collision, now)
case wallEntity:
a.revertMoveAction()
return []entity.Entity{a.Entity(), e}
}
return nil
}
func (phase *narrowPhase) solveActorAssail(a *actor, assail assailEntity, collision quad.Collision, now stime.Time) []entity.Entity {
// Don't damage yourself
if assail.spawnedBy == a.actorEntity.Id() {
return []entity.Entity{a.Entity()}
}
var percentDamage float64
switch a.pathAction {
case nil:
if a.Cell() == assail.Cell() {
percentDamage = 1.0
}
default:
coordCollision := coord.NewCellCollision(*a.pathAction, assail.Cell())
percentDamage = coordCollision.OverlapAt(now)
}
damage := int(math.Floor(float64(assail.damage) * percentDamage))
a.hp -= damage
if a.hp <= 0 {
a.hp = 100
a.actorEntity.cell = origin
a.actorEntity.facing = coord.South
a.actorEntity.pathAction = nil
}
return []entity.Entity{a.Entity()}
}
func | (a, b *actor) (*actor, *actor, coord.Collision) {
var collision coord.Collision
switch {
case a.pathAction == nil && b.pathAction != nil:
a, b = b, a
fallthrough
case a.pathAction != nil && b.pathAction == nil:
collision = coord.NewCellCollision(*a.pathAction, b.Cell())
// A or B may have had a previous collision resolved that
// caused this collision to not be possible anymore.
// It is more relevant to return nil here then a
// coord.Collision with type CT_NONE
if collision.Type() == coord.CT_NONE {
return a, b, nil
}
case a.pathAction != nil && b.pathAction != nil:
pathCollision := coord.NewPathCollision(*a.pathAction, *b.pathAction)
// coord.NewPathCollision can flip the,
// A and B paths to simplify the number
// of collision types. This normalizes
// actor A with pathCollision.A
if *a.pathAction != pathCollision.A {
a, b = b, a
}
collision = pathCollision
case a.pathAction == nil && b.pathAction == nil:
// This case handles actors being on the same square,
// but not moving at all.
// There isn't a coord.CollisionType for this case.
// Maybe there should be?
return a, b, nil
default:
panic(fmt.Sprintf("impossible collision between {%v} {%v}", a, b))
}
return a, b, collision
}
type node struct {
actor *actor
entity entity.Entity
}
// Move forward in the directed graph. This movement is based on
// which entity is occupying the destination of the other's path action.
func followGraph(a, b *actor, collision quad.Collision) node {
// normalize a, b to collision.[A, B]
if a.actorEntity.Id() != collision.A.Id() {
a, b = b, a
}
var actor *actor
var entity entity.Entity
switch {
case a.pathAction.Orig == b.pathAction.Dest:
entity = collision.A
actor = a
case b.pathAction.Orig == a.pathAction.Dest:
entity = collision.B
actor = b
default:
panic(fmt.Sprintf("unexpected graph state %v between %v & %v", collision, a, b))
}
return node{actor, entity}
}
// Used to figure out which actor is "A" if
// the collision was CT_A_INTO_B instead of CT_NONE
func currentNode(a, b *actor, collision quad.Collision) *actor {
switch {
case a.pathAction.Orig == b.pathAction.Dest:
return b
case b.pathAction.Orig == a.pathAction.Dest:
return a
default:
panic(fmt.Sprintf("unexpected graph state %v between %v & %v", collision, a, b))
}
}
// Compare entity Id's with the entities in
// a collision and return the one that isn't
// the actor.
func otherEntityIn(a *actor, collision quad.Collision) entity.Entity {
var e entity.Entity
// figure out is prioritized actor is A or B in the collision
switch {
case a.actorEntity.Id() != collision.A.Id():
e = collision.A
case a.actorEntity.Id() != collision.B.Id():
e = collision.B
default:
panic(fmt.Sprintf("unexpected graph state %v actor %v", collision, a))
}
return e
}
// Store what actor's have been visited during
// a recursive solve. Used to avoid infinite
// recursion through a cycle in the graph.
type solverActorActor struct {
visited []*actor
}
func (s solverActorActor) hasVisited(actor *actor) bool {
for _, a := range s.visited {
if actor == a {
return true
}
}
return false
}
func (phase *narrowPhase) solveActorActor(solver *solverActorActor, a, b *actor, collision quad.Collision) []entity.Entity {
// When this functions returns the
// collision will have been solved
defer func() {
phase.solved = append(phase.solved, collision)
}()
var entities []entity.Entity
attemptSolve:
a, b, coordCollision := newActorActorCollision(a, b)
if coordCollision == nil {
goto resolved
}
switch coordCollision.Type() {
case coord.CT_NONE:
// This may not be entirely accurate.
// We should walk through the collision index
// of our partner too see if they should resolve
// some of there collisions first. They may
// appear to be moving to us right now, but
// have a collision that when solved will
// render them motionless, thus we would become
// motionless as well.
e, err := phase.solveDependencies(solver, a, b, collision)
switch err {
case nil:
if len(e) > 0 {
entities = append(entities, e...)
}
// Try solving again
goto attemptSolve
case errCycleDetected:
// Detected a cycle, we can't move
currentNode(a, b, collision).revertMoveAction()
goto resolved
case errNoDependencies:
// All dependencies have been solved
// We can move
goto resolved
}
case coord.CT_CELL_DEST:
a.revertMoveAction()
goto resolved
case coord.CT_SWAP:
a.revertMoveAction()
b.revertMoveAction()
goto resolved
case coord.CT_A_INTO_B_FROM_SIDE:
// This may not be entirely accurate.
// We should walk through the collision index
// of our partner too see if they should resolve
// some of there collisions first. They may
// appear to be moving to us right now, but
// have a collision that when solved will
// render them motionless, thus we would become
// motionless as well.
e, err := phase.solveDependencies(solver, a, b, collision)
switch err {
case nil:
if len(e) > 0 {
entities = append(entities, e...)
}
// Try solving again
goto attemptSolve
case errCycleDetected:
a.revertMoveAction()
goto resolved
case errNoDependencies:
if a.pathAction.End() >= b.pathAction.End() {
goto resolved
}
a.revertMoveAction()
goto resolved
}
case coord.CT_A_INTO_B:
a.revertMoveAction()
goto resolved
case coord.CT_HEAD_TO_HEAD:
fallthrough
case coord.CT_FROM_SIDE:
if a.pathAction.Start() < b.pathAction.Start() {
// A has already won the destination
b.revertMoveAction()
goto resolved
} else if a.pathAction.Start() > b.pathAction.Start() {
// B has already won the destination
a.revertMoveAction()
goto resolved
}
// Start values are equal
if a.pathAction.End() < b.pathAction.End() {
// A is moving faster and wins the destination
b.revertMoveAction()
goto resolved
} else if a.pathAction.End() > b.pathAction.End() {
// B is moving faster and wins the destination
a.revertMoveAction()
goto resolved
}
// End values are equal
// Movement direction priority goes in this order
// N -> E -> S -> W
if a.facing < b.facing {
// A's movement direction has a higher priority
b.revertMoveAction()
goto resolved
} else {
// B's movement direction has a higher priority
a.revertMoveAction()
goto resolved
}
}
resolved:
return append(entities, a.Entity(), b.Entity())
}
var errNoDependencies = errors.New("no dependencies")
var errCycleDetected = errors.New("cycle detected")
// Error can be errNoDependencies, errCycleDetected or nil
func (phase *narrowPhase) solveDependencies(solver *solverActorActor, a, b *actor, collision quad.Collision) ([]entity.Entity, error) {
node := followGraph(a, b, collision)
// Mark what actors have been visited
if a != node.actor {
solver.visited = append(solver.visited, a)
} else {
solver.visited = append(solver.visited, b)
}
// If the next node only has one collision
// then there are no dependencies and the
// collision can be solved
if len(phase.collisionIndex[node.entity]) == 1 {
return nil, errNoDependencies
}
// Walk through the directed graph of collisions and solve
// all the collisions that the collision depends on.
for _, c := range phase.collisionIndex[node.entity] {
// Ignore the collision that caused us to recurse
if c.IsSameAs(collision) {
continue
}
// Avoid solving a collision that's already been solving.
if phase.hasSolved(c) {
continue
}
e := otherEntityIn(node.actor, c)
switch e := e.(type) {
case actorEntity:
actor := phase.actorIndex[e.ActorId()]
// Detect cycles
if solver.hasVisited(actor) {
return nil, errCycleDetected
}
// Recurse
return phase.solveActorActor(solver, node.actor, actor, c), nil
}
}
return nil, errNoDependencies
}
| newActorActorCollision | identifier_name |
main.go | package main
/*
Minimal tool to automate release creation.
Create:
- git tag
- homebrew bottle
- linux tarball
- GitHub release with asset link(s)
Update:
- Homebrew formula tap with new release & SHAs
*/
import (
"bytes"
"compress/gzip"
"context"
"crypto/sha256"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"regexp"
"strings"
"text/template"
"github.com/google/go-github/github"
"github.com/mholt/archiver"
"github.com/pkg/errors"
"github.com/spf13/pflag"
"golang.org/x/oauth2"
)
const (
repoOwner = "dollarshaveclub"
repoName = "psst"
)
| var trowner, trname string
var hbrev, brbd uint
var osvs []string
var logger = log.New(os.Stderr, "", log.LstdFlags)
func ferr(msg string, args ...interface{}) {
fmt.Printf(msg+"\n", args...)
os.Exit(1)
}
var ghc *github.Client
func init() {
pflag.StringVar(&rname, "release", "", "release name (ex: v1.0.0)")
pflag.StringVar(&npath, "notes-path", "relnotes.md", "path to release notes")
pflag.StringVar(&commitsha, "commit", "", "commit SHA to release")
pflag.StringVar(&taprepo, "tap-repo", "dollarshaveclub/homebrew-public", "name of tap GitHub repository ([owner]/[repo])")
pflag.StringVar(&tapref, "tap-repo-ref", "master", "tap repository ref (branch/tag/SHA)")
pflag.StringVar(&fpath, "formula", "Formula/psst.rb", "path to formula within tap repo")
pflag.StringVar(&ftpath, "formula-template", "Formula/psst.rb.tmpl", "path to formula template within tap repo")
pflag.StringVar(&targetoslist, "macos-versions", "el_capitan,high_sierra,sierra", "Supported MacOS versions (comma-delimited)")
pflag.UintVar(&hbrev, "homebrew-rev", 0, "Homebrew revision (bump to force reinstall/rebuild)")
pflag.UintVar(&brbd, "bottle-rebuild", 1, "Bottle rebuild (bump to force bottle reinstall)")
pflag.BoolVar(&draft, "draft", false, "Draft release (unpublished)")
pflag.BoolVar(&prerelease, "prerelease", false, "Prerelease")
pflag.BoolVar(&dobuild, "build", true, "Build binaries first")
pflag.Parse()
trs := strings.Split(taprepo, "/")
if len(trs) != 2 {
ferr("malformed tap repo (expected [owner]/[repo]): %v", taprepo)
}
if rname == "" {
ferr("release name is required")
}
trowner = trs[0]
trname = trs[1]
osvs = strings.Split(targetoslist, ",")
if len(osvs) == 0 {
ferr("At least one MacOS version is required")
}
ghtoken = os.Getenv("GITHUB_TOKEN")
if ghtoken == "" {
ferr("GITHUB_TOKEN missing from environment")
}
if err := checkFiles(npath); err != nil {
ferr("file path error: %v", err)
}
checkLocalRepoVersion()
ghc = newGHClient()
}
func newGHClient() *github.Client {
tc := oauth2.NewClient(context.Background(), oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: ghtoken},
))
return github.NewClient(tc)
}
func checkLocalRepoVersion() {
cmd := exec.Command("git", "rev-parse", "HEAD")
out, err := cmd.Output()
if err != nil {
ferr("error getting git command output: %v", err)
}
if strings.TrimRight(string(out), "\n") != commitsha {
ferr("current git revision does not match requested release version: %v (expected %v)", string(out), commitsha)
}
}
func checkFiles(paths ...string) error {
for _, p := range paths {
if _, err := os.Stat(p); err != nil {
return errors.Wrap(err, "file error")
}
}
return nil
}
func createGitTag() error {
msg := fmt.Sprintf("release %v", rname)
ot := "commit"
tag := github.Tag{
Tag: &rname,
Message: &msg,
Object: &github.GitObject{
Type: &ot,
SHA: &commitsha,
},
}
log.Printf("creating tag...\n")
_, _, err := ghc.Git.CreateTag(context.Background(), repoOwner, repoName, &tag)
if err != nil {
return errors.Wrap(err, "error creating tag")
}
refstr := fmt.Sprintf("refs/tags/%v", rname)
objt := "commit"
ref := github.Reference{
Ref: &refstr,
Object: &github.GitObject{
Type: &objt,
SHA: &commitsha,
},
}
log.Printf("creating tag ref...\n")
_, _, err = ghc.Git.CreateRef(context.Background(), repoOwner, repoName, &ref)
if err != nil {
return errors.Wrap(err, "error creating tag ref")
}
return nil
}
type bottleDefinition struct {
Hash string
TargetOS string
}
type formulaTemplateData struct {
Tag string
CommitSHA string
HomebrewRevision uint
BaseDownloadURL string
Bottled bool
BottleRebuild uint
BottleDefs []bottleDefinition
}
func (ftd *formulaTemplateData) populate(bdefs []bottleDefinition) {
ftd.Tag = rname
ftd.CommitSHA = commitsha
if hbrev > 0 {
ftd.HomebrewRevision = hbrev
}
ftd.BaseDownloadURL = fmt.Sprintf("https://github.com/%v/%v/releases/download/%v", repoOwner, repoName, rname)
ftd.BottleRebuild = brbd
ftd.Bottled = true
ftd.BottleDefs = bdefs
}
const header = "# GENERATED FROM TEMPLATE. DO NOT EDIT!\n"
// generateFormula fetches the template from github, executes the template with ftd and returns the raw data or error, if any
func generateFormula(ftd formulaTemplateData) ([]byte, error) {
logger.Printf("Generating Homebrew formula")
// get template
fc, _, _, err := ghc.Repositories.GetContents(context.Background(), trowner, trname, ftpath, &github.RepositoryContentGetOptions{Ref: tapref})
if err != nil {
return nil, errors.Wrap(err, "error getting formula template")
}
rt, err := fc.GetContent()
if err != nil {
return nil, errors.Wrap(err, "error getting formula template content")
}
// generate new formula
tmpl, err := template.New("formula").Parse(rt)
if err != nil {
return nil, errors.Wrap(err, "error parsing formula template")
}
buf := bytes.NewBuffer([]byte{})
if err = tmpl.Execute(buf, &ftd); err != nil {
return nil, errors.Wrap(err, "error executing template")
}
return append([]byte(header), buf.Bytes()...), nil
}
func pushFormula(fd []byte) error {
logger.Printf("Pushing Homebrew formula")
// Get the current file for the SHA
fc, _, _, err := ghc.Repositories.GetContents(context.Background(), trowner, trname, fpath, &github.RepositoryContentGetOptions{Ref: tapref})
if err != nil {
return errors.Wrap(err, "error getting formula contents")
}
sp := func(s string) *string {
return &s
}
_, _, err = ghc.Repositories.UpdateFile(context.Background(), trowner, trname, fpath, &github.RepositoryContentFileOptions{
Message: sp(fmt.Sprintf("updated for release %v", rname)),
Content: fd,
SHA: fc.SHA,
Branch: &tapref,
})
if err != nil {
return errors.Wrap(err, "error updating formula")
}
return nil
}
const (
linuxBinName = "psst-linux-amd64"
)
var buildopts = []string{"-ldflags", "-X github.com/dollarshaveclub/psst/cmd.CommitSHA=%v -X github.com/dollarshaveclub/psst/cmd.Version=%v -X github.com/dollarshaveclub/psst/cmd.CompiledDirectory=github -X github.com/dollarshaveclub/psst/cmd.CompiledStorage=vault -X github.com/dollarshaveclub/psst/cmd.Org=dollarshaveclub"}
func buildBins() error {
if err := os.MkdirAll("bins", os.ModeDir|0755); err != nil {
return errors.Wrap(err, "error creating bins directory")
}
cwd, err := os.Getwd()
if err != nil {
return errors.Wrap(err, "error getting working directory")
}
wd := filepath.Join(cwd, "..")
buildopts[1] = fmt.Sprintf(buildopts[1], commitsha, rname)
build := func(osn string) ([]byte, error) {
cmd := exec.Command("go", append([]string{"build"}, buildopts...)...)
cmd.Env = append(os.Environ(), []string{fmt.Sprintf("GOOS=%v", osn), "GOARCH=amd64"}...)
cmd.Dir = wd
return cmd.CombinedOutput()
}
logger.Printf("Building binaries...\n")
logger.Printf("...macOS amd64")
if out, err := build("darwin"); err != nil {
return errors.Wrapf(err, "error running build command: %s", out)
}
if err := os.Rename(filepath.Join(wd, "psst"), filepath.Join(cwd, "bins", "psst-darwin")); err != nil {
return errors.Wrap(err, "error renaming binary")
}
logger.Printf("...Linux amd64")
if out, err := build("linux"); err != nil {
return errors.Wrapf(err, "error running build command: %s", out)
}
lfn := filepath.Join(cwd, "bins", linuxBinName)
if err := os.Rename(filepath.Join(wd, "psst"), lfn); err != nil {
return errors.Wrap(err, "error renaming binary")
}
// compress linux binary
logger.Printf("...compressing Linux binary\n")
d, err := ioutil.ReadFile(lfn)
if err != nil {
return errors.Wrap(err, "error reading linux binary")
}
f, err := os.Create(lfn + ".gz")
if err != nil {
return errors.Wrap(err, "error creating compressed linux binary")
}
defer f.Close()
gw := gzip.NewWriter(f)
defer gw.Close()
if _, err := gw.Write(d); err != nil {
return errors.Wrap(err, "error writing compressed linux binary")
}
return nil
}
// "copy" (link) a file if it doesn't exist
func cpifneeded(src, dest string) error {
if _, err := os.Stat(dest); err != nil {
if os.IsNotExist(err) {
return os.Link(src, dest)
}
return errors.Wrap(err, "error getting destination")
}
return nil
}
var bottleNameTmpl = template.Must(template.New("bn").Parse("psst-{{ .Release }}{{ if .HomebrewRevision }}_{{ .HomebrewRevision }}{{ end }}.{{ .OS }}.bottle.{{ .BottleRebuild }}.tar.gz"))
// createBottle synthetically creates a bottle tarball returning the bottle definitions, local bottle filenames and error if any
func createBottle() ([]bottleDefinition, []string, error) {
logger.Printf("Creating Homebrew bottle...\n")
cwd, err := os.Getwd()
if err != nil {
return nil, nil, errors.Wrap(err, "error getting working directory")
}
rver := regexp.MustCompile("([0-9.]+)").FindString(rname)
basepath := filepath.Join(".", "psst", rver)
binpath := filepath.Join(basepath, "bin")
if err := os.MkdirAll(binpath, os.ModeDir|0755); err != nil {
return nil, nil, errors.Wrap(err, "error creating bottle directory path")
}
// .brew
if err := os.MkdirAll(filepath.Join(basepath, ".brew"), os.ModeDir|0755); err != nil {
return nil, nil, errors.Wrap(err, "error creating .brew directory")
}
// copy README
if err := cpifneeded(filepath.Join(cwd, "..", "README.md"), filepath.Join(basepath, "README.md")); err != nil {
return nil, nil, errors.Wrap(err, "error copying README")
}
// copy binary
if err := cpifneeded(filepath.Join("bins", "psst-darwin"), filepath.Join(binpath, "psst")); err != nil {
return nil, nil, errors.Wrap(err, "error copying binary")
}
// INSTALL_RECEIPT.json
ir, err := ioutil.ReadFile("INSTALL_RECEIPT.json.tmpl")
if err != nil {
return nil, nil, errors.Wrap(err, "error reading install receipt template")
}
tmpl, err := template.New("instrcpt").Parse(string(ir))
d := struct {
Release string
OS string
HomebrewRevision uint
BottleRebuild uint
}{
Release: rver,
BottleRebuild: brbd,
}
if hbrev > 0 {
d.HomebrewRevision = hbrev
}
buf := bytes.NewBuffer([]byte{})
if err := tmpl.Execute(buf, &d); err != nil {
return nil, nil, errors.Wrap(err, "error executing install receipt template")
}
if err := ioutil.WriteFile(filepath.Join(basepath, "INSTALL_RECEIPT.json"), buf.Bytes(), os.ModePerm); err != nil {
return nil, nil, errors.Wrap(err, "error writing install receipt")
}
// tar it up
if err := os.MkdirAll("bottle", os.ModeDir|0755); err != nil {
return nil, nil, errors.Wrap(err, "error creating bottle directory")
}
buf = bytes.NewBuffer([]byte{})
d.OS = osvs[0]
if err := bottleNameTmpl.Execute(buf, &d); err != nil {
return nil, nil, errors.Wrap(err, "error executing bottle filename template: "+d.OS)
}
bp := filepath.Join("bottle", buf.String())
if err := archiver.TarGz.Make(bp, []string{"psst"}); err != nil {
return nil, nil, errors.Wrap(err, "error creating bottle tarball")
}
// Get hash of bottle, populate bottle definitions
bd, err := ioutil.ReadFile(bp)
if err != nil {
return nil, nil, errors.Wrap(err, "error reading bottle")
}
sha := fmt.Sprintf("%x", sha256.Sum256(bd))
bdefs := []bottleDefinition{
bottleDefinition{
Hash: sha,
TargetOS: osvs[0],
},
}
lps := []string{bp}
// link other bottles
for _, osn := range osvs[1:] {
d.OS = osn
buf = bytes.NewBuffer([]byte{})
if err := bottleNameTmpl.Execute(buf, &d); err != nil {
return nil, nil, errors.Wrap(err, "error executing bottle filename template: "+d.OS)
}
p := filepath.Join("bottle", buf.String())
if err := cpifneeded(bp, p); err != nil {
return nil, nil, errors.Wrap(err, "error linking bottle")
}
lps = append(lps, p)
bdefs = append(bdefs, bottleDefinition{
Hash: sha,
TargetOS: osn,
})
}
return bdefs, lps, nil
}
func createGHRelease(assetpaths []string) error {
rel := github.RepositoryRelease{
TagName: &rname,
//TargetCommitish: &commitsha,
Name: &rname,
Draft: &draft,
Prerelease: &prerelease,
}
nd, err := ioutil.ReadFile(npath)
if err != nil {
return errors.Wrap(err, "error reading release notes")
}
notes := string(nd)
rel.Body = ¬es
logger.Printf("Creating GitHub release")
ro, _, err := ghc.Repositories.CreateRelease(context.Background(), repoOwner, repoName, &rel)
if err != nil {
return errors.Wrap(err, "error creating release")
}
for _, ap := range assetpaths {
f, err := os.Open(ap)
if err != nil {
return errors.Wrap(err, "error opening asset")
}
defer f.Close()
logger.Printf("Uploading asset %v...", ap)
resp, _, err := ghc.Repositories.UploadReleaseAsset(context.Background(), repoOwner, repoName, *ro.ID, &github.UploadOptions{Name: filepath.Base(ap)}, f)
if err != nil {
return errors.Wrap(err, "error uploading asset")
}
logger.Printf("...%v\n", resp.GetBrowserDownloadURL())
}
return nil
}
func cleanup() error {
logger.Printf("Cleaning up")
for _, p := range []string{"./bins", "./bottle", "./psst"} {
if err := os.RemoveAll(p); err != nil {
return errors.Wrap(err, "error removing path")
}
}
return nil
}
func main() {
if dobuild {
if err := buildBins(); err != nil {
ferr("error building binaries: %v", err)
}
}
bds, lps, err := createBottle()
if err != nil {
ferr("error creating bottle: %v", err)
}
ftd := formulaTemplateData{}
ftd.populate(bds)
fd, err := generateFormula(ftd)
if err != nil {
ferr("error generating formula: %v", err)
}
if err = pushFormula(fd); err != nil {
ferr("error pushing formula: %v", err)
}
if err := createGitTag(); err != nil {
ferr("error creating tag: %v", err)
}
cwd, err := os.Getwd()
if err != nil {
ferr("error getting working directory: %v", err)
}
assetpaths := append([]string{filepath.Join(cwd, "bins", linuxBinName+".gz")}, lps...)
if err = createGHRelease(assetpaths); err != nil {
ferr("error creating GitHub release: %v", err)
}
if err := cleanup(); err != nil {
ferr("error cleaning up: %v", err)
}
logger.Printf("Done")
} | var rname, npath, commitsha, ghtoken, taprepo, tapref, fpath, ftpath, targetoslist string
var draft, prerelease, dobuild bool | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.