index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
67,137 | Alfiesan/earthquakePrediction | refs/heads/master | /RNN_Code/firstRNN.py |
import pandas as pd
import numpy as np
import matplotlib.pylab as plt
%matplotlib qt
from scipy import signal
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Conv2D
from keras.layers import Dropout
from keras.layers import LSTM
from keras.layers import TimeDistributed
from sklearn.metrics import mean_absolute_error
from sklearn.preprocessing import StandardScaler
from keras.models import load_model
"""
bin1 = dill.load(open("all_bins/r_bin_12227.dill", "rb"))
all_bins = []
for i in range(1, 153584):
bin_name = "all_bins/r_bin_" + str(i) + ".dill"
curr_bin = dill.load(open(bin_name, "rb"))
if(curr_bin[3] == -1):
bin_data = curr_bin[2]
if(bin_data.size == 4095):
bin_data = np.append(bin_data, 0)
if(bin_data.size == 8192):
bin_data = bin_data[:4096]
bin_data = np.append(bin_data, curr_bin[1])
all_bins.append(bin_data)
newData = pd.DataFrame(all_bins)
newData.to_csv("all_quakes.csv")
"""
data = pd.read_csv("all_quakes.csv")
X = data.iloc[:, 1:4097].values
y = data.iloc[:, 4097].values
X = X.astype(float)
X_fft2 = np.zeros((153567,4096), dtype = np.complex64)
X_fft2 = np.fft.fft2(X, (153567, 4096))
X_fft = np.zeros((153567,4096), dtype = np.complex64)
fftArr = np.array([])
for i in range(0, 153567):
X_fft[i] = np.fft.fft(X[i])
#fftArr = np.append(fftArr, X_fft[i])
X_lot = np.array([])
for i in range(0, 100):
X_lot = np.append(X_lot, X[i])
f, t, Sxx = signal.spectrogram(x = X_fft2[2])
plt.pcolormesh(t, f, Sxx)
plt.ylabel('Frequency [Hz]')
plt.xlabel('Time [sec]')
plt.show()
a = X[0]
b = X_fft2[2]
plt.plot(b)
plt.plot(X_fft[0])
plt.plot(X_fft[1156])
# all 17 starts to quakes
quake_starts = [0, 1380, 12225, 25551, 33873, 45802, 53371, 60004, 75141, 82570, 91626, 102364, 112724, 121020, 129069, 142932, 151821]
feature_data = pd.read_csv("features_including_fft.csv")
X_features = feature_data.iloc[:, 1:].values
sc_X = StandardScaler()
X_features = sc_X.fit_transform(X_features)
# 5.67 avg with 3.04 mean error
"""
# find all the bins that are the start of a quake
for i in range(1, 153584):
bin_name = "all_bins/r_bin_" + str(i) + ".dill"
curr_bin = dill.load(open(bin_name, "rb"))
if(curr_bin[3] != -1):
print(i)
"""
#not 153584?
# get indexes of training data so no 36 bins goes over a quake
indexs = np.array([])
for start in range(0, 35):
for i in range(start, 129068, 36):
need_break = False
for j in range(1, 16):
if(i < quake_starts[j] and i + 35 > quake_starts[j]):
need_break = True
if(need_break):
continue
indexs = np.append(indexs, i)
for i in range(0, 129068):
need_break = False
for j in range(1, 16):
if(i < quake_starts[j] and i + 35 > quake_starts[j]):
need_break = True
if(need_break):
continue
indexs = np.append(indexs, i)
indexs = indexs.astype(np.int32)
# shuffles indexes
np.random.shuffle(indexs)
# adds in missing values between indexes
y_data = np.zeros((indexs.size, 1))
all_indexs = np.zeros((indexs.size, 36))
for i in range(0, indexs.size):
y_data[i] = y[indexs[i]+35]
all_indexs[i] = np.arange(indexs[i], indexs[i]+36)
all_indexs.resize((indexs.size, 36))
all_indexs = all_indexs.astype(np.int32)
data_in3d = np.zeros((indexs.size, 36, 31))
for i in range(0, indexs.size):
for j in range(0, 36):
data_in3d[i,j,:] = X_features[all_indexs[i,j]]
model = Sequential()
model.add(TimeDistributed(Dense(units = 256, activation = 'relu', kernel_initializer = 'uniform'), input_shape = (36, 12)))
model.add(TimeDistributed(Dense(units = 128, activation = 'relu', kernel_initializer = 'uniform')))
model.add(TimeDistributed(Dense(units = 128, activation = 'relu', kernel_initializer = 'uniform')))
model.add(TimeDistributed(Dense(units = 64, activation = 'relu', kernel_initializer = 'uniform')))
model.add(LSTM(units = 8, input_shape = (36, 64), kernel_initializer = 'uniform'))
model.add(Dense(units = 1, kernel_initializer = 'uniform'))
model.compile(optimizer = 'adam', loss = 'mean_squared_error', metrics = ['accuracy'])
model = load_model("actually_working_Poisson.h5")
model.fit(data_in3d, y_data, batch_size = 10000, epochs = 10)
y_pred = model.predict(data_in3d)
y_pred_df = pd.DataFrame(y_pred)
y_pred_df.to_csv("y_pred_df.csv")
y_test_df = pd.DataFrame(y_data)
y_test_df.to_csv("y_test_df.csv")
model.save("actually_working_Poisson_George_is_the_best.h5")
model = load_model("featureRNN_v1.h5")
X = np.absolute(X)
X_features = np.zeros((153567, 12))
for i in range(0, 153567):
X_features[i, 0] = np.mean(X[i])
X_features[i, 1] = np.median(X[i])
X_features[i, 2] = np.std(X[i])
X_features[i, 3] = np.max(X[i])
X_features[i, 4] = np.min(X[i])
X_features[i, 5] = np.var(X[i])
X_features[i, 6] = np.ptp(X[i]) #Peak-to-peak is like range
X_features[i, 7] = np.percentile(X[i],q=10)
X_features[i, 8] = np.percentile(X[i],q=25) #We can also grab percentiles
X_features[i, 9] = np.percentile(X[i],q=50)
X_features[i, 10] = np.percentile(X[i],q=75)
X_features[i, 11] = np.percentile(X[i],q=90)
feature_data = pd.DataFrame(X_features)
feature_data.to_csv("data_abs_features.csv")
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,138 | Alfiesan/earthquakePrediction | refs/heads/master | /TinyFFNWithStats.py |
# coding: utf-8
# In[1]:
import os, sys
currentFolder = os.path.abspath('')
projectFolder = 'F:/myProjects/tfKeras/UCSC/CMPS242/earthquake/'
sys.path.append(str(projectFolder))
#exec(open("inc_notebook.py").read())
# In[2]:
import logging, sys, math,os
exec(open("estimator/initKeras.py").read())
# In[3]:
from matplotlib import pyplot as plt
#get_ipython().run_line_magic('matplotlib', 'auto')
import seaborn as sns
sns.set(style="darkgrid")
# In[4]:
if sys.modules.get( 'library.MultipleBinDataGenerator', False ) != False :
del sys.modules['library.MultipleBinDataGenerator']
if sys.modules.get( 'MultipleBinDataGenerator', False ) != False :
del sys.modules['MultipleBinDataGenerator']
from library.MultipleBinDataGenerator import *
logging.warning( "MultipleBinDataGenerator loaded" )
trainGenerator = MultipleBinDataGenerator(batch_size=20, windowSize = 10, stride = 10)
# In[5]:
#aBatch = trainGenerator.__getitem__(0)
# In[5]:
if sys.modules.get( 'library.LivePlotKeras', False ) != False :
del sys.modules['library.LivePlotKeras']
if sys.modules.get( 'LivePlotKeras', False ) != False :
del sys.modules['LivePlotKeras']
from library.LivePlotKeras import *
logging.warning( "LivePlotKeras loaded" )
livePlotKeras = LivePlotKeras()
# In[6]:
trainGenerator.__len__()
# In[7]:
model_input = layers.Input( shape = ( 15 + 6 * 27 + 2 + 15+ 3 * 27, ) )
# In[8]:
x = layers.Dense(64)(model_input)
x = layers.LeakyReLU(alpha=0.1)(x)
x = layers.Dropout(0.2)(x)
x = layers.Dense(32)(x)
x = layers.LeakyReLU(alpha=0.1)(x)
x = layers.Dropout(0.2)(x)
x = layers.Dense(16)(x)
x = layers.LeakyReLU(alpha=0.1)(x)
x = layers.Dropout(0.2)(x)
x = layers.Dense(1, activation=activations.relu)(x)
model = models.Model(model_input, x, name = "TinyFFN")
model.summary()
# In[9]:
model.compile(optimizer=optimizers.Adam(lr=0.001),
loss = losses.MSE,
metrics = [metrics.MSE, metrics.MAE])
# In[10]:
sys.path.remove(str(projectFolder))
os.chdir(currentFolder)
# In[ ]:
np.seterr(invalid='ignore')
np.warnings.filterwarnings('ignore')
history = model.fit_generator( generator=trainGenerator,
use_multiprocessing=True,
workers=4,
initial_epoch = 1,
epochs=10,
max_q_size = 20,
steps_per_epoch = trainGenerator.__len__(),
callbacks = [livePlotKeras]
)
# In[ ]:
aBatch = trainGenerator.__getitem__(0)
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,139 | Alfiesan/earthquakePrediction | refs/heads/master | /library/MultipleBinDataGenerator.py | import numpy as np
import logging, dill, fnmatch, os, math, gc
from data_analysis.library.Bin import Bin
from data_analysis.library.BinIO import BinIO
from data_analysis.library.Scalers import Scalers
from library.RegressionDataGenerator import RegressionDataGenerator
from embedding.OneStatsEmbedding import *
from embedding.CNNStatsEmbedding import *
from embedding.MultipleBinEmbeddingType import *
from embedding.EmbeddingCache import EmbeddingCache
from embedding.EmbeddingIO import EmbeddingIO
class MultipleBinDataGenerator(RegressionDataGenerator):
def __init__(self, binType='pos', embedding=MultipleBinEmbeddingType.ONE_STATS,
startBinId = 1, windowSize = 36, stride = 36,
list_IDs = None, numBins = 153584, batch_size=16,
n_channels=1, shuffle=False):
self.binType = binType
self.embedding = embedding
self.binIO = BinIO()
self.startBinId = startBinId
self.numBins = numBins
self.scalers = Scalers()
self.windowSize = windowSize
self.stride = stride
self.lastWindowBins = {}
# Make IDs here.
if list_IDs is None:
list_IDs = self.getListIds()
self.embedder = self.getEmbedder()
self.embeddingIO = EmbeddingIO()
if self.stride > self.windowSize:
logging.warning( f"stride is greater than windowSize" )
self.addDimToX = False
logging.warning(f"shuffling: {shuffle}")
if embedding == MultipleBinEmbeddingType.ONE_STATS:
self.dim = (self.embedder.numberOfFeatures)
elif embedding == MultipleBinEmbeddingType.CNN_STATS:
self.addDimToX = True
self.dim = self.embedder.dim
super(MultipleBinDataGenerator, self).__init__(list_IDs, batch_size, dim=self.dim, shuffle = shuffle)
pass
def getListIds(self):
return list(range(self.startBinId, self.numBins +1))
def getEmbedder(self):
if self.embedding == MultipleBinEmbeddingType.ONE_STATS:
if self.binType == 'nor':
return OneStatsEmbedding( self.scalers.getScaler('scaler') )
elif self.binType == 'pos':
return OneStatsEmbedding( self.scalers.getScaler('absScaler') )
if self.embedding == MultipleBinEmbeddingType.CNN_STATS:
if self.binType == 'nor':
return CNNStatsEmbedding( self.scalers.getScaler('scaler'), binsPerEmbedding=self.windowSize )
elif self.binType == 'pos':
return CNNStatsEmbedding( self.scalers.getScaler('absScaler'), binsPerEmbedding=self.windowSize )
def getNumberOfBatches(self):
if self.stride >= self.windowSize:
return math.floor(self.numBins / (self.stride * self.batch_size))
else:
return math.floor(( self.numBins + 1 - self.windowSize) / (self.stride * self.batch_size) ) # TODO verfiy this equation.
def __len__(self):
return self.getNumberOfBatches()
def __getitem__(self, batchIndex):
'Generate one batch of data'
if self.embedding == MultipleBinEmbeddingType.ONE_STATS:
X = np.empty((self.batch_size, self.dim))
if self.embedding == MultipleBinEmbeddingType.CNN_STATS:
X = np.empty((self.batch_size, *self.dim))
y = np.empty(self.batch_size)
#print( X.shape )
#print(self.dim)
embeddingId = batchIndex * self.batch_size + 1
try:
for i in range( self.batch_size ):
embeddingCache = self.embeddingIO.readById(embeddingId, self.embedder.type)
# print(embeddingCache.features.shape)
if self.addDimToX:
# print('reshaping to', -1, self.embedder.numberOfFeatures, 1)
x = embeddingCache.features
X[i,] = x.reshape(-1, self.embedder.numberOfFeatures, 1)
# print(f'shape of x{x.shape} and shape of reshaped: {X[i,].shape}')
else:
X[i,] = embeddingCache.features
y[i] = embeddingCache.ttf
embeddingId += 1
except Exception as e:
logging.warning(f"Batch exception: {e}")
#print(X.shape)
return X, y
# def __getitemFromBins__(self, batchIndex):
# 'Generate one batch of data'
# X = np.empty((self.batch_size, self.dim))
# y = np.empty(self.batch_size)
# #print( X.shape )
# #print(self.dim)
# sampleStartId = batchIndex * self.batch_size * self.stride + 1
# for i in range( self.batch_size ):
# X[i,], y[i] = self.getEmbeddingAndOutput(sampleStartId)
# sampleStartId += self.stride
# #print(X.shape)
# return X, y
def getEmbeddingAndOutput(self, startBinId ): #should cache the last window as there will be overlapping bins.
endBinId = startBinId + self.windowSize
bins = []
for binId in range( startBinId, endBinId ):
try:
if binId in self.lastWindowBins:
bins.append(self.lastWindowBins[binId])
else:
bins.append(self.binIO.readBinById(binId, self.binType))
except Exception as e:
logging.warning( f"Batch bin exception. Might be safe to continue. {e}")
lastBin = bins[-1]
#cache bins
self.lastWindowBins = {}
for aBin in bins:
self.lastWindowBins[aBin.binId] = aBin
# Generate data
features = self.embedder.fromBins(bins)
#print(features.shape)
return features, lastBin.ttf
def cacheEmbeddingByBatch(self, startEmbeddingId = 1, stopAfter =0):
embeddingId = startEmbeddingId
startBinId = (embeddingId-1) * self.stride + 1
while startBinId + self.stride <= self.numBins and ( stopAfter == 0 or stopAfter >= startBinId ):
# print(startBinId)
features, ttf = self.getEmbeddingAndOutput(startBinId)
embeddingCache = EmbeddingCache(embeddingId = embeddingId,
firstBinId = startBinId,
type = str(self.embedding.value) + '-w' + str(self.windowSize) + 's-' + str(self.stride),
features = features,
ttf = ttf)
self.embeddingIO.save(embeddingCache, self.embedder.type)
if embeddingId % 1000 == 0:
logging.debug(f"cached {embeddingId} now collecting garbage")
gc.collect() # TODO do it in another thread
startBinId += self.stride
embeddingId += 1
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,140 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/PositiveBinManager.py | import numpy as np
import logging, dill
from .Bin import Bin
from .BinManager import BinManager
from .BinProcessor import BinProcessor
from .BinIO import BinIO
class PositiveBinManager:
def __init__(self):
self.binManager = BinManager()
self.binProcessor = BinProcessor()
self.binIO = BinIO()
self.numRawBins = 153584
self.positiveBinType = 'pos'
pass
# For positive bins
def createPositiveBins(self, fromId = 1, toId = 0 ):
""" It makes all the acoustic data from raw bins positive """
if toId == 0:
toId = self.numRawBins
for binId in range(fromId, toId + 1):
if (binId % 2000) == 0:
print( f'processed {binId}th positive bin' )
positiveBin = self.binProcessor.makeDataPositive( self.binManager.readRawBinById(binId) )
self.binIO.saveBin( positiveBin, self.positiveBinType )
pass
def readPositiveBinById(self, binId):
return self.binIO.readBinById(binId, self.positiveBinType)
def countPositiveBin(self):
return self.binIO.countBin(self.positiveBinType)
def readPositiveBins(self, fromId, size):
return self.binIO.readBins(fromId, size, self.positiveBinType)
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,141 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/Embedding.py | import numpy as np
from data_analysis.library.Bin import Bin
from embedding.SourceCardinality import SourceCardinality
# Base class which all embedding classes need to implement
class Embedding:
def __init__(self, sourceCardinality=SourceCardinality.SINGLE):
self.sourceCardinality = sourceCardinality
pass
def fromBin(self, aBin: Bin):
raise Exception(f"{type(self)} has not implemented fromBin")
def fromBins(self, aBin: Bin):
raise Exception(f"{type(self)} has not implemented fromBins")
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,142 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/OneStatsEmbedding.py | import numpy as np
import logging
from scipy import stats
from data_analysis.library.Bin import Bin
from data_analysis.library.BinProcessor import BinProcessor
from embedding.Embedding import Embedding
from embedding.SourceCardinality import SourceCardinality
from embedding.Stats import Stats
import pandas as pd
class OneStatsEmbedding(Embedding):
def __init__(self, scaler = None):
self.type = 'one-stats'
self.numberOfFeatures = 15 + 6 * 27 + 2 + 15 + 3 * 27
self.scaler = scaler
self.stats = Stats()
super(OneStatsEmbedding, self).__init__(sourceCardinality = SourceCardinality.MULTI)
pass
def fromBins(self, bins: Bin):
# 1. get all data & scale it using the scaler
data = []
# ttfs = []
for aBin in bins:
data.extend(aBin.data)
# ttfs.append(aBin.ttf)
return self.fromUnnormalizedNumpyData(data)
def fromUnnormalizedNumpyData(self, data):
data = np.array(data).reshape(-1, 1)
if self.scaler is not None:
data = self.scaler.transform( data )
return self.fromNormalizedNumpyData(data)
def fromNormalizedNumpyData(self, data ):
with np.errstate(invalid='ignore'):
dataSeries = pd.Series(data.flatten())
embedding = self.stats.getBasicStatsList(data) #15 #maybe this function should use series
embedding.extend(self.stats.getTrendStatsList(dataSeries)) # 6 * 27
embedding.extend(self.stats.getLinearSeasonalityStatsList(data, True)) # 2
embedding.extend(self.stats.getFirstOrderSeasonalityStatsList(dataSeries)) # 15 + 3 * 27
# embedding.extend(self.stats.getTTFDiffStatsList(ttfs)) # 15
#print (f"embedding length: {len(embedding)}" )
# 3. return stats
return np.array(embedding)
def fromUnnormalizedDfData(self, df):
return self.fromUnnormalizedNumpyData(df.acoustic_data.values)
def fromBinsDf(self, df):
return self.fromUnnormalizedDfData(df)
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,143 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/SourceCardinality.py | from enum import Enum
class SourceCardinality(Enum):
SINGLE = 1
MULTI = 2 | {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,144 | Alfiesan/earthquakePrediction | refs/heads/master | /RNN_Code/RNN_finding_minimum.py |
import pandas as pd
import numpy as np
import matplotlib.pylab as plt
%matplotlib qt
import scipy
import dill
from keras.models import Sequential, load_model
from keras.layers import Dense, Conv2D, Dropout, LSTM, RNN, TimeDistributed
from sklearn.metrics import mean_absolute_error
from sklearn.preprocessing import StandardScaler
from keras import optimizers, Model
data = pd.read_csv("train_chunk1.csv")
"""
data = pd.read_csv("all_quakes.csv")
X = data.iloc[:, 1:4097].values
y = data.iloc[:, 4097].values
X = X.astype(np.int16)
X = np.absolute(X)
X_features = np.zeros((153567, 31))
for i in range(0, 153567):
X_features[i, 0] = np.mean(X[i])
X_features[i, 1] = np.median(X[i])
X_features[i, 2] = np.std(X[i])
X_features[i, 3] = np.max(X[i])
X_features[i, 4] = np.var(X[i])
X_features[i, 5] = np.ptp(X[i])
X_features[i, 6] = np.percentile(X[i], q = 10)
X_features[i, 7] = np.percentile(X[i], q = 25)
X_features[i, 8] = np.percentile(X[i], q = 50)
X_features[i, 9] = np.percentile(X[i], q = 75)
X_features[i, 10] = np.percentile(X[i], q = 90)
X_features[i, 11] = scipy.stats.entropy(X[i])
X_features[i, 12] = scipy.stats.kurtosis(X[i])
X_features[i, 13] = scipy.stats.skew(X[i])
if (i <= 153566):
X_features[i, 14] = np.correlate(X[i], X[i + 1]) #Corr of two consecutive bins
if (i <= 153556):
X_features[i, 15] = np.correlate(X[i], X[i + 10]) #Corr of 10 consecutive bins
X_fft = np.zeros((153567,4096))
fftArr = np.array([])
for i in range(0, 153567):
X_fft[i] = np.fft.fft(X[i])
for i in range(0, 153567):
X_features[i, 16] = np.mean(X_fft[i])
X_features[i, 17] = np.median(X_fft[i])
X_features[i, 18] = np.std(X_fft[i])
X_features[i, 19] = np.max(X_fft[i])
X_features[i, 20] = np.var(X_fft[i])
X_features[i, 21] = np.ptp(X_fft[i])
X_features[i, 22] = np.percentile(X_fft[i], q = 10)
X_features[i, 23] = np.percentile(X_fft[i], q = 25)
X_features[i, 24] = np.percentile(X_fft[i], q = 50)
X_features[i, 25] = np.percentile(X_fft[i], q = 75)
X_features[i, 26] = np.percentile(X_fft[i], q = 90)
X_features[i, 27] = scipy.stats.kurtosis(X_fft[i])
X_features[i, 28] = scipy.stats.skew(X_fft[i])
if (i <= 153566):
X_features[i, 29] = np.correlate(X_fft[i], X_fft[i + 1]) #Corr of two consecutive bins
if (i <= 153556):
X_features[i, 30] = np.correlate(X_fft[i], X_fft[i + 10]) #Corr of 10 consecutive bins
X_fea = pd.DataFrame(X_features)
X_fea.to_csv("features_including_fft.csv")
"""
data = pd.read_csv("all_quakes.csv")
y = data.iloc[:, 4097].values
# all 17 starts to quakes
quake_starts = [0, 1380, 12225, 25551, 33873, 45802, 53371, 60004, 75141, 82570, 91626, 102364, 112724, 121020, 129069, 142932, 151821]
feature_data = pd.read_csv("features_including_fft.csv")
X_features = feature_data.iloc[:, 1:].values
sc_X = StandardScaler()
X_features = sc_X.fit_transform(X_features)
# 5.67 avg with 3.04 mean error
"""
# find all the bins that are the start of a quake
for i in range(1, 153584):
bin_name = "all_bins/r_bin_" + str(i) + ".dill"
curr_bin = dill.load(open(bin_name, "rb"))
if(curr_bin[3] != -1):
print(i)
"""
#not 153584?
# get indexes of training data so no 36 bins goes over a quake
indexs = np.array([])
for start in range(0, 35):
for i in range(start, 129069, 36):
need_break = False
for j in range(1, 16):
if(i < quake_starts[j] and i + 35 > quake_starts[j]):
need_break = True
if(need_break):
continue
indexs = np.append(indexs, i)
indexs = indexs.astype(np.int32)
# shuffles indexes
np.random.shuffle(indexs)
# adds in missing values between indexes
y_data = np.zeros((indexs.size, 1))
all_indexs = np.zeros((indexs.size, 36))
for i in range(0, indexs.size):
y_data[i] = y[indexs[i]+35]
all_indexs[i] = np.arange(indexs[i], indexs[i]+36)
all_indexs.resize((indexs.size, 36))
all_indexs = all_indexs.astype(np.int32)
data_in3d = np.zeros((indexs.size, 36, 31))
for i in range(0, indexs.size):
for j in range(0, 36):
data_in3d[i,j,:] = X_features[all_indexs[i,j]]
historys = []
scores = np.array([])
count = 0
mae = 3
while(True):
count = count + 1
model = Sequential()
model.add(TimeDistributed(Dense(units = 256, activation = 'relu', kernel_initializer = 'uniform'), input_shape = (36, 31)))
model.add(Dropout(.2))
model.add(TimeDistributed(Dense(units = 256, activation = 'relu', kernel_initializer = 'uniform')))
model.add(Dropout(.2))
model.add(TimeDistributed(Dense(units = 128, activation = 'relu', kernel_initializer = 'uniform')))
model.add(Dropout(.2))
model.add(TimeDistributed(Dense(units = 64, activation = 'relu', kernel_initializer = 'uniform')))
model.add(Dropout(.2))
model.add(TimeDistributed(Dense(units = 64, activation = 'relu', kernel_initializer = 'uniform')))
model.add(Dropout(.2))
model.add(LSTM(units = 64, input_shape = (36, 64), kernel_initializer = 'uniform'))
model.add(Dropout(.2))
model.add(Dense(units = 1, kernel_initializer = 'uniform'))
model.compile(optimizer = 'adam', loss = 'mean_absolute_error', metrics = ['accuracy'])
history = model.fit(data_in3d, y_data, batch_size = 10000, epochs = 15, validation_data = (data_in3dT, y_dataT))
historys.append(history)
y_pred = model.predict(data_in3d)
mae = mean_absolute_error(y_data, y_pred)
scores = np.append(scores, mae)
y_pred_df = pd.DataFrame(y_pred)
y_pred_df.to_csv("y_pred_df.csv")
y_test_df = pd.DataFrame(y_data)
y_test_df.to_csv("y_test_df.csv")
model.save("overfittedv1.h5")
model = load_model("featureRNN_v1.h5")
model = load_model("actually_working.h5")
mean_absolute_error(y_data, y_pred)
indexsT = np.array([])
for start in range(129069, 129104):
for i in range(start, 151821, 36):
need_break = False
for j in range(1, 16):
if(i < quake_starts[j] and i + 35 > quake_starts[j]):
need_break = True
if(need_break):
continue
indexsT = np.append(indexsT, i)
indexsT = indexsT.astype(np.int32)
# shuffles indexes
np.random.shuffle(indexsT)
y_dataT = np.zeros((indexsT.size, 1))
all_indexsT = np.zeros((indexsT.size, 36))
for i in range(0, indexsT.size):
y_dataT[i] = y[indexsT[i]+35]
all_indexsT[i] = np.arange(indexsT[i], indexsT[i]+36)
all_indexsT.resize((indexsT.size, 36))
all_indexsT = all_indexsT.astype(np.int32)
data_in3dT = np.zeros((indexsT.size, 36, 31))
for i in range(0, indexsT.size):
for j in range(0, 36):
data_in3dT[i,j,:] = X_features[all_indexsT[i,j]]
y_predT = model.predict(data_in3dT)
mae = mean_absolute_error(y_dataT, y_predT)
print(mae)
plt.plot(y_dataT[:1000])
plt.plot(y_predT[:1000])
for i in range(0, 18):
plt.plot(historys[i])
x1 = model.layers[-1]
x2 = model.layers[-2]
x3 = model.layers[-3]
x4 = model.layers[-4]
x5 = model.layers[-5]
x6 = model.layers[-6]
x7 = model.layers[-7]
d1 = Dropout(.8)
d2 = Dropout(.8)
d3 = Dropout(.8)
d4 = Dropout(.8)
d5 = Dropout(.8)
d6 = Dropout(.8)
d7 = Dropout(.8)
x = d7(x7.output)
x = x6(x)
x = d6(x)
x = x5(x)
x = d5(x)
x = x4(x)
x = d4(x)
x = x3(x)
x = d3(x)
x = x2(x)
x = d2(x)
x1 = x1(x)
model2 = Model(input = model.input, output = x1)
seq = pd.read_csv("sample_submission.csv")
seq_data = seq.iloc[:, 0].values
seq_data = seq_data.astype(str)
full_data = np.zeros((seq_data.size, 36, 31))
for k in range(seq_data.size):
if(k%100 == 0):
print(k)
file = 'split/' + seq_data[k] + '.csv'
data = pd.read_csv(file)
data = data.iloc[:,:].values
data_bins = np.zeros((36, 4096))
for j in range(0, 36):
data_bins[j] = np.reshape(data[j*4096:(j*4096)+4096], (4096))
data_bins = np.absolute(data_bins)
X_features = np.zeros((36, 31))
for i in range(0, 36):
X_features[i, 0] = np.mean(data_bins[i])
X_features[i, 1] = np.median(data_bins[i])
X_features[i, 2] = np.std(data_bins[i])
X_features[i, 3] = np.max(data_bins[i])
X_features[i, 4] = np.var(data_bins[i])
X_features[i, 5] = np.ptp(data_bins[i])
X_features[i, 6] = np.percentile(data_bins[i], q = 10)
X_features[i, 7] = np.percentile(data_bins[i], q = 25)
X_features[i, 8] = np.percentile(data_bins[i], q = 50)
X_features[i, 9] = np.percentile(data_bins[i], q = 75)
X_features[i, 10] = np.percentile(data_bins[i], q = 90)
X_features[i, 11] = scipy.stats.entropy(data_bins[i])
X_features[i, 12] = scipy.stats.kurtosis(data_bins[i])
X_features[i, 13] = scipy.stats.skew(data_bins[i])
if (i < 35):
X_features[i, 14] = np.correlate(data_bins[i], data_bins[i + 1]) #Corr of two consecutive bins
if (i < 26):
X_features[i, 15] = np.correlate(data_bins[i], data_bins[i + 10]) #Corr of 10 consecutive bins
X_fft = np.zeros((36 ,4096))
fftArr = np.array([])
for i in range(0, 36):
X_fft[i] = np.fft.fft(data_bins[i])
for i in range(0, 36):
X_features[i, 16] = np.mean(X_fft[i])
X_features[i, 17] = np.median(X_fft[i])
X_features[i, 18] = np.std(X_fft[i])
X_features[i, 19] = np.max(X_fft[i])
X_features[i, 20] = np.var(X_fft[i])
X_features[i, 21] = np.ptp(X_fft[i])
X_features[i, 22] = np.percentile(X_fft[i], q = 10)
X_features[i, 23] = np.percentile(X_fft[i], q = 25)
X_features[i, 24] = np.percentile(X_fft[i], q = 50)
X_features[i, 25] = np.percentile(X_fft[i], q = 75)
X_features[i, 26] = np.percentile(X_fft[i], q = 90)
X_features[i, 27] = scipy.stats.kurtosis(X_fft[i])
X_features[i, 28] = scipy.stats.skew(X_fft[i])
if (i < 35):
X_features[i, 29] = np.correlate(X_fft[i], X_fft[i + 1]) #Corr of two consecutive bins
if (i < 26):
X_features[i, 30] = np.correlate(X_fft[i], X_fft[i + 10]) #Corr of 10 consecutive bins
X_features = sc_X.transform(X_features)
full_data[k] = X_features
np.save("all_data", full_data)
allData = np.load("all_data.npy")
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,145 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/BinToEmbedding.py | import numpy as np
from data_analysis.library.Bin import Bin | {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,146 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/DataFilters.py | import numpy as np
import pandas as pd
import collections
import logging
from sklearn import preprocessing
TimeSlice = collections.namedtuple('TimeSlice', 'ttf data')
class DataFilter:
def __init__(self):
self.featureSize = 150_000 #which is a chunk of 0.0375 seconds of seismic data (ordered in time), which is recorded at 4MHz,
# hence 150'000 data points, and the output is time remaining until the following lab earthquake, in seconds.
"""
self.sourceSSD = '/home/exx/muktadir/data/train.csv'
self.sourceHDD = '/home/exx/muktadir/data/train.csv'
self.destFolderSSD = '/home/exx/muktadir/data/'
self.destFolderHDD = '/home/exx/muktadir/data/'
"""
self.sourceSSD = 'C:/earthquake/train.csv'
self.sourceHDD = 'F:/myProjects/cmps242/earthquake/data/train.csv'
self.destFolderSSD = 'C:/earthquake/'
self.destFolderHDD = 'F:/myProjects/cmps242/earthquake/data/'
pass
def createChunkIterator(self, chunkSizeInM = 100, ttfDtype = np.float64 ):
chunkSize = chunkSizeInM * 1000000
return pd.read_csv(
self.sourceSSD,
chunksize=chunkSize,
dtype = {'acoustic_data': np.int16, 'time_to_failure':ttfDtype }
)
def loadCSVFromHDD(self, filename, ttfDtype = np.float64 ):
location = self.destFolderHDD + filename
return pd.read_csv(location, dtype = {'acoustic_data': np.int16, 'time_to_failure':ttfDtype } )
def getPositionalDataInNP( self, df, start, step ):
return df[start::step].values
def getPositionalDataFromChunks( self, chunks:pd.DataFrame, start, step, ignore_index = True ):
"""Assumes that positions are preserved across chunks"""
data = pd.DataFrame()
for chunk in chunks:
data = data.append( chunk[start::step], ignore_index = ignore_index )
return data
def getPositionalDataInNPFromChunks( self, chunks:pd.DataFrame, start, step ):
"""Assumes that positions are preserved across chunks"""
dataList = []
for chunk in chunks:
dataList.append(chunk.values.tolist())
return np.array(dataList)
def saveDF(self, df, filename, index = False):
df.to_csv( self.destFolderHDD + filename, index = index, chunksize = 10000 )
pass
def savePositionalDFFromChunks( self, chunks:pd.DataFrame, start, step, ignore_index = True, rename=True ):
df = self.getPositionalDataFromChunks(chunks, start, step, ignore_index)
if rename:
df.columns = ['acoustic', 'ttf']
filename = 'every_' + str(step) + '_from_' + str(start) + '.csv'
self.saveDF(df, filename)
pass
def getBins( self, df ):
""" TODO: Fix this. Bean boundary can be anywhere and two corner cases. diff is big ~ 0.001 or negative (after an earth quake)"""
curTime = -1
data = []
tempSlice = []
for row in df.itertuples(index = False):
if curTime != row.time_to_failure:
if curTime > -1:
#save it
print(f"appending {curTime} with {len(tempSlice)} data points")
data.append( TimeSlice(ttf=curTime, data= np.array(tempSlice) ) )
tempSlice = []
curTime = row.time_to_failure
tempSlice.append(row.acoustic_data)
return data
def printBinBoundary(self, df, binNo):
# TODO: fix each packet is supposed to have 4096 samples
start = 4096 * binNo - 10
for i in range(20):
start = start + 1
if start in df.index:
diff = df.time_to_failure[start-1] - df.time_to_failure[start]
if diff < 0.00001:
print( f" {start-1}, {start}: {diff}" )
else:
logging.warning( f" {start-1}, {start}: {diff}" )
pass
def getBin(self, df, binNo):
""" TODO: This method won't work for bins too far or after an earthquake"""
start = 4096 * (binNo - 1)
samples = []
#fix start if it's not beanNo 1
if binNo > 1:
diff = df.time_to_failure[start-1] - df.time_to_failure[start]
while diff < 0.00001:
start = start - 1
diff = df.time_to_failure[start-1] - df.time_to_failure[start]
diff = df.time_to_failure[start+4094] - df.time_to_failure[start+4095]
if diff < 0.00001:
samples = df[start:start+4096]
else:
samples = df[start:start+4095]
return samples
def getBinStats(self, df, binNo):
binDf = self.getBin(df, binNo)
dic = {}
dic['mean'] = binDf.time_to_failure.mean()
dic['var'] = binDf.time_to_failure.var()
dic['median'] = binDf.time_to_failure.median()
dic['max'] = binDf.time_to_failure.max()
dic['min'] = binDf.time_to_failure.min()
dic['dif_max_min'] = dic['max'] - dic['min']
dic['dif_median_mean'] = dic['median'] - dic['mean']
return dic
def normalizeDF(self, df):
df.acoustic_data = preprocessing.normalize(df.acoustic_data)
pass
def scaleDF(self, df):
df.acoustic_data = preprocessing.scale(df.acoustic_data)
pass
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,147 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/Stats.py | import numpy as np
import logging
from scipy import stats
import pandas as pd
from data_analysis.library.Bin import Bin
from sklearn.linear_model import *
class Stats:
def getBasicStatsList(self, data:np.ndarray):
data = data[np.isfinite(data)]
scistats = stats.describe( data )
embedding = []
embedding.append(scistats.mean)
embedding.append(scistats.variance)
embedding.append(np.median( data ))
embedding.append(scistats.skewness)
embedding.append(scistats.kurtosis)
embedding.append(scistats.minmax[1])
embedding.append(scistats.minmax[0])
embedding.append(scistats.minmax[1] - scistats.minmax[0])
embedding.append(np.quantile(data, 0.99))
embedding.append(np.quantile(data, 0.95))
embedding.append(np.quantile(data, 0.90))
embedding.append(np.quantile(data, 0.01))
embedding.append(np.quantile(data, 0.05))
embedding.append(np.quantile(data, 0.10))
embedding.append(scistats.variance - scistats.mean)
# 15 features upto var/mean
return embedding
def getTrendStatsList(self, x:pd.core.series.Series, windows = [5, 10, 20, 40, 100, 1000]):
embedding = []
for w in windows:
x_roll_abs_mean = x.abs().rolling(w).mean().dropna().values
x_roll_mean = x.rolling(w).mean().dropna().values
x_roll_std = x.rolling(w).std().dropna().values
x_roll_min = x.rolling(w).min().dropna().values
x_roll_max = x.rolling(w).max().dropna().values
embedding.append( x_roll_std.mean() )
embedding.append( x_roll_std.std())
embedding.append( x_roll_std.max())
embedding.append( x_roll_std.min())
embedding.append( np.quantile(x_roll_std, 0.01))
embedding.append( np.quantile(x_roll_std, 0.05))
embedding.append( np.quantile(x_roll_std, 0.10))
embedding.append( np.quantile(x_roll_std, 0.95))
embedding.append( np.quantile(x_roll_std, 0.99))
embedding.append( x_roll_mean.mean())
embedding.append( x_roll_mean.std())
embedding.append( x_roll_mean.max())
embedding.append( x_roll_mean.min())
embedding.append( np.quantile(x_roll_mean, 0.05))
embedding.append( np.quantile(x_roll_mean, 0.95))
embedding.append( x_roll_abs_mean.mean())
embedding.append( x_roll_abs_mean.std())
embedding.append( np.quantile(x_roll_abs_mean, 0.05))
embedding.append( np.quantile(x_roll_abs_mean, 0.95))
embedding.append( x_roll_min.std())
embedding.append( x_roll_min.max())
embedding.append( np.quantile(x_roll_min, 0.05))
embedding.append( np.quantile(x_roll_min, 0.95))
embedding.append( x_roll_max.std())
embedding.append( x_roll_max.min())
embedding.append( np.quantile(x_roll_max, 0.05))
embedding.append( np.quantile(x_roll_max, 0.95))
# 27 features per loop
# 6x27 = 162 features upto var/mean default
return embedding
def getLinearSeasonalityStatsList(self, arr, abs_values=False):
embedding = []
"""Fit a univariate linear regression and return the coefficient."""
idx = np.array(range(len(arr)))
if abs_values:
arr = np.abs(arr)
lr = LinearRegression()
lr.fit(idx.reshape(-1, 1), arr)
embedding.append( lr.coef_[0] )
embedding.append( lr.intercept_ )
return embedding
def getFirstOrderSeasonalityStatsList(self, x:pd.core.series.Series):
seasonalData = x.diff()
embedding = self.getBasicStatsList(seasonalData.values) #15
embedding.extend( self.getTrendStatsList(seasonalData, windows=[5,10, 20])) # 3 * 27
return embedding
def getTTFDiffStatsList(self, ttfs):
return self.getBasicStatsList(ttfs)
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,148 | Alfiesan/earthquakePrediction | refs/heads/master | /library/OneStatsGeneratorForTestPos.py | from library.TestIO import TestIO
from os.path import dirname, basename, isfile
import glob
import pandas as pd
import numpy as np
import math, re
from embedding.OneStatsEmbedding import *
from embedding.EmbeddingCache import EmbeddingCacheTest
from data_analysis.library.Scalers import Scalers
class OneStatsGeneratorForTestPos:
def __init__(self, windowSize = 200):
self.embeddingType = 'one-stats-test'
self.io = TestIO(self.embeddingType)
self.scalers = Scalers()
self.windowSize = windowSize
self.lastEmbeddingId = 0
self.numEmbeddings = 0
self.numberOfEmbeddingPerFile = math.ceil((150_000 - 36 * 4096) / windowSize)
self.embedder = OneStatsEmbedding( self.scalers.getScaler('absScaler') ) # positive scaler
pass
def generateEmbeddings(self):
csvPaths = glob.glob(dirname(self.io.sourceFolder)+"/*.csv")
i = 0
for path in csvPaths:
self.createEmbeddingsFromPath(path)
i += 1
if i % 100 == 0:
print(f"processed {i} files")
print(f"generated {self.lastEmbeddingId} embeddings")
print(f"generated {self.lastEmbeddingId} embeddings")
def createEmbeddingsFromPath(self, path):
df = pd.read_csv(
path,
dtype = {'acoustic_data': np.int16}
)
df.acoustic_data = df.acoustic_data.abs() # converting to positive vals.
start = 0
for _ in range(self.numberOfEmbeddingPerFile):
end = start + 4096
binDf = df[start: end]
# create embedding
self.createEmbedddingFromBinDf(binDf)
start = end
pass
def createEmbedddingFromBinDf(self, binDf):
features = self.embedder.fromUnnormalizedDfData(binDf)
self.lastEmbeddingId += 1
embedding = EmbeddingCacheTest(embeddingId=self.lastEmbeddingId, type=self.embeddingType, features = features)
self.io.save(embedding)
pass
def getBatch(self, batchNo, batchSize=16):
start = (batchNo - 1) * batchSize + 1
end = start + 16
batchList = []
for embeddingId in range(start, end):
try:
embedding = self.io.readById(embeddingId)
batchList.append(embedding.features)
except Exception as e:
logging.warning(f'encountered exception while reading embedding #{embeddingId}: {e}. Sliently progressing')
break
pass
return np.array(batchList)
def batches(self, batchSize = 16):
# numBatches = math.ceil(self.numEmbeddings / 16)
# for i in range(numBatches):
# yield self.getBatch(i+1, batchSize)
# pass
i = 0
while True:
i = i + 1
data = self.getBatch(i, batchSize)
if len(data) == 0:
break
yield data
def batchesByFile(self):
csvPaths = glob.glob(dirname(self.io.sourceFolder)+"/*.csv")
i = 0
for path in csvPaths:
i = i + 1
data = self.getBatch(i, self.numberOfEmbeddingPerFile)
if len(data) == 0:
break
yield self.getTestName(path), data
def getTestName(self, path):
# print(path)
return re.findall(r'.*[\/\\]([a-zA-Z0-9_]+)\.csv$', path)[0]
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,149 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/BinJoiner.py | # 1. start from an earth quake and go backward. Join every n bins together
# 2. start anywhere and join a window of n bins till an earthquake
# 3. Add an especial seperator for bin time diff. Fill with zeros? experiment.
import numpy as np
import logging, dill, fnmatch, os
from .Bin import Bin
from .BinIO import BinIO
class BinJoiner:
def __init__(self):
pass | {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,150 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/BinProcessor.py | import numpy as np
from scipy import stats
from .Bin import Bin
import seaborn as sns
sns.set(style="darkgrid")
class BinProcessor:
def __init__(self):
pass
def getBinStats(self, aBin):
scistats = stats.describe( aBin.data )
dic = {}
dic['mean'] = scistats.mean
dic['var'] = scistats.variance
dic['median'] = np.median( aBin.data )
dic['skewness'] = scistats.skewness
dic['kurtosis'] = scistats.kurtosis
dic['max'] = scistats.minmax[1]
dic['min'] = scistats.minmax[0]
dic['dif_max_min'] = dic['max'] - dic['min']
dic['dif_median_mean'] = dic['median'] - dic['mean']
return dic
def makeDataPositive(self, aBin):
data = np.abs(aBin.data)
return self.updateData(aBin, data)
def updateData(self, aBin, newData):
return Bin(binId = aBin.binId,
ttf = aBin.ttf,
data = newData,
quakeIndex = aBin.quakeIndex,
trIndexStart = aBin.trIndexStart
)
def plot(self, aBin, ax=None):
x = np.arange(len(aBin.data))
sns.scatterplot(x, aBin.data, s=10, ax=ax,
estimator=None, label=f'{aBin.binId}-ttf-{aBin.ttf}')
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,151 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/Stats150K.py | import numpy as np
class Stats150K:
def __init__(self):
pass
def createFromDf(self, df, windowSize=150_000, stopAfter = 0, addBinNoToDf = False, dontSaveToDisk = False):
nextId = 0
start = nextId * windowSize
nextId += 1
end = start + 150_000
while end <= len(df) and (stopAfter == 0 or stopAfter >= nextId):
nextDf = df[start:end]
print( f"size of next df {len(nextDf)}, start {start}, end {end}")
start = nextId * windowSize
nextId += 1
end = start + 150_000
pass
def resample(self, df, start, size):
return df
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,152 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/EmbeddingIO.py | import numpy as np
import logging, dill, fnmatch, os
from embedding.EmbeddingCache import EmbeddingCache
class EmbeddingIO:
def __init__(self):
"""
self.destFolderSSD = '/home/exx/muktadir/data/'
self.destFolderHDD = '/home/exx/muktadir/data/'
"""
self.destFolderSSD = 'C:/earthquake/'
self.destFolderHDD = 'F:/myProjects/cmps242/earthquake/data/'
self.destFolder = self.destFolderSSD
pass
def save(self, anEm, emType):
fname = self.getFileName(anEm.embeddingId, emType)
# print( fname)
with open(fname, 'wb') as outfile:
dill.dump(anEm, outfile)
pass
def getFileName(self, embeddingId, emType):
return self.getFolder(emType) + self.getRelativeFileName(embeddingId, emType)
def getFolder(self, emType):
return self.destFolder + emType + '-embedding/'
def getRelativeFileName(self, embeddingId, emType):
return 'em_' + str( embeddingId ) + '.dill'
def readById(self, embeddingId, emType):
fname = self.getFileName(embeddingId, emType)
return self.read(fname)
def read(self, fname):
with open(fname, 'rb') as f:
out = dill.load(f)
return out | {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,153 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/BinIO.py | import numpy as np
import logging, dill, fnmatch, os
from .Bin import Bin
class BinIO:
def __init__(self):
"""
self.sourceSSD = 'C:/earthquake/train.csv'
self.sourceHDD = 'F:/myProjects/cmps242/earthquake/data/train.csv'
self.destFolderSSD = 'C:/earthquake/'
self.destFolderHDD = 'F:/myProjects/cmps242/earthquake/data/'
self.sourceSSD = '/home/exx/muktadir/data/train.csv'
self.sourceHDD = '/home/exx/muktadir/data/train.csv'
self.destFolderSSD = '/home/exx/muktadir/data/'
self.destFolderHDD = '/home/exx/muktadir/data/'
"""
self.sourceSSD = 'C:/earthquake/train.csv'
self.sourceHDD = 'F:/myProjects/cmps242/earthquake/data/train.csv'
self.destFolderSSD = 'C:/earthquake/'
self.destFolderHDD = 'F:/myProjects/cmps242/earthquake/data/'
self.destFolder = self.destFolderSSD
pass
def saveBin(self, aBin, binType):
fname = self.getBinFileName(aBin.binId, binType)
# print( fname)
with open(fname, 'wb') as outfile:
dill.dump(aBin, outfile)
pass
def getBinFileName(self, binId, binType):
return self.getBinFolder(binType) + self.getRelativeFileName(binId, binType)
def getBinFolder(self, binType):
return self.destFolder + binType + '-bins/'
def getRelativeFileName(self, binId, binType):
return binType + '_bin_' + str( binId ) + '.dill'
def readBinById(self, binId, binType):
fname = self.getBinFileName(binId, binType)
return self.readBin(fname)
def readBin(self, fname):
with open(fname, 'rb') as f:
out = dill.load(f)
return out
def countBin(self, binType):
return len( os.listdir( self.getBinFolder(binType) ) )
def readBins(self, fromId, size, binType):
bins = []
for i in range(size):
bins.append( self.readBinById(fromId + i, binType) )
return bins
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,154 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/Bin.py | import collections
Bin = collections.namedtuple( 'Bin', 'binId, ttf, data, quakeIndex, trIndexStart' ) #quakeIndex -1 means no quake in this bin
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,155 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/EmbeddingCache.py | import collections
EmbeddingCache = collections.namedtuple( 'EmbeddingCache', 'embeddingId, firstBinId, type, features, ttf' )
EmbeddingCacheTest = collections.namedtuple( 'EmbeddingCacheTest', 'embeddingId, type, features' ) | {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,156 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/RawBinManager.py | import numpy as np
import pandas as pd
import collections
import logging, dill, fnmatch, os
from .Bin import Bin
from .BinIO import BinIO
from sklearn import preprocessing
from .Scalers import Scalers
class RawBinManager:
def __init__(self, binType = 'r', makePositive = False, normalize = False, scale=False ):
self.binIO = BinIO()
self.scalers = Scalers()
self.binType = binType
# self.rawBinPrefix = binType + '_'
# self.rawBinFolder = self.destFolderHDD + binType + '-bins/'
self.curStatId = 0
self.stats = {}
self.makePositive = makePositive
self.normalize = normalize
self.scale = scale
self.scaler = None
self.normalizer = None
pass
def createRawBinsFromDf(self, df, stopAfter = 0, addBinNoToDf = False, dontSaveRawToDisk = False):
if self.makePositive:
df.acoustic_data = np.abs(df.acoustic_data)
reshapedAcousticDataForPreprocessing = df.acoustic_data.values.reshape(-1, 1)
if self.normalize:
df['norm'] = self.scalers.getScaler('absNormalizer').transform(reshapedAcousticDataForPreprocessing)
logging.warning('abs normalized df')
if self.scale:
df['scaled'] = self.scalers.getScaler('absScaler').transform(reshapedAcousticDataForPreprocessing)
logging.warning('abs scaled df')
else:
reshapedAcousticDataForPreprocessing = df.acoustic_data.values.reshape(-1, 1)
if self.normalize:
df['norm'] = self.scalers.getScaler('normalizer').transform(reshapedAcousticDataForPreprocessing)
logging.warning('normalized df')
if self.scale:
df['scaled'] = self.scalers.getScaler('scaler').transform(reshapedAcousticDataForPreprocessing)
logging.warning('scaled df')
if addBinNoToDf is True:
df['binNo'] = np.zeros(len(df), dtype=np.int32)
# 1. init stats
self.initStatsForCurrentDf(df)
# 2. Loop over bins
nextId = 0
index = -1
nextBinDf, index = self.getNextBinDf(df, index)
print( f"last index: {index} and number records in nextdf { nextBinDf.shape[0] } {nextBinDf.empty is False}" )
while ( (nextBinDf.empty is False ) and (nextId <= stopAfter or stopAfter == 0) ):
nextId = nextId + 1
nextBin = self.convertDfIntoBinTuple(nextId, nextBinDf)
if (nextId % 2000) == 0:
print( f'processed {nextId}th raw bin' )
# 3. create bin stats
self.addBinStats(nextBin)
# 4. save bins
if dontSaveRawToDisk is False:
self.saveRawBin(nextBin, self.binType)
if self.normalize:
self.saveRawBin(self.getNormalBin(nextBin, nextBinDf), self.binType + 'nor')
if self.scale:
self.saveRawBin(self.getScaledBin(nextBin, nextBinDf), self.binType + 'scaled')
# 5. augment df?
if addBinNoToDf is True:
self.addBinNoToDf(df, nextBinDf, nextId)
# 6. next
nextBinDf, index = self.getNextBinDf(df, index)
if dontSaveRawToDisk is False:
print(f'saved {nextId} bins to {self.rawBinFolder} folder')
else:
print(f'Processed {nextId} bins, but not saved.')
pass
def initStatsForCurrentDf(self, df):
self.curStatId = len(df)
self.stats[self.curStatId] = {}
self.stats[self.curStatId]["earthquakeBinIds"] = []
self.stats[self.curStatId]["sizeFrequencies"] = {}
self.stats[self.curStatId]["binIdsBySize"] = {}
pass
def addBinStats(self, nextBin):
sizeFrequencies = self.stats[self.curStatId]["sizeFrequencies"]
binIdsBySize = self.stats[self.curStatId]["binIdsBySize"]
sizeKey = len(nextBin.data)
if sizeKey not in sizeFrequencies:
sizeFrequencies[sizeKey] = 0
binIdsBySize[sizeKey] = []
sizeFrequencies[sizeKey] = sizeFrequencies[sizeKey] + 1
binIdsBySize[sizeKey].append(nextBin.binId)
pass
def addBinNoToDf(self, df, nextBinDf, nextId):
#print( nextBinDf.head(5) )
for row in nextBinDf.itertuples(index = True):
#print(f'adding binId {nextId} to row {row.Index}')
df.loc[row.Index]['binNo'] = nextId
pass
def getNextBinDf(self, df, lastIndex = -1):
"""
index is the end point of the last bin
"""
start = lastIndex + 1
if start >= df.shape[0]:
return pd.DataFrame(), lastIndex
end = start + 4094
while (end < df.shape[0]):
if (end + 1) == df.shape[0]:
break
diff = df.time_to_failure[end] - df.time_to_failure[end+1]
if diff > 0.00001:
break
end = end + 1
return df[start:end+1], end
def convertDfIntoBinTuple(self, nextId, nextBinDf):
"""code smell: does earthquake calculations."""
data = nextBinDf.acoustic_data.values
ttf = nextBinDf.iloc[-1].time_to_failure
quakeIndex = -1
for i in range(1, len(data)):
if nextBinDf.time_to_failure.iloc[i-1] - nextBinDf.time_to_failure.iloc[i] < -0.001:
#negative value means ttf jumped. #todo confirm that this is correct. It can be incorrect.
quakeIndex = i-1
self.stats[self.curStatId]["earthquakeBinIds"].append( nextId )
print( f'bin {nextId} has a quake at index {quakeIndex}' )
break
return Bin(binId = nextId,
ttf = ttf,
data = data,
quakeIndex = quakeIndex,
trIndexStart = nextBinDf.index[0]
)
def getNormalBin(self, rawBin, rawBinDf):
return Bin(binId = rawBin.binId,
ttf = rawBin.ttf,
data = rawBinDf.norm.values,
quakeIndex = rawBin.quakeIndex,
trIndexStart = rawBin.trIndexStart
)
def getScaledBin(self, rawBin, rawBinDf):
return Bin(binId = rawBin.binId,
ttf = rawBin.ttf,
data = rawBinDf.scaled.values,
quakeIndex = rawBin.quakeIndex,
trIndexStart = rawBin.trIndexStart
)
def saveRawBin(self, nextBin, binType ):
self.binIO.saveBin( nextBin, binType )
pass
def readRawBinById(self, binIdn, binType):
return self.binIO.readBinById(binId, binType)
def countRawBin(self, fname):
return self.binIO.countBin(self.binType)
def readRawBins(self, fromId, size):
return self.binIO.readBins(fromId, size, self.binType)
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,157 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/MultipleBinEmbeddingType.py | from enum import Enum
class MultipleBinEmbeddingType(Enum):
ONE_STATS = 1
EACH_STATS = 2
CNN_STATS = 3
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,158 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/BinNormalizer.py | import numpy as np
import logging, dill
from .Bin import Bin
from .BinProcessor import BinProcessor
from .BinIO import BinIO
class BinNormalizer:
def __init__(self, min = -5500, max = 5500):
self.binProcessor = BinProcessor()
self.binIO = BinIO()
self.min = min
self.max = max
self.range = max - min
self.numBins = 153584
self.toBinType = 'nor'
pass
def normByMinMax(self, aBin: Bin):
data = (aBin.data - self.min) / self.range
return self.binProcessor.updateData(aBin, data)
def createNormalizedBins(self, binType = 'r', fromId = 1, toId = 0 ):
""" It makes all the acoustic data from raw bins positive """
if toId == 0:
toId = self.numBins
for binId in range(fromId, toId + 1):
if (binId % 2000) == 0:
print( f'processed {binId}th bin' )
fromBin = self.binIO.readBinById(binId, binType);
toBin = self.normByMinMax(fromBin)
self.binIO.saveBin( toBin, self.toBinType )
pass
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,159 | Alfiesan/earthquakePrediction | refs/heads/master | /data_analysis/library/Scalers.py | from sklearn import preprocessing
import numpy as np
import logging, dill
class Scalers:
def __init__(self):
#self.scalerFolder = '/home/exx/muktadir/earthquakePrediction/scalers/'
self.scalerFolder = './scalers/'
pass
def createScalers(self, df):
reshapedAcousticDataForPreprocessing = df.acoustic_data.values.reshape(-1, 1)
absValues = np.abs( reshapedAcousticDataForPreprocessing )
self.normalizer = preprocessing.MinMaxScaler((0,5)).fit(reshapedAcousticDataForPreprocessing)
with open(self.scalerFolder + 'normalizer', 'wb') as outfile:
dill.dump(self.normalizer, outfile)
self.scaler = preprocessing.RobustScaler().fit(reshapedAcousticDataForPreprocessing)
with open(self.scalerFolder + 'scaler', 'wb') as outfile:
dill.dump(self.scaler, outfile)
self.absNormalizer = preprocessing.MinMaxScaler((0,5)).fit(absValues)
with open(self.scalerFolder + 'absNormalizer', 'wb') as outfile:
dill.dump(self.absNormalizer, outfile)
self.absScaler = preprocessing.RobustScaler().fit(absValues)
with open(self.scalerFolder + 'absScaler', 'wb') as outfile:
dill.dump(self.absScaler, outfile)
pass
def getScaler(self, name):
fname = self.scalerFolder + name
with open(fname, 'rb') as f:
out = dill.load(f)
return out
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,160 | Alfiesan/earthquakePrediction | refs/heads/master | /RNN_Code/raw_data_NN.py |
import pandas as pd
import numpy as np
import matplotlib.pylab as plt
import keras
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from sklearn.metrics import mean_absolute_error
import dill
bin1 = dill.load(open("r_bin_1.dill", "rb"))
all_bins = []
#50085878
for i in range(1, 30000):
bin_name = "r_bin_" + str(i) + ".dill"
curr_bin = dill.load(open(bin_name, "rb"))
bin_data = curr_bin[2]
if(bin_data.size == 4095):
bin_data = np.append(bin_data, 0)
bin_data = np.append(bin_data, curr_bin[1])
all_bins.append(bin_data)
sqArr = np.array(all_bins)
newData = pd.DataFrame(all_bins)
newData.to_csv("first_2+_quakes.csv")
data = pd.read_csv("first_2+_quakes.csv")
X = newData.iloc[:, 1:4097].values
y = newData.iloc[:, 4097].values
X = X.astype(np.int16)
y = y.astype(np.float64)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2)
model = Sequential()
model.add(Dense(units = 8192, activation = 'relu', kernel_initializer = 'uniform', input_dim = 4096))
model.add(Dropout(.2))
model.add(Dense(units = 4096, activation = 'relu', kernel_initializer = 'uniform'))
model.add(Dropout(.2))
model.add(Dense(units = 4096, activation = 'relu', kernel_initializer = 'uniform'))
model.add(Dropout(.2))
model.add(Dense(units = 4096, activation = 'relu', kernel_initializer = 'uniform'))
model.add(Dropout(.2))
model.add(Dense(units = 4096, activation = 'relu', kernel_initializer = 'uniform'))
model.add(Dropout(.2))
model.add(Dense(units = 2048, activation = 'relu', kernel_initializer = 'uniform'))
model.add(Dense(units = 1024, activation = 'relu', kernel_initializer = 'uniform'))
model.add(Dense(units = 1, kernel_initializer = 'uniform'))
model.compile(optimizer = 'adam', loss = 'mean_absolute_error', metrics = ['accuracy'])
model.fit(X_train, y_train, batch_size = 1000, epochs = 100, verbose = 2)
y_pred = model.predict(X_test)
mean_absolute_error(y_test, y_pred) | {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,161 | Alfiesan/earthquakePrediction | refs/heads/master | /library/SingleBinDataGenerator.py | import numpy as np
import logging, dill, fnmatch, os
from data_analysis.library.Bin import Bin
from data_analysis.library.BinIO import BinIO
from library.RegressionDataGenerator import RegressionDataGenerator
from embedding.BinEmbedding import *
class SingleBinDataGenerator(RegressionDataGenerator):
def __init__(self, binType='nor', embedding='bin', startBinId = 1, numBins = 153584, dim=(64,64), batch_size=32, n_channels=1, shuffle=False):
self.binType = binType
self.embedding = embedding
self.binIO = BinIO()
self.startBinId = startBinId
self.numBins = numBins
# Make IDs here.
list_IDs = self.getListIds()
self.embedder = self.getEmbedder()
super(SingleBinDataGenerator, self).__init__(list_IDs, batch_size, dim, n_channels, shuffle)
pass
def getListIds(self):
if self.embedding == 'bin':
return list(range(self.startBinId, self.numBins +1))
def getEmbedder(self):
if self.embedding == 'bin':
return BinEmbedding(4096)
def __getitem__(self, index):
'Generate one batch of data'
# Generate indexes of the batch
indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size]
# Find list of IDs
list_IDs_temp = [self.list_IDs[k] for k in indexes]
# Generate data
X, y = self.__data_generation(list_IDs_temp)
return X, y
def __data_generation(self, list_IDs_temp):
'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
# Initialization
X = np.empty((self.batch_size, *self.dim, self.n_channels))
y = np.empty(self.batch_size)
# Generate data
for i, ID in enumerate(list_IDs_temp):
# read the bin
aBin = self.binIO.readBinById(ID, self.binType)
# Store sample
X[i,] = self.embedder.fromBin(aBin)
# Store class
y[i] = aBin.ttf
print( X.shape )
print( y.shape )
return X, y
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,162 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/CNNStatsEmbedding.py | import numpy as np
from data_analysis.library.Bin import Bin
from embedding.Embedding import Embedding
from embedding.SourceCardinality import SourceCardinality
from embedding.OneStatsEmbedding import OneStatsEmbedding
class CNNStatsEmbedding(Embedding):
"""similar features in a column. We will run 2-D CNNN with 1-D kernel"""
def __init__(self, scaler = None, binsPerEmbedding = 36):
self.type = 'cnn-stats'
self.scaler = scaler
self.binsPerEmbedding = binsPerEmbedding
self.embedding = OneStatsEmbedding(scaler)
self.dim = (binsPerEmbedding, self.embedding.numberOfFeatures, 1)
super(CNNStatsEmbedding, self).__init__(sourceCardinality = SourceCardinality.MULTI)
self.numberOfFeatures = self.embedding.numberOfFeatures
pass
def fromBins(self, bins: Bin):
# 1. get all data & scale it using the scaler
data = []
# ttfs = []
for aBin in bins:
binStats = self.embedding.fromUnnormalizedNumpyData(aBin.data)
data.append(binStats)
return np.array(data)
def fromBinsDf(self, df):
start = 0
data = []
for _ in range(self.binsPerEmbedding):
end = start + 4096
binStats = self.embedding.fromUnnormalizedNumpyData(df[start: end])
data.append(binStats)
start = end
return np.array(data)
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,163 | Alfiesan/earthquakePrediction | refs/heads/master | /library/LivePlotKeras.py | import keras
from matplotlib import pyplot as plt
from IPython.display import clear_output
import seaborn as sns
sns.set(style="darkgrid")
class LivePlotKeras(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.i = 0
self.x = []
self.losses = []
self.val_losses = []
self.fig = plt.figure(figsize=(20, 10))
self.logs = []
def on_epoch_end(self, epoch, logs={}):
self.logs.append(logs)
self.x.append(self.i)
self.losses.append(logs.get('mean_squared_error'))
self.val_losses.append(logs.get('val_mean_squared_error'))
self.i += 1
clear_output(wait=True)
self.fig = plt.figure(figsize=(20, 10))
plt.plot(self.x, self.losses, label="train")
plt.plot(self.x, self.val_losses, label="validation")
plt.legend()
plt.show()
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,164 | Alfiesan/earthquakePrediction | refs/heads/master | /library/EmbeddingStatsGeneratorForTestPos.py | from library.TestIO import TestIO
from os.path import dirname, basename, isfile
import glob, gc
import pandas as pd
import numpy as np
import math, re
from embedding.OneStatsEmbedding import *
from embedding.CNNStatsEmbedding import *
from embedding.EmbeddingCache import EmbeddingCacheTest
from data_analysis.library.Scalers import Scalers
class EmbeddingStatsGeneratorForTestPos:
def __init__(self, windowSize = 200, embeddingType = 'one-stats-test', binsPerEmbedding = 36):
self.embeddingType = embeddingType
self.io = TestIO(self.embeddingType)
self.scalers = Scalers()
self.windowSize = windowSize
self.lastEmbeddingId = 0
self.binsPerEmbedding = binsPerEmbedding
self.numberOfTestFiles = 2624
self.numberOfEmbeddingPerFile = math.ceil((150_000 - binsPerEmbedding * 4096) / windowSize)
self.numEmbeddings = self.numberOfTestFiles * self.numberOfEmbeddingPerFile
self.addDimToX = False
if embeddingType == 'one-stats-test':
self.embedder = OneStatsEmbedding( self.scalers.getScaler('absScaler') ) # positive scaler
elif embeddingType == 'cnn-stats-test':
self.addDimToX = True
self.embedder = CNNStatsEmbedding( self.scalers.getScaler('absScaler'), binsPerEmbedding=binsPerEmbedding ) # positive scaler
pass
def generateEmbeddings(self, skipFiles=0):
csvPaths = glob.glob(dirname(self.io.sourceFolder)+"/*.csv")
i = 0
for path in csvPaths:
if i % 100 == 0:
gc.collect() # TODO do it in another thread
print(f"processed {i} files")
print(f"generated {self.lastEmbeddingId} embeddings")
i += 1
if skipFiles > 0 and i < skipFiles:
self.lastEmbeddingId += self.numberOfEmbeddingPerFile
continue
self.createEmbeddingsFromPath(path)
print(f"generated {self.lastEmbeddingId} embeddings")
pass
def createEmbeddingsFromPath(self, path):
df = pd.read_csv(
path,
dtype = {'acoustic_data': np.int16}
)
df.acoustic_data = df.acoustic_data.abs() # converting to positive vals.
# stats from 4096 * binsPerEmbedding
# windowSize is the slide
start = 0
for _ in range(self.numberOfEmbeddingPerFile):
end = start + 4096 * self.binsPerEmbedding
binDf = df[start: end]
# create embedding
self.createEmbedddingFromBinsDf(binDf)
start += self.windowSize
pass
def createEmbedddingFromBinsDf(self, binDf):
features = self.embedder.fromBinsDf(binDf)
self.lastEmbeddingId += 1
embedding = EmbeddingCacheTest(embeddingId=self.lastEmbeddingId, type=self.embeddingType, features = features)
self.io.save(embedding)
pass
def getBatch(self, batchNo, batchSize=16):
start = (batchNo - 1) * batchSize + 1
end = start + 16
batchList = []
for embeddingId in range(start, end):
try:
embedding = self.io.readById(embeddingId)
x = embedding.features
if self.addDimToX:
batchList.append(x.reshape(-1, self.embedder.numberOfFeatures, 1))
else:
batchList.append(x)
except Exception as e:
logging.warning(f'encountered exception while reading embedding #{embeddingId}: {e}. Sliently progressing')
break
pass
return np.array(batchList)
def batches(self, batchSize = 16):
# numBatches = math.ceil(self.numEmbeddings / 16)
# for i in range(numBatches):
# yield self.getBatch(i+1, batchSize)
# pass
i = 0
while True:
i = i + 1
data = self.getBatch(i, batchSize)
if len(data) == 0:
break
yield data
def batchesByFile(self):
csvPaths = glob.glob(dirname(self.io.sourceFolder)+"/*.csv")
i = 0
for path in csvPaths:
i = i + 1
data = self.getBatch(i, self.numberOfEmbeddingPerFile)
if len(data) == 0:
break
yield self.getTestName(path), data
def getTestName(self, path):
# print(path)
return re.findall(r'.*[\/\\]([a-zA-Z0-9_]+)\.csv$', path)[0]
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,165 | Alfiesan/earthquakePrediction | refs/heads/master | /embedding/BinEmbedding.py | import numpy as np
import logging
from data_analysis.library.Bin import Bin
from embedding.Embedding import Embedding
from SourceCardinality import SourceCardinality
class BinEmbedding(Embedding):
def __init__(self, binSize = 4096 ):
self.binSize = binSize
self.rowDim = 64
self.colDim = int( self.binSize / self.rowDim )
if self.binSize % self.rowDim != 0:
logging.error(f"{binSize} is not divisible by {self.rowDim}")
raise Exception(f"{binSize} is not divisible by {self.rowDim}")
super(BinEmbedding, sourceCardinality = SourceCardinality.SINGLE)
pass
def fromBin(self, aBin: Bin):
curBinSize = len(aBin.data)
data = None
if curBinSize < self.binSize:
data = self.inflateBinData(aBin, self.binSize)
elif curBinSize > self.binSize:
data = self.reduceBinDataWithQuake(aBin, self.binSize)
else:
data = aBin.data
return data.reshape([self.rowDim,self.colDim, 1])
def inflateBinData(self, aBin, binSize):
itemsToInflate = binSize - len(aBin.data)
last = [aBin.data[-1]] * itemsToInflate
return np.append( aBin.data, last )
def reduceBinDataWithQuake(self, aBin, binSize):
data = None
logging.debug(f"bin {aBin.binId} has been reduced")
if( aBin.quakeIndex >= binSize ): #take the second part
logging.debug(f"bin {aBin.binId} has quakeIndex at {aBin.quakeIndex}")
data = aBin.data[-binSize: len(aBin.data)]
else:
data = aBin.data[0: binSize]
return data
| {"/library/TestIO.py": ["/embedding/EmbeddingIO.py"], "/TinyFFNWithStats.py": ["/library/MultipleBinDataGenerator.py", "/library/LivePlotKeras.py"], "/library/MultipleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/MultipleBinEmbeddingType.py", "/embedding/EmbeddingCache.py", "/embedding/EmbeddingIO.py"], "/data_analysis/library/PositiveBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/embedding/Embedding.py": ["/data_analysis/library/Bin.py", "/embedding/SourceCardinality.py"], "/embedding/OneStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/Stats.py"], "/embedding/BinToEmbedding.py": ["/data_analysis/library/Bin.py"], "/embedding/Stats.py": ["/data_analysis/library/Bin.py"], "/library/OneStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinJoiner.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py"], "/data_analysis/library/BinProcessor.py": ["/data_analysis/library/Bin.py"], "/embedding/EmbeddingIO.py": ["/embedding/EmbeddingCache.py"], "/data_analysis/library/BinIO.py": ["/data_analysis/library/Bin.py"], "/data_analysis/library/RawBinManager.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/data_analysis/library/Scalers.py"], "/data_analysis/library/BinNormalizer.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinProcessor.py", "/data_analysis/library/BinIO.py"], "/library/SingleBinDataGenerator.py": ["/data_analysis/library/Bin.py", "/data_analysis/library/BinIO.py", "/embedding/BinEmbedding.py"], "/embedding/CNNStatsEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py", "/embedding/SourceCardinality.py", "/embedding/OneStatsEmbedding.py"], "/library/EmbeddingStatsGeneratorForTestPos.py": ["/library/TestIO.py", "/embedding/OneStatsEmbedding.py", "/embedding/CNNStatsEmbedding.py", "/embedding/EmbeddingCache.py", "/data_analysis/library/Scalers.py"], "/embedding/BinEmbedding.py": ["/data_analysis/library/Bin.py", "/embedding/Embedding.py"]} |
67,187 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/infrastructure/repositories/sqllight/sdk_repo.py | from sqlalchemy import Column, String, Integer
from sqlalchemy.sql import func
from app.domain.entities.sdk import SDK
from app.domain.repositories.sdk_repo import SDKBaseRepo
from app.infrastructure.repositories.sqllight.base_repo import (
SQLLightBaseRepo, BaseSQLLightRepoError)
from app.infrastructure.repositories.sqllight import Base
class SDKDTO(Base):
__tablename__ = "sdks"
sdk_version = Column(String, primary_key=True, nullable=False, index=True)
ad_requests = Column(Integer, nullable=False, default=0)
impression_requests = Column(Integer, nullable=False, default=0)
def to_entity(self) -> SDK:
return SDK(
sdk_version=self.sdk_version,
ad_requests=self.ad_requests,
impression_requests=self.impression_requests,
)
@staticmethod
def from_entity(sdk: SDK) -> "SDKDTO":
return SDKDTO(
sdk_version=sdk.sdk_version,
ad_requests=sdk.ad_requests,
impression_requests=sdk.impression_requests,
)
def __repr__(self):
return '<SDK %r>' % self.sdk_version
class SDKRepo(SDKBaseRepo, SQLLightBaseRepo):
request_types = ('ad_requests', 'impression_requests')
@SQLLightBaseRepo.commit_action
def create(self, sdk: SDK):
sdk_dto = SDKDTO.from_entity(sdk)
self.session.add(sdk_dto)
def get_avg_impressions(self):
return SDKDTO.query.with_entities(
func.avg(SDKDTO.impression_requests).label('avg')).first().avg
def get_avg_ad_requests(self):
return SDKDTO.query.with_entities(
func.avg(SDKDTO.ad_requests).label('avg')).first().avg
@SQLLightBaseRepo.commit_action
def increment_request(self, sdk: str, request_type: str):
if request_type not in SDKRepo.request_types:
raise BaseSQLLightRepoError('unsupported request type')
sdk_entity = SDKDTO.query.filter_by(sdk_version=sdk).first()
if sdk_entity:
setattr(sdk_entity, request_type,
getattr(SDKDTO, request_type) + 1)
else:
self.create(SDK(**{"sdk_version": sdk, request_type: 1}))
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,188 | bohdana-kuzmenko/sayollo | refs/heads/main | /setup.py | from setuptools import setup, find_packages
with open('requirements.txt') as requirements_txt:
install_requires = requirements_txt.read().splitlines()
setup(
name='sayollo',
version='0.0.1',
description='Test',
author='Bohdana Kuzmenko',
author_email='bogdana.kuzmenko.16@gmail.com',
packages=find_packages(),
install_requires=install_requires,
) | {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,189 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/domain/repositories/sdk_repo.py | from abc import abstractmethod
from typing import Optional
from app.domain.entities.sdk import SDK
from app.domain.repositories import BaseRepo
class SDKBaseRepo(BaseRepo):
@abstractmethod
def create(self, sdk: SDK) -> Optional[SDK]:
raise NotImplementedError
@abstractmethod
def get_avg_impressions(self):
raise NotImplementedError
@abstractmethod
def get_avg_ad_requests(self):
raise NotImplementedError
@abstractmethod
def increment_request(self, sdk_version: str, request_type: str):
raise NotImplementedError
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,190 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/application/flask/views/ad_view.py | from flask import Response, request
from flask_restful import Resource
import requests
from app.application.flask.helpers.response import make_custom_response
from app.application.services.sdk_service import SDKService
from app.application.services.user_service import UserService
from app.domain.entities.request import RequestSchema
class AdView(Resource):
request_type = "ad_requests"
def __init__(self, sdk_service: SDKService, user_service: UserService):
self.sdk_service = sdk_service
self.user_service = user_service
@make_custom_response
def get(self):
api_url = ('https://6u3td6zfza.execute-api.us-east-2.amazonaws.com/'
'prod/ad/vast')
response = requests.request('get', api_url)
ad_request = RequestSchema().load(request.args)
self.sdk_service.increment(
ad_request.get('sdk_version'), self.request_type)
self.user_service.increment(
ad_request.get('username'), self.request_type)
return Response(response.text, mimetype='text/xml')
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,191 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/application/flask/helpers/response.py | from http import HTTPStatus
from flask import Response, make_response
def make_custom_response(fn):
def wrapped(self, *args, **kwargs):
try:
response = fn(self, *args, **kwargs)
except Exception as e:
response = make_response(
getattr(e, 'message', repr(e)),
HTTPStatus.INTERNAL_SERVER_ERROR)
else:
if not isinstance(response, Response):
response = make_response(response, HTTPStatus.OK)
finally:
return response
return wrapped
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,192 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/domain/entities/sdk.py | from dataclasses import dataclass
from typing import Optional
@dataclass
class SDK(object):
sdk_version: str
ad_requests: Optional[int] = 0
impression_requests: Optional[int] = 0
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,193 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/application/services/user_service.py | from app.domain.entities.user import User
from app.domain.repositories.user_repo import UserBaseRepo
class UserService(object):
def __init__(self, repo: UserBaseRepo):
self.repo = repo
def create(self, user: User):
return self.repo.create(user)
def increment(self, username, request_type):
return self.repo.increment_request(username, request_type)
def avg_impressions(self):
return self.repo.get_avg_impressions()
def avg_ad_requests(self):
return self.repo.get_avg_ad_requests()
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,194 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/domain/entities/user.py | from dataclasses import dataclass
from typing import Optional
@dataclass
class User(object):
username: str
ad_requests: Optional[int] = 0
impression_requests: Optional[int] = 0
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,195 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/application/flask/run.py | from flask import Flask
from flask_restful import Api
from app.application.flask.views.ad_view import AdView
from app.application.flask.views.impression_view import ImpressionView
from app.application.flask.views.stats_view import StatsView
class FlaskAPIRunner(object):
def __init__(self, sdk_service, user_service):
self.sdk_service = sdk_service
self.user_service = user_service
def run(self):
app = Flask(__name__)
api = Api(app)
api.add_resource(
AdView, f'/api/v1/ad',
resource_class_kwargs={
'sdk_service': self.sdk_service,
'user_service': self.user_service,
})
api.add_resource(
ImpressionView, f'/api/v1/impression',
resource_class_kwargs={
'sdk_service': self.sdk_service,
'user_service': self.user_service,
})
api.add_resource(
StatsView, f'/api/v1/stats',
resource_class_kwargs={
'sdk_service': self.sdk_service,
'user_service': self.user_service,
})
app.run(port=5001)
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,196 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/infrastructure/repositories/sqllight/user_repo.py | from sqlalchemy import Column, String, Integer, func
from app.domain.entities.user import User
from app.domain.repositories.user_repo import UserBaseRepo
from app.infrastructure.repositories.sqllight.base_repo import (
SQLLightBaseRepo, BaseSQLLightRepoError)
from app.infrastructure.repositories.sqllight import Base
class UserDTO(Base):
__tablename__ = "users"
username = Column(String, primary_key=True, nullable=False, index=True)
ad_requests = Column(Integer, nullable=False, default=0)
impression_requests = Column(Integer, nullable=False, default=0)
def to_entity(self) -> User:
return User(
username=self.username,
ad_requests=self.ad_requests,
impression_requests=self.impression_requests
)
@staticmethod
def from_entity(user: User) -> "UserDTO":
return UserDTO(
username=user.username,
ad_requests=user.ad_requests,
impression_requests=user.impression_requests,
)
def __repr__(self):
return '<User %r>' % self.username
class UserRepo(UserBaseRepo, SQLLightBaseRepo):
request_types = ('ad_requests', 'impression_requests')
@SQLLightBaseRepo.commit_action
def create(self, user: User):
user_dto = UserDTO.from_entity(user)
self.session.add(user_dto)
def get_avg_impressions(self):
return UserDTO.query.with_entities(
func.avg(UserDTO.impression_requests).label('avg')).first().avg
def get_avg_ad_requests(self):
return UserDTO.query.with_entities(
func.avg(UserDTO.ad_requests).label('avg')).first().avg
@SQLLightBaseRepo.commit_action
def increment_request(self, username: str, request_type: str):
if request_type not in UserRepo.request_types:
raise BaseSQLLightRepoError('unsupported request type')
user = UserDTO.query.filter_by(username=username).first()
if user:
setattr(user, request_type, getattr(UserDTO, request_type) + 1)
else:
self.create(User(**{"username": username, request_type: 1}))
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,197 | bohdana-kuzmenko/sayollo | refs/heads/main | /main.py | from app.application.flask.run import FlaskAPIRunner
from app.application.services.sdk_service import SDKService
from app.application.services.user_service import UserService
from app.infrastructure.repositories.sqllight.sdk_repo import \
SDKRepo
from app.infrastructure.repositories.sqllight.user_repo import \
UserRepo
from app.infrastructure.repositories.sqllight import session, create_tables
def main():
create_tables()
sqllight_repo = SDKRepo(session)
sdk_service = SDKService(sqllight_repo)
user_repo = UserRepo(session)
user_service = UserService(user_repo)
FlaskAPIRunner(sdk_service, user_service).run()
if __name__ == '__main__':
main()
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,198 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/application/flask/views/impression_view.py | from flask import request, Response
from flask_restful import Resource
from app.application.flask.helpers.response import make_custom_response
from app.application.services.sdk_service import SDKService
from app.application.services.user_service import UserService
from app.domain.entities.request import RequestSchema
class ImpressionView(Resource):
request_type = "impression_requests"
def __init__(self, sdk_service: SDKService, user_service: UserService):
self.sdk_service = sdk_service
self.user_service = user_service
@make_custom_response
def get(self):
ad_request = RequestSchema().load(request.args)
self.sdk_service.increment(
ad_request.get('sdk_version'), self.request_type)
self.user_service.increment(
ad_request.get('username'), self.request_type)
return Response(status=200)
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,199 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/infrastructure/repositories/sqllight/base_repo.py | from sqlalchemy.orm import Session
class BaseSQLLightRepoError(Exception):
pass
class SQLLightBaseRepo(object):
def __init__(self, session: Session):
self.session: Session = session
@staticmethod
def commit_action(fn):
def wrapped(self, *args, **kwargs):
result = fn(self, *args, **kwargs)
self.session.commit()
return result
return wrapped
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,200 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/application/services/sdk_service.py | from app.domain.entities.sdk import SDK
from app.domain.repositories.sdk_repo import SDKBaseRepo
class SDKService(object):
def __init__(self, repo: SDKBaseRepo):
self.repo = repo
def create(self, sdk: SDK):
return self.repo.create(sdk)
def increment(self, sdk_version, request_type):
return self.repo.increment_request(sdk_version, request_type)
def avg_impressions(self):
return self.repo.get_avg_impressions()
def avg_ad_requests(self):
return self.repo.get_avg_ad_requests()
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,201 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/infrastructure/repositories/sqllight/__init__.py | from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
SQLALCHEMY_DATABASE_URL = "sqlite:///foo.db"
engine = create_engine(
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False, },
)
session = scoped_session(sessionmaker(
bind=engine, autocommit=False, autoflush=False))
Base = declarative_base()
Base.query = session.query_property()
def create_tables():
Base.metadata.create_all(bind=engine)
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,202 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/domain/entities/request.py | from dataclasses import dataclass
from marshmallow import fields, Schema, EXCLUDE
@dataclass
class Request(object):
sdk_version: str
session_id: str
platform: str
username: str
country_code: str
class RequestSchema(Schema):
class Meta:
index_errors = True
unknown = EXCLUDE
sdk_version = fields.String(required=True)
session_id = fields.String(required=True)
platform = fields.String(required=True)
username = fields.String(required=True)
country_code = fields.String(required=True)
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,203 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/domain/repositories/user_repo.py | from abc import abstractmethod
from typing import Optional
from app.domain.entities.user import User
from app.domain.repositories import BaseRepo
class UserBaseRepo(BaseRepo):
@abstractmethod
def create(self, user: User) -> Optional[User]:
raise NotImplementedError
@abstractmethod
def get_avg_impressions(self):
raise NotImplementedError
@abstractmethod
def get_avg_ad_requests(self):
raise NotImplementedError
@abstractmethod
def increment_request(self, username: str, request_type: str):
raise NotImplementedError
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,204 | bohdana-kuzmenko/sayollo | refs/heads/main | /app/application/flask/views/stats_view.py | from flask import request, jsonify
from flask_restful import Resource
from marshmallow import ValidationError
from app.application.flask.helpers.response import make_custom_response
from app.application.services.sdk_service import SDKService
from app.application.services.user_service import UserService
class StatsView(Resource):
def __init__(self, sdk_service: SDKService, user_service: UserService):
self.sdk_service = sdk_service
self.user_service = user_service
@make_custom_response
def get(self):
filter_type = request.args.get('filter_type')
if not filter_type:
raise ValidationError("No filter type have been provided")
services = {
'user': self.user_service,
'sdk': self.sdk_service,
}
if filter_type not in services:
raise ValidationError("Unrecognized filer type have been provided")
avg_impressions = services[filter_type].avg_impressions()
avg_ad_requests = services[filter_type].avg_ad_requests()
rate = avg_impressions / avg_ad_requests if avg_ad_requests else 0
return jsonify({
"avg_impressions": avg_impressions,
"avg_ad_requests": avg_ad_requests,
"rate": rate
})
| {"/app/infrastructure/repositories/sqllight/sdk_repo.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/domain/repositories/sdk_repo.py": ["/app/domain/entities/sdk.py"], "/app/application/flask/views/ad_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/user_service.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py"], "/app/application/flask/run.py": ["/app/application/flask/views/ad_view.py", "/app/application/flask/views/impression_view.py", "/app/application/flask/views/stats_view.py"], "/app/infrastructure/repositories/sqllight/user_repo.py": ["/app/domain/entities/user.py", "/app/domain/repositories/user_repo.py", "/app/infrastructure/repositories/sqllight/base_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/main.py": ["/app/application/flask/run.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/infrastructure/repositories/sqllight/sdk_repo.py", "/app/infrastructure/repositories/sqllight/user_repo.py", "/app/infrastructure/repositories/sqllight/__init__.py"], "/app/application/flask/views/impression_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py", "/app/domain/entities/request.py"], "/app/application/services/sdk_service.py": ["/app/domain/entities/sdk.py", "/app/domain/repositories/sdk_repo.py"], "/app/domain/repositories/user_repo.py": ["/app/domain/entities/user.py"], "/app/application/flask/views/stats_view.py": ["/app/application/flask/helpers/response.py", "/app/application/services/sdk_service.py", "/app/application/services/user_service.py"]} |
67,213 | brunosch99/Data-Engineering-Challenge | refs/heads/master | /config.py | user = "postgres"
password = "desafio2019"
host = "192.168.99.100"
port = "5432"
database = "marketing_campaign" | {"/marketing_ingestion.py": ["/config.py"]} |
67,214 | brunosch99/Data-Engineering-Challenge | refs/heads/master | /marketing_ingestion.py | #!/usr/bin/env python
# coding: utf-8
import json
import psycopg2
import pandas as pd
import numpy as np
import config as cfg
def get_campaign_id(line):
"""
Receives a line from a url and gets the campaign_id if exists
Returns campaign_id, if doesn't exists, return 0
"""
if len(line.split('?')) == 2:
if len(line.split('?')[1].split('&')) == 1:
return int(line.split('?')[1].split('=')[1])
else:
return int(line.split('?')[1].split('&')[1].split('=')[1])
#Returns a default value for campaign_id
return 0
def get_ad_creative_id(line):
"""
Receives a line from a url and gets the ad_creative_id if exists
Returns ad_creative_id, if doesn't exists, return 0
"""
if len(line.split('?')) == 2:
if len(line.split('?')[1].split('&')) == 2:
return int(line.split('?')[1].split('&')[0].split('=')[1])
#Returns a default value for ad_creative_id
return 0
def generate_create_table_script(df, table_name):
"""
Receives a dataframe and a table name
Returns a SQL script that creates the table with the received parameter name and with the fields and its datatypes from the dataframe
"""
fields_and_types = []
for column in df.columns:
if str(df[column].dtype) == 'int64':
fields_and_types.append([column,"INT"])
elif str(df[column].dtype) == 'float64':
fields_and_types.append([column,"FLOAT"])
else:
fields_and_types.append([column,"VARCHAR(100)"])
script = "CREATE TABLE {} (".format(table_name)
for f in fields_and_types:
script += f[0] +" "+ f[1]
if f != fields_and_types[len(fields_and_types)-1]:
script+=","
script+=");"
return script
def run_query(query, commit=False):
"""
Receives a query and a commit flag
Run the query with the global connection variable and commits if the flago equals True
"""
global connection
if connection:
cursor = connection.cursor()
cursor.execute(query)
if commit:
connection.commit()
def connect_database(database):
"""
Receives a database to connect and return its connection
Uses the configs from the config.py file
"""
try:
connection = psycopg2.connect(user = cfg.user,
password = cfg.password,
host = cfg.host,
port = cfg.port,
database = database)
print("Connection Success!")
return connection
except:
print("Failed connection!")
return None
def generate_insert_script(dictionary, table):
"""
Receives a dictionary and a table
Returns a SQL script that inserts on that table the information from the dictionary received
"""
script = "INSERT INTO {} VALUES(".format(table)
for k in dictionary:
if type(dictionary[k]) == int or type(dictionary[k]) == float:
script+=str(dictionary[k])+","
else:
script+="'"+str(dictionary[k])+"'"+","
script+=")"
script = script.replace(",)",");")
return script
def load_dataframe_into_table(df, table):
"""
Receives a list of df and a table
Inserts all df lines into the parameter table
"""
#For a better performance the dataframe is converted into a dictionary
dictionary = df.to_dict('records')
for d in dictionary:
if d == dictionary[len(dictionary)-1]:
run_query(generate_insert_script(d, table), True)
else:
run_query(generate_insert_script(d, table))
def insert_dfs_into_database(df_table):
for dt in df_table:
print("Creating {} table".format(dt[1]))
run_query(generate_create_table_script(dt[0], dt[1]), True)
print("Inserting into {}".format(dt[1]))
load_dataframe_into_table(dt[0], dt[1])
#Defining files paths
google_ad_path = r'C:\Users\BlueShift\Documents\Data-Engineering-Challenge\datasets\google_ads_media_costs.jsonl'
facebook_ad_path = r'C:\Users\BlueShift\Documents\Data-Engineering-Challenge\datasets\facebook_ads_media_costs.jsonl'
pageview_path = r'C:\Users\BlueShift\Documents\Data-Engineering-Challenge\datasets\pageview.txt'
customer_leads_funnel_path = r'C:\Users\BlueShift\Documents\Data-Engineering-Challenge\datasets\customer_leads_funnel.csv'
#Matrix that will contain a df and a name for its table in the database
df_table = []
#Generating google_ad_df from google_ads_media_costs.jsonl
google_ad_df = pd.read_json(google_ad_path, lines=True)
#Generating facebook_ad_df from facebook_ads_media_costs.jsonl
facebook_ad_df = pd.read_json(facebook_ad_path, lines=True)
#Generating pageview_df from pageview.txt
#Some columns were removed and some were created using other columns
pageview_df = pd.read_csv(pageview_path, delimiter=' ', header=None)
pageview_df.drop([1,4,5,7,8,10,11], axis=1, inplace=True)
pageview_df.columns = ['ip', 'date', 'hour', 'url', 'device_id', 'referer']
pageview_df['datetime'] = pageview_df['date'].astype('str').apply(lambda line: line.replace("[", "")) + " " + pageview_df['hour'].astype('str').apply(lambda line: line.replace("]", ""))
pageview_df['campaign_id'] = pageview_df['url'].apply(get_campaign_id).astype('int64')
pageview_df['ad_creative_id'] = pageview_df['url'].apply(get_ad_creative_id)
pageview_df.drop(['date', 'hour'], axis=1, inplace=True)
#Generating customer_leads_funnel_df from customer_leads_funnel.csv
customer_leads_funnel_df = pd.read_csv(customer_leads_funnel_path, header=None)
customer_leads_funnel_df.columns = ['device_id', 'lead_id', 'registered_at', 'credit_decision', 'credit_decision_at', 'signed_at', 'revenue']
customer_leads_funnel_df['signed_at'].fillna('-', inplace = True)
customer_leads_funnel_df['revenue'].fillna(0, inplace = True)
df_table.append([google_ad_df,"google_ads_media_costs"])
df_table.append([facebook_ad_df,"facebook_ads_media_costs"])
df_table.append([pageview_df,"pageview"])
df_table.append([customer_leads_funnel_df,"customer_leads_funnel"])
#Connects to database
connection = connect_database("marketing_campaign")
if connection is not None:
insert_dfs_into_database(df_table)
print("Ingestion Finished")
create_campaign_stats_query = """
CREATE TABLE campaign_stats as(
SELECT c.*, l.device_id, l.lead_id, l.credit_decision, l.revenue
FROM
(SELECT G.google_campaign_id as campaign_id,
G.google_campaign_name as campaign_name,
G.ad_creative_id as ad_creative_id,
G.ad_creative_name as ad_creative_name,
SUM(G.clicks) as clicks,
SUM(G.impressions) as impressions,
SUM(G.cost) as cost
FROM google_ads_media_costs G
GROUP BY G.google_campaign_id,
G.google_campaign_name,
G.ad_creative_id,
G.ad_creative_name
UNION ALL
SELECT F.facebook_campaign_id AS campaign_id,
F.facebook_campaign_name AS campaign_name,
0 as ad_creative_id,
null as ad_creative_name,
SUM(F.clicks) AS clicks,
SUM(F.impressions) AS impressions,
SUM(F.cost) AS cost
FROM facebook_ads_media_costs F
GROUP BY F.facebook_campaign_id,
F.facebook_campaign_name) C
INNER JOIN pageview P
ON P.campaign_id = C.campaign_id AND P.ad_creative_id = C.ad_creative_id
INNER JOIN customer_leads_funnel L
ON L.device_id = p.device_id);
"""
run_query(create_campaign_stats_query, True)
connection.close()
else:
print("No database connection!") | {"/marketing_ingestion.py": ["/config.py"]} |
67,218 | frankyangTW/deep_demosaicing | refs/heads/master | /model.py | from keras.models import *
from keras.layers import *
from keras.optimizers import *
import keras
import keras.backend as K
import tensorflow as tf
def conv_lrelu_conv_lrelu(inputs, filters):
conv = Conv2D(filters, [3, 3], padding='same')(inputs)
lrelu = LeakyReLU(alpha=0.3)(conv)
conv = Conv2D(filters, [3, 3], padding='same')(lrelu)
lrelu = LeakyReLU(alpha=0.3)(conv)
return conv, lrelu
def conv_lrelu_conv_lrelu_pool(inputs, filters):
conv, lrelu = conv_lrelu_conv_lrelu(inputs, filters)
pool = MaxPooling2D(pool_size=(2, 2), padding='same')(lrelu)
return conv, pool
def upconv_concat_conv_lrelu_conv_lrelu(inputs, concat, filters):
upconv = Conv2DTranspose(filters, kernel_size=2, strides=[2, 2], padding='same')(inputs)
upconv = Concatenate(axis=3)([upconv, concat])
conv, lrelu = conv_lrelu_conv_lrelu(upconv, filters)
return lrelu
def space_to_depth(x):
return tf.space_to_depth(x, 2)
def depth_to_space(x):
return tf.depth_to_space(x, 2)
def PSNR(y_true, y_pred):
def log10(x):
numerator = K.log(x)
denominator = K.log(K.constant(10, dtype=numerator.dtype))
return numerator / denominator
mse = K.mean((y_pred - y_true) ** 2)
return 10 * log10(1 / mse)
def create_model(depth=True):
inputs = Input((None, None, 3))
if depth:
to_depth = Lambda(space_to_depth)(inputs)
conv1, pool1 = conv_lrelu_conv_lrelu_pool(inputs=to_depth, filters=32)
else:
conv1, pool1 = conv_lrelu_conv_lrelu_pool(inputs=inputs, filters=32)
conv2, pool2 = conv_lrelu_conv_lrelu_pool(inputs=pool1, filters=64)
conv3, pool3 = conv_lrelu_conv_lrelu_pool(inputs=pool2, filters=128)
conv4, lrelu = conv_lrelu_conv_lrelu(inputs=pool3, filters=256)
lrelu = upconv_concat_conv_lrelu_conv_lrelu(lrelu, conv3, 128)
lrelu = upconv_concat_conv_lrelu_conv_lrelu(lrelu, conv2, 64)
lrelu = upconv_concat_conv_lrelu_conv_lrelu(lrelu, conv1, 32)
if depth:
out = Conv2D(12, [1, 1])(lrelu)
out = Lambda(depth_to_space)(out)
else:
out = Conv2D(3, [1, 1])(lrelu)
model = Model(input = [inputs], output = [out])
# model.summary()
model.compile(optimizer = Adam(lr = 1e-4), loss = 'mean_squared_error', metrics=[PSNR])
return model
def conv_lrelu_conv_lrelu_conv_lrelu_residual(inputs, filters):
conv = Conv2D(filters, [1, 1], padding='same')(inputs)
lrelu = LeakyReLU(alpha=0.3)(conv)
conv = Conv2D(filters, [3, 3], padding='same')(lrelu)
lrelu = LeakyReLU(alpha=0.3)(conv)
conv = Conv2D(filters, [1, 1], padding='same')(lrelu)
lrelu = LeakyReLU(alpha=0.3)(conv)
out = Add()([lrelu, inputs])
return LeakyReLU(alpha=0.3)(out)
def residual_model():
inputs = Input((None, None, 3))
conv1 = Conv2D(32, [1, 1], padding='same')(inputs)
conv1 = conv_lrelu_conv_lrelu_conv_lrelu_residual(inputs=conv1, filters=32)
conv2 = Conv2D(64, [1, 1], padding='same')(conv1)
conv2 = conv_lrelu_conv_lrelu_conv_lrelu_residual(inputs=conv2, filters=64)
conv3 = Conv2D(128, [1, 1], padding='same')(conv2)
conv3 = conv_lrelu_conv_lrelu_conv_lrelu_residual(inputs=conv3, filters=128)
out = Conv2D(3, [1, 1])(conv3)
model = Model(input = [inputs], output = [out])
# model.summary()
model.compile(optimizer = Adam(lr = 1e-4), loss = 'mean_squared_error', metrics=[PSNR])
return model
def residual_to_depth_model():
inputs = Input((None, None, 3))
to_depth = Lambda(space_to_depth)(inputs)
conv1 = Conv2D(32, [1, 1], padding='same')(to_depth)
conv1 = conv_lrelu_conv_lrelu_conv_lrelu_residual(inputs=conv1, filters=32)
conv2 = Conv2D(64, [1, 1], padding='same')(conv1)
conv2 = conv_lrelu_conv_lrelu_conv_lrelu_residual(inputs=conv2, filters=64)
conv3 = Conv2D(128, [1, 1], padding='same')(conv2)
conv3 = conv_lrelu_conv_lrelu_conv_lrelu_residual(inputs=conv3, filters=128)
out = Conv2D(12, [1, 1])(conv3)
out = Lambda(depth_to_space)(out)
model = Model(input = [inputs], output = [out])
# model.summary()
model.compile(optimizer = Adam(lr = 1e-4), loss = 'mean_squared_error', metrics=[PSNR])
return model
| {"/train.py": ["/model.py", "/utils.py"]} |
67,219 | frankyangTW/deep_demosaicing | refs/heads/master | /train.py | import numpy as np
import matplotlib.pyplot as plt
import glob
from keras.models import *
from model import create_model, residual_model, residual_to_depth_model
import cv2
from utils import *
from tqdm import tqdm
from keras.callbacks import *
limit_gpu()
print ("Loading images")
image_folder = '/data/frank/images/'
filelist = glob.glob(image_folder + '*.jpg')[:230]
images = []
for i in tqdm(range(len(filelist))):
images.append(plt.imread(filelist[i])[:2048, :2048].astype(np.uint8))
print ("Done")
print ("Loading Validation Set")
val_x, val_y = get_val_test_data(filelist[-30:], images[:-30])
print ("Done")
print ("Creating Model")
checkpoint = ModelCheckpoint("saved_models/residual_to_depth/weights.{epoch:d}-{val_loss:f}.hdf5",
monitor='val_loss',
verbose=0,
save_best_only=False,
save_weights_only=False,
mode='auto',
period=5)
tensorboard = TensorBoard(log_dir='./logs/residual_to_depth')
model = residual_to_depth_model()
model.summary()
print ("Done")
print ("Start Training")
history = model.fit_generator(image_generator(filelist, images),
steps_per_epoch=1000,
epochs=100,
validation_data = (val_x, val_y),
callbacks=[checkpoint, tensorboard])
# ) | {"/train.py": ["/model.py", "/utils.py"]} |
67,220 | frankyangTW/deep_demosaicing | refs/heads/master | /utils.py | import numpy as np
import tensorflow as tf
import os
from keras import backend as K
import subprocess
import cv2
import matplotlib.pyplot as plt
def limit_gpu():
os.environ["CUDA_VISIBLE_DEVICES"] = str(np.argmax([int(x.split()[2]) for x in subprocess.Popen("nvidia-smi -q -d Memory | grep -A4 GPU | grep Free", shell=True, stdout=subprocess.PIPE).stdout.readlines()]))
print (os.environ["CUDA_VISIBLE_DEVICES"])
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
# config.log_device_placement = True
sess = tf.Session(config=config)
K.tensorflow_backend.set_session(sess)
return
def mosaic(A):
output = np.zeros(A.shape)
H, W, D = A.shape
R_locations = np.zeros([H, W])
R_locations[::2, ::2] = 1
B_locations = np.zeros([H, W])
B_locations[1::2, 1::2] = 1
G_locations = np.zeros([H, W])
G_locations[::2, 1::2] = 1
G_locations[1::2, ::2] = 1
output = R_locations * A[:, :, 0] + G_locations * A[:, :, 1] + B_locations * A[:, :, 2]
return output
def get_val_test_data(filelist, images):
val_x = []
val_y = []
for i in range(30):
img = images[i][:128, :128]
bayer_img = mosaic(img)
debayered_img = cv2.demosaicing(bayer_img.astype(np.uint8), cv2.COLOR_BayerBG2RGB)
val_x.append(debayered_img / 255)
val_y.append(img / 255)
val_x = np.array(val_x)
val_y = np.array(val_y)
# test_x = []
# test_y = []
# for i in range(30):
# f = np.random.randint(0, len(filelist))
# if images[f].shape[0] < 1024 or images[f].shape[1] < 1024:
# continue
# img = images[f][:1024, :1024]
# bayer_img = mosaic(img)
# debayered_img = cv2.demosaicing(bayer_img.astype(np.uint8), cv2.COLOR_BayerBG2RGB)
# test_x.append(debayered_img / 255)
# test_y.append(img / 255)
# test_x = np.array(test_x)
# test_y = np.array(test_y)
# print (val_x.shape, test_x.shape)
return val_x, val_y
def sample_images(filelist, num_imgs=10):
imgs = []
for i in range(10):
img_file = filelist[i]
img = plt.imread(img_file)
imgs.append(img)
return imgs
def image_generator(filelist, images):
h, w = 128, 128
while 1:
train_X = []
train_y = []
f = np.random.randint(0, len(filelist), 32)
for i in f:
img = images[i]
x = np.random.randint(0, img.shape[1] - w)
y = np.random.randint(0, img.shape[0] - h)
train_y.append(img[y:y+h, x:x+w])
bayer_img = mosaic(img[y:y+h, x:x+w])
debayered_img = cv2.demosaicing(bayer_img.astype(np.uint8), cv2.COLOR_BayerBG2RGB)
train_X.append(debayered_img)
train_X = np.array(train_X)
train_y = np.array(train_y)
yield (train_X / 255, train_y / 255)
| {"/train.py": ["/model.py", "/utils.py"]} |
67,221 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/urls.py | from django.urls import path
from . import views
from rest_framework_jwt.views import refresh_jwt_token
urlpatterns = [
path("csrf", views.get_csrf),
path("user", views.UserView.as_view()),
path("users", views.UserListView.as_view()),
path("refresh_token", refresh_jwt_token),
path("login", views.login),
path("listings", views.ListingView.as_view()),
path("listing/<int:pk>/update", views.ListingUpdateView.as_view()),
path("amenities", views.AmenityListView.as_view()),
path("updateAmenities", views.AmenityUpdateView.as_view()),
path("updateRules", views.RulesCreateUpdateView.as_view()),
path("reservations", views.ReservationView.as_view()),
path("owner_reservations", views.get_user_reservations),
path("approve_reservation", views.approve_reservation),
path("stay/<int:pk>", views.StayView.as_view()),
path("stays", views.StayListView.as_view()),
path("conversations", views.ConversationListView.as_view()),
path("conversation/<int:pk>", views.MessageListView.as_view())
] | {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,222 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0003_auto_20200816_1825.py | # Generated by Django 3.1 on 2020-08-16 18:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_address_conversation_listing_listingphoto_message_reservation'),
]
operations = [
migrations.AddField(
model_name='listing',
name='description',
field=models.TextField(default='not available', max_length=500),
preserve_default=False,
),
migrations.AddField(
model_name='listing',
name='headline',
field=models.CharField(default='not available', max_length=255),
preserve_default=False,
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,223 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/serializers.py | from rest_framework import serializers
from .models import *
from random import randint
from django.core.validators import MaxLengthValidator, MinValueValidator, MaxValueValidator, MinLengthValidator
class CreateUserSerializer(serializers.Serializer):
password = serializers.CharField(max_length=100)
email = serializers.EmailField()
firstname = serializers.CharField(max_length=100)
lastname = serializers.CharField(max_length=100)
birthdate = serializers.DateField()
def save(self):
username = f'{self.data["firstname"]}_{self.data["lastname"]}{randint(0,200)}'
new_user = User(
email=self.data["email"],
first_name=self.data["firstname"],
last_name = self.data["lastname"],
birthdate = self.data["birthdate"],
username=username
)
new_user.set_password(self.data["password"])
new_user.save()
return new_user
class UserSerializer(serializers.Serializer):
email = serializers.EmailField()
first_name = serializers.CharField(max_length=100)
last_name = serializers.CharField(max_length=100)
birthdate = serializers.DateField()
username = serializers.CharField()
class ReviewSerializer(serializers.ModelSerializer):
user = UserSerializer()
class Meta:
model = Review
fields = "__all__"
class ListingPhotoSerializer(serializers.ModelSerializer):
class Meta:
model = ListingPhoto
fields = "__all__"
class AddressSerializer(serializers.ModelSerializer):
class Meta:
model = Address
fields = "__all__"
class ListingReservationSerializer(serializers.ModelSerializer):
photos = ListingPhotoSerializer(many=True, read_only=True)
address = AddressSerializer()
owner = UserSerializer()
class Meta:
model = Listing
fields = ["id", "headline", "photos", "owner", "address"]
class ReservationSerializer(serializers.ModelSerializer):
listing = ListingReservationSerializer()
user = UserSerializer()
class Meta:
model = Reservation
fields = "__all__"
class RulesSerializer(serializers.ModelSerializer):
class Meta:
model = Rules
fields = [
"smoking",
"pets",
"parties",
"check_in",
"check_out",
"additional"
]
class CreateRulesSerializer(serializers.ModelSerializer):
class Meta:
model = Rules
fields = "__all__"
class AmenitySerializer(serializers.ModelSerializer):
class Meta:
model = Amenity
fields = "__all__"
class ListingSerializer(serializers.ModelSerializer):
owner = UserSerializer()
address = AddressSerializer()
photos = ListingPhotoSerializer(many=True, read_only=True)
reviews = ReviewSerializer(many=True, read_only=True)
amenities = AmenitySerializer(source="amenity_set", many=True, read_only=True)
rules = RulesSerializer()
class Meta:
model = Listing
fields = [
'id',
'owner',
'address',
'description',
'headline',
'photos',
"reservations",
"price_per_night",
"room_type",
"reviews",
"rules",
"amenities"
]
class CreateListingSerializer(serializers.Serializer):
description = serializers.CharField(max_length=500)
headline = serializers.CharField(max_length=255)
price_per_night = serializers.DecimalField(max_digits=5, decimal_places=2)
room_type = serializers.ChoiceField(choices=["P", "S", "W"])
def create(self, validated_data):
return Listing.objects.create(**validated_data)
class ListingQuerySerializer(serializers.Serializer):
city = serializers.CharField(max_length=50)
state = serializers.CharField(max_length=2)
class MessageSerializer(serializers.Serializer):
sender = serializers.CharField()
message = serializers.CharField()
id = serializers.CharField()
time = serializers.CharField()
class Meta:
model = Message
fields = ["id", "sender", "message", "time"]
class ConversationSerializer(serializers.Serializer):
sender = UserSerializer()
receiver = UserSerializer()
id = serializers.CharField()
messages = MessageSerializer(many=True)
class Meta:
model = Conversation
fields = "__all__"
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,224 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0013_reservation_accepted.py | # Generated by Django 3.1 on 2020-08-30 21:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0012_auto_20200826_1059'),
]
operations = [
migrations.AddField(
model_name='reservation',
name='accepted',
field=models.BooleanField(default=False),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,225 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0002_address_conversation_listing_listingphoto_message_reservation.py | # Generated by Django 3.1 on 2020-08-15 18:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import localflavor.us.models
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('street', models.CharField(max_length=255)),
('city', models.CharField(max_length=255)),
('state', localflavor.us.models.USStateField(max_length=2)),
('zip_code', localflavor.us.models.USZipCodeField(max_length=10)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Conversation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_created=True)),
('last_modified', models.DateTimeField()),
('receiver', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='receiver', to=settings.AUTH_USER_MODEL)),
('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sender', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Listing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='main.address')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Reservation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('from_date', models.DateField()),
('to_date', models.DateField()),
('listing', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.listing')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message', models.TextField(max_length=500)),
('conversation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.conversation')),
('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ListingPhoto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('filepath', models.FilePathField()),
('listing', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.listing')),
],
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,226 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0007_user_superhost.py | # Generated by Django 3.1 on 2020-08-22 10:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0006_auto_20200817_2014'),
]
operations = [
migrations.AddField(
model_name='user',
name='superhost',
field=models.BooleanField(default=False),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,227 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0011_auto_20200826_1045.py | # Generated by Django 3.1 on 2020-08-26 10:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0010_auto_20200826_0955'),
]
operations = [
migrations.AlterField(
model_name='amenity',
name='listings',
field=models.ManyToManyField(related_name='amentities', related_query_name='amenity', to='main.Listing'),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,228 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/freebnb/routing.py | from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from main.consumers import ChatConsumer
chat = ProtocolTypeRouter({
"websocket": AuthMiddlewareStack(
URLRouter([
path("messages/<int:conversation_id>", ChatConsumer)
])
)
}) | {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,229 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/models.py | from django.db import models
from django.db.models import Q
from django.contrib.auth.models import AbstractUser
from localflavor.us.models import USStateField, USZipCodeField
from django.core.validators import MaxValueValidator, MinValueValidator
# Create your models here.
class User(AbstractUser):
email = models.EmailField(unique=True)
birthdate = models.DateField(null=True)
superhost = models.BooleanField(default=False)
class Address(models.Model):
street = models.CharField(max_length=255)
city = models.CharField(max_length=255)
state = USStateField()
zip_code = USZipCodeField()
created_at = models.DateTimeField(auto_now_add=True)
class Listing(models.Model):
address = models.OneToOneField(Address, on_delete=models.CASCADE)
owner = models.ForeignKey(User, on_delete=models.CASCADE, related_name="listings", related_query_name="listing")
headline = models.CharField(max_length=255)
description = models.TextField(max_length=500)
price_per_night = models.DecimalField(max_digits=4, decimal_places=2)
room_type = models.CharField(max_length=1, choices=[("P", "Private Room"), ("S", "Shared Room"), ("W", "Whole House")])
class Amenity(models.Model):
amenity = models.CharField(max_length=50)
listings = models.ManyToManyField(Listing)
class Rules(models.Model):
listing = models.OneToOneField(Listing, on_delete=models.CASCADE, related_name="rules", related_query_name="rules")
smoking = models.BooleanField(default=False)
pets = models.BooleanField(default=False)
parties = models.BooleanField(default=False)
check_in = models.IntegerField(default=12)
check_out = models.IntegerField(default=10)
additional = models.TextField(max_length=500)
class Review(models.Model):
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, related_name="reviews", related_query_name="review")
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="reviews", related_query_name="review")
review = models.TextField(max_length=500)
rating = models.IntegerField(default=1, validators=[
MaxValueValidator(5),
MinValueValidator(1)
])
class ListingPhoto(models.Model):
image = models.ImageField(upload_to="listings/")
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, related_name="photos", related_query_name="photo")
class Reservation(models.Model):
from_date = models.DateField()
to_date = models.DateField()
total_price = models.IntegerField()
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, related_name="reservations", related_query_name="reservation")
user = models.ForeignKey(User, on_delete=models.CASCADE)
accepted = models.BooleanField(default=False)
class ConversationManager(models.Manager):
def get_user_convos(self, user):
q = Q(sender=user) | Q(receiver=user)
return self.get_queryset().filter(q)
def get_prev_convo(self, user1, user2):
q1 = Q(sender=user1) & Q(receiver=user2)
q2 = Q(sender=user2) & Q(receiver=user1)
return self.get_queryset().filter(q1 | q2)
class Conversation(models.Model):
objects = ConversationManager()
created_at = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now_add=True)
sender = models.ForeignKey(User, on_delete=models.CASCADE, related_name="conversations", related_query_name="conversation")
receiver = models.ForeignKey(User, on_delete=models.CASCADE)
class Message(models.Model):
sender = models.ForeignKey(User, on_delete=models.CASCADE)
message = models.TextField(max_length=500)
conversation = models.ForeignKey(Conversation, on_delete=models.CASCADE, related_name="messages", related_query_name="message")
time = models.TextField(max_length=15, default="")
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,230 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0009_auto_20200826_0945.py | # Generated by Django 3.1 on 2020-08-26 09:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0008_auto_20200824_0003'),
]
operations = [
migrations.RenameModel(
old_name='Reviews',
new_name='Review',
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,231 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0008_auto_20200824_0003.py | # Generated by Django 3.1 on 2020-08-24 00:03
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0007_user_superhost'),
]
operations = [
migrations.AlterField(
model_name='listing',
name='price_per_night',
field=models.DecimalField(decimal_places=2, max_digits=4),
),
migrations.CreateModel(
name='Rules',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('smoking', models.BooleanField(default=False)),
('pets', models.BooleanField(default=False)),
('parties', models.BooleanField(default=False)),
('check_in', models.IntegerField(default=12)),
('check_out', models.IntegerField(default=10)),
('additional', models.TextField(max_length=500)),
('listing', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rules', related_query_name='rules', to='main.listing')),
],
),
migrations.CreateModel(
name='Reviews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('review', models.TextField(max_length=500)),
('rating', models.IntegerField(default=1, validators=[django.core.validators.MaxValueValidator(5), django.core.validators.MinValueValidator(1)])),
('listing', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', related_query_name='review', to='main.listing')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', related_query_name='review', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Amenity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amenity', models.CharField(max_length=50)),
('listings', models.ManyToManyField(to='main.Listing')),
],
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,232 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0006_auto_20200817_2014.py | # Generated by Django 3.1 on 2020-08-17 20:14
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0005_auto_20200817_1445'),
]
operations = [
migrations.AddField(
model_name='listing',
name='price_per_night',
field=models.IntegerField(default=10),
preserve_default=False,
),
migrations.AddField(
model_name='listing',
name='room_type',
field=models.CharField(choices=[('P', 'Private Room'), ('S', 'Shared Room'), ('W', 'Whole House')], default='W', max_length=1),
preserve_default=False,
),
migrations.AddField(
model_name='reservation',
name='total_price',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AlterField(
model_name='conversation',
name='receiver',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='conversation',
name='sender',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='conversations', related_query_name='conversation', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='listing',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='listings', related_query_name='listing', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='reservation',
name='listing',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reservations', related_query_name='reservation', to='main.listing'),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,233 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0005_auto_20200817_1445.py | # Generated by Django 3.1 on 2020-08-17 14:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0004_auto_20200817_0015'),
]
operations = [
migrations.AlterField(
model_name='listingphoto',
name='image',
field=models.ImageField(upload_to='listings/'),
),
migrations.AlterField(
model_name='listingphoto',
name='listing',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='photos', related_query_name='photo', to='main.listing'),
),
migrations.AlterField(
model_name='message',
name='conversation',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='messages', related_query_name='message', to='main.conversation'),
),
migrations.AlterField(
model_name='reservation',
name='listing',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='listings', related_query_name='listing', to='main.listing'),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,234 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0004_auto_20200817_0015.py | # Generated by Django 3.1 on 2020-08-17 00:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20200816_1825'),
]
operations = [
migrations.RemoveField(
model_name='listingphoto',
name='filepath',
),
migrations.AddField(
model_name='listingphoto',
name='image',
field=models.ImageField(default='notavail.png', upload_to='../../src/assetts'),
preserve_default=False,
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,235 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0010_auto_20200826_0955.py | # Generated by Django 3.1 on 2020-08-26 09:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0009_auto_20200826_0945'),
]
operations = [
migrations.AlterField(
model_name='rules',
name='listing',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='rules', related_query_name='rules', to='main.listing'),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,236 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/views.py | from .serializers import *
from rest_framework.views import APIView
from rest_framework.generics import RetrieveAPIView, ListAPIView, UpdateAPIView, CreateAPIView
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from rest_framework.response import Response
from .models import User, Listing, Address, ListingPhoto, Reservation
from rest_framework import authentication, permissions
from rest_framework.parsers import MultiPartParser, JSONParser, FormParser
from django.middleware.csrf import get_token
from datetime import datetime
from rest_framework_jwt.settings import api_settings
from dateutil import parser as dateutil_parser
@api_view(["get"])
def get_csrf(request):
return Response({ "csrf": get_token(request) })
class UserView(APIView):
permission_classes = [permissions.AllowAny]
def get(self, request):
return Response({ "msg": "this is a success!"})
def post(self, request, format=None):
# convert mm/dd/yyyy birth being sent to python friendly format
birthdate = datetime.strptime(request.data["birthdate"], "%m/%d/%Y")
request.data["birthdate"] = str(birthdate.date())
# create serialized user
user = CreateUserSerializer(data=request.data)
# check if valid, create token, send user info and token back to server
if user.is_valid():
new_user = user.save()
payload = api_settings.JWT_PAYLOAD_HANDLER(new_user)
token = api_settings.JWT_ENCODE_HANDLER(payload)
return Response({
"status": "success",
"username": new_user.username,
"email": new_user.email,
"firstname": new_user.first_name,
"lastname": new_user.last_name,
"token": token
})
else:
return Response({ "status": "error", "errors": user.errors})
@api_view(["GET"])
@permission_classes([permissions.IsAuthenticated])
def get_user_reservations(request):
try:
user = request.user
approved_reservations = Reservation.objects.filter(listing__owner=user, accepted=True)
pending_reservations = Reservation.objects.filter(listing__owner=user, accepted=False)
return Response({ "status": "success", "approved": ReservationSerializer(approved_reservations, many=True).data,
"pending": ReservationSerializer(pending_reservations, many=True).data
})
except Exception:
return Response({ "status": "error" })
@api_view(["PATCH"])
@permission_classes([permissions.IsAuthenticated])
def approve_reservation(request):
user = request.user
try:
reservation = Reservation.objects.get(pk=request.data["id"])
<<<<<<< HEAD
if request.user == reservation.listing.owner:
reservation.accepted = True
reservation.save()
return Response({"status": "success"})
else:
return Response({"status": "error", "msg": "not authorized"})
except Exception:
=======
reservation.accepted = True
reservation.save()
convo = Conversation.objects.get_prev_convo(user, reservation.listing.owner)
Message.objects.create(
sender=user,
message=f"{user.first_name} has accepted your request to stay",
time=datetime.now().strftime("%m/%d/%Y %H:%M"),
conversation=convo
)
return Response({"status": "success"})
except:
>>>>>>> channels
return Response({ "status": "error"})
class ListingView(APIView):
permission_classes = [permissions.IsAuthenticated]
serializer_class = ListingSerializer
parser_classes = [MultiPartParser, FormParser, JSONParser]
def get(self, request, format=None):
user = request.user
queryset = Listing.objects.filter(owner=user)
return Response({ "listings": ListingSerializer(queryset, many=True).data })
def post(self, request, format=None):
user = request.user
address = AddressSerializer(data=request.data)
if not address.is_valid():
return Response({ "status": "error", "errors": address.errors })
serialized_listing = CreateListingSerializer(data=request.data)
if not serialized_listing.is_valid():
return Response({ "status": "error", "errors": serialized_listing.errors })
listing = serialized_listing.save(owner=user, address=address.save())
try:
image=request.FILES["photos"]
except:
return Response({ "error": "please include a photo!"})
listingphoto = ListingPhoto(listing=listing, image=image)
listingphoto.save()
return Response({ "status": "success", "listing": ListingSerializer(instance=listing).data})
def delete(self, request, format=None):
try:
Listing.objects.filter(id=request.data["id"]).delete()
return Response({ "status": "success" })
except:
return Response({ "status": "error" })
class UserListView(APIView):
query_set = User.objects.all()
serializer_class = CreateUserSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
def get(self, request):
users = [user for user in User.objects.all()]
return Response({ "users": users })
@api_view(["POST"])
@permission_classes([permissions.AllowAny])
def login(request):
email = request.data["email"]
password = request.data["password"]
try:
user = User.objects.get(email=email)
except:
return Response({ "error": "email not found "})
if not user.check_password(password):
return Response({ "error": "password incorrect "})
payload = api_settings.JWT_PAYLOAD_HANDLER(user)
token = api_settings.JWT_ENCODE_HANDLER(payload)
return Response({
"token": token,
"status": "success",
"user": UserSerializer(instance=user).data
})
class ReservationView(APIView):
permission_classes = [permissions.IsAuthenticated]
serializer_class = ReservationSerializer
def get(self, request, format=None):
user = request.user
queryset = Reservation.objects.filter(user=user)
return Response({ "reservations": ReservationSerializer(queryset, many=True).data })
def post(self, request, format=None):
user = request.user
toDate = dateutil_parser.isoparse(request.data["toDate"])
fromDate = dateutil_parser.isoparse(request.data["fromDate"])
totalDays = toDate - fromDate
price = round(int(totalDays.days) * float(request.data["price"]), 2)
listing = Listing.objects.get(pk=request.data["id"])
reservations = Reservation.objects.filter(listing=listing, from_date__gte=fromDate, to_date__lte=toDate)
# stop user from booking their own place
if listing.owner == user:
return Response({ "status": "error", "msg": "you cannot book your own place"})
# if no reservations exists for current dates make one
if not reservations:
try:
reservation = Reservation(
user=user,
to_date = toDate,
from_date = fromDate,
listing=listing,
total_price = price
)
reservation.save()
# send owner of lister a message alerting them about the request
conversation = Converation.objects.get_prev_convo(user, listing.owner)
message=f"{user.first_name} would like to book your place"
# if no conversation currently exists between requester and owner make one
if len(conversation) == 0:
conversation = Conversation.objects.create(
sender=user,
receiver=listing.owner
)
# add an automated message to the conversation
Message.objects.create(
message=message,
sender=user,
conversation=conversation[0],
time=datetime.now().strftime("%m/%d/%Y %H:%M")
)
return Response({ "status": "success"}, status=200)
except Exception:
return Response({ "status": "error"}, status=401)
else:
return Response({ "status": "error", "msg": "dates are not valid" }, status=401)
class StayView(RetrieveAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ListingSerializer
queryset = Listing.objects.all()
class ListingUpdateView(UpdateAPIView):
permission_classes = [permissions.IsAuthenticated]
serializer_class = ListingSerializer
queryset = Listing.objects.all()
lookup_field = "pk"
def update(self, request, *args, **kwargs):
listing = ListingSerializer(instance=self.get_object(), data=request.data, partial=True)
address = AddressSerializer(instance=self.get_object().address, data=request.data, partial=True)
if listing.is_valid():
listing.save()
else:
return Response({"status": "invalid listing data"})
if address.is_valid():
address.save()
else:
return Response({"status": "invalid address data"})
if "photos" in request.FILES:
photo = ListingPhoto(listing=self.get_object(), image=request.FILES["photos"])
return Response({"status": "success" })
class StayListView(APIView):
permission_classes = [permissions.AllowAny]
def get(self, request, format=None):
query = {}
if request.GET.get("city"):
query["address__city__contains"] = request.GET.get("city")
if request.GET.get("state"):
query["address__state__contains"] = request.GET.get("state")
if request.GET.get("priceHigh"):
query["price_per_night__lte"] = request.GET.get("priceHigh")
if request.GET.get("priceLow"):
query["price_per_night__gte"] = request.GET.get("priceLow")
exclude = {}
if request.GET.get("toDate"):
exclude["reservation__to_date__gte"] = datetime.strptime("%Y-%m-%d", request.GET.get("toDate"))
if request.GET.get("fromDate"):
exclude["reservation__from_date__lte"] = datetime.strptime("%Y-%m-%d", request.GET.get("fromDate"))
queryset = Listing.objects.filter(**query).exclude(**exclude)
return Response({ "stays": ListingSerializer(queryset, many=True).data})
class AmenityListView(ListAPIView):
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
serializer_class = AmenitySerializer
queryset = Amenity.objects.all()
class AmenityUpdateView(UpdateAPIView):
permission_classes = [permissions.IsAuthenticated]
serializer_class = AmenitySerializer
queryset = Amenity.objects.all()
def put(self, request):
amenities = request.data["amenities"]
id = request.data["id"]
listing = Listing.objects.get(id=id)
for amenity in amenities:
a = Amenity.objects.get(amenity=amenity["amenity"])
if amenity["checked"]:
listing.amenity_set.add(a)
else:
listing.amenity_set.remove(a)
return Response({ "status": "success"})
class RulesCreateUpdateView(CreateAPIView):
permission_classes = [permissions.IsAuthenticated]
queryset = Rules.objects.all()
serializer_class = RulesSerializer
def post(self, request, *args, **kwargs):
listing = Listing.objects.get(id=request.data["listing"])
try:
rules = listing.rules
rules_serializer = CreateRulesSerializer(instance=rules, data=request.data, partial=True)
if rules_serializer.is_valid():
rules_serializer.save()
else:
return Response({ "status": "error"}, status=500)
except:
request.data["listing"] = listing
rules = Rules(**request.data)
try:
rules.save()
except:
Response({"status": "error"}, status=500)
return Response(data={"status": "success"})
class ConversationListView(APIView):
permission_classes = [permissions.IsAuthenticated]
serializer_class = ConversationSerializer
query_set = Conversation.objects.all()
def get(self, request, format=None):
try:
user = request.user
conversations = Conversation.objects.get_user_convos(user)
return Response({
"convos": ConversationSerializer(conversations, many=True).data,
"status": "success"
})
except Exception:
print(Exception.with_traceback())
return Response({ "status": "error" })
class MessageListView(APIView):
permission_classes = [permissions.IsAuthenticated]
query_set = Conversation.objects.all()
serializer_class = MessageSerializer
lookup_field = "pk"
def get(self, request, *args, **kwargs):
user = request.user
convo = Conversation.objects.get(pk=kwargs["pk"])
if convo.receiver == user or convo.sender == user:
messages = convo.messages.all()
return Response({ "status": "success", "messages": MessageSerializer(messages, many=True).data})
else:
return Response({ "status": "error", "msg": "not authorized"}) | {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,237 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/consumers.py | from channels.consumer import AsyncConsumer
from channels.db import database_sync_to_async
from channels.exceptions import StopConsumer
from rest_framework_jwt.authentication import jwt_get_username_from_payload, jwt_decode_handler
import json
from .models import Conversation, Message, User
class ChatConsumer(AsyncConsumer):
async def websocket_connect(self, event):
# get token from query string by converting byte -> string -> parse
query_string = self.scope["query_string"].decode("utf8")
token = query_string.split("=")[1]
self.conversation_id = self.scope["url_route"]["kwargs"]["conversation_id"]
# if we successful get a user add to self.user
# store conversation id in self.conversation_id
if token:
payload = jwt_decode_handler(token)
user = jwt_get_username_from_payload(payload)
self.user = user
self.room_name = f"conversation_{self.conversation_id}"
await self.secure_conversation(self.conversation_id, self.user)
if user:
await self.channel_layer.group_add(
self.room_name,
self.channel_name,
)
await self.send({
"type": "websocket.accept"
})
else:
await self.send({
"type": "websocket.close"
})
async def chat_message(self, event):
data = json.loads(event["text"])
await self.send({
"type": "websocket.send",
"text": event["text"],
})
await self.save_message(data)
async def websocket_receive(self, event):
await self.channel_layer.group_send(
self.room_name,
{
"type": "chat_message",
"text": event["text"]
}
)
async def websocket_disconnect(self, event):
print("disconnected")
await self.send({
"type": "websocket.close"
})
raise StopConsumer()
@database_sync_to_async
def secure_conversation(self, conversation_id, user):
"""
make sure user is either reciever or sender recorded in this conversation
"""
convo = Conversation.objects.filter(id=conversation_id)[0]
if convo.receiver != user or convo.sender != user:
return self.send({
"type": "websocket.close"
})
@database_sync_to_async
def save_message(self, data):
user = User.objects.get(username=self.user)
convo = Conversation.objects.get(id=self.conversation_id)
return Message.objects.create(message=data["message"], time=data["time"], sender=user, conversation=convo) | {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,238 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0016_message_time.py | # Generated by Django 3.1 on 2020-09-09 10:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0015_auto_20200905_0030'),
]
operations = [
migrations.AddField(
model_name='message',
name='time',
field=models.TextField(default='', max_length=15),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,239 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0012_auto_20200826_1059.py | # Generated by Django 3.1 on 2020-08-26 10:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0011_auto_20200826_1045'),
]
operations = [
migrations.AlterField(
model_name='amenity',
name='listings',
field=models.ManyToManyField(to='main.Listing'),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,240 | ccunnin8/FreeBNB | refs/heads/master | /freebnb/freebnb/server/freebnb/main/migrations/0015_auto_20200905_0030.py | # Generated by Django 3.1 on 2020-09-05 00:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0014_auto_20200905_0029'),
]
operations = [
migrations.AlterField(
model_name='conversation',
name='last_modified',
field=models.DateTimeField(auto_now_add=True),
),
]
| {"/freebnb/freebnb/server/freebnb/main/serializers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"], "/freebnb/freebnb/server/freebnb/main/consumers.py": ["/freebnb/freebnb/server/freebnb/main/models.py"]} |
67,257 | RyanKung/qubit | refs/heads/master | /qubit/__main__.py | from .wsgiapp import app
def main() -> None:
host, port = '0.0.0.0', 8888
print(app.url_map)
app.run(host, port, debug=True, use_reloader=False)
if __name__ == '__main__':
main()
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,258 | RyanKung/qubit | refs/heads/master | /qubit/types/__init__.py | from .qubit import Qubit
from .states import States
__all__ = ['Qubit', 'States']
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,259 | RyanKung/qubit | refs/heads/master | /qubit/measure/pandas.py | import pandas
from functools import partial
import json
from types import GeneratorType
__all__ = ['pandas', 'LazyQueryReader']
class LazyQueryReader(object):
def __init__(self, queryer: GeneratorType):
self.g = queryer
self.count = 0
def read(self, n=0):
try:
query_res = list(next(self.g))
if not query_res:
return ''
res = ','.join(list(map(
partial(json.dumps, default=str),
list(next(self.g))[0])))
if res:
self.count = self.count + 1
return ('%s\n' % res).encode()
except StopIteration:
return ''
def __iter__(self):
return self
def __next__(self):
return self.read()
def read_generator(gen: GeneratorType, keys=[]):
return pandas.read_csv(LazyQueryReader(gen),
lineterminator='\n',
names=list(keys),
engine='python')
pandas.read_gen = read_generator
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,260 | RyanKung/qubit | refs/heads/master | /qubit/apis/states.py | import datetime
from itertools import groupby
from qubit.types import States
from qubit.core import app
from .utils import resp_wrapper as wrapper
from .utils import jsonize
__all__ = ['states_api', 'states_period_api']
@app.route('/qubit/<id>/from/<start>/to/<end>/', methods=['GET'])
@jsonize
@wrapper
def states_api(id, start, end):
data = States.select(id, start, end)
return [d._asdict() for d in data]
@app.route('/qubit/<id>/period/<period>/', methods=['GET'])
@app.route('/qubit/<id>/period/<period>/<cycle>/', methods=['GET'])
@jsonize
@wrapper
def states_period_api(id, period, cycle=1):
def handler():
return States.get_period(id, period, cycle)
return handler()
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,261 | RyanKung/qubit | refs/heads/master | /qubit/io/celery/config.py | from qubit.config import MQ_BROKER, REDIS_BACKEND
TIMEZONE = 'Europe/London'
ENABLE_UTC = True
BROKER_URL = MQ_BROKER
CELERY_RESULT_BACKEND = REDIS_BACKEND
CELERY_ACCEPT_CONTENT = ['application/json', 'application/x-python-serialize']
CELERY_TASK_RESULT_EXPIRES = 18000 # 5 hours.
CELERY_ALWAYS_EAGER = False
CELERY_DEFAULT_QUEUE = 'qubit.tasks.default'
CELERY_DEFAULT_EXCHANGE = 'qubit.tasks.default'
CELERY_DEFAULT_ROUTING_KEY = 'default'
# These settings is used for fix `celeryev.xxx queue huge length` problem:
# http://stackoverflow.com/questions/30227266/what-is-the-celeryev-queue-for
# http://stackoverflow.com/questions/17778715/celeryev-queue-in-rabbitmq-becomes-very-large
# DOC:
# http://celery.readthedocs.io/en/latest/configuration.html#celery-event-queue-ttl
CELERY_SEND_EVENTS = True
CELERY_EVENT_QUEUE_TTL = 60
CELERY_EVENT_QUEUE_EXPIRES = 60 # Will delete all celeryev. queues without consumers after 1 minute.
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,262 | RyanKung/qubit | refs/heads/master | /tests/apis/__init__.py | from functools import partial, reduce
from operator import add
import werkzeug.test
from qubit.wsgiapp import app
__all__ = ['client', 'request', 'get']
client = werkzeug.test.Client(app)
environ_overrides = {'REMOTE_ADDR': '127.0.0.1:8086'}
def request(*args, **kwargs):
resp = partial(client.open,
environ_overrides=environ_overrides,
content_type='application/json')(*args, **kwargs)[0]
return reduce(add, map(bytes, resp)).decode()
def get(*args, **kwargs):
resp = client.open(environ_overrides=environ_overrides,
*args, **kwargs)[0]
return reduce(add, map(bytes, resp)).decode()
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,263 | RyanKung/qubit | refs/heads/master | /qubit/io/pulsar.py | from functools import partial
import pulsar
__all__ = ['period_task', 'async']
def period_task(fn):
def task_wrapper(actor):
return fn()
if pulsar.get_actor():
pulsar.spawn(period_task=task_wrapper)
return fn
def async(fn):
def task_wrapper(actor, *args, **kwargs):
return fn(*args, **kwargs)
fn.async = lambda *k, **kw: pulsar.spawn(start=partial(task_wrapper, *k, **kw))
return fn
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,264 | RyanKung/qubit | refs/heads/master | /qubit/io/celery/__init__.py | import os
from typing import Callable
from celery import Celery
from celery import Task
from .utils import task_method
from .types import PeriodTask
__all__ = ['queue', 'period_task', 'task_method', 'Entanglement']
class Entanglement(Task):
abstract = True
os.environ['CELERY_CONFIG_MODULE'] = 'qubit.io.celery.config'
queue = Celery()
def period_task(fn: Callable, period=20, name='lambda'):
if isinstance(fn, task_method):
fn = fn.task
if not period_task.__dict__.get('tasks'):
period_task.__dict__['tasks'] = []
period_task.__dict__['tasks'].append(
PeriodTask(period / 1000, fn, name)
)
return fn
@queue.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
for task in period_task.tasks:
sender.add_periodic_task(task.period, task.task.s(), name=task.name)
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,265 | RyanKung/qubit | refs/heads/master | /qubit/io/postgres/postgres.py | try:
import psycopg2
import psycopg2.pool
except:
from psycopg2cffi import compat
compat.register()
import psycopg2
import psycopg2.pool
from qubit.config import PGSQL_PARAM
__all__ = ['connection', 'pool']
def connection():
if not getattr(connection, '_conn', None):
connection._conn = psycopg2.connect(**PGSQL_PARAM)
return connection._conn
# for creat a new connection
pool = psycopg2.pool.SimpleConnectionPool(1, 60 * 1000, **PGSQL_PARAM)
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,266 | RyanKung/qubit | refs/heads/master | /qubit/measure/__init__.py | from .pandas import pandas
__all__ = ['pandas']
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,267 | RyanKung/qubit | refs/heads/master | /qubit/io/postgres/__init__.py | from .postgres import connection, pool
from . import types
from .queryset import QuerySet
__all__ = ['connection', 'pool', 'types', 'QuerySet']
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,268 | RyanKung/qubit | refs/heads/master | /qubit/io/utils.py | import pulsar
import asyncio
from functools import wraps, partial
from typing import Callable
from types import coroutine
from threading import Thread
from multiprocessing import Process
from flask import request
__all__ = ['syncio', 'sync2async']
loop = pulsar.get_event_loop()
def syncio(fn, loop):
@wraps(fn)
def wrapper(*args, **kwargs):
return loop.run_until_complete(fn(*args, **kwargs))
return wrapper
def sync2async(fn: Callable) -> coroutine:
async def handler(*args, **kwargs):
def wrapper(ft: asyncio.Future):
print('call wrapper')
res = fn(*args, **kwargs)
ft.set_result(res)
loop.stop()
future = asyncio.Future()
loop.call_later(0, partial(wrapper, future))
return future
return handler
def with_new_thread(fn):
def _(*args, **kwargs):
def _(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
loop = asyncio.new_event_loop()
Thread(target=_, args=(loop,)).start()
feature = asyncio.run_coroutine_threadsafe(fn(*args, **kwargs), loop)
return feature.result()
return _
def with_loop(fn):
def _(*args, **kwargs):
try:
loop = request.environ.get('pulsar.connection')._loop
return loop.run_until_complete(fn(*args, **kwargs))
except OSError:
print('gen new loop !!!!!!!', OSError)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
res = loop.run_until_complete(fn(*args, **kwargs))
return res
except RuntimeError as ex:
# if ex.args[0] == 'Event loop is running.':
# res = with_new_thread(fn)(*args, **kwargs)
# return res
# if ex.args[0] == 'Event loop is closed':
# res = with_new_thread(fn)(*args, **kwargs)
# return res
raise ex
return _
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,269 | RyanKung/qubit | refs/heads/master | /qubit/views/admin.py | from flask import render_template
from qubit.core.app import app
__all__ = ['admin']
@app.route('/qubit/admin/')
def admin():
return render_template('index.html')
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,270 | RyanKung/qubit | refs/heads/master | /qubit/views/__init__.py | from . import admin
__all__ = ['admin']
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,271 | RyanKung/qubit | refs/heads/master | /qubit/utils.py | import time
from functools import wraps
__all__ = ['timer']
def timer(fn):
@wraps(fn)
def handler(*args, **kwargs):
start = time.time()
res = fn(*args, **kwargs)
end = time.time()
cost = str((end - start) * 1000.0)
print('calling %s cost %s ms' % (fn.__name__, cost))
return res
return handler
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,272 | RyanKung/qubit | refs/heads/master | /qubit/io/celery/types.py | import celery
from typing import NamedTuple
PeriodTask = NamedTuple('PeriodTask', [
('period', float),
('task', celery.Task),
('name', str)
])
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,273 | RyanKung/qubit | refs/heads/master | /qubit/config.py | import os
__all__ = ['PGSQL_PARAM', 'MQ_BROKER', 'REDIS_BACKEND', 'STATIC_PATH', 'STATIC_URL']
os.environ['PGOPTIONS'] = '-c statement_timeout=10000'
PGSQL_PARAM = dict(user='ryan',
host='127.0.0.1',
database='qubit',
connect_timeout=3,
port=5432)
MQ_PARAMS = {"host": "127.0.0.1", "port": 5672}
REDIS_PARMAS = {"host": "127.0.0.1", "port": 6379}
REDIS_BACKEND = "redis://%s:%s" % (REDIS_PARMAS['host'], REDIS_PARMAS['port'])
MQ_BROKER = "amqp://%s:%s//" % (MQ_PARAMS['host'], MQ_PARAMS['port'])
STATIC_PATH = 'static/dist'
STATIC_URL = '/static'
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,274 | RyanKung/qubit | refs/heads/master | /qubit/io/postgres/queryset.py | from . import utils
from .postgres import pool, connection
import time
import pulsar
from qubit.utils import timer
__all__ = ['QuerySet', 'LazyQuery']
key = str(time.time())
loop = pulsar.get_event_loop
@timer
def query(sql):
print('sql', sql)
conn = connection()
conn.set_session(autocommit=True)
cur = conn.cursor()
cur.execute(sql)
res = cur.fetchall()
cur.close()
return res
@timer
def update(sql):
print('sql', sql)
conn = connection()
conn.set_session(autocommit=True)
cur = conn.cursor()
cur.execute(sql)
res = cur.fetchall()
if not res:
return False
cur.close()
return res if len(res) > 1 else res[0]
@timer
def insert(sql):
print('sql', sql)
conn = connection()
conn.set_session(autocommit=True)
cur = conn.cursor()
cur.execute(sql)
res = cur.fetchone()
if not res:
return False
cur.close()
return res if len(res) > 1 else res[0]
class LazyQuery():
def __init__(self, sql, fields=None):
self.sql = sql
self.conn = pool.getconn()
self.conn.set_session(autocommit=True)
self.cur = self.conn.cursor()
self.cur.execute(self.sql)
self.fields = fields
def __iter__(self):
return self
def __next__(self):
res = self.cur.fetchone()
if res:
yield dict(zip(self.fields, res[0])) if self.fields else res
else:
pool.putconn(self.conn)
raise StopIteration
def read(self, n=0): # for pandas
try:
return next(','.join(self.g))
except StopIteration:
return ''
class QuerySet(object):
_sql = {
'get_list': 'SELECT {fields} from {table} {extra} LIMIT {size} OFFSET {offset}',
'filter': 'SELECT {fields} from {table} WHERE {rule} LIMIT {size} OFFSET {offset}',
'count': 'SELECT COUNT({field}) FROM {table}',
'count_on_rule': 'SELECT COUNT({field}) FROM {table} WHERE {rule}',
'orderby': 'ORDER BY {field}',
'nearby': 'select {fields} difference from {table} where {rule} and {value} > {column} limit 1',
'orderby_decr': 'ORDER BY {field} DECR',
'filter_with_orderby': "SELECT {fields} from {table} WHERE {rule} ORDER BY {sort_key} LIMIT {size} OFFSET {offset};",
'filter_with_orderby_decr': "SELECT {fields} from {table} WHERE {rule} ORDER BY {sort_key} LIMIT {size} OFFSET {offset};",
'filter_in': "SELECT {fields} FROM {table} WHERE {key} IN ({targets});",
'filter_in_range': "SELECT {fields} FROM {table} WHERE {rule} and {key} <= {end} and {key} >= {start};",
'find_in_range': "SELECT {fields} FROM {table} WHERE {key} <= {end} and {key} >= {start};",
'find_near': "SELECT {fields} FROM {table} WHERE {key} >= {start};",
'insert': 'INSERT INTO {table} ({keys}) VALUES ({values}) RETURNING id;',
'replace': 'REPLACE INTO {table} ({keys}) VALUES ({values})',
'delete': "DELETE FROM {table} WHERE {rules} RETURNING id",
'update': "UPDATE {table} SET {key_value_pairs} WHERE {rules} RETURNING id",
'append_array': "UPDATE {table} SET {key} = array_append({key}, {value}) WHERE id='{id}' RETURNING id",
'get_via_id': "SELECT {fields} from {table} WHERE id='{id}'",
'update_via_id': "UPDATE {table} SET {key_value_pairs} WHERE id='{id}' RETURNING id",
'delete_via_id': "DELETE FROM {table} WHERE id='{id}' RETURNING id",
'incr': "UPDATE {table} SET {key}={key}+'{num}' WHERE id='{id}' RETURNING id",
'decr': "UPDATE {table} SET {key}={key}-'{num}' WHERE id='{id}' RETURNING id",
'search': "SELECT {fields} FROM {table} WHERE {extra} {key} LIKE '%{value}%' LIMIT {size} OFFSET {offset}",
'insert_or_update': "INSERT INTO {table} ({keys}) VALUES ({values}) ON DUPLICATE KEY UPDATE {key_value_pairs};"
}
def __init__(self, table):
self.table = table
self.fields = table._fields
self.tablename = table.__name__
def format(self, data):
if not isinstance(data, dict):
return utils.escape(str(data.encode('utf8')))
if not all(f in self.fields for f in data.keys()):
raise Exception("Unknew Fields", set(data.keys()) - set(self.fields))
try:
res = {k: utils.escape(v) for k, v in data.items()}
return res
except:
raise Exception("Series Failed")
def nearby(self, value, column, *args, **kwargs):
data = self.format(kwargs)
res = query(self._sql['nearby'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, self.fields)),
'value': utils.escape(value),
'column': utils.escape(column),
'rule': utils.get_and_seg(data)
}))
return res
def get(self, oid):
res = query(self._sql['get_via_id'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, self.fields)),
'id': oid
}))
return res and dict(zip(self.fields, res[0])) if res else None
def get_by(self, *args, **kwargs):
data = self.format(kwargs)
res = query(self._sql['filter'].format(**{
'table': self.tablename,
'rule': utils.get_and_seg(data),
'size': '1',
'offset': '0',
'fields': utils.concat(map(utils.wrap_key, self.fields)),
}))
return res and dict(zip(self.fields, res[0]))
def search(self, key, value, start, limit, filters=''):
return query(self._sql['search'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, self.fields)),
'key': self.format(key),
'value': self.format(value),
'offset': str(int(start)),
'size': str(int(limit)),
'extra': filters and utils.get_pairs(filters) + 'and' or ''
}))
def get_list(self, size=100, offset=0, sort_key='') -> list:
if isinstance(sort_key, list):
sort_key = utils.concat(map(utils.set_desc, sort_key))
else:
sort_key = sort_key and utils.set_desc(sort_key) or ''
res = query(self._sql['get_list'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, self.fields)),
'size': str(int(size)),
'offset': str(int(offset)),
'extra': sort_key and self._sql['orderby'].format(**{
'field': sort_key
}) or ''
}))
return [dict(zip(self.fields, r)) for r in res]
def find_in(self, key, targets, fields=[]) -> dict:
return query(self._sql['filter_in'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, fields or self.fields)),
'key': key,
'targets': utils.concat(map(utils.wrap_value, targets))
}))
def find_in_range_lazy(self, key, start, end, fields=[], *args, **kwargs) -> dict:
data = self.format(kwargs)
return LazyQuery(self._sql['filter_in_range'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, fields or self.fields)),
'key': key,
'rule': utils.get_and_seg(data),
'start': utils.wrap_value(start),
'end': utils.wrap_value(end)
}), self.fields)
def find_near_lazy(self, key, start, end, fields=[], *args, **kwargs) -> dict:
data = self.format(kwargs)
return LazyQuery(self._sql['find_near'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, fields or self.fields)),
'key': key,
'rule': utils.get_and_seg(data),
'start': utils.wrap_value(start),
'end': utils.wrap_value(end)
}), self.fields)
def find_near(self, key, start, end, fields=[], *args, **kwargs) -> dict:
data = self.format(kwargs)
res = query(self._sql['find_near'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, fields or self.fields)),
'key': key,
'rule': utils.get_and_seg(data),
'start': utils.wrap_value(start),
'end': utils.wrap_value(end)
}))
return [dict(zip(self.fields, r)) for r in res]
def find_in_range(self, key, start, end, fields=[], *args, **kwargs) -> dict:
data = self.format(kwargs)
res = query(self._sql['filter_in_range'].format(**{
'table': self.tablename,
'fields': utils.concat(map(utils.wrap_key, fields or self.fields)),
'key': key,
'rule': utils.get_and_seg(data),
'start': utils.wrap_value(start),
'end': utils.wrap_value(end)
}))
return [dict(zip(self.fields, r)) for r in res]
def count(self, field):
field = utils.escape(field) or '*'
return query(self._sql['count'].format(**{
'table': self.tablename,
'field': field
}))
def count_on_rule(self, field, rule):
rule = self.format(rule)
field = utils.escape(field)
return query(self._sql['count_on_rule'].format(**{
'table': self.tablename,
'rule': utils.get_and_seg(rule),
'field': field
}))
def filter(self, limit=100, offset=0, sort_key='', *args, **kwargs):
data = self.format(kwargs)
res = query(self._sql['filter'].format(**{
'table': self.tablename,
'rule': utils.get_and_seg(data),
'size': str(int(limit)),
'fields': utils.concat(map(utils.wrap_key, self.fields)),
'offset': str(int(offset))
}))
return [dict(zip(self.fields, r)) for r in res]
def sortby(self, sort_key='id', offset=0, limit=100, extra="", decr=False, *args, **kwargs):
data = self.format(kwargs)
if isinstance(sort_key, list):
sort_key = utils.concat(map(utils.set_desc, sort_key))
else:
sort_key = utils.set_desc(sort_key)
tmpl = decr and 'filter_with_orderby_decr' or 'filter_with_orderby'
return query(self._sql[tmpl].format(**{
'table': self.tablename,
'rule': utils.get_and_seg(data),
'size': str(int(limit)),
'sort_key': sort_key,
'offset': str(int(offset)),
'fields': utils.concat(map(utils.wrap_key, self.fields)),
}))
def insert(self, *args, **kwargs):
data = self.format(kwargs)
return insert(self._sql['insert'].format(**{
'table': self.tablename,
'keys': utils.concat(map(utils.wrap_key, data.keys())),
'values': utils.concat(map(utils.wrap_value, data.values()))
}))
def replace(self, *args, **kwargs):
data = self.format(kwargs)
return insert(self._sql['replace'].format(**{
'table': self.tablename,
'keys': utils.concat(map(utils.wrap_key, data.keys())),
'values': utils.concat(map(utils.wrap_value, data.values()))
}))
def update(self, oid, *args, **kwargs):
data = self.format(kwargs)
pairs = utils.get_pairs(data)
return update(self._sql['update_via_id'].format(**{
'id': oid,
'table': self.tablename,
'key_value_pairs': pairs
}))
def append_array(self, oid, key, value):
return update(self._sql['append_array'].format(**{
'id': oid,
'table': self.tablename,
'key': key,
'value': value
}))
def insert_or_update(self, *args, **kwargs) -> dict:
data = self.format(kwargs)
return insert(self._sql('insert_or_update').format(**{
'table': self.tablename,
'keys': utils.concat(map(utils.wrap_key, data.keys())),
'values': utils.concat(map(utils.wrap_key, data.values())),
'key_value_pairs': utils.get_pairs(data)
}))
def update_by(self, rules, *args, **kwargs):
data = self.format(kwargs)
rules = self.format(rules)
return update(self._sql['update'].format(**{
'table': self.tablename,
'rules': utils.get_and_seg(rules),
'key_value_pairs': utils.get_pairs(data)
}))
def delete(self, oid):
return update(self._sql['delete_via_id'].format(**{
'table': self.tablename,
'id': oid
}))
def delete_by(self, *args, **kwargs):
data = self.format(kwargs)
return update(self._sql['delete'].format(**{
'table': self.tablename,
'rules': utils.get_and_seg(data)
}))
def incr(self, oid, key, num):
return update(self._sql['incr'].format(**{
'id': oid,
'table': self.tablename,
'key': key,
'num': num
}))
def decr(self, oid, key, num):
return update(self._sql['decr'].format(**{
'id': oid,
'table': self.tablename,
'key': key,
'num': num
}))
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,275 | RyanKung/qubit | refs/heads/master | /qubit/types/states.py | #! -*- eval: (venv-workon "qubit"); -*-
from itertools import groupby, starmap
import datetime
from dateutil.relativedelta import relativedelta
from qubit.core.utils import tail
from qubit.measure import pandas
from qubit.io.postgres import types
from qubit.io.postgres import QuerySet
from qubit.io.redis import cache
from qubit.types.utils import DateRange
__all__ = ['States']
METRIC = ('years', 'months', 'weeks', 'days', 'hours', 'minutes', 'seconds')
class States(object):
prototype = types.Table('states', [
('qubit', types.integer),
('datum', types.json),
('tags', types.text),
('ts', types.timestamp)
])
manager = QuerySet(prototype)
@classmethod
def create(cls, qubit: str, datum: dict,
ts=datetime.datetime.now(), tags=[]):
'''
Create a new state data
'''
return dict(id=cls.manager.insert(
qubit=qubit, datum=datum,
ts=ts, tags=tags))
@classmethod
def format(cls, state_data: dict):
'''
map dict type dita to self.prototype
'''
return cls.prototype(
qubit=state_data['qubit'],
datum=state_data['datum'],
tags=state_data.get('tags'),
ts=state_data['ts'])
@classmethod
def select(cls, qid, start, end=datetime.datetime.now(), lazy=False):
'''
query states via [start, end]
'''
res = cls.manager.find_in_range(
qubit=qid, key='ts', start=start, end=end)
return map(cls.format, res)
@classmethod
def select_lazy(cls, qid, start, end):
'''
query states via [start, end]
'''
return cls.manager.find_in_range_lazy(
qubit=qid, key='ts', start=start, end=end)
@classmethod
def pick(cls, sid, ts):
return cls.manager.nearby(
column='ts', value=ts, qubit=sid)[0]
@classmethod
def get_via_qid(cls, qid):
return cls.manager.get_by(qubit=qid)
@classmethod
def measure(cls, qid: str, sec: str) -> list:
now = datetime.datetime.now()
delta = datetime.timedelta(second=sec)
return cls.manager.find_near_lazy(
qubit=qid, key='ts', start=now - delta)
@staticmethod
def shift(t: datetime.datetime, k: str, v: int):
return t - relativedelta(**{k: v})
@classmethod
def get_period(cls, qid: str, period: str,
cycle: int, group_by=None) -> list:
cycle = int(cycle)
if cycle > 12: # refuse large data querying
return []
period_group_method = {
'days': lambda d: d.ts.day,
'weeks': lambda d: d.ts.isocalendar()[1],
'months': lambda d: d.ts.month,
'years': lambda d: d.ts.year,
'seconds': lambda d: d.ts.second,
'mintues': lambda d: d.ts.timetuple().tm_mn,
'hours': lambda d: d.ts.timetuple().tm_hour
}[period]
def query(start, end) -> [list]:
grouped = groupby(cls.select(qid, start, end), period_group_method)
def calcu(data: dict) -> dict:
ts = max(data.keys())
df = pandas.DataFrame(data).T.describe()
res = df.to_dict('index')
return (ts, res)
def map2df(g: groupby): # itertools groupby
return (calcu(dict(tuple(map(lambda x: (x.ts, x.datum), tail(g))))))
return tuple(map(map2df, grouped))
if METRIC.index(period) > 3:
end = datetime.datetime.now()
start = cls.shift(end, str(period), int(cycle))
return query(start, end)
else:
dates = list(DateRange(period, cycle))
return tuple(starmap(cache()(query), dates[:-1])) + (query(*(dates[-1])), )
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,276 | RyanKung/qubit | refs/heads/master | /qubit/io/redis/__init__.py | import redis
import simplejson as json
from functools import wraps
from pulsar.apps.data import create_store
from qubit.config import REDIS_PARMAS, REDIS_BACKEND
__all__ = ['client', 'store', 'pubsub',
'clear']
client = redis.StrictRedis(**REDIS_PARMAS)
store = create_store(REDIS_BACKEND)
pubsub = store.pubsub()
def clear(flag=None):
if not flag:
flag = "*"
else:
flag += ':'
keys = client.keys('qubit::%s' % flag)
for k in keys:
res = client.delete(k.decode())
print('deleting %s %s' % (k.decode(), bool(res)))
def cache(ttl=100, flag=None):
def wrapper(fn):
@wraps(fn)
def handler(*args, **kwargs):
key = "qubit::{fn_name}:{args}".format(**{
'fn_name': flag or fn.__name__,
'args': str(args)
})
cached_data = client.get(key)
if cached_data:
return json.loads(cached_data.decode())['data']
else:
res = fn(*args, **kwargs)
client.set(key, json.dumps(dict(data=res), namedtuple_as_object=True))
client.expire(key, ttl)
return res
return handler
return wrapper
pubsub = store.pubsub()
clear()
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,277 | RyanKung/qubit | refs/heads/master | /tests/types/test_crud.py | from qubit.types import Qubit
def test_qubit():
data = {
'name': 'test_qubit',
'entangle': 'Spout:tester',
'flying': True
}
Qubit.create(**data)
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,278 | RyanKung/qubit | refs/heads/master | /qubit/io/celery/utils.py | __all__ = ['task_method']
class task_method(object):
def __init__(self, task, *args, **kwargs):
self.task = task
def __get__(self, obj, type=None):
if obj is None:
return self.task
task = self.task.__class__()
task.__self__ = obj
return task
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,279 | RyanKung/qubit | refs/heads/master | /tests/apis/test_curd.py | import json
import datetime
import time
from tests.apis import request
from tests.apis import get
def create_qubit(entangle, name='a qubit'):
qubit_data = {
'name': name,
'entangle': entangle
}
res = json.loads(request(path='/qubit/', data=json.dumps(qubit_data), method='POST'))
assert res['result'] == 'ok'
qid = res['id']
return qid
def entangle(q1, q2):
res = json.loads(request(path='/qubit/entangle/%s/' % q1, data=json.dumps({
'id': q2
}), method='POST'))
assert res['result'] == 'ok'
return res
def get_hours_data(qid):
time.sleep(2)
end = datetime.datetime.now()
delta = datetime.timedelta(hours=1)
start = end - delta
res = json.loads(get(path='/qubit/%s/from/%s/to/%s/' % (
qid, str(start), str(end))))
return res['data']
def feed_random_data(spout='tester'):
data = {
'datum': {
'a': time.time()
},
'ts': str(datetime.datetime.now())
}
res = json.loads(request(path='/qubit/spout/%s/' % spout,
data=json.dumps(data), method='PUT'))
assert res['result'] == 'ok'
def test_crud():
code = '1'
data = {
'name': 'tester',
'monad': code,
'rate': 1
}
res = json.loads(request(path='/qubit/',
data=json.dumps(data), method='POST'))
assert res['result'] == 'ok'
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,280 | RyanKung/qubit | refs/heads/master | /tests/io/test_db.py | from qubit.io.postgres import connection
def test_db():
assert connection
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,281 | RyanKung/qubit | refs/heads/master | /qubit/core/utils.py | def car(lst: list):
return lst[0]
def cdr(lst: list):
return lst[1:]
def tail(lst: list):
return lst[-1]
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,282 | RyanKung/qubit | refs/heads/master | /qubit/__init__.py | from .wsgiapp import app, middleware
__author__ = [('Ryan Kung', 'ryankung@ieee.org')]
__version__ = '0.0.1'
__all__ = ['app', 'middleware']
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,283 | RyanKung/qubit | refs/heads/master | /tests/apis/test_cpu.py | import json
import time
import datetime
from tests.apis import request, get
from operator import sub
from functools import partial
def get_hours_data(qid):
time.sleep(2)
end = datetime.datetime.now()
delta = datetime.timedelta(hours=1)
start = end - delta
res = json.loads(get(path='/qubit/%s/from/%s/to/%s/' % (
qid, str(start), str(end))))
return res['data']
def test_cpu_case():
qubit_code = '''
import psutil
from functools import partial
get_rate = partial(psutil.cpu_percent, interval=1)
datum = get_rate()
'''
qubit_data = {
'name': 'cpu_example',
'monad': qubit_code,
'rate': 100,
'is_spout': True,
'is_stem': True,
'flying': True,
'store': False,
'comment': '''The Qubit Sample for testing
basiclly usage of qubit chains'''
}
gen_cpu_qubit = partial(request, path='/qubit/', method='POST', data=json.dumps(qubit_data))
q1 = json.loads(gen_cpu_qubit())['id']
another_qubit_data = {
'name': 'another_qubit',
'monad': '''
datum = datum
''',
'entangle': 'Stem:%s' % q1,
'is_spout': False,
'is_stem': False,
'flying': True,
'store': True,
'comment': 'another qubit'
}
gen_another_qubit = partial(request, path='/qubit/', method='POST', data=json.dumps(another_qubit_data))
q2 = json.loads(gen_another_qubit())['id']
assert sub(int(q2), int(q1)) == 1
time.sleep(10)
data1 = get_hours_data(q1)
data2 = get_hours_data(q2)
assert len(data1) == 0
assert len(data2) > 5
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
67,284 | RyanKung/qubit | refs/heads/master | /tests/__init__.py | from qubit.io.postgres import connection as conn
from schema.utils import execute_file
__all__ = ['create_table', 'drop_table']
def drop_table():
file_path = 'schema/drop.sql'
execute_file(file_path, conn)
def create_table():
drop_table()
file_path = 'schema/schema.sql'
execute_file(file_path, conn)
drop_table()
create_table()
| {"/qubit/__main__.py": ["/qubit/wsgiapp.py"], "/qubit/types/__init__.py": ["/qubit/types/qubit.py", "/qubit/types/states.py"], "/qubit/apis/states.py": ["/qubit/types/__init__.py", "/qubit/apis/utils.py"], "/qubit/io/celery/config.py": ["/qubit/config.py"], "/tests/apis/__init__.py": ["/qubit/wsgiapp.py"], "/qubit/io/celery/__init__.py": ["/qubit/io/celery/utils.py", "/qubit/io/celery/types.py"], "/qubit/io/postgres/postgres.py": ["/qubit/config.py"], "/qubit/measure/__init__.py": ["/qubit/measure/pandas.py"], "/qubit/io/postgres/__init__.py": ["/qubit/io/postgres/postgres.py", "/qubit/io/postgres/queryset.py"], "/qubit/views/admin.py": ["/qubit/core/app.py"], "/qubit/io/postgres/queryset.py": ["/qubit/io/postgres/__init__.py", "/qubit/io/postgres/postgres.py", "/qubit/utils.py"], "/qubit/types/states.py": ["/qubit/core/utils.py", "/qubit/measure/__init__.py", "/qubit/io/postgres/__init__.py", "/qubit/io/redis/__init__.py", "/qubit/types/utils.py"], "/qubit/io/redis/__init__.py": ["/qubit/config.py"], "/tests/types/test_crud.py": ["/qubit/types/__init__.py"], "/tests/apis/test_curd.py": ["/tests/apis/__init__.py"], "/tests/io/test_db.py": ["/qubit/io/postgres/__init__.py"], "/qubit/__init__.py": ["/qubit/wsgiapp.py"], "/tests/apis/test_cpu.py": ["/tests/apis/__init__.py"], "/tests/__init__.py": ["/qubit/io/postgres/__init__.py", "/schema/utils.py"], "/schema/utils.py": ["/qubit/io/postgres/__init__.py"], "/qubit/apis/qubit.py": ["/qubit/types/__init__.py", "/qubit/types/utils.py", "/qubit/apis/utils.py"], "/qubit/wsgiapp.py": ["/qubit/io/celery/__init__.py", "/qubit/middleware/__init__.py", "/qubit/types/__init__.py", "/qubit/apis/__init__.py", "/qubit/views/__init__.py"], "/qubit/core/app.py": ["/qubit/config.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.