index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
68,872 | arthurdehgan/sleep | refs/heads/master | /utils.py | """Functions used to compute and analyse EEG/MEG data with pyriemann."""
print(
"Warning, this package is no longer updated, there might be bugs here and there even if they were not reported to me. Please use the new version at https://github.com/arthurdehgan/NeuroPy-MLToolbox."
)
import time
import functools
from itertools import permutations, product
from sklearn.base import clone, BaseEstimator
from sklearn.model_selection import LeavePGroupsOut
from sklearn.metrics import accuracy_score, roc_auc_score
from scipy.io import loadmat, savemat
from scipy.signal import welch
from scipy.stats import zscore
import numpy as np
from numpy.random import permutation
from path import Path as path
from joblib import Parallel, delayed
def proper_loadmat(file_path):
data = loadmat(file_path)
clean_data = {}
for key, value in data.items():
if not key.startswith("__"):
clean_data[key] = value.squeeze().tolist()
return clean_data
def super_count(liste):
counts = dict()
for item in liste:
if item in counts:
counts[item] += 1
else:
counts[item] = 1
return counts
def timer(func):
"""Decorator to compute time spend for the wrapped function"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
start_time = time.perf_counter()
val = func(*args, **kwargs)
time_diff = elapsed_time(start_time, time.perf_counter())
print('"{}" executed in {}'.format(func.__name__, time_diff))
return val
return wrapper
def _cross_val(train_index, test_index, estimator, X, y):
"""Computes predictions for a subset of data."""
clf = clone(estimator)
x_train, x_test = X[train_index], X[test_index]
y_train, y_test = y[train_index], y[test_index]
clf.fit(x_train, y_train)
y_pred = clf.predict(x_test)
return y_pred, y_test
def cross_val_scores(estimator, cv, X, y, groups=None, n_jobs=1):
"""Computes all crossval on the chosen estimator, cross-val and dataset.
To use instead of sklearn cross_val_score if you want both roc_auc and
acc in one go."""
clf = clone(estimator)
crossv = clone(cv, safe=False)
results = Parallel(n_jobs=n_jobs)(
delayed(_cross_val)(train_index, test_index, clf, X, y)
for train_index, test_index in crossv.split(X=X, y=y, groups=groups)
)
AUC = not X.shape[1] > 1 and cv.n_groups > 1
accuracy, auc_list = [], []
for test in results:
y_pred = test[0]
y_test = test[1]
acc = accuracy_score(y_test, y_pred)
if AUC:
auc = roc_auc_score(y_test, y_pred)
else:
auc = 0
accuracy.append(acc)
auc_list.append(auc)
return accuracy, auc_list
def _permutations(iterable, size, limit=None):
"""Combinations Generator"""
i = 0
for elem in permutations(iterable, size):
yield elem
i += 1
if limit is not None and i == limit:
break
def permutation_test(estimator, cv, X, y, groups=None, n_perm=0, n_jobs=1):
"""Will do compute permutations aucs and accs."""
acc_pscores, auc_pscores = [], []
for _ in range(n_perm):
perm_index = permutation(len(y))
clf = clone(estimator)
y_perm = y[perm_index]
groups_perm = groups[perm_index]
perm_acc, perm_auc = cross_val_scores(clf, cv, X, y_perm, groups_perm, n_jobs)
acc_pscores.append(np.mean(perm_acc))
auc_pscores.append(np.mean(perm_auc))
return acc_pscores, auc_pscores
def classification(estimator, cv, X, y, groups=None, perm=None, n_jobs=1):
"""Do a classification.
Parameters:
estimator: a classifier object from sklearn
cv: a cross-validation object from sklearn
X: The Data, array of size n_samples x n_features
y: the labels, array of size n_samples
groups: optional, groups for groups based cross-validations
perm: optional, None means no permutations will be computed
otherwise set her the number of permutations
n_jobs: optional, default: 1, number of threads to use during
for the cross-validations. higher means faster. setting to -1 will use
all available threads - Warning: may sow down computer.
Returns:
save: a dictionnary countaining:
acc_score: the mean score across all cross-validations using the
accuracy scoring method
auc_score: the mean score across all cross-validations using the
roc_auc scoring method
acc: the list of all cross-validations accuracy scores
auc: the list of all cross-validations roc_auc scores
if permutation is not None it also countains:
auc_pvalue: the pvalue using roc_auc as a scoring method
acc_pvalue: the pvalue using accuracy as a scoring method
auc_pscores: a list of all permutation auc scores
acc_pscores: a list of all permutation accuracy scores
"""
y = np.asarray(y)
X = np.asarray(X)
if len(X) != len(y):
raise ValueError(
"Dimension mismatch for X and y : {}, {}".format(len(X), len(y))
)
if groups is not None:
try:
if len(y) != len(groups):
raise ValueError("dimension mismatch for groups and y")
except TypeError:
print(
"Error in classification: y or",
"groups is not a list or similar structure",
)
exit()
clf = clone(estimator)
accuracies, aucs = cross_val_scores(clf, cv, X, y, groups, n_jobs)
acc_score = np.mean(accuracies)
auc_score = np.mean(aucs)
save = {
"acc_score": [acc_score],
"auc_score": [auc_score],
"acc": accuracies,
"auc": aucs,
"n_splits": cv.get_n_splits(X, y, groups),
}
if perm is not None:
acc_pscores, auc_pscores = permutation_test(clf, cv, X, y, groups, perm, n_jobs)
acc_pvalue = compute_pval(acc_score, acc_pscores)
auc_pvalue = compute_pval(auc_score, auc_pscores)
save.update(
{
"auc_pvalue": auc_pvalue,
"acc_pvalue": acc_pvalue,
"auc_pscores": auc_pscores,
"acc_pscores": acc_pscores,
}
)
return save
def compute_pval(score, perm_scores):
"""computes pvalue of an item in a distribution)"""
n_perm = len(perm_scores)
pvalue = (np.sum(perm_scores >= score) + 1.0) / (n_perm + 1)
# n_perm = len(perm_scores) + 1
# pvalue = 0
# for psc in perm_scores:
# if score <= psc:
# pvalue += 1 / n_perm
return pvalue
def computePSD(signal, window, overlap, fmin, fmax, fs):
"""Compute PSD."""
f, psd = welch(
signal, fs=fs, window="hamming", nperseg=window, noverlap=overlap, nfft=None
)
psd = np.mean(psd[(f >= fmin) * (f <= fmax)])
return psd
def create_groups(y):
"""Generate groups from labels of shape (subject x labels)."""
k = 0
y = np.asarray(list(map(np.ravel, y)))
y = np.asarray(list(map(np.asarray, y)))
groups = []
for sub in y:
for _ in range(len(sub.ravel())):
groups.append(k)
k += 1
groups = np.asarray(groups).ravel()
y = np.concatenate([lab.ravel() for lab in y], axis=0).ravel()
return y, groups
def elapsed_time(t0, t1, formating=True):
"""Time lapsed between t0 and t1.
Returns the time (from time.time()) between t0 and t1 in a
more readable fashion.
Parameters
----------
t0: float,
time.time() initial measure of time
(eg. at the begining of the script)
t1: float,
time.time() time at the end of the script
or the execution of a function.
"""
lapsed = abs(t1 - t0)
if formating:
m, h, j = 60, 3600, 24 * 3600
nbj = lapsed // j
nbh = (lapsed - j * nbj) // h
nbm = (lapsed - j * nbj - h * nbh) // m
nbs = lapsed - j * nbj - h * nbh - m * nbm
if lapsed > j:
formated_time = "{:.0f}j, {:.0f}h:{:.0f}m:{:.0f}s".format(
nbj, nbh, nbm, nbs
)
elif lapsed > h:
formated_time = "{:.0f}h:{:.0f}m:{:.0f}s".format(nbh, nbm, nbs)
elif lapsed > m:
formated_time = "{:.0f}m:{:.0f}s".format(nbm, nbs)
else:
formated_time = "{:.4f}s".format(nbs)
return formated_time
return lapsed
def rm_outliers(data, rm_outl=2):
zs_dat = zscore(data)
to_keep = np.where(abs(zs_dat) <= rm_outl)[0]
return data[to_keep]
def prepare_data(
data,
labels=None,
rm_outl=None,
key="data",
n_trials=None,
random_state=0,
zscore=False,
):
final_data = None
if rm_outl is not None:
data = np.asarray([rm_outliers(sub, rm_outl) for sub in data])
sizes = [len(sub) for sub in data]
if n_trials is not None:
n_sub_min = min(sizes)
if n_trials > n_sub_min:
print(
"can't take {} trials, will take the minimum amout {} instead".format(
n_trials, n_sub_min
)
)
n_trials = n_sub_min
labels = np.asarray([[lab] * n_trials for lab in labels])
elif labels is not None:
labels = np.asarray([[labels[i]] * size for i, size in enumerate(sizes)])
else:
raise Exception(
"Error: either specify a number of trials and the "
+ "labels will be generated or give the original labels"
)
labels, groups = create_groups(labels)
for submat in data:
if submat.shape[0] == 1:
submat = submat.ravel()
if n_trials is not None:
index = np.random.RandomState(random_state).choice(
range(len(submat)), n_trials, replace=False
)
prep_submat = submat[index]
else:
prep_submat = submat
if zscore:
prep_submat = zscore(prep_submat)
final_data = (
prep_submat
if final_data is None
else np.concatenate((prep_submat, final_data))
)
return np.asarray(final_data), labels, groups
# def prepare_data(
# dico, labels=None, rm_outl=None, key="data", n_trials=None, random_state=0
# ):
# data = dico[key].ravel()
# data = np.asarray([sub.squeeze() for sub in data])
# final_data = None
# if rm_outl is not None:
# data = np.asarray([rm_outliers(sub, rm_outl) for sub in data])
#
# sizes = [len(sub) for sub in data]
# if n_trials is not None:
# n_sub_min = min(sizes)
# if n_trials > n_sub_min:
# print(
# "can't take {} trials, will take the minimum amout {} instead".format(
# n_trials, n_sub_min
# )
# )
# n_trials = n_sub_min
#
# labels = np.asarray([[lab] * n_trials for lab in labels])
# elif labels is not None:
# labels = np.asarray([labels[i] * size for i, size in enumerate(sizes)])
# else:
# raise Exception(
# "Error: either specify a number of trials and the "
# + "labels will be generated or give the original labels"
# )
# labels, groups = create_groups(labels)
#
# for submat in data:
# if submat.shape[0] == 1:
# submat = submat.ravel()
# if n_trials is not None:
# index = np.random.RandomState(random_state).choice(
# range(len(submat)), n_trials, replace=False
# )
# prep_submat = submat[index]
# else:
# prep_submat = submat
#
# final_data = (
# prep_submat
# if final_data is None
# else np.concatenate((prep_submat, final_data))
# )
#
# return np.asarray(final_data), labels, groups
def load_hypno(sub):
HYPNO_PATH = path(
"/home/arthur/Documents/data/sleep_data/sleep_raw_data/hypnograms"
)
with open(HYPNO_PATH / "hyp_per_s{}.txt".format(sub)) as f:
hypno = []
for line in f:
if line[0] not in ["-", "\n"]:
hypno.append(line[0])
return hypno
# def visu_hypno(sub):
# hypno = list(map(int, load_hypno(sub)))
# plt.plot(hypno)
# plt.show()
def empty_stage_dict():
stages = ["S1", "S2", "S3", "S4", "REM"]
stage_dict = {}
for st in stages:
stage_dict[st] = []
return dict(stage_dict)
def split_cycles(data, sub, duree=1200):
stages = ["S1", "S2", "S3", "S4", "REM"]
ref = "12345"
cycles = [empty_stage_dict()]
hypno = load_hypno(sub)
for i, hyp in enumerate(hypno):
next_hyps = hypno[i + 1 : i + 1 + duree]
obs = data[i * 1000 : (i + 1) * 1000]
if hyp in ref:
cycles[-1][stages[ref.index(hyp)]].append(obs)
if hyp == "5" and "5" not in next_hyps and len(cycles[-1]["REM"]) >= 300:
cycles.append(dict(empty_stage_dict()))
return cycles
# def convert_sleep_data(data_path, sub_i, elec=None):
# """Load the samples of a subject for a sleepstate."""
# tempFileName = data_path / "s%i_sleep.mat" % (sub_i)
# try:
# if elec is None:
# dataset = np.asarray(h5py.File(tempFileName, "r")["m_data"])[:, :19]
# else:
# dataset = np.asarray(h5py.File(tempFileName, "r")["m_data"])[:, elec]
# except IOError:
# print(tempFileName, "not found")
# cycles = split_cycles(dataset, sub_i)
# dataset = []
# for i, cycle in enumerate(cycles):
# for stage, secs in cycle.items():
# if len(secs) != 0:
# secs = np.array(secs)
# save = np.concatenate(
# [secs[i : i + 30] for i in range(0, len(secs), 30)]
# )
# savemat(
# data_path / "{}_s{}_cycle{}".format(stage, sub_i, i + 1),
# {stage: save},
# )
def merge_S3_S4(data_path, sub_i, cycle):
try:
S3_file = data_path / "S3_s{}_cycle{}.mat".format(sub_i, cycle)
S3 = loadmat(S3_file)["S3"]
S4_file = data_path / "S4_s{}_cycle{}.mat".format(sub_i, cycle)
S4 = loadmat(S4_file)["S4"]
data = {"SWS": np.concatenate((S3, S4), axis=0)}
savemat(data_path / "SWS_s{}_cycle{}.mat".format(sub_i, cycle), data)
S3_file.remove()
S4_file.remove()
except IOError:
print("file not found for cycle", cycle)
def merge_SWS(data_path, sub_i, cycle=None):
if cycle is None:
for i in range(1, 4):
merge_S3_S4(data_path, sub_i, i)
else:
merge_S3_S4(data_path, sub_i, cycle)
def load_full_sleep(data_path, sub_i, state, cycle=None):
"""Load the samples of a subject for a sleepstate."""
tempFileName = data_path / "{}_s{}.mat".format(state, sub_i)
if cycle is not None:
tempFileName = data_path / "{}_s{}_cycle{}.mat".format(state, sub_i, cycle)
try:
dataset = loadmat(tempFileName)[state]
except (IOError, TypeError) as e:
print(tempFileName, "not found")
dataset = None
return dataset
def load_samples(data_path, sub_i, state, cycle=None, elec=None):
"""Load the samples of a subject for a sleepstate."""
if elec is None:
dataset = load_full_sleep(data_path, sub_i, state, cycle)[:19]
else:
dataset = load_full_sleep(data_path, sub_i, state, cycle)[elec]
dataset = dataset.swapaxes(0, 2)
dataset = dataset.swapaxes(1, 2)
return dataset
def import_data(data_path, state, subject_list, label_path=None, full_trial=False):
"""Transform the data and generate labels.
Takes the original files and put them in a matrix of
shape (Trials x 19 x 30000)
"""
X = []
print("Loading data...")
for i in range(len(subject_list)):
# Loading of the trials of the selected sleepstate
dataset = load_samples(data_path, subject_list[i], state)
if full_trial:
# use if you want full trial
dataset = np.concatenate((dataset[range(len(dataset))]), axis=1)
dataset = dataset.reshape(1, dataset.shape[0], dataset.shape[1])
X.append(dataset)
del dataset
if label_path is not None:
y = loadmat(label_path / state + "_labels.mat")["y"].ravel()
return X, np.asarray(y)
def is_signif(pvalue, p=0.05):
"""Tell if condition with classifier is significative.
Returns a boolean : True if the condition is significativeat given p
"""
answer = False
if pvalue <= p:
answer = True
return answer
class StratifiedShuffleGroupSplit(BaseEstimator):
def __init__(self, n_groups, n_iter=None):
self.n_groups = n_groups
self.n_iter = n_iter
self.counter = 0
self.labels_list = []
self.n_each = None
self.n_labs = None
self.labels_list = None
self.lpgos = None
self.indexes = None
def _init_atributes(self, y, groups):
if len(y) != len(groups):
raise Exception("Error: y and groups need to have the same length")
if y is None:
raise Exception("Error: y cannot be None")
if groups is None:
raise Exception("Error: this function requires a groups parameter")
if self.labels_list is None:
self.labels_list = list(set(y))
if self.n_labs is None:
self.n_labs = len(self.labels_list)
assert (
self.n_groups % self.n_labs == 0
), "Error: The number of groups to leave out must be a multiple of the number of classes"
if self.n_each is None:
self.n_each = int(self.n_groups / self.n_labs)
if self.lpgos is None:
lpgos, indexes = [], []
for label in self.labels_list:
index = np.where(y == label)[0]
indexes.append(index)
lpgos.append(LeavePGroupsOut(self.n_each))
self.lpgos = lpgos
self.indexes = np.array(indexes)
def split(self, X, y, groups):
self._init_atributes(y, groups)
y = np.asarray(y)
groups = np.asarray(groups)
iterators = []
for lpgo, index in zip(self.lpgos, self.indexes):
iterators.append(lpgo.split(index, y[index], groups[index]))
for ite in product(*iterators):
if self.counter == self.n_iter:
break
self.counter += 1
train_idx = np.concatenate(
[index[it[0]] for it, index in zip(ite, self.indexes)]
)
test_idx = np.concatenate(
[index[it[1]] for it, index in zip(ite, self.indexes)]
)
yield train_idx, test_idx
def get_n_splits(self, X, y, groups):
self._init_atributes(y, groups)
if self.n_iter is not None:
return self.n_iter
groups = np.asarray(groups)
n = 1
for index, lpgo in zip(self.indexes, self.lpgos):
n *= lpgo.get_n_splits(None, None, groups[index])
return n
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,873 | arthurdehgan/sleep | refs/heads/master | /classif_cov_test_simplified.py | """Load covariance matrix, perform classif, perm test, saves results.
Outputs one file per freq x state
Author: Arthur Dehgan"""
from time import time
from scipy.io import savemat, loadmat
import pandas as pd
import numpy as np
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
from sklearn.model_selection import cross_val_score
from pyriemann.classification import TSclassifier
from utils import StratifiedLeave2GroupsOut, prepare_data, classification
from params import SAVE_PATH, STATE_LIST
prefix = "classif_subsamp_"
name = "cov"
state = "SWS"
SAVE_PATH = SAVE_PATH / name
info_data = pd.read_csv(SAVE_PATH.parent / "info_data.csv")[STATE_LIST]
info_data = info_data[state]
N_TRIALS = info_data.min().min()
N_SUBS = len(info_data) - 1
groups = [i for i in range(N_SUBS) for _ in range(N_TRIALS)]
N_TOTAL = N_TRIALS * N_SUBS
labels = [0 if i < N_TOTAL / 2 else 1 for i in range(N_TOTAL)]
file_name = prefix + name + "n153_{}.mat".format(state)
save_file_path = SAVE_PATH / "results" / file_name
data_file_path = SAVE_PATH / name + "_{}.mat".format(state)
final_save = None
data = loadmat(data_file_path)
data = prepare_data(data, n_trials=N_TRIALS, random_state=0)
sl2go = StratifiedLeave2GroupsOut()
lda = LDA()
clf = TSclassifier(clf=lda)
score = cross_val_score(clf, data, labels, groups, cv=sl2go, n_jobs=-1)
print(score)
# save['acc_bootstrap'] = [save['acc_score']]
# save['auc_bootstrap'] = [save['auc_score']]
# if final_save is None:
# final_save = save
# else:
# for key, value in final_save.items():
# final_save[key] = final_save[key] + save[key]
# savemat(save_file_path, final_save)
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,874 | arthurdehgan/sleep | refs/heads/master | /compute_cov.py | """Computes Crosspectrum matrices and save them.
Author: Arthur Dehgan"""
import os
from time import time
from path import Path as path
from joblib import Parallel, delayed
import numpy as np
# from pyriemann.estimationmod import CospCovariances
from pyriemann.estimation import Covariances
from scipy.io import savemat, loadmat
from utils import elapsed_time, load_samples
from params import DATA_PATH, SAVE_PATH, SUBJECT_LIST, STATE_LIST
SAVE_PATH = SAVE_PATH / "covariance/"
FULL_TRIAL = True
if FULL_TRIAL:
prefix = "ft_cov"
else:
prefix = "cov"
def combine_subjects(state):
"""Combines crosspectrum matrices from subjects into one."""
dat, load_list = [], []
print(state)
for sub in SUBJECT_LIST:
pattern = prefix + "_s{}_{}.mat"
pattern = prefix + "_{}.mat"
file_path = path(SAVE_PATH / pattern.format(sub, state))
try:
data = loadmat(file_path)["data"]
dat.append(data)
load_list.append(str(file_path))
except IOError:
print(file_path, "not found")
savemat(SAVE_PATH / save_pattern.format(state), {"data": np.asarray(dat)})
for file in load_list:
os.remove(file)
def compute_cov(state):
"""Computes the crosspectrum matrices per subjects."""
for sub in SUBJECT_LIST:
pattern = prefix + "_s{}_{}.mat"
file_path = path(SAVE_PATH / pattern.format(sub, state))
if not file_path.isfile():
# data must be of shape n_trials x n_elec x n_samples
data = load_samples(DATA_PATH, sub, state)
if FULL_TRIAL:
data = np.concatenate(data, axis=1)
data = data.reshape(1, data.shape[0], data.shape[1])
cov = Covariances()
mat = cov.fit_transform(data)
savemat(file_path, {"data": mat})
if __name__ == "__main__":
T_START = time()
Parallel(n_jobs=-1)(delayed(compute_cov)(state) for state in STATE_LIST)
print("combining subjects data")
Parallel(n_jobs=-1)(delayed(combine_subjects)(state) for state in STATE_LIST)
print("total time lapsed : %s" % elapsed_time(T_START, time()))
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,875 | arthurdehgan/sleep | refs/heads/master | /visu_fbands_metric.py | """Generates the visu for a da matrix
Author: Arthur Dehgan"""
import matplotlib.pyplot as plt
from matplotlib import ticker
from scipy.io import loadmat
from params import WINDOW, OVERLAP, FBIN_LIST, CHANNEL_NAMES, SAVE_PATH, STATE_LIST
DATA_PATH = SAVE_PATH / "psd/results"
for state in STATE_LIST:
for elec in CHANNEL_NAMES:
file_name = "da_bin_{}_{}_{}_{:.2f}.mat".format(state, elec, WINDOW, OVERLAP)
data = loadmat(DATA_PATH / file_name)["score"]
n, m = data.shape
for i in range(n):
for j in range(i + 1, m):
data[j, i] = data[i, j]
fig, ax = plt.subplots(figsize=(15, 15))
fig.suptitle(elec, fontsize=20)
# mat = ax.matshow(data, vmin=.5, vmax=.65, interpolation=None)
mat = ax.matshow(data[:50, :50], vmin=.5, vmax=.65, interpolation=None)
ax.set_xticklabels(FBIN_LIST, rotation=45)
ax.set_yticklabels(FBIN_LIST)
ax.xaxis.set_major_locator(ticker.MultipleLocator(1))
ax.yaxis.set_major_locator(ticker.MultipleLocator(1))
co = fig.colorbar(mat)
co.set_ticks([.5, .525, .55, .575, .6, .625, .65])
co.set_label("Decoding Accuracies")
plt.savefig(
"figures/reduced_da_bands_{}_{}_{}_{}.png".format(
state, elec, WINDOW, OVERLAP
),
dpi=150,
)
plt.close("all")
# plt.show()
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,876 | arthurdehgan/sleep | refs/heads/master | /compute_psd_bins.py | """Computes PSD vectors and save them.
Computes PSD for each frequency from meg fif files and saves.
"""
from scipy.io import savemat, loadmat
from scipy.signal import welch
from path import Path as path
from joblib import Parallel, delayed
import numpy as np
from params import (
STATE_LIST,
SUBJECT_LIST,
SF,
N_ELEC,
WINDOW,
OVERLAP,
DATA_PATH,
SAVE_PATH,
CHANNEL_NAMES,
)
SAVE_PATH = SAVE_PATH / "psd/"
freq_range = (1, 45)
del STATE_LIST[STATE_LIST.index("NREM")]
def computePSD(signal, fs, window, overlap, freq_range):
f, psd = welch(
signal, fs=fs, window="hamming", nperseg=window, noverlap=overlap, nfft=None
)
return psd[(f >= freq_range[0]) * (f <= freq_range[1])]
def computeSavePSD(sleep_stages, subject, window, overlap):
for stage in sleep_stages:
print(stage)
file_name = DATA_PATH / "{}_s{}.mat".format(stage, subject)
X = loadmat(file_name)[stage][:N_ELEC].swapaxes(1, 2)
for i, elec in enumerate(CHANNEL_NAMES):
psd = []
for trial in X[i]:
save_name = SAVE_PATH / "PSDs_{}_s{}_{}_{}_{:.2f}.mat".format(
stage, subject, elec, window, overlap
)
psd.append(computePSD(trial, SF, window, overlap, freq_range))
savemat(save_name, {"data": np.asarray(psd)})
if __name__ == "__main__":
"""Main function."""
Parallel(n_jobs=1)(
delayed(computeSavePSD)(STATE_LIST, subject, window=WINDOW, overlap=OVERLAP)
for subject in SUBJECT_LIST
)
# computeSavePSD(STATE_LIST, subject, window=WINDOW, overlap=OVERLAP)
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,877 | arthurdehgan/sleep | refs/heads/master | /visu_topomap_multif.py | """Generate topomaps"""
from mne.viz import plot_topomap
from scipy.io import loadmat
from params import SAVE_PATH, STATE_LIST, CHANNEL_NAMES
import numpy as np
import matplotlib.pyplot as plt
plt.switch_backend("agg")
DATA_PATH = SAVE_PATH / "psd"
RESULTS_PATH = DATA_PATH / "results"
POS_FILE = SAVE_PATH / "../Coord_EEG_1020.mat"
SENSORS_POS = loadmat(POS_FILE)["Cor"]
# FREQS = ['Delta', 'Theta', 'Alpha', 'Sigma', 'Beta', 'Gamma1', 'Gamma2']
WINDOW = 1000
OVERLAP = 0
p = .001
for stage in STATE_LIST:
fig = plt.figure(figsize=(5, 5))
scores, pvalues = [], []
for elec in CHANNEL_NAMES:
file_name = "perm_PSDM_{}_{}_{}_{:.2f}.mat".format(stage, elec, WINDOW, OVERLAP)
try:
score = loadmat(RESULTS_PATH / file_name)["score"].ravel()
except TypeError:
print(file_name)
scores.append(score[0] * 100)
pvalue = loadmat(RESULTS_PATH / file_name)["pvalue"].ravel()
pvalues.append(pvalue[0])
DA = np.asarray(scores)
da_pvalues = np.asarray(pvalues)
da_mask = np.full((len(CHANNEL_NAMES)), False, dtype=bool)
da_mask[da_pvalues <= p] = True
mask_params = dict(
marker="*", markerfacecolor="white", markersize=9, markeredgecolor="white"
)
data = {
"name": "Decoding Accuracies p<{}".format(p),
"cmap": "viridis",
"mask": da_mask,
"cbarlim": [50, 65],
"data": DA,
}
ch_show = True
ax, _ = plot_topomap(
data["data"],
SENSORS_POS,
res=128,
cmap=data["cmap"],
show=False,
vmin=data["cbarlim"][0],
vmax=data["cbarlim"][1],
names=CHANNEL_NAMES,
show_names=ch_show,
mask=data["mask"],
mask_params=mask_params,
contours=0,
)
fig.colorbar(ax, shrink=.65)
file_name = "topomap_all_multifeature_{}_p{}".format(stage, str(p)[2:])
savename = SAVE_PATH / "../figures" / file_name
plt.savefig(savename, dpi=200)
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,878 | arthurdehgan/sleep | refs/heads/master | /classif_cov.py | """Load covariance matrix, perform classif, perm test, saves results.
Outputs one file per state
Author: Arthur Dehgan"""
import sys
from time import time
from scipy.io import savemat, loadmat
import pandas as pd
import numpy as np
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
from sklearn.model_selection import StratifiedShuffleSplit as SSS
from pyriemann.classification import TSclassifier
from utils import (
create_groups,
StratifiedLeave2GroupsOut,
elapsed_time,
prepare_data,
classification,
)
from params import SAVE_PATH, STATE_LIST, LABEL_PATH
# PREFIX = "perm_"
# PREFIX = "classif_"
# PREFIX = "reduced_classif_"
PREFIX = "bootstrapped_classif_"
NAME = "subsamp_cov"
# NAME = "cosp"
# NAME = 'ft_cosp'
# NAME = "moy_cosp"
# NAME = 'im_cosp'
# NAME = 'wpli'
# NAME = 'coh'
# NAME = 'imcoh'
# NAME = 'ft_wpli'
# NAME = 'ft_coh'
# NAME = 'ft_imcoh'
PREFIX_LIST = PREFIX.split("_")
BOOTSTRAP = "bootstrapped" in PREFIX_LIST
REDUCED = "reduced" in PREFIX_LIST
FULL_TRIAL = "ft" in NAME or "moy" in NAME.split("_")
SUBSAMPLE = "subsamp" in NAME.split("_")
PERM = "perm" in PREFIX_LIST
N_PERM = 999 if PERM else None
if BOOTSTRAP:
N_BOOTSTRAPS = 100
elif REDUCED:
N_BOOTSTRAPS = 19
else:
N_BOOTSTRAPS = 1
SAVE_PATH = SAVE_PATH / NAME
print(NAME, PREFIX)
def proper_loadmat(file_path):
data = loadmat(file_path)
to_del = []
for key, value in data.items():
if key.startswith("__"):
to_del.append(key)
else:
data[key] = value.squeeze().tolist()
for key in to_del:
del data[key]
return data
def classif_cov(state):
"""Where the magic happens"""
print(state)
if FULL_TRIAL:
labels = np.concatenate((np.ones(18), np.zeros(18)))
groups = range(36)
elif SUBSAMPLE:
info_data = pd.read_csv(SAVE_PATH.parent / "info_data.csv")[STATE_LIST]
n_trials = info_data.min().min()
n_subs = len(info_data) - 1
groups = [i for i in range(n_subs) for _ in range(n_trials)]
n_total = n_trials * n_subs
labels = [0 if i < n_total / 2 else 1 for i in range(n_total)]
else:
labels = loadmat(LABEL_PATH / state + "_labels.mat")["y"].ravel()
labels, groups = create_groups(labels)
file_path = SAVE_PATH / "results" / PREFIX + NAME + "_{}.mat".format(state)
if not file_path.isfile():
n_rep = 0
else:
final_save = proper_loadmat(file_path)
n_rep = final_save["n_rep"]
print("starting from i={}".format(n_rep))
file_name = NAME + "_{}.mat".format(state)
data_file_path = SAVE_PATH / file_name
if data_file_path.isfile():
data_og = loadmat(data_file_path)
for i in range(n_rep, N_BOOTSTRAPS):
if FULL_TRIAL:
data = data_og["data"]
elif SUBSAMPLE:
data = prepare_data(data_og, n_trials=n_trials, random_state=i)
else:
data = prepare_data(data_og)
if REDUCED:
reduced_data = []
for submat in data:
temp_a = np.delete(submat, i, 0)
temp_b = np.delete(temp_a, i, 1)
reduced_data.append(temp_b)
data = np.asarray(reduced_data)
if FULL_TRIAL:
crossval = SSS(9)
else:
crossval = StratifiedLeave2GroupsOut()
lda = LDA()
clf = TSclassifier(clf=lda)
save = classification(
clf, crossval, data, labels, groups, N_PERM, n_jobs=-1
)
print(save["acc_score"])
if i == 0:
final_save = save
elif BOOTSTRAP or REDUCED:
for key, value in save.items():
final_save[key] += value
final_save["n_rep"] = i + 1
savemat(file_path, final_save)
final_save["n_rep"] = N_BOOTSTRAPS
if BOOTSTRAP:
final_save["auc_score"] = np.mean(final_save["auc_score"])
final_save["acc_score"] = np.mean(final_save["acc_score"])
savemat(file_path, final_save)
print(
"accuracy for %s %s : %0.2f (+/- %0.2f)"
% (state, np.mean(save["acc_score"]), np.std(save["acc"]))
)
if PERM:
print("pval = {}".format(save["acc_pvalue"]))
else:
print(data_file_path.name + " Not found")
if __name__ == "__main__":
TIMELAPSE_START = time()
if len(sys.argv) > 1:
ARGS = sys.argv[1:]
else:
ARGS = []
if ARGS == []:
for state in STATE_LIST:
classif_cov(state)
else:
print(ARGS)
classif_cov(ARGS[0])
print("total time lapsed : %s" % elapsed_time(TIMELAPSE_START, time()))
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,879 | arthurdehgan/sleep | refs/heads/master | /classif_psd.py | """Uses a classifier to decode PSD values.
Computes pvalues and saves them in a mat format with the decoding accuracies.
Author: Arthur Dehgan
"""
import sys
from time import time
from itertools import product
import pandas as pd
import numpy as np
from scipy.io import savemat, loadmat
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
from utils import (
StratifiedShuffleGroupSplit,
elapsed_time,
create_groups,
classification,
proper_loadmat,
)
from params import (
SAVE_PATH,
LABEL_PATH,
CHANNEL_NAMES,
WINDOW,
OVERLAP,
STATE_LIST,
FREQ_DICT,
)
NAME = "psd"
# NAME = "zscore_psd"
# PREFIX = "perm_"
# PREFIX = "bootstrapped_perm_subsamp_"
# PREFIX = "bootstrapped_subsamp_outl_"
PREFIX = "bootstrapped_subsamp_"
# PREFIX = "bootstrapped_adapt_"
SOLVER = "svd" # 'svd' 'lsqr'
PREF_LIST = PREFIX.split("_")
BOOTSTRAP = "bootstrapped" in PREF_LIST
SUBSAMPLE = "subsamp" in PREF_LIST
ADAPT = "adapt" in PREF_LIST
PERM = "perm" in PREF_LIST
N_PERM = 999 if PERM else None
N_BOOTSTRAPS = 1000 if BOOTSTRAP else 1
INIT_LABELS = [0] * 18 + [1] * 18
SAVE_PATH /= NAME
def prepare_data(
dico, labels=None, rm_outl=None, key="data", n_trials=None, random_state=0
):
data = dico[key].ravel()
data = np.asarray([sub.squeeze() for sub in data])
final_data = None
if rm_outl is not None:
data = np.asarray([rm_outliers(sub, rm_outl) for sub in data])
sizes = [len(sub) for sub in data]
if n_trials is not None:
n_sub_min = min(sizes)
if n_trials > n_sub_min:
print(
"can't take {} trials, will take the minimum amout {} instead".format(
n_trials, n_sub_min
)
)
n_trials = n_sub_min
labels = np.asarray([[lab] * n_trials for lab in labels])
elif labels is not None:
labels = np.asarray([labels[i] * size for i, size in enumerate(sizes)])
else:
raise Exception(
"Error: either specify a number of trials and the "
+ "labels will be generated or give the original labels"
)
labels, groups = create_groups(labels)
for submat in data:
if submat.shape[0] == 1:
submat = submat.ravel()
if n_trials is not None:
index = np.random.RandomState(random_state).choice(
range(len(submat)), n_trials, replace=False
)
prep_submat = submat[index]
else:
prep_submat = submat
final_data = (
prep_submat
if final_data is None
else np.concatenate((prep_submat, final_data))
)
return np.asarray(final_data), labels, groups
def classif_psd(state, elec, n_jobs=-1):
if SUBSAMPLE or ADAPT:
info_data = pd.read_csv(SAVE_PATH.parent / "info_data.csv")[STATE_LIST]
if SUBSAMPLE:
n_trials = info_data.min().min()
# n_trials = 30
elif ADAPT:
n_trials = info_data.min()[state]
labels_og = INIT_LABELS
for freq in FREQ_DICT:
print(state, elec, freq)
data_file_name = NAME + "_{}_{}_{}_{}_{:.2f}.mat".format(
state, freq, elec, WINDOW, OVERLAP
)
save_file_name = PREFIX + data_file_name
data_file_path = SAVE_PATH / data_file_name
save_file_path = SAVE_PATH / "results" / save_file_name
if not save_file_path.isfile():
n_rep = 0
else:
final_save = proper_loadmat(save_file_path)
n_rep = int(final_save["n_rep"])
# n_splits = int(final_save["n_splits"])
CHANGES = False
print("Starting from i={}".format(n_rep))
og_data = loadmat(data_file_path)
crossval = StratifiedShuffleGroupSplit(2)
clf = LDA(solver=SOLVER)
for i in range(n_rep, N_BOOTSTRAPS):
CHANGES = True
if SUBSAMPLE or ADAPT:
data, labels, groups = prepare_data(
og_data, labels_og, n_trials=n_trials, random_state=i
)
else:
data, labels, groups = prepare_data(og_data, labels_og)
# n_splits = crossval.get_n_splits(None, labels, groups)
data = np.array(data).reshape(-1, 1)
save = classification(
clf, crossval, data, labels, groups, N_PERM, n_jobs=n_jobs
)
# save["n_splits"] = n_splits
if i == 0:
final_save = save
elif BOOTSTRAP:
for key, value in save.items():
# if key != "n_splits":
final_save[key] += value
final_save["n_rep"] = i + 1
if n_jobs == -1:
savemat(save_file_path, final_save)
if BOOTSTRAP:
final_save["auc_score"] = np.mean(final_save["auc_score"])
final_save["acc_score"] = np.mean(final_save["acc_score"])
if CHANGES:
savemat(save_file_path, final_save)
standev = np.std(
[
np.mean(final_save["acc"][i : i + N_BOOTSTRAPS])
for i in range(0, len(final_save["acc"]), N_BOOTSTRAPS)
]
)
print(
"accuracy for {} {} : {:.2f} (+/- {:.2f})".format(
state, elec, final_save["acc_score"], standev
)
)
if PERM:
print("pval = {:.4f}".format(final_save["acc_pvalue"]))
if __name__ == "__main__":
TIMELAPSE_START = time()
ARGS = sys.argv
if len(ARGS) > 2:
ARGS = sys.argv[1:]
elif len(ARGS) == 2:
ARGS = sys.argv[1:][0].split("_")
else:
ARGS = []
if ARGS == []:
from joblib import delayed, Parallel
Parallel(n_jobs=-1)(
delayed(classif_psd)(st, el, n_jobs=1)
for st, el in product(STATE_LIST, CHANNEL_NAMES)
)
else:
print(ARGS)
classif_psd(ARGS[0], ARGS[1])
print("total time lapsed : %s" % (elapsed_time(TIMELAPSE_START, time())))
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,880 | arthurdehgan/sleep | refs/heads/master | /visu_psd.py | from scipy.io import loadmat
import numpy as np
import math
import matplotlib.pyplot as plt
from params import SUBJECT_LIST, CHANNEL_NAMES, SAVE_PATH, STATE_LIST
SAVE_PATH = SAVE_PATH / "psd"
HR_labels = ("HR", "HR mean")
LR_labels = ("LR", "LR mean")
plt.figure()
ax = plt.axes()
def compute(val, k):
return math.log(val / (k + 1))
all_subs = []
for sub in SUBJECT_LIST:
all_elecs = []
for elec in CHANNEL_NAMES:
data = []
for state in STATE_LIST:
filename = SAVE_PATH / "PSDs_{}_s{}_{}_1000_0.00.mat".format(
state, sub, elec
)
data.append(loadmat(filename)["data"].mean(axis=0))
all_states = np.asarray(data).mean(axis=0)
all_elecs.append(all_states)
all_subs.append(np.asarray(all_elecs).mean(axis=0))
all_subs = np.asarray(all_subs)
hdr = all_subs[18:]
hdr = [[compute(a, i) for a in dat] for i, dat in enumerate(hdr)]
ldr = all_subs[:18]
ldr = [[compute(a, i) for a in dat] for i, dat in enumerate(ldr)]
for i in range(len(hdr)):
plt.plot(
range(1, 46), hdr[i], color="peachpuff", label="_nolegend_" if i > 0 else "HR"
)
plt.plot(
range(1, 46), ldr[i], color="skyblue", label="_nolegend_" if i > 0 else "LR"
)
# plt.plot([math.log(i) for i in range(1, 46)], hdr[i], color='peachpuff')
# plt.plot([math.log(i) for i in range(1, 46)], ldr[i], color='skyblue')
# plt.plot([math.log(i) for i in range(1, 46)], np.mean(hdr, axis=0), color='red')
# plt.plot([math.log(i) for i in range(1, 46)], np.mean(ldr, axis=0), color='blue')
plt.plot(range(1, 46), np.mean(hdr, axis=0), color="red", label="HR mean")
plt.plot(range(1, 46), np.mean(ldr, axis=0), color="blue", label="LR mean")
plt.legend()
plt.xlim(1, 45)
plt.ylabel("Power Spectral Density")
plt.xlabel("Frequency")
plt.show()
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,881 | arthurdehgan/sleep | refs/heads/master | /classif_psd_nremvsrem.py | from utils import StratifiedLeave2GroupsOut, elapsed_time, create_groups
from scipy.io import savemat, loadmat
from time import time
import numpy as np
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
from sklearn.model_selection import cross_val_score
from numpy.random import permutation
from params import data_path, save_path, n_elec, label_path, freq_dict, path
def load_data(data_path, sleep_state, key, elec, window, overlap):
data_file_path = (
data_path
/ "PSD_sleepState_%s_%s_%i_%i_%.2f.mat"
% (sleep_state, key, elec, window, overlap)
)
# data_file_path = data_path / 'PSD_EOG_sleepState_%s_%s_%i_%i_%.2f.mat' % (sleep_state, key, elec, window, overlap) # EOG
if path(data_file_path).isfile():
X = loadmat(data_file_path)["data"].ravel()
data = X[0].ravel()
for submat in X[1:]:
data = np.concatenate((submat.ravel(), data))
X = data.reshape(len(data), 1)
del data
return X
else:
print(path(data_file_path).name + " Not found")
print(
'please run "computePSD.py" and "group_PSD_per_subjects.py" before running this script'
)
return 0
if __name__ == "__main__":
t0 = time()
window = (
1000
) # windows for computation of cross-spectrum matrices (in number of samples)
overlap = 0 # overlap for computation of cross-spectrum matrices (0 = no overlap)
n_permutations = 1000
# n_elec=2 # EOG
sleep_list = ["AWA", "NREM", "Rem"]
save_path = save_path / "PSD"
classifier = "LDA"
print("\nClassification of dreamers vs non dreamers")
print("features : PSD")
print("parameters : window = %i overlap = %0.2f" % (window, overlap))
print("Classifier : " + classifier)
for sleep_state in sleep_list:
print("\nProcessing state %s" % sleep_state)
for elec in range(n_elec):
print("electrode : %i/%i" % (elec, n_elec))
t1 = time()
y = loadmat(label_path / sleep_state + "_labels.mat")["y"].ravel()
y, groups = create_groups(y)
for key in freq_dict:
results_file_path = (
save_path
/ "results"
/ "perm_PSD_%s_%s_%i_%i_%0.2f.mat"
% (sleep_state, key, elec, window, overlap)
)
# results_file_path = save_path / 'results' / 'perm_PSD_EOG_%s_%s_%i_%i_%0.2f.mat' % (sleep_state, key, elec, window, overlap) # EOG
if not path(results_file_path).isfile():
X = None
# print('\nloading PSD for %s frequencies' % key)
if sleep_state == "NREM":
for n in ["S1", "S2", "SWS"]:
X = (
load_data(save_path, n, key, elec, window, overlap)
if X is None
else X.vstack(load_data(n, key, elec, window, overlap))
)
else:
X = load_data(
save_path, sleep_state, key, elec, window, overlap
)
t3 = time()
# print('Classification...')
sl2go = StratifiedLeave2GroupsOut()
clf = LDA()
scores = []
pvalue = 0
good_score = cross_val_score(
cv=sl2go, estimator=clf, X=X, y=y, groups=groups, n_jobs=-1
).mean()
for perm in range(n_permutations):
clf = LDA()
perm_set = permutation(len(y))
y_perm = y[perm_set]
groups_perm = groups[perm_set]
scores.append(
cross_val_score(
cv=sl2go,
estimator=clf,
X=X,
y=y_perm,
groups=groups_perm,
n_jobs=-1,
).mean()
)
for score in scores:
if good_score <= score:
pvalue += 1 / n_permutations
# print('Done in %s' % elapsed_time(t3, time()))
data = {"score": good_score, "pscore": scores, "pvalue": pvalue}
print("%s : %0.2f significatif a p=%0.4f" % (key, score, pvalue))
savemat(results_file_path, data)
print("total time lapsed : %s" % elapsed_time(t0, time()))
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,882 | arthurdehgan/sleep | refs/heads/master | /create_NREM.py | from scipy.io import loadmat, savemat
from params import CHANNEL_NAMES, FREQ_DICT, SUBJECT_LIST
import numpy as np
# DP = '/home/arthur/Documents/sleep/features/crosspectre/'
DP = '/home/arthur/Documents/sleep/features/psd/'
# DP = '/home/arthur/Documents/sleep/features/covariance/'
LP = '/home/arthur/Documents/sleep/labels/'
STATE_LIST = ['S1', 'S2', 'SWS']
FULL_TRIAL = False
# labels = []
# for state in STATE_LIST:
# label_path = LP + '{}_labels.mat'.format(state)
# temp_label = loadmat(label_path)['y'].ravel()
# temp_label = [lab.ravel() for lab in temp_label]
# labels.append(temp_label)
#
# labelsf = []
# for i in range(len(labels[0])):
# temp = []
# for k in range(len(labels)):
# temp.append(labels[k][i])
# temp = np.concatenate(temp)
# labelsf.append(temp)
#
# labelsf = np.array(labelsf)
# savemat(LP + 'NREM_labels.mat', {'y': labelsf})
for key in FREQ_DICT:
print(key)
for elec in CHANNEL_NAMES:
print(elec)
data = []
for state in STATE_LIST:
file_name = DP + 'PSD_{}_{}_{}_1000_0.00.mat'.format(state, key, elec)
# file_name = DP + 'ft_cosp_{}_{}_1000_0.00.mat'.format(state, key)
# file_name = DP + 'cov_{}.mat'.format(state)
try:
if FULL_TRIAL:
temp_data = loadmat(file_name)['data']
else:
temp_data = loadmat(file_name)['data'].ravel()
except IOError:
print('error loading:', file_name)
data.append(temp_data)
if FULL_TRIAL:
data = np.array(data)
data = data.mean(axis=0)
savemat(DP + 'PSDs_{}_{}_{}_1000_0.00.mat'.format(state, elec), {'data': data})
else:
dataf = []
for i in range(len(SUBJECT_LIST)):
dataf.append(np.concatenate(([data[k][i].ravel() for k in range(len(STATE_LIST))])))
dataf = np.array(dataf)
# savemat(DP + 'ft_cosp_NREM_{}_1000_0.00.mat'.format(key), {'data': dataf})
savemat(DP + 'PSD_NREM_{}_{}_1000_0.00.mat'.format(key, elec), {'data': dataf})
# savemat(DP + 'cov_NREM.mat', {'data': dataf})
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,883 | arthurdehgan/sleep | refs/heads/master | /classif_SVM_STATE_ELEC.py | """Uses a classifier to decode PSD values.
Computes pvalues and saves them in a mat format with the decoding accuracies.
Author: Arthur Dehgan
"""
import numpy as np
import sys
from scipy.io import savemat, loadmat
from sklearn.svm import SVC
from sklearn.model_selection import RandomizedSearchCV as RS
from utils import StratifiedLeave2GroupsOut, create_groups, prepare_data
from params import SAVE_PATH, LABEL_PATH, WINDOW, OVERLAP, FREQ_DICT
SAVE_PATH = SAVE_PATH / "psd"
PREFIX = "classif_svm_"
N_PERM = None
STATE, ELEC = sys.argv[1], sys.argv[2][:-1]
LABELS = loadmat(LABEL_PATH / STATE + "_labels.mat")["y"].ravel()
LABELS, GROUPS = create_groups(LABELS)
for freq in FREQ_DICT:
print(STATE, ELEC, freq)
data_file_name = "PSD_{}_{}_{}_{}_{:.2f}.mat".format(
STATE, freq, ELEC, WINDOW, OVERLAP
)
save_file_name = PREFIX + data_file_name
data_file_path = SAVE_PATH / data_file_name
save_file_path = SAVE_PATH / "results" / save_file_name
if not save_file_path.isfile():
data = loadmat(data_file_path)
data = prepare_data(data)
data = np.array(data).reshape(len(data), 1)
cross_val = StratifiedLeave2GroupsOut()
save = {"score": [], "cv_results": [], "best_score": [], "best_params": []}
for train_index, test_index in cross_val.split(data, LABELS, GROUPS):
train_set, validation_set = data[train_index], data[test_index]
train_labs, validation_labs = LABELS[train_index], LABELS[test_index]
train_groups, validation_groups = GROUPS[train_index], GROUPS[test_index]
nested_cv = StratifiedLeave2GroupsOut()
clf = SVC(kernel="rbf")
parameters = {"C": np.logspace(-3, 2, 6), "gamma": np.logspace(-3, 2, 6)}
random_search = RS(clf, parameters, n_iter=1, n_jobs=-1, cv=nested_cv)
random_search.fit(X=train_set, y=train_labs, groups=train_groups)
score = random_search.score(validation_set, validation_labs)
save["score"].append(score)
save["cv_results"].append(random_search.cv_results_)
save["best_score"].append(random_search.best_score_)
save["best_params"].append(random_search.best_params_)
break
savemat(save_file_path, save)
print("{} : {:.2f}".format(freq, save["score"]))
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,884 | arthurdehgan/sleep | refs/heads/master | /visu_barplot_cosp.py | """Generate barplot and saves it."""
from math import ceil
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
from scipy.io import loadmat
from params import FREQ_DICT, STATE_LIST, SAVE_PATH, WINDOW, OVERLAP
FIG_PATH = SAVE_PATH.dirname() / "figures"
# NAME_COSP = "moy_cosp"
# NAME_COV = "moy_cov"
# NAME_COSP = "cosp"
# NAME_COV = "cov"
# PREFIX = "classif_"
NAME_COSP = "cosp"
NAME_COV = "cov"
PREFIX = "bootstrapped_subsamp_"
MOY = "moy" in NAME_COSP
SUBSAMP = "subsamp" in NAME_COSP.split("_")
COSP_PATH = SAVE_PATH / NAME_COSP / "results/"
COV_PATH = SAVE_PATH / NAME_COV / "results"
PERM = True
PVAL = 0.001
MINMAX = [40, 80]
Y_LABEL = "Decoding accuracies (%)"
COLORS = ["#C2C2C2"] + list(sns.color_palette("deep"))
WIDTH = .90
GRAPH_TITLE = "Riemannian classifications"
RESOLUTION = 300
def autolabel(ax, rects, thresh):
"""Attach a text label above each bar displaying its height."""
for rect in rects:
height = rect.get_height()
width = rect.get_width()
if height > thresh:
color = "green"
else:
color = "black"
if height != 0:
ax.text(
rect.get_x() + width / 2.,
width + 1. * height,
"%d" % int(height),
ha="center",
va="bottom",
color=color,
size=14,
)
return ax
# barplot parameters
def visualisation(pval):
scoring = "acc"
labels = list(FREQ_DICT.keys())
labels = ["Covariance"] + labels
groups = STATE_LIST
nb_labels = len(labels)
dat, stds = [], []
thresholds = []
for state in groups:
temp_std, temp, temp_thresh = [], [], []
for lab in labels:
if lab == "Covariance":
file_name = COV_PATH / f"{PREFIX}{NAME_COV}_{state}.mat"
perm_fname = COV_PATH / f"perm_{NAME_COV}_{state}.mat"
else:
file_name = (
COSP_PATH
/ f"{PREFIX}{NAME_COSP}_{state}_{lab}_{WINDOW}_{OVERLAP:.2f}.mat"
)
perm_fname = (
COSP_PATH
/ f"perm_{NAME_COSP}_{state}_{lab}_{WINDOW}_{OVERLAP:.2f}.mat"
)
try:
data = loadmat(file_name)
n_rep = int(data["n_rep"])
data = np.asarray(data[scoring][0]) * 100
n_cv = int(len(data) / n_rep)
if PERM:
data_perm = loadmat(perm_fname)
pscores = np.asarray(data_perm["acc_pscores"][0]) * 100
ind = int(PVAL * len(pscores))
threshold = sorted(pscores)[-ind]
except IOError:
print(file_name, "not found.")
except KeyError:
print(file_name, "key error")
temp.append(np.mean(data))
std_value = np.std(
[np.mean(data[i * n_cv : (i + 1) * n_cv]) for i in range(n_rep)]
)
temp_std.append(std_value)
if PERM:
if threshold < 1:
threshold *= 100
temp_thresh.append(threshold)
dat.append(temp)
stds.append(temp_std)
if PERM:
thresholds.append(temp_thresh)
fig = plt.figure(figsize=(10, 5)) # size of the figure
# Generating the barplot (do not change)
ax = plt.axes()
temp = 0
offset = .4
for group in range(len(groups)):
bars = []
if not PERM:
t = thresholds[group]
data = dat[group]
std_val = stds[group]
for i, val in enumerate(data):
if PERM:
t = thresholds[group][i]
pos = i + 1
if i == 1:
temp += offset # offset for the first bar
color = COLORS[i]
bars.append(ax.bar(temp + pos, val, WIDTH, color=color, yerr=std_val[i]))
start = (
(temp + pos * WIDTH) / 2 + 1 - WIDTH
if pos == 1 and temp == 0
else temp + pos - len(data) / (2 * len(data) + 1)
)
end = start + WIDTH
ax.plot([start, end], [t, t], "k--", label="p < {}".format(PVAL))
# ax = autolabel(ax, bars[i], t)
temp += pos + 1
ax.set_ylabel(Y_LABEL)
ax.set_ylim(bottom=MINMAX[0], top=MINMAX[1])
ax.set_title(GRAPH_TITLE)
ax.set_xticklabels(groups)
ax.set_xticks(
[
ceil(nb_labels / 2) + offset + i * (1 + offset + nb_labels)
for i in range(len(groups))
]
)
# labels[-1] = labels[-1][:-1]
labels = ["Covariance"] + [elem + " cospec" for elem in FREQ_DICT]
# ax.legend(bars, labels, frameon=False)
ax.legend(
bars,
labels,
# loc="upper center",
# bbox_to_anchor=(0.5, -0.05),
fancybox=False,
shadow=False,
# ncol=len(labels),
)
file_name = PREFIX + f"{NAME_COSP}_{scoring}_{pval}_1000_0.png"
print(FIG_PATH / file_name)
save_path = str(FIG_PATH / file_name)
fig.savefig(save_path, dpi=RESOLUTION)
plt.close()
if __name__ == "__main__":
visualisation(PVAL)
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,885 | arthurdehgan/sleep | refs/heads/master | /visu_fselect_topomap.py | """Generate topomaps"""
from mne.viz import plot_topomap
from scipy.io import loadmat
import numpy as np
import matplotlib.pyplot as plt
from params import SAVE_PATH, STATE_LIST, CHANNEL_NAMES
plt.switch_backend("agg")
DATA_PATH = SAVE_PATH / "psd"
TTEST_RESULTS_PATH = DATA_PATH / "results"
RESULTS_PATH = DATA_PATH / "results/"
POS_FILE = SAVE_PATH / "../Coord_EEG_1020.mat"
SENSORS_POS = loadmat(POS_FILE)["Cor"]
# FREQS = ['Delta', 'Theta', 'Alpha', 'Sigma', 'Beta', 'Gamma1', 'Gamma2']
FREQS = ["Delta", "Theta", "Alpha", "Sigma", "Beta", "Gamma1"]
WINDOW = 1000
OVERLAP = 0
PVAL = .01
for stage in STATE_LIST:
for freq in FREQS:
efs_scores, pvalues = [], []
og_scores = []
for elec in CHANNEL_NAMES:
file_name = "EFS_NoGamma_{}_{}_1000_0.00.mat".format(stage, elec)
try:
score = loadmat(RESULTS_PATH / file_name)["score"].ravel()
except TypeError:
print(file_name)
except KeyError:
print("wrong key")
efs_scores.append(score[0] * 100)
file_name = "perm_PSD_{}_{}_{}_{}_{:.2f}.mat".format(
stage, freq, elec, WINDOW, OVERLAP
)
try:
score = loadmat(RESULTS_PATH / file_name)["score"].ravel()
except TypeError:
print(file_name)
og_scores.append(score[0] * 100)
pvalue = loadmat(RESULTS_PATH / file_name)["pvalue"].ravel()
pvalues.append(pvalue[0])
EFS_DA = np.asarray(efs_scores)
OG_DA = np.asarray(og_scores)
da_pvalues = np.asarray(pvalues)
efs_mask = np.full((len(CHANNEL_NAMES)), False, dtype=bool)
# if freq == 'Delta':
da_mask = np.full((len(CHANNEL_NAMES)), False, dtype=bool)
da_mask[da_pvalues <= PVAL] = True
mask_params = dict(
marker="*", markerfacecolor="white", markersize=9, markeredgecolor="white"
)
# data = [{'name': 'Decoding accuracies', 'cmap': 'viridis',
# 'mask': da_mask, 'cbarlim': [50, 65], 'data': OG_DA}]
data = [
{
"name": "EFS decoding accuracies",
"cmap": "viridis",
"mask": da_mask,
"cbarlim": [50, 65],
"data": EFS_DA,
}
]
for i, subset in enumerate(data):
plt.subplot(1, len(data), i + 1)
ch_show = True
ax, _ = plot_topomap(
subset["data"],
SENSORS_POS,
res=128,
cmap=subset["cmap"],
show=False,
vmin=subset["cbarlim"][0],
vmax=subset["cbarlim"][1],
names=CHANNEL_NAMES,
show_names=ch_show,
mask=subset["mask"],
mask_params=mask_params,
contours=0,
)
plt.colorbar(ax, shrink=.45)
# file_name = 'topomap_{}_mean_scores_EFS_{}'.format(stage, freq)
file_name = "topomap_mean_scores_EFS_{}".format(stage)
plt.savefig(SAVE_PATH / "../figures" / file_name, dpi=200)
plt.close()
del ax, data
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,886 | arthurdehgan/sleep | refs/heads/master | /classif_perm_subsamp.py | """Uses a classifier to decode PSD values.
Computes pvalues and saves them in a mat format with the decoding accuracies.
Author: Arthur Dehgan
"""
from itertools import product
import pandas as pd
import numpy as np
from scipy.io import savemat, loadmat
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
from joblib import delayed, Parallel
from utils import (
StratifiedLeave2GroupsOut,
prepare_data,
classification,
proper_loadmat,
)
from params import SAVE_PATH, CHANNEL_NAMES, WINDOW, OVERLAP, STATE_LIST, FREQ_DICT
NAME = "psd"
PREFIX = "perm_subsamp_"
SOLVER = "svd" # 'svd' 'lsqr'
PREF_LIST = PREFIX.split("_")
SUBSAMPLE = "subsamp" in PREF_LIST
PERM = "perm" in PREF_LIST
N_PERM = 999 if PERM else None
SAVE_PATH /= NAME
def classif_psd(state, elec, freq, n_jobs=-1):
info_data = pd.read_csv(SAVE_PATH.parent / "info_data.csv")[STATE_LIST]
n_trials = info_data.min().min()
n_subs = len(info_data) - 1
groups = [i for i in range(n_subs) for _ in range(n_trials)]
n_total = n_trials * n_subs
labels = [0 if i < n_total / 2 else 1 for i in range(n_total)]
print(state, elec, freq)
data_file_name = NAME + "_{}_{}_{}_{}_{:.2f}.mat".format(
state, freq, elec, WINDOW, OVERLAP
)
save_file_name = PREFIX + data_file_name
data_file_path = SAVE_PATH / data_file_name
save_file_path = SAVE_PATH / "results" / save_file_name
data = loadmat(data_file_path)
data = prepare_data(data, n_trials=n_trials, random_state=666)
data = np.array(data).reshape(len(data), 1)
sl2go = StratifiedLeave2GroupsOut()
clf = LDA(solver=SOLVER)
save = classification(clf, sl2go, data, labels, groups, N_PERM, n_jobs=n_jobs)
savemat(save_file_path, save)
if __name__ == "__main__":
Parallel(n_jobs=-1)(
delayed(classif_psd)(st, el, fr, n_jobs=-1)
for st, el, fr in product(STATE_LIST, CHANNEL_NAMES, list(FREQ_DICT.keys()))
)
| {"/classif_cosp_backward.py": ["/utils.py"], "/visu_piecharts_fselect.py": ["/utils.py"], "/classif_subcosp.py": ["/utils.py"], "/classif_all_bin_combinations.py": ["/utils.py"], "/visu_data_boxplot.py": ["/utils.py"], "/classif_cosp_multif.py": ["/utils.py"], "/EFS_fixed_elec.py": ["/utils.py"], "/visu_topomap.py": ["/utils.py"], "/ttest.py": ["/ttest_perm_indep.py"], "/classif_psd_bins.py": ["/utils.py"], "/permutations_EFS_fixed_elec.py": ["/utils.py"], "/classif_cov_testn153SWS.py": ["/utils.py"], "/visu_barplot_multifeature.py": ["/utils.py"], "/compute_psd.py": ["/utils.py"], "/compute_cosp.py": ["/utils.py"], "/classif_psd_multi_fixed_elec.py": ["/utils.py"], "/classif_cov_test_simplified.py": ["/utils.py"], "/compute_cov.py": ["/utils.py"], "/classif_cov.py": ["/utils.py"], "/classif_psd.py": ["/utils.py"], "/classif_psd_nremvsrem.py": ["/utils.py"], "/classif_SVM_STATE_ELEC.py": ["/utils.py"], "/classif_perm_subsamp.py": ["/utils.py"]} |
68,907 | Paulfuther/Work-Website | refs/heads/master | /flaskblog/routes.py | from flask import Flask, render_template, jsonify, request, send_file, url_for, redirect, flash, abort
from flaskblog import app, db, Bcrypt
from flaskblog.forms import EmployeeForm, LoginForm, PostForm, RegistrationForm, UpdateAccountForm, EmployeeUpdateForm, whmisForm, ppeForm, fireextinguishersForm, emergencyproceduresForm, firstaidForm, foodhandlingForm, propaneForm, healthandsafetyForm, fuelpumpshutoffForm, workingaloneForm, workplaceviolenceForm, jointhealthandsafetyForm, giantform
from flaskblog.models import User, Post, Employee, whmis, ppe, fireextinguishers, emergencyresponseprocedures,firstaid, foodhandling,propane,healthandsafety,fuelpumpshutoff,workingalone,workplaceviolence,jointhealthandsafety
from io import BytesIO
import os
from werkzeug.utils import secure_filename
import pandas as pd
import numpy
import openpyxl
import xlrd
import xlwt
import xlsxwriter
from flaskblog import datetime
from flaskblog import MySQL
from flaskblog import bcrypt
from flask_login import login_user, current_user, logout_user, login_required
import secrets
from PIL import Image
import re
import mysql
from sqlalchemy.sql import text, select
from sqlalchemy import *
from sqlalchemy import extract
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
chartstore = 48314
engine = create_engine('mysql://root:root@localhost/work')
@app.route("/")
@app.route("/home")
def home():
return render_template('home.html')
@app.route("/hrhome")
def hrhome():
return render_template('hrhome.html')
@app.route("/ert")
def ert():
return render_template('ERT.html')
@app.route("/hrfile<int:staff_id>")
def hrfile(staff_id):
gsa = Employee.query.get(staff_id)
return render_template('hrfile.html', gsa=gsa)
@app.route("/hrlist", methods =['GET', 'POST'])
def hrlist():
return render_template('hrlist.html')
@app.route("/search", methods=['GET', 'POST'])
def search():
form=request.form
search_value=form['search_string']
if search_value == "all":
gsa = Employee.query.order_by(Employee.store).all()
#for staff in gsa:
# print(staff.id)
return render_template('hrlist.html', gsa=gsa)
gsa1 = Employee.query.filter_by(store=search_value)
gsa = gsa1.order_by(Employee.store).all()
#for staff in gsa:
#print(staff.firstname)
return render_template('hrlist.html', gsa=gsa)
def save_hrpicture(form_hrpicture):
thumb = 30,30
medium = 150,150
large = 250,250
random_hex = secrets.token_hex(8)
_, f_ext = os.path.splitext(form_hrpicture.filename)
hrpicture_fn = random_hex + f_ext
picture_paththumb = os.path.join(
app.root_path, 'static/empfiles/thumb', hrpicture_fn)
output_size = (150, 150)
i = Image.open(form_hrpicture)
i.thumbnail(output_size, Image.LANCZOS)
i.save(picture_paththumb)
print (i.size)
picture_pathmobile = os.path.join(
app.root_path, 'static/empfiles/mobile', hrpicture_fn)
output_size2 = (250, 250)
i2 = Image.open(form_hrpicture)
i2.thumbnail(output_size2, Image.LANCZOS)
i2.save(picture_pathmobile)
return hrpicture_fn
@app.route("/updategsa<int:staff_id>", methods=['GET', 'POST'])
def updategsa(staff_id):
# Here we are getting the row of data based on the index, which is staff_id and
#generatating a query under gsa
#form is then populated with that data and published
#when changes are made the form.data attribut is changed also
#you can then compare the new form data using .data with old data use gsa.data
#note below that some data is int and some is text. they need to be the same for the compares
gsa = Employee.query.get(staff_id)
form = EmployeeUpdateForm(obj=gsa)
image_file = url_for(
'static', filename='empfiles/mobile/' + gsa.image_file)
gsaphone = gsa.mobilephone
gsasin = gsa.SIN
gsaemail = gsa.email
gsapostal = gsa.postal
gsatrainingid = gsa.trainingid
gsatrainingpassword = gsa.trainingpassword
gsaiprism = gsa.iprismcode
phone = form.mobilephone.data
sin = int(form.SIN.data)
postal = form.postal.data
trainingid = form.trainingid.data
trainingpassword = form.trainingpassword.data
#iprismcodecheck = (form.updateabout_you.iprismcode.data)
#add a pciture
#print(form.hrpicture.data)
emp = Employee.query.filter_by(mobilephone=text(phone)).first()
emailcheck = Employee.query.filter_by(email=form.email.data).first()
sincheck = Employee.query.filter_by(SIN=sin).first()
postalcheck = Employee.query.filter_by(postal=postal).first()
trainingidcheck = Employee.query.filter_by(trainingid=trainingid).first()
trainingpasswordcheck = Employee.query.filter_by(trainingpassword=trainingpassword).first()
#iprismcheck = Employee.query.filter_by(iprismcode=iprismcodecheck).first()
if gsaphone == phone:
print("same mobile")
else:
if emp:
flash("mobile already used")
return render_template('employeeupdate.html', form=form, gsa=gsa)
#if gsaiprism == iprismcodecheck:
# print("same iprism")
#else:
# if iprismcheck:
# flash("iprism code already used")
# return render_template('employeeupdate.html', form=form, gsa=gsa)
if gsasin == sin:
print("same sin")
else:
if sincheck:
flash("sin already used")
return render_template('employeeupdate.html', form=form, gsa=gsa)
if gsa.email == form.email.data:
print("same email")
else:
if emailcheck:
flash("email already used")
return render_template('employeeupdate.html', form=form, gsa=gsa)
if gsa.postal == form.postal.data:
print("same postal code")
else:
if postalcheck:
flash("postal already exists")
return render_template('employeeupdate.html', form=form, gsa=gsa)
if gsa.trainingid == form.trainingid.data:
print("same user id ")
else:
if trainingidcheck:
flash("user id already exists")
return render_template('employeeupdate.html', form=form, gsa=gsa)
if gsa.trainingpassword == form.trainingpassword.data:
print("same training password")
else:
if trainingpasswordcheck:
flash("training password already exists")
return render_template('employeeupdate.html', form=form, gsa=gsa)
if form.validate_on_submit():
if form.submit.data:
form.populate_obj(gsa)
if form.hrpicture.data:
picture_file = save_hrpicture(form.hrpicture.data)
gsa.image_file = picture_file
db.session.commit()
flash("info updated")
return render_template('hrhome.html')
elif form.delete.data:
Employee.query.filter_by(id=staff_id).delete()
db.session.commit()
return render_template('employeeupdate.html', image_file=image_file, form=form,gsa=gsa)
@app.route("/hr", methods=['GET', 'POST'])
def hr():
form = giantform()
if form.validate_on_submit():
if form.about_you.hrpicture.data:
picture_file = save_hrpicture(form.about_you.picture.data)
#current_user.image_file = picture_file
emp = Employee(firstname=form.about_you.firstname.data,
nickname=form.about_you.nickname.data,
store=form.about_you.store.data,
addressone=form.about_you.addressone.data,
addresstwo=form.about_you.addresstwo.data,
apt=form.about_you.apt.data,
city=form.about_you.city.data,
province=form.about_you.province.data,
country=form.about_you.country.data,
email=form.about_you.email.data,
mobilephone=form.about_you.mobilephone.data,
SIN=form.about_you.SIN.data,
Startdate=form.about_you.Startdate.data,
Enddate=form.about_you.Enddate.data,
lastname=form.about_you.lastname.data,
postal=form.about_you.postal.data,
trainingid=form.about_you.trainingid.data,
trainingpassword=form.about_you.trainingpassword.data,
manager=form.about_you.manager.data,
active=form.about_you.active.data,
iprismcode=form.about_you.iprismcode.data)
db.session.add(emp)
db.session.commit()
trainingwhmis = whmis(startdate=form.training.startdate.data,
completed=form.training.completeddate.data,
datequalified=form.training.datequalified.data,
expireydate=form.training.expirationdate.data,
compliant=form.training.compliant.data,
employee_id=emp.id)
db.session.add(trainingwhmis)
db.session.commit()
trainingppe = ppe(startdate=form.training2.startdate.data,
completed=form.training2.completeddate.data,
datequalified=form.training2.datequalified.data,
expireydate=form.training2.expirationdate.data,
compliant=form.training2.compliant.data,
employee_id=emp.id)
db.session.add(trainingppe)
db.session.commit()
trainingfireextinguishers = fireextinguishers(startdate=form.training3.startdate.data,
completed=form.training3.completeddate.data,
datequalified=form.training3.datequalified.data,
expireydate=form.training3.expirationdate.data,
compliant=form.training3.compliant.data,
employee_id=emp.id)
db.session.add(trainingfireextinguishers)
db.session.commit()
trainingemergencyresponseprocedures = emergencyresponseprocedures(startdate=form.training4.startdate.data,
completed=form.training4.completeddate.data,
datequalified=form.training4.datequalified.data,
expireydate=form.training4.expirationdate.data,
compliant=form.training4.compliant.data,
employee_id=emp.id)
db.session.add(trainingemergencyresponseprocedures)
db.session.commit()
trainingfirstaid = firstaid(startdate=form.training5.startdate.data,
completed=form.training5.completeddate.data,
datequalified=form.training5.datequalified.data,
expireydate=form.training5.expirationdate.data,
compliant=form.training5.compliant.data,
employee_id=emp.id)
db.session.add(trainingfirstaid)
db.session.commit()
trainingfoodhandling = foodhandling(startdate=form.training6.startdate.data,
completed=form.training6.completeddate.data,
datequalified=form.training6.datequalified.data,
expireydate=form.training6.expirationdate.data,
compliant=form.training6.compliant.data,
employee_id=emp.id)
db.session.add(trainingfoodhandling)
db.session.commit()
trainingpropane = propane(startdate=form.training7.startdate.data,
completed=form.training7.completeddate.data,
datequalified=form.training7.datequalified.data,
expireydate=form.training7.expirationdate.data,
compliant=form.training7.compliant.data,
employee_id=emp.id)
db.session.add(trainingpropane)
db.session.commit()
traininghealthandsafety = healthandsafety(startdate=form.training8.startdate.data,
completed=form.training8.completeddate.data,
datequalified=form.training8.datequalified.data,
expireydate=form.training8.expirationdate.data,
compliant=form.training8.compliant.data,
employee_id=emp.id)
db.session.add(traininghealthandsafety)
db.session.commit()
trainingfuelpumpshutoff = fuelpumpshutoff(startdate=form.training12.startdate.data,
completed=form.training12.completeddate.data,
datequalified=form.training12.datequalified.data,
expireydate=form.training12.expirationdate.data,
compliant=form.training12.compliant.data,
employee_id=emp.id)
db.session.add(trainingfuelpumpshutoff)
db.session.commit()
trainingworkingalone = workingalone(startdate=form.training9.startdate.data,
completed=form.training9.completeddate.data,
datequalified=form.training9.datequalified.data,
expireydate=form.training9.expirationdate.data,
compliant=form.training9.compliant.data,
employee_id=emp.id)
db.session.add(trainingworkingalone)
db.session.commit()
trainingworkplaceviolence = workplaceviolence(startdate=form.training10.startdate.data,
completed=form.training10.completeddate.data,
datequalified=form.training10.datequalified.data,
expireydate=form.training10.expirationdate.data,
compliant=form.training10.compliant.data,
employee_id=emp.id)
db.session.add(trainingworkplaceviolence)
db.session.commit()
trainingjointhealthandsafety = jointhealthandsafety(startdate=form.training11.startdate.data,
completed=form.training11.completeddate.data,
datequalified=form.training11.datequalified.data,
expireydate=form.training11.expirationdate.data,
compliant=form.training11.compliant.data,
employee_id=emp.id)
db.session.add(trainingjointhealthandsafety)
db.session.commit()
flash('Employee has been added to the database', 'success')
return redirect(url_for('hrhome'))
print(form.errors.items())
#print("did not work")
return render_template('employee.html', title='Employee Information', form=form)
@app.route("/blog")
def blog():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(
Post.date_posted.desc()).paginate(page=page, per_page=3)
return render_template('blog.html', posts=posts, title='Blog')
@app.route("/applications")
def Applications():
return render_template('applications.html', title='Applications')
@app.route("/kpiconvert")
def Kpiconvert():
return render_template('kpiconvert.html', title='KPI Converter')
@app.route("/carwashkpiconvert")
def CarwashKPIconvert():
return render_template('carwashkpiconvert.html', title='Carwash KPI Converter')
@app.route("/tpfileconvert")
def TPFileconvert():
return render_template('teamperformanceconvert.html', title='Team Performance File Converter')
@app.route("/tpfileupload", methods=['POST'])
def tpfileupload():
if request.method == "POST":
files = request.files.getlist('tpfileinputFile[]')
newdf = []
for file in files:
input_filename = file
#print(x)
df_totalsheet = pd.read_excel(input_filename)
print(df_totalsheet.head)
tp_date = (df_totalsheet.iat[8, 0])
print(tp_date)
tp_store = (df_totalsheet.iat[6, 0])
a, b1, c, d = tp_date.split()
e, f, g = tp_store.split()
tp_storefinal = g[:5]
pd.to_datetime(b1)
print(b1)
b = datetime.strptime(b1, "%m/%d/%Y").strftime("%b-%Y")
pd.to_numeric(tp_storefinal)
df = pd.read_excel(input_filename, skiprows=14)
cols = list(df)
dropcols = [2, 3, 6, 7, 8, 13, 14]
df.drop(df.columns[dropcols], axis=1, inplace=True)
df = df.rename(columns={'Performance Measure': 'one'})
df.set_index('one', inplace=True)
df = df.T
df2 = df.index
df['Gsa'] = df.index
df['Store'] = tp_storefinal
df.reset_index(drop=True, inplace=True)
df.Gsa = df.Gsa.shift(1)
df['Store'] = tp_storefinal
df['date'] = b
df['date'] = pd.to_datetime(df['date'], format="%b-%Y")
df['date'] = df['date'].dt.date
df.dropna(subset=['Shift Count'], how='all', inplace=True)
print(df)
df = df[['date', 'Store', 'Gsa', 'Shift Count', 'Average Check',
'2 Pack Ratio', 'Season Pass', 'Wash & Go', 'In-Store Premium Ratio',
'Crind Ratio', 'Campaign Deals Total', 'Campaign Deals to In-Store Transaction Ratio',
'Campaign Deals by Confectionery', 'Campaign Deals by Salty Snacks',
'Campaign Deals by Alternative Beverages', 'Campaign Deals by Packaged Soft Drinks',
'Hot Beverages', 'FSR Redemptions', '$1 Snack Redemptions']]
newdf.append(df)
newdf = pd.concat(newdf)
output = BytesIO()
writer = pd.ExcelWriter(output, engine='xlsxwriter')
newdf.to_excel(writer)
writer.save()
output.seek(0)
return send_file(output, attachment_filename="sfoutput.xlsx", as_attachment=True)
@app.route("/securityfileconvert")
def SecurityFileconvert():
return render_template('securityfileconvert.html', title='Security File Converter')
@app.route("/securityfileupload", methods=['POST'])
def securityfileupload():
if request.method == "POST":
start = datetime.strptime('05:15:00', '%H:%M:%S').time()
end = datetime.strptime('11:45:00', '%H:%M:%S').time()
files = request.files.getlist('securityfileinputFile[]')
newdf = []
for file in files:
inputfilename = file
excel_file = inputfilename
store_number = file
a = str(store_number)
b = re.search('\d+', a).group()
df = pd.read_csv(excel_file, sep='\t', header=None)
df.columns = ['Text']
#use regular expresssions re to find character sets in a string of data
#in a dataframe
df['Date'] = df['Text'].str.extract(
r"([\d]{1,2} [ADJFMNOS]\w* [\d]{2})").copy()
df2 = df[df['Text'].str.contains('Pump', na=False)].copy()
if df2.empty:
continue
df2['Store'] = b
newdf.append(df2)
newdf = pd.concat(newdf)
newdf['Date'] = pd.to_datetime(newdf['Date'], dayfirst=True)
newdf['Time'] = newdf['Text'].str.extract(r"([\d]{1,2}\:[\d]{1,2}\:[\d]{1,2})")
newdf['Time'] = pd.to_datetime(newdf['Time'], format='%H:%M:%S').dt.time
newdf = newdf[newdf['Time'].between(start, end)]
newdf.set_index('Date', inplace=True)
output = BytesIO()
writer = pd.ExcelWriter(output, engine='xlsxwriter')
newdf.to_excel(writer)
writer.save()
output.seek(0)
return send_file(output, attachment_filename="sfoutput.xlsx", as_attachment=True)
@app.route("/securityfilenegconvert")
def SecurityFilenegconvert():
return render_template('securityfilenegconvert.html', title='Security File Negative Sales Converter')
@app.route("/securityfilenegupload", methods=['POST'])
def securityfilenegupload():
if request.method == "POST":
files = request.files.getlist('securityfileneginputFile[]')
newdf = []
for file in files:
inputfilename = file
excel_file = inputfilename
store_number = file
a = str(store_number)
b = re.search('\d+', a).group()
df = pd.read_csv(excel_file, sep='\t', header=None)
df.columns = ['Text']
df['Date'] = df['Text'].str.extract('(.. ... ..)', expand=False).copy()
df2 = df[df['Text'].str.contains('NEGATIVE', na=False)].copy()
if df2.empty:
continue
df2['Store'] = b
print(df2)
newdf.append(df2)
newdf = pd.concat(newdf)
newdf['Date'] = pd.to_datetime(
newdf['Date'], dayfirst=True) # .dt.strftime('%d %m %Y')
newdf['Time'] = newdf['Text'].str.extract(r"([\d]{1,2}\:[\d]{1,2}\:[\d]{1,2})")
newdf['Time'] = pd.to_datetime(newdf['Time'], format='%H:%M:%S').dt.time
newdf.set_index('Date', inplace=True)
output = BytesIO()
writer = pd.ExcelWriter(output, engine='xlsxwriter')
newdf.to_excel(writer)
writer.save()
output.seek(0)
return send_file(output, attachment_filename="sfoutput.xlsx", as_attachment=True)
@app.route("/carwashkpiupload", methods=['POST'])
def carwashkpiupload():
if request.method == "POST":
file = request.files['cwinputFile']
print(file)
filename = secure_filename(file.filename)
excel_file = file
df = pd.read_excel(excel_file, skiprows=9, usecols=(3, 4, 5))
columnheaders = (df.columns.tolist())
current_cwdate = (columnheaders[1])
x = datetime.strptime(current_cwdate, "%Y/%b").strftime("%b-%Y")
previous_cwdate = (columnheaders[2])
px = datetime.strptime(previous_cwdate, "%Y/%b").strftime("%b-%Y")
#get list of sheets
xls = pd.ExcelFile(excel_file)
res = len(xls.sheet_names)
nres = res-1
#get type for the tab names they are a list
#print(type(res))
tabs = (xls.sheet_names)
#print(type(tabs))
newtabs = (tabs[0:-1])
dffinal2 = []
for line in newtabs:
#first half of spreadsheet
type = line.split("_")[1]
df = pd.read_excel(excel_file, sheet_name=line,
skiprows=9, usecols=(3, 4, 5))
df.columns = ['a', x, px]
df['store'] = type
df['Date'] = x
df1 = df.iloc[1:3].copy()
df1['label'] = 'revenue'
df2 = df.iloc[4:14].copy()
df2['label'] = 'expense'
df3 = df.iloc[17:26].copy()
df3['label'] = 'operation performnce'
df4 = df.iloc[30:37].copy()
df4['label'] = 'sales performance'
df5 = df.iloc[40:45].copy()
df5['label'] = 'paid units %'
df6 = df.iloc[46:52].copy()
df6['label'] = 'paid units Instore and Crind'
df7 = df.iloc[54:68].copy()
df7['label'] = 'total units'
dfpartone = pd.concat([df1, df2, df3, df4, df5, df6, df7])
#second half of spreadsheet
dftwo = pd.read_excel(
excel_file, sheet_name=line, skiprows=9, usecols=(3, 8, 9))
dftwo.columns = ['a', x, px]
dftwo['store'] = type
dftwo['Date'] = x
df8 = dftwo.iloc[1:3].copy()
df8['label'] = 'revenue per car'
df9 = dftwo.iloc[4:14].copy()
df9['label'] = 'expense per car'
df10 = dftwo.iloc[40:45].copy()
df10['label'] = '% fullfillment per car'
df11 = dftwo.iloc[46:52].copy()
df11['label'] = 'paid fullfillments'
df12 = dftwo.iloc[54:68].copy()
df12['label'] = 'total fullfillments'
dfparttwo = pd.concat([df8, df9, df10, df11, df12])
dffinal = pd.concat([dfpartone, dfparttwo])
#final table of data
dffinal2.append(dffinal)
#reorganise columns
dffinal2 = pd.concat(dffinal2)
dffinal2.columns = ['Item', 'Amount', px,
'Store', 'Date', 'Classification']
dffinal2 = dffinal2[['Date', 'Store',
'Classification', 'Item', 'Amount', px]]
dffinal2['Amount'] = pd.to_numeric(
dffinal2['Amount'], errors='coerce')
dffinal2['Date'] = pd.to_datetime(dffinal2['Date'], format='%b-%Y')
dffinal2['Date'] = dffinal2['Date'].dt.date
#save final spreadsheet
#outputfilename = asksaveasfilename(filetypes=[("Excel files","*.xlsx")])
#dffinal2.to_excel(outputfilename + ".xlsx", engine='xlsxwriter')
#dffinal.to_excel("test.xlsx")
output = BytesIO()
writer = pd.ExcelWriter(output, engine='xlsxwriter')
dffinal2.to_excel(writer)
writer.save()
output.seek(0)
return send_file(output, attachment_filename="cwoutput.xlsx", as_attachment=True)
return render_template("applications.html")
@app.route("/upload", methods=['POST'])
def upload():
if request.method == "POST":
file = request.files['inputFile']
print(file)
filename = secure_filename(file.filename)
#this will save file to folder in root named Files
#file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
def convert_amount(val):
"""
Convert the string number value to a float
- Remove $
- Remove commas
- Convert to float type
"""
new_val = val.replace(',', '').replace(
'%', '').replace('/0', '')
return pd.to_numeric(new_val)
excel_file = file
df = pd.read_excel(excel_file, header=3)
#print (df)
xls = pd.ExcelFile(excel_file)
res = len(xls.sheet_names)
tabs = (xls.sheet_names)
newtabs = (tabs)
columnheaders = (df.columns.tolist())
#print (columnheaders)
kpidate = (columnheaders[2])
current_kpidate = datetime.strptime(
kpidate, "%Y-%m").strftime("%b-%Y")
#print (current_kpidate)
newkpi = []
finalkpi = []
for x in newtabs:
type = x[:5]
#print(type)
data = pd.read_excel(
excel_file, sheet_name=x, skiprows=3, usecols=range(8))
data['store'] = type
data['Date'] = current_kpidate
finalkpi.append(data)
finalkpi = pd.concat(finalkpi)
#print(finalkpi)
#name columns
finalkpi.columns = ['Category1', 'Category2', kpidate, 'Value2', 'value3','value4','value5','Rolling','Store','Date']
#reorder columns
finalkpi = finalkpi[['Date', 'Store', 'Category1', 'Category2',kpidate,'Value2','value3','value4','value5','Rolling']]
"""combine two columns
"""
finalkpi['Category'] = finalkpi.Category2.combine_first(
finalkpi.Category1)
finalkpi = finalkpi[['Date', 'Store', 'Category', kpidate,'Value2','value3','value4','value5','Rolling']]
finalkpi['Date'] = pd.to_datetime((finalkpi['Date']), format='%b-%Y')
finalkpi[kpidate] = finalkpi[kpidate].apply(convert_amount)
finalkpi['Value2'] = finalkpi['Value2'].apply(convert_amount)
finalkpi['value3'] = finalkpi['value3'].apply(convert_amount)
finalkpi['value4'] = finalkpi['value4'].apply(convert_amount)
finalkpi['value5'] = finalkpi['value5'].apply(convert_amount)
finalkpi['Rolling'] = finalkpi['Rolling'].apply(convert_amount)
#create output stream
output = BytesIO()
writer = pd.ExcelWriter(output, engine='xlsxwriter')
finalkpi.to_excel(writer)
writer.save()
output.seek(0)
return send_file(output, attachment_filename="output.xlsx", as_attachment=True)
print(finalkpi)
return render_template("applications.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('home'))
form = RegistrationForm()
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user = User(username=form.username.data,email=form.email.data, password=hashed_password)
db.session.add(user)
db.session.commit()
flash('Your Account Has Been Created. You Can Now Login', 'success')
return redirect(url_for('login'))
return render_template('register.html', title='Register', form=form)
@app.route("/login", methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('home'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user and bcrypt.check_password_hash(user.password, form.password.data):
login_user(user, remember=form.remember.data)
next_page = request.args.get('next')
return redirect(next_page) if next_page else redirect(url_for('home'))
else:
flash('login unsuccessfull. Please check email and password', 'danger')
return render_template('login.html', title='Login', form=form)
#This route used sql alchemy to access the grwothkpi tables in the MySql database
@app.route("/cstoresales")
def data():
metadata = MetaData(engine)
table = Table('growthkpi', metadata, autoload=True)
s = select([table.c.Amount,
table.c.Date,
])\
.where(and_(table.c.Store == '48314',
table.c.Category == 'Total C-Store Sales ($)',
table.c.Date >= "2017-04-01"))
rs = s.execute()
newdata = []
content = {}
for result in rs:
content = {'date': result[1], 'sales': result[0]}
newdata.append(content)
content = {}
#print(newdata)
return jsonify(newdata)
@app.route("/cstoremargin")
def thirddata():
metadata = MetaData(engine)
table = Table('growthkpi', metadata, autoload=True)
s = select([table.c.Amount,
table.c.Store,
extract("month", table.c.Date,)])\
.where(and_(table.c.Category == 'Total C-Store Margin ($)',
table.c.Date >= "2019-01-01"))
#.order_by(table.desc(table.c.Store))
#.all()
rs3 = s.execute()
newdata3 = []
content3 = {}
for result in rs3:
content3 = {'date': result[2],'store': result[1], 'margin': result[0]}
newdata3.append(content3)
content3 = {}
print(newdata3)
return jsonify(newdata3)
#(table.c.Store == '48314',
@app.route("/data")
def seconddata():
metadata = MetaData(engine)
table = Table('growthkpi', metadata, autoload=True)
s = select([table.c.Amount,
extract("month", table.c.Date,
)])\
.where(and_(table.c.Store == '48314',
table.c.Category == 'Total Fuel Volume',
table.c.Date >= "2019-01-01"))
rs2=s.execute()
newdata2 = []
content2 = {}
for result in rs2:
content2 = {'date': result[1], 'volume': result[0]}
newdata2.append(content2)
content2 = {}
#print(newdata2)
return jsonify(newdata2)
@app.route("/carwashmargin")
def carwashmargin():
metadata = MetaData(engine)
table = Table('car wash', metadata, autoload=True)
s = select([table.c.Amount,
table.c.Date,
])\
.where(and_(table.c.Store == '48314',
table.c.Classification == 'revenue',
table.c.Item == 'CW Commission Revenue (before crop)',
table.c.Date >= "2019-04-01"))
rs4 = s.execute()
newdata4 = []
content4 = {}
for result in rs4:
content4 = {'date': result[1], 'commissions': result[0]}
newdata4.append(content4)
content4 = {}
#print(newdata4)
return jsonify(newdata4)
@app.route("/charts")
def charts():
return render_template('charts.html')
@app.route("/dashboard")
def dashboard():
return render_template('dashboard.html')
@app.route("/logout")
def logout():
logout_user()
return redirect(url_for('home'))
def save_picture(form_picture):
thumb = 30, 30
medium = 150, 150
large = 250, 250
random_hex = secrets.token_hex(8)
_, f_ext = os.path.splitext(form_picture.filename)
picture_fn = random_hex + f_ext
picture_paththumb = os.path.join(
app.root_path, 'static/profile_pics/thumb', picture_fn)
output_size = (150, 150)
i = Image.open(form_picture)
i.thumbnail(output_size, Image.LANCZOS)
i.save(picture_paththumb)
print(i.size)
picture_pathmobile = os.path.join(
app.root_path, 'static/profile_pics/mobile', picture_fn)
output_size2 = (250, 250)
i2 = Image.open(form_picture)
i2.thumbnail(output_size2, Image.LANCZOS)
i2.save(picture_pathmobile)
return picture_fn
@app.route("/account", methods=['GET', 'POST'])
@login_required
def account():
form = UpdateAccountForm()
if form.validate_on_submit():
if form.picture.data:
picture_file = save_picture(form.picture.data)
current_user.image_file = picture_file
print(form.picture.data)
current_user.username= form.username.data
current_user.email= form.email.data
db.session.commit()
flash('Your Account Has Been Update', 'success')
return redirect(url_for('account'))
elif request.method == 'GET':
form.username.data = current_user.username
form.email.data = current_user.email
image_file=url_for('static', filename='profile_pics/mobile/' + current_user.image_file)
return render_template('account.html', title = 'Account',
image_file=image_file, form=form)
@app.route("/post/new", methods=['GET','POST'])
@login_required
def new_post():
form = PostForm()
if form.validate_on_submit():
post = Post(title=form.title.data, content=form.content.data, author=current_user)
db.session.add(post)
db.session.commit()
flash('Your Post Has Been Created!', 'success')
return redirect(url_for('blog'))
return render_template('create_post.html', title='New Post',
form=form, legend='New Post')
@app.route("/post/<int:post_id>")
def post(post_id):
post=Post.query.get_or_404(post_id)
return render_template('post.html', title=post.title, post=post)
@app.route("/post/<int:post_id>/update", methods=['GET', 'POST'])
@login_required
def update_post(post_id):
post = Post.query.get_or_404(post_id)
if post.author != current_user:
abort(403)
form = PostForm()
if form.validate_on_submit():
post.title = form.title.data
post.content = form.content.data
db.session.commit()
flash('Your Post Has Been Update', 'success')
return redirect(url_for('post', post_id=post.id))
elif request.method == 'GET':
form.title.data = post.title
form.content.data= post.content
return render_template('create_post.html', title='Update Post',
form=form, legend='Update Post')
@app.route("/post/<int:post_id>/delete", methods=['POST'])
@login_required
def delete_post(post_id):
post = Post.query.get_or_404(post_id)
if post.author != current_user:
abort(403)
db.session.delete(post)
db.session.commit()
flash('Your Post Has Been Deleted', 'success')
return redirect(url_for('blog'))
@app.route("/user/<string:username>")
def user_posts(username):
page = request.args.get('page', 1, type=int)
user = User.query.filter_by(username=username).first_or_404()
posts = Post.query.filter_by(author=user)\
.order_by(Post.date_posted.desc())\
.paginate(page=page, per_page=3)
return render_template('user_posts.html', posts=posts, user=user)
| {"/flaskblog/routes.py": ["/flaskblog/__init__.py", "/flaskblog/forms.py", "/flaskblog/models.py"], "/flaskblog/forms.py": ["/flaskblog/models.py"], "/flaskblog/models.py": ["/flaskblog/__init__.py"]} |
68,908 | Paulfuther/Work-Website | refs/heads/master | /flaskblog/__init__.py | from flask import Flask, jsonify, request, send_file, flash
from random import sample
from flask_mysqldb import MySQL
from flask_moment import Moment
from datetime import time, datetime
import os
from werkzeug.utils import secure_filename
import pandas as pd
import numpy
import openpyxl
import xlrd
import xlwt
import xlsxwriter
from datetime import datetime
from io import BytesIO
from openpyxl.reader.excel import load_workbook
from os import environ
import re
import datetime as dt
import glob
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SubmitField, BooleanField
from wtforms.validators import DataRequired, Length, Email, EqualTo
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import create_engine
from flaskblog import config
from flaskblog import MySQL
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
from sqlalchemy.sql import text, select
from sqlalchemy import *
from flask_moment import Moment
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
UPLOAD_FOLDER = os.path.join(APP_ROOT, 'Files')
#print(UPLOAD_FOLDER)
app = Flask(__name__)
app.config['SECRET_KEY'] = '302176f4723b5282ef5fbdfd77eccc50'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///site.db'
#app.config.from_object("config.ProductionConfig")
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
moment = Moment(app)
login_manager=LoginManager(app)
login_manager.login_view = 'login'
login_manager.login_message_category = 'info'
#engine = create_engine('mysql://root:root@localhost/work')
#meta=MetaData(engine).reflect()
#metadata = MetaData(engine)
#db2 = engine
#print(engine.table_names())
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
mysql = MySQL(app)
Bootstrap(app)
moment = Moment(app)
from flaskblog import routes | {"/flaskblog/routes.py": ["/flaskblog/__init__.py", "/flaskblog/forms.py", "/flaskblog/models.py"], "/flaskblog/forms.py": ["/flaskblog/models.py"], "/flaskblog/models.py": ["/flaskblog/__init__.py"]} |
68,909 | Paulfuther/Work-Website | refs/heads/master | /flaskblog/config.py |
class Config(object):
DEBUG = False
class ProductionConfig(Config):
MYSQL_PASSWORD = 'root'
MYSQL_USER = 'root'
MYSQL_HOST = 'localhost'
MYSQL_DB = 'work'
MYSQL_PORT = 3306 | {"/flaskblog/routes.py": ["/flaskblog/__init__.py", "/flaskblog/forms.py", "/flaskblog/models.py"], "/flaskblog/forms.py": ["/flaskblog/models.py"], "/flaskblog/models.py": ["/flaskblog/__init__.py"]} |
68,910 | Paulfuther/Work-Website | refs/heads/master | /flaskblog/forms.py | from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from wtforms import StringField, PasswordField, SubmitField, BooleanField, TextAreaField, FormField, DateField, SelectField, IntegerField, DecimalField
from wtforms.fields.html5 import DateField, TelField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError, Optional, InputRequired, NumberRange
from flaskblog.models import User, Employee
from flask_login import current_user
import wtforms
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Sign-Up')
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That Username is Taken. Please choose a different one')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That Email is Taken. Please choose a different one')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember = BooleanField('Remember Me')
submit = SubmitField('Login')
class UpdateAccountForm(FlaskForm):
username = StringField('Username', validators=[
DataRequired(), Length(min=2, max=20)])
email = StringField('Email', validators=[DataRequired(), Email()])
picture = FileField('Update Profile Picture', validators=[FileAllowed(['jpg','png'])])
submit = SubmitField('Update')
def validate_username(self, username):
if username.data != current_user.username:
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError(
'That Username is Taken. Please choose a different one')
def validate_email(self, email):
if email.data != current_user.email:
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError(
'That Email is Taken. Please choose a different one')
class PostForm(FlaskForm):
title = StringField('Title', validators=[DataRequired()])
content = TextAreaField('Content', validators=[DataRequired()])
submit = SubmitField('Post')
class TelephoneForm(FlaskForm):
area_code = IntegerField('Area Code', validators=[DataRequired()])
number = IntegerField('Number', validators=[DataRequired(), Length(min=7, max=7)] )
class EmployeeForm(FlaskForm):
firstname = StringField('Firstname', validators= [DataRequired(), Length(min=2, max=20)])
nickname = StringField('Nickname', validators=[Optional()])
lastname = StringField('Lastname', validators=[DataRequired(), Length(min=2, max=20)])
store = SelectField('Store', choices=[('Home Store', 'HomeStore'), ("396", "396"), ('398', '398'),
('402', '402'), ('414', '414'), ('1616','1616'), ('8156', '8156'),
('8435', '8435'), ('33410', '33410'),
('33485', '33485'), ('48314', '48314'),
('65077', '65077'), ('65231', '65231')])
addressone = StringField('Address Line 1', validators=[
DataRequired(), Length(min=2, max=100)])
addresstwo = StringField('Address Line 2', validators=[
Optional(), Length(min=2, max=100)])
apt = StringField('Unit/Apt', validators=[Optional()])
city = StringField('City', validators=[
DataRequired(), Length(min=2, max=20)])
province = StringField('Province', validators=[
DataRequired(), Length(min=2, max=20)])
country = StringField('Country', validators=[
DataRequired(), Length(min=2, max=20)])
email = StringField('Email', validators=[
DataRequired(), Length(min=10, max=100), Email()])
#mobilephone = TelField(validators=[DataRequired(), Length(min=10, max=10)])
mobilephone = StringField('mobile', validators=[
DataRequired(), Length(min=9, max=12)])
SIN = StringField('sin', validators=[DataRequired(), Length(min=9, max=9)])
Startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[DataRequired()])
Enddate = DateField('End Date', format='%m/%d/%Y', validators=[Optional()])
postal = StringField('Postal Code', validators=[
DataRequired(), Length(min=6, max=6)])
trainingid = StringField('Training ID', validators=[DataRequired()])
trainingpassword = StringField(
'Training Password', validators=[DataRequired()])
manager = SelectField('manager', choices=[(
'Manager Name', 'Manager Name'), ('Terry', "Terry"),
('Steph', 'Steph'), ('Wanda', 'Wanda'), ('Sahib', 'Sahib'),
('Paul', 'Paul')])
hrpicture = FileField(validators=[
FileAllowed(['jpg', 'png'])])
active = SelectField('Active', choices=[
('Active', 'Active'), ('Y', 'Y'), ('N', 'N')])
iprismcode = StringField('Iprism Code', validators=[
DataRequired(), Length(min=1, max=9)])
submit = SubmitField('Add Employee')
def validate_mobilephone(self, mobilephone):
user = Employee.query.filter_by(mobilephone=mobilephone.data).first()
if user:
raise ValidationError(
'That mobile is Taken')
def validate_email(self, email):
emp = Employee.query.filter_by(email=email.data).first()
if emp:
raise ValidationError('That email is Taken')
def validate_SIN(self, SIN):
user = Employee.query.filter_by(SIN=SIN.data).first()
if user:
raise ValidationError('That SIN is Taken')
def validate_iprismcode(self, iprismcode):
user = Employee.query.filter_by(iprismcode=iprismcode.data).first()
if user:
raise ValidationError('That code is Taken')
def validate_store(self, store):
if store.data == "Home Store":
raise ValidationError('Please Enter a Store')
def validate_active(self, active):
if active.data == "Active":
print("homestore")
raise ValidationError('Must indicate active or not')
def validate_manager(self, active):
if active.data == "Manager Name":
print("Manager Name")
raise ValidationError('Must Select a Manager')
class whmisForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class ppeForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class fireextinguishersForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class emergencyproceduresForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class firstaidForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class foodhandlingForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?e', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class propaneForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class healthandsafetyForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class fuelpumpshutoffForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class workingaloneForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class workplaceviolenceForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class jointhealthandsafetyForm(FlaskForm):
startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[Optional()])
completeddate = DateField('Completed Date', format='%Y-%m-%d',
validators=[Optional()])
datequalified = DateField('Date Qualified', format='%Y-%m-%d',
validators=[Optional()])
expirationdate = DateField('Expiration Date', format='%Y-%m-%d',
validators=[Optional()])
compliant = SelectField('Compliant ?', choices=[
('Compliant ?', 'Compliant ?'), ('Y', 'Y'), ('N', 'N')])
def validate_compliant(self, compliant):
if compliant.data == "Compliant ?":
raise ValidationError('Must indicate compliant or not')
class giantform(FlaskForm):
about_you = wtforms.FormField(EmployeeForm)
training = wtforms.FormField(whmisForm)
training2 = wtforms.FormField(ppeForm)
training3 = wtforms.FormField(fireextinguishersForm)
training4 = wtforms.FormField(emergencyproceduresForm)
training5 = wtforms.FormField(firstaidForm)
training6 = wtforms.FormField(foodhandlingForm)
training7 = wtforms.FormField(propaneForm)
training8 = wtforms.FormField(healthandsafetyForm)
training9 = wtforms.FormField(workingaloneForm)
training10 = wtforms.FormField(workplaceviolenceForm)
training11 = wtforms.FormField(jointhealthandsafetyForm)
training12 = wtforms.FormField(fuelpumpshutoffForm)
class EmployeeUpdateForm(FlaskForm):
firstname = StringField('Firstname', validators=[
DataRequired(), Length(min=2, max=20)])
nickname = StringField('Nickname', validators=[Optional()])
lastname = StringField('Lastname', validators=[
DataRequired(), Length(min=2, max=20)])
store = SelectField('Store', choices=[('Home Store', 'HomeStore'), ("396", "396"), ('398', '398'),
('402', '402'), ('414', '414'), ('1616',
'1616'), ('8156', '8156'),
('8435', '8435'), ('33410', '33410'),
('33485', '33485'), ('48314', '48314'),
('65077', '65077'), ('65231', '65231')])
addressone = StringField('Address Line 1', validators=[
DataRequired(), Length(min=2, max=100)])
addresstwo = StringField('Address Line 2', validators=[
DataRequired(), Length(min=2, max=100)])
apt = StringField('Unit/Apt', validators=[Optional()])
city = StringField('City', validators=[
DataRequired(), Length(min=2, max=20)])
province = StringField('Province', validators=[
DataRequired(), Length(min=2, max=20)])
country = StringField('Country', validators=[
DataRequired(), Length(min=2, max=20)])
email = StringField('Email', validators=[
DataRequired(), Length(min=10, max=100), Email()])
mobilephone = StringField('mobile', validators=[
DataRequired(), Length(min=9, max=12)])
SIN = StringField('sin', validators=[DataRequired(), Length(min=9, max=9)])
Startdate = DateField('Start Date', format='%Y-%m-%d',
validators=[DataRequired()])
Enddate = DateField('End Date', format='%m/%d/%Y', validators=[Optional()])
postal = StringField('Postal Code', validators=[
DataRequired(), Length(min=6, max=6)])
manager = SelectField('manager', choices=[(
'Manager Name', 'Manager Name'), ('Terry', "Terry"),
('Steph', 'Steph'), ('Wanda', 'Wanda'), ('Sahib', 'Sahib'),
('Paul', 'Paul')])
delete = SubmitField('Delete Employee')
submit = SubmitField('Edit Employee')
trainingid = StringField('Training ID', validators=[DataRequired()])
trainingpassword = StringField(
'Training Password', validators=[DataRequired()])
hrpicture = FileField(validators=[
FileAllowed(['jpg', 'png'])])
active = SelectField('Active', choices=[
('Active', 'Active'), ('Y', 'Y'), ('N', 'N')])
iprismcode = StringField('Iprism Code', validators=[
DataRequired(), Length(min=1, max=9)])
| {"/flaskblog/routes.py": ["/flaskblog/__init__.py", "/flaskblog/forms.py", "/flaskblog/models.py"], "/flaskblog/forms.py": ["/flaskblog/models.py"], "/flaskblog/models.py": ["/flaskblog/__init__.py"]} |
68,911 | Paulfuther/Work-Website | refs/heads/master | /flaskblog/models.py | from flaskblog import db, login_manager
from flaskblog import datetime
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
image_file = db.Column(db.String(20), nullable=False, default='default.jpg')
password = db.Column(db.String(60), nullable=False)
posts = db.relationship('Post', backref='author', lazy=True)
def __repr__(self):
return f"User('{self.username}', '{self.email}')"
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100), nullable=False)
date_posted = db.Column(db.DateTime, nullable=False,
default=datetime.utcnow())
content = db.Column(db.Text, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return f"Post('{self.title}', '{self.date_posted}')"
class Employee(db.Model):
id=db.Column(db.Integer, primary_key=True)
firstname = db.Column(db.String(20), nullable=False)
nickname = db.Column(db.String(20), nullable=True)
lastname = db.Column(db.String(20), nullable=False)
store = db.Column(db.Integer)
addressone = db.Column(db.String(20), nullable=False)
addresstwo = db.Column(db.String(20), nullable=True)
apt = db.Column(db.String(20), nullable=True)
city = db.Column(db.String(20), nullable=False)
province = db.Column(db.String(20), nullable=False)
country = db.Column(db.String(20), nullable=False)
mobilephone = db.Column(db.String(10), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
SIN = db.Column(db.Integer, unique=True, nullable=False)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
Startdate = db.Column(db.DateTime(), nullable=True)
Enddate = db.Column(db.DateTime(), nullable=True)
postal = db.Column(db.String(6), nullable=False)
trainingid = db.Column(db.String(), nullable=False)
trainingpassword = db.Column(db.String(), nullable=False)
manager = db.Column(db.String)
image_file = db.Column(db.String(20), nullable=False,default='default.jpg')
active = db.Column(db.String)
iprismcode = db.Column(db.String(9), nullable=False)
#traiing#
whmis = db.relationship(
'whmis', backref='employee', uselist=False)
ppe = db.relationship(
'ppe', backref='employee', uselist=False)
fireextinguishers = db.relationship(
'fireextinguishers', backref='employee', uselist=False)
emergencyresponseprocedures = db.relationship(
'emergencyresponseprocedures', backref='employee', uselist=False)
firstaid = db.relationship(
'firstaid', backref='employee', uselist=False)
foodhandling = db.relationship(
'foodhandling', backref='employee', uselist=False)
propane = db.relationship(
'propane', backref='employee', uselist=False)
healthandsafety = db.relationship(
'healthandsafety', backref='employee', uselist=False)
fuelpumpshutoff = db.relationship(
'fuelpumpshutoff', backref='employee', uselist=False)
workingalone = db.relationship(
'workingalone', backref='employee', uselist=False)
workplaceviolence = db.relationship(
'workplaceviolence', backref='employee', uselist=False)
jointhealthandsafety = db.relationship(
'jointhealthandsafety', backref='employee', uselist=False)
#def __repr__(self):
# return f"Employee('{self.firstname}', '{self.SIN}')"
class whmis(db.Model):
id=db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class ppe(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class fireextinguishers(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class emergencyresponseprocedures(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class firstaid(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class foodhandling(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class propane(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class healthandsafety(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class fuelpumpshutoff(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class workingalone(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class workplaceviolence(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
class jointhealthandsafety(db.Model):
id = db.Column(db.Integer, primary_key=True)
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
updated_on = db.Column(
db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
startdate = db.Column(db.DateTime(), nullable=True)
completed = db.Column(db.String)
datequalified = db.Column(db.DateTime(), nullable=True)
expireydate = db.Column(db.DateTime(), nullable=True)
employee_id = db.Column(db.Integer, db.ForeignKey('employee.id'))
compliant = db.Column(db.String)
| {"/flaskblog/routes.py": ["/flaskblog/__init__.py", "/flaskblog/forms.py", "/flaskblog/models.py"], "/flaskblog/forms.py": ["/flaskblog/models.py"], "/flaskblog/models.py": ["/flaskblog/__init__.py"]} |
68,912 | Paulfuther/Work-Website | refs/heads/master | /flaskblog/applications.py | import pandas as pd
import numpy
import openpyxl
import xlrd
import xlwt
import xlsxwriter
from flaskblog import datetime
def securityapp(file):
start = datetime.strptime('05:15:00', '%H:%M:%S').time()
end = datetime.strptime('11:45:00', '%H:%M:%S').time()
files = request.files.getlist('securityfileinputFile[]')
#os.chdir('/Users/mobile/Dropbox/BACK OFFICE SECURITY FILE/')
#print(os.getcwd())
#FileList = glob.glob('*.rtf')
#print(FileList)
newdf = []
for file in files:
inputfilename = file
excel_file = inputfilename
store_number = file
a = str(store_number)
print(a)
b = re.search('\d+', a).group()
print(b)
print(store_number)
df = pd.read_csv(excel_file, sep='\t', header=None)
df.columns = ['Text']
print(df.dtypes)
print(df.head)
df2 = df[df['Text'].str.contains('Pump', na=False)].copy()
print(df2)
if df2.empty:
continue
df2['Time'] = df2['Text'].str.extract('(..:..:..)', expand=True)
df2['Time'] = pd.to_datetime(
df2['Time'], format='%H:%M:%S').dt.time
df2 = df2[df2['Time'].between(start, end)]
df2['Date'] = df2['Text'].str.slice(start=0, stop=9)
df2['Date'] = pd.to_datetime(
df2['Date'], format='%d %b %y').dt.date
#df2['Date']=df2['Date'].datetime.strptime(b1, "%d %m %y").strftime("%b-%Y")
df2['Store'] = b
print(df2)
newdf.append(df2)
newdf = pd.concat(newdf)
#newdf.to_excel("Pumps to Prepay" + ".xlsx", engine='xlsxwriter')
#print(newdf.dtypes)
output = BytesIO()
writer = pd.ExcelWriter(output, engine='xlsxwriter')
newdf.to_excel(writer)
writer.save()
output.seek(0)
| {"/flaskblog/routes.py": ["/flaskblog/__init__.py", "/flaskblog/forms.py", "/flaskblog/models.py"], "/flaskblog/forms.py": ["/flaskblog/models.py"], "/flaskblog/models.py": ["/flaskblog/__init__.py"]} |
68,922 | shivagarg35/TSV_parser | refs/heads/main | /records/literals.py | STAT_ONE_CHOICE_ONE = '01'
STAT_ONE_CHOICE_TWO = '02'
STAT_ONE_CHOICE_THREE = '03'
STAT_ONE_CHOICE_FOUR = '04'
STAT_ONE_CHOICE_FIVE = '11'
STAT_ONE_CHOICE_NOT_SPECIFIED = '99'
STAT_TWO_CHOICE_ONE = '1'
STAT_TWO_CHOICE_TWO = '2'
STAT_TWO_CHOICE_THREE = '3'
STAT_TWO_CHOICE_NOT_SPECIFIED = '9'
| {"/records/views.py": ["/records/models.py", "/records/forms.py"], "/records/forms.py": ["/records/models.py"]} |
68,923 | shivagarg35/TSV_parser | refs/heads/main | /records/views.py | from django.shortcuts import render,redirect
from records.models import Thing,Item, Tsv
from .forms import TsvModelForm
from django.contrib import messages
from datetime import datetime
from dateutil import parser
import csv
from . import literals
def upload_file_view(request):
form = TsvModelForm(request.POST or None, request.FILES or None)
stat_one_val = [
literals.STAT_ONE_CHOICE_ONE,
literals.STAT_ONE_CHOICE_TWO,
literals.STAT_ONE_CHOICE_THREE,
literals.STAT_ONE_CHOICE_FOUR,
literals.STAT_ONE_CHOICE_FIVE,
literals.STAT_ONE_CHOICE_NOT_SPECIFIED
]
stat_two_val = [
literals.STAT_TWO_CHOICE_ONE,
literals.STAT_TWO_CHOICE_TWO,
literals.STAT_TWO_CHOICE_THREE,
literals.STAT_TWO_CHOICE_NOT_SPECIFIED
]
if form.is_valid():
p = form.save(commit=False)
form= TsvModelForm()
with open(p.file_name.path, 'r') as file:
reader = csv.reader(file, delimiter="\t")
Thing_list = []
Item_list = []
for i,row in enumerate(reader):
if i == 0:
if row[0] != "code" or row[1] != "description" or row[2] != 'date' or row[3] != 'stat_one' or row[4] != 'stat_two' or row[5] != 'name' or row[6] != 'rating' or row[7] != 'score' :
messages.error(request,"Table schema invalid")
break
p.save()
else:
code = row[0]
description = row[1]
### date Validation. Also will accept any Date format and convert it into ISO format
date = parser.parse(row[2])
date = datetime.strftime(date, '%Y-%m-%d')
### Stat_one Validation
stat_one = row[3]
if stat_one not in stat_one_val:
messages.error(request,"Value to Stat_one is not valid in line number: " + str(i+1))
continue
### Stat_two Validation
stat_two = row[4]
if stat_two not in stat_two_val:
messages.error(request,'Value of stat_two is not valid in line number: ' + str(i+1))
continue
name = row[5]
score = row[7]
### Rating Validation
try:
rating = round(float(row[6]),1)
if rating < 1.0 or rating > 5.0 :
messages.error(request,"Rating not in range in line number: " + str(i+1))
continue
rating = str(rating)
except:
messages.error(request,"Rating not number in line number: " + str(i+1))
## Saving the Valid Row in database
thingInfo = Thing(
code = code,
description = description,
date = date,
stat_one = stat_one,
stat_two = stat_two
)
thingInfo.save()
Thing_list.append(thingInfo)
itemInfo = Item(
thing = thingInfo,
name = name,
rating = rating,
score = score
)
itemInfo.save()
Item_list.append(itemInfo)
context = {
'form' : form,
'things' : Thing_list,
'Items' : Item_list
}
return render(request,'tsv_upload.html', context)
context = {
'form' : form
}
return render(request,'tsv_upload.html', context)
| {"/records/views.py": ["/records/models.py", "/records/forms.py"], "/records/forms.py": ["/records/models.py"]} |
68,924 | shivagarg35/TSV_parser | refs/heads/main | /records/urls.py | from django.conf.urls import url
from django.urls import path
from django.contrib import admin
from django.urls.resolvers import URLPattern
from records import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', views.upload_file_view, name="index"),
] | {"/records/views.py": ["/records/models.py", "/records/forms.py"], "/records/forms.py": ["/records/models.py"]} |
68,925 | shivagarg35/TSV_parser | refs/heads/main | /records/tests.py | from django.test import TestCase
import literals
# Create your tests here.
for i in literals.STAT_ONE_ :
print(i)
| {"/records/views.py": ["/records/models.py", "/records/forms.py"], "/records/forms.py": ["/records/models.py"]} |
68,926 | shivagarg35/TSV_parser | refs/heads/main | /records/admin.py | from django.contrib import admin
from . import models
admin.site.register(models.Tsv)
admin.site.register(models.Thing)
admin.site.register(models.Item)
| {"/records/views.py": ["/records/models.py", "/records/forms.py"], "/records/forms.py": ["/records/models.py"]} |
68,927 | shivagarg35/TSV_parser | refs/heads/main | /records/forms.py | from django import forms
from django.db.models import fields
from .models import Tsv
import os
class TsvModelForm(forms.ModelForm):
class Meta:
model = Tsv
fields = ('file_name',)
| {"/records/views.py": ["/records/models.py", "/records/forms.py"], "/records/forms.py": ["/records/models.py"]} |
68,928 | shivagarg35/TSV_parser | refs/heads/main | /records/models.py | from django.db import models
from django.core.validators import FileExtensionValidator
from django.core.validators import MaxValueValidator, MinValueValidator
from . import literals
class Tsv (models.Model):
file_name = models.FileField(upload_to='Tssv',validators=[FileExtensionValidator(allowed_extensions=['tsv'])],)
uploaded = models.DateTimeField(auto_now_add=True)
def __str__(self):
return f"File id: {self.id}"
class Thing (models.Model):
code = models.CharField(max_length=10, primary_key=True)
description = models.CharField(max_length=50, null=True, blank=True)
date = models.DateField(null=True, blank=True)
stat_one = models.CharField(max_length=2, default=literals.STAT_ONE_CHOICE_NOT_SPECIFIED)
stat_two = models.CharField(max_length=1, default=literals.STAT_TWO_CHOICE_NOT_SPECIFIED)
class Item (models.Model):
thing = models.ForeignKey(Thing, on_delete=models.CASCADE)
name = models.CharField(max_length=10)
rating = models.CharField(max_length=3)
score = models.IntegerField(default=0, blank=True, validators=[MaxValueValidator(100), MinValueValidator(1)])
| {"/records/views.py": ["/records/models.py", "/records/forms.py"], "/records/forms.py": ["/records/models.py"]} |
68,933 | gooofy/aqb | refs/heads/master | /src/tools/os2aqb/sfd2aqb.py | #!/bin/env python3
#
# _very_ crude converter amiga os include -> AQB
#
import re
import sys
import io
C2PY = {"ULONG": "ULONG",
"CONST ULONG *": "ULONG PTR",
"ULONG *": "ULONG PTR",
"LONG": "LONG",
"CONST struct BitMap *": "BitMap PTR",
"PLANEPTR": "VOID PTR",
"CONST PLANEPTR": "VOID PTR",
"CONST_STRPTR": "String",
"WORD": "INTEGER",
"CONST UWORD *": "UINTEGER PTR",
"UWORD *": "UINTEGER PTR",
"UBYTE *": "UBYTE PTR",
"CONST UBYTE *": "UBYTE PTR",
"UWORD": "UINTEGER",
"CONST struct TextFont *": "TextFont PTR",
"APTR": "APTR",
"CONST APTR": "VOID PTR",
"struct AnimOb **": "AnimOb PTR PTR",
"BOOL": "BOOLEAN",
"Tag": "ULONG",
"DisplayInfoHandle": "VOID PTR",
"CONST DisplayInfoHandle": "VOID PTR",
"CONST WORD *": "INTEGER PTR",
}
PTRPATTERN = re.compile(r"^struct\W+(\w+)\W+\*")
CPTRPATTERN = re.compile(r"^CONST struct\W+(\w+)\W+\*")
CPTRPATTERN2 = re.compile(r"^const struct\W+(\w+)\W+\*")
def c2py (ty):
if not ty:
return "???"
m = PTRPATTERN.match(ty)
if m:
return "%s PTR" % m.group(1)
m = CPTRPATTERN.match(ty)
if m:
return "%s PTR" % m.group(1)
m = CPTRPATTERN2.match(ty)
if m:
return "%s PTR" % m.group(1)
return C2PY[ty]
# LONG BltBitMap(CONST struct BitMap * srcBitMap, LONG xSrc, LONG ySrc, struct BitMap * destBitMap, LONG xDest, LONG yDest, LONG xSize, LONG ySize, ULONG minterm, ULONG mask, PLANEPTR tempA) (a0,d0,d1,a1,d2,d3,d4,d5,d6,d7,a2)
#SIGPATTERN = re.compile(r"^(\w+)\W([^(]*)\(([^)]*)\)\W*\(([^)]*)\)")
SIGPATTERN = re.compile(r"^([^(]+)\(([^)]*)\)\W*\(([^)]*)\)")
input_stream = io.TextIOWrapper(sys.stdin.buffer, encoding='latin1')
libBase = '???'
baseType = '???'
offset = -30
for line in input_stream:
if not line:
continue
line = line.strip()
if line.startswith ('=='):
if line.startswith ('==base '):
libBase = line[7:]
print ("REM libBase: %s" % libBase)
elif line.startswith ('==basetype '):
baseType = line[11:]
print ("REM baseType: %s" % baseType)
else:
# print(line)
m = SIGPATTERN.match (line)
if m:
# print ("*** MATCH %s" % m.group(1))
# print (" %s" % m.group(2))
# print (" %s" % m.group(3))
ri = m.group(1).rfind(' ')
returntype = m.group(1)[:ri]
fname = m.group(1)[ri:]
# print (returntype, ri, fname)
args = m.group(2).split(',')
regs = m.group(3)
if m.group(1) == "VOID":
sys.stdout.write ("DECLARE SUB %s (" % fname)
else:
sys.stdout.write ("DECLARE FUNCTION %s (" % fname)
first=True
for arg in args:
if not arg:
continue
if first:
first = False
else:
sys.stdout.write (", ")
ti = arg.rfind(' ')
argname = arg[ti:].strip()
sys.stdout.write ("BYVAL %s AS %s" % (argname, c2py(arg[:ti].strip())))
if returntype == "VOID":
sys.stdout.write (")")
else:
sys.stdout.write (") AS %s" % c2py(returntype))
print (" LIB %d %s (%s)" % (offset, libBase, regs))
offset -= 6
else:
print ("REM %s" % line)
assert False
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,934 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/marko/ext/codehilite.py | r"""
Code highlight extension
~~~~~~~~~~~~~~~~~~~~~~~~
Enable code highlight using ``pygments``. This requires to install `codehilite` extras::
pip install marko[codehilite]
Arguments:
All arguments are passed to ``pygments.formatters.html.HtmlFormatter``.
Usage::
from marko import Markdown
markdown = Markdown(extensions=['codehilite'])
markdown.convert(```python my_script.py\nprint('hello world')\n```)
"""
import json
from pygments import highlight
from pygments.lexers import get_lexer_by_name, guess_lexer
from pygments.formatters import html
from pygments.util import ClassNotFound
def _parse_extras(line):
if not line:
return {}
return {k: json.loads(v) for part in line.split(",") for k, v in [part.split("=")]}
class CodeHiliteRendererMixin:
options = {} # type: dict
def render_fenced_code(self, element):
code = element.children[0].children
options = CodeHiliteRendererMixin.options.copy()
options.update(_parse_extras(getattr(element, "extra", None)))
if element.lang:
try:
lexer = get_lexer_by_name(element.lang, stripall=True)
except ClassNotFound:
lexer = guess_lexer(code)
else:
lexer = guess_lexer(code)
formatter = html.HtmlFormatter(**options)
return highlight(code, lexer, formatter)
class CodeHilite:
def __init__(self, **options):
CodeHiliteRendererMixin.options = options
self.renderer_mixins = [CodeHiliteRendererMixin]
def make_extension(**options):
return CodeHilite(**options)
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,935 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/marko/__init__.py | r"""
_ _ _ ___ _ _ ___
| \ / | /_\ | _ \ | |/ / / _ \
| |\/| | / _ \ | / | ' < | (_) |
|_| |_| /_/ \_\ |_|_\ |_|\_\ \___/
A markdown parser with high extensibility.
Licensed under MIT.
Created by Frost Ming<mianghong@gmail.com>
"""
from .html_renderer import HTMLRenderer
from .renderer import Renderer
from .parser import Parser
from .helpers import is_type_check, load_extension_object
if is_type_check():
from typing import Type, List, Any, Optional
from .block import Document
from .parser import ElementType
__version__ = "1.1.0"
class SetupDone(Exception):
def __str__(self):
return "Unable to register more extensions after setup done."
class Markdown:
"""The main class to convert markdown documents.
Attributes:
* parser: an instance of :class:`marko.parser.Parser`
* renderer: an instance of :class:`marko.renderer.Renderer`
:param parser: a subclass :class:`marko.parser.Parser`.
:param renderer: a subclass :class:`marko.renderer.Renderer`.
:param extensions: a list of extensions to register on the object.
See document of :meth:`Markdown.use()`.
"""
def __init__(self, parser=Parser, renderer=HTMLRenderer, extensions=None):
# type: (Type[Parser], Type[Renderer], Optional[Any]) -> None
assert issubclass(parser, Parser)
self._base_parser = parser
self._parser_mixins = [] # type: List[Any]
assert issubclass(renderer, Renderer)
self._base_renderer = renderer
self._renderer_mixins = [] # type: List[Any]
self._extra_elements = [] # type: List[ElementType]
self._setup_done = False
if extensions:
self.use(*extensions)
def use(self, *extensions): # type: (Any) -> None
r"""Register extensions to Markdown object.
An extension should be either an object providing ``elements``, `parser_mixins``
, ``renderer_mixins`` or all attributes, or a string representing the
corresponding extension in ``marko.ext`` module.
:param \*extensions: extension object or string.
.. note:: Marko uses a mixin based extension system, the order of extensions
matters: An extension preceding in order will have higher priorty.
"""
if self._setup_done:
raise SetupDone()
for extension in extensions:
if isinstance(extension, str):
extension = load_extension_object(extension)()
self._parser_mixins = (
getattr(extension, "parser_mixins", []) + self._parser_mixins
)
self._renderer_mixins = (
getattr(extension, "renderer_mixins", []) + self._renderer_mixins
)
self._extra_elements.extend(getattr(extension, "elements", []))
def _setup_extensions(self): # type: () -> None
"""Install all extensions and set things up."""
if self._setup_done:
return
self.parser = type(
"MarkdownParser", tuple(self._parser_mixins) + (self._base_parser,), {}
)()
for e in self._extra_elements:
self.parser.add_element(e)
self.renderer = type(
"MarkdownRenderer",
tuple(self._renderer_mixins) + (self._base_renderer,),
{},
)()
self._setup_done = True
def convert(self, text): # type: (str) -> str
"""Parse and render the given text."""
return self.render(self.parse(text))
def __call__(self, text): # type: (str) -> str
return self.convert(text)
def parse(self, text): # type: (str) -> Document
"""Call ``self.parser.parse(text)``.
Override this to preprocess text or handle parsed result.
"""
self._setup_extensions()
return self.parser.parse(text)
def render(self, parsed): # type: (Document) -> str
"""Call ``self.renderer.render(text)``.
Override this to handle parsed result.
"""
self.renderer.root_node = parsed
with self.renderer as r:
return r.render(parsed)
# Inner instance, use the bare convert/parse/render function instead
_markdown = Markdown()
def convert(text): # type: (str) -> str
"""Parse and render the given text.
:param text: text to convert.
:returns: The rendered result.
"""
return _markdown.convert(text)
def parse(text): # type: (str) -> Document
"""Parse the text to a structured data object.
:param text: text to parse.
:returns: the parsed object
"""
return _markdown.parse(text)
def render(parsed): # type: (Document) -> str
"""Render the parsed object to text.
:param parsed: the parsed object
:returns: the rendered result.
"""
return _markdown.render(parsed)
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,936 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/tests/test_spec.py | from tests import SpecTestSuite
from marko import Markdown
from marko.ext.gfm import gfm
class TestCommonMark(SpecTestSuite):
@classmethod
def setup_class(cls):
cls.markdown = Markdown()
def test_greedy_consume_prefix(self):
md = "> 1. Item 1\n> ```code\n> indented code\n> ```"
html = (
'<blockquote><ol><li>Item 1<pre><code class="language-code">'
" indented code\n</code></pre></li></ol></blockquote>"
)
self.assert_case(md, html)
TestCommonMark.load_spec("commonmark")
GFM_IGNORE = ["autolinks_015", "autolinks_018", "autolinks_019"]
class TestGFM(SpecTestSuite):
@classmethod
def setup_class(cls):
cls.markdown = gfm
@classmethod
def ignore_case(cls, n):
return n in GFM_IGNORE
def test_parse_table_with_backslashes(self):
md = "\\\n\n| \\ |\n| - |\n| \\ |"
html = "<p>\\</p><table><thead><tr><th>\\</th></tr></thead><tbody><tr><td>\\</td></tr></tbody></table>"
self.assert_case(md, html)
TestGFM.load_spec("gfm")
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,937 | gooofy/aqb | refs/heads/master | /src/tools/os2aqb/os2aqb.py | #!/bin/env python3
#
# _very_ crude converter amiga os include -> AQB
#
import re
import sys
import io
in_type = False
input_stream = io.TextIOWrapper(sys.stdin.buffer, encoding='latin1')
for line in input_stream:
if not line:
continue
if not in_type:
# struct Menu
g = re.match(r'^\s*struct\s+([A-Za-z_]+)', line)
if g:
print ("TYPE %s" % g.group(1))
in_type = True
continue
# #define MENUENABLED 0x0001 /* whether or not this menu is enabled */
g = re.match(r'^\#define\s*([A-Za-z_]+)\s+0x([0-9A-Fa-f]+)', line)
if g:
print ("CONST AS ? %s = &H%s" % (g.group(1), g.group(2)))
continue
# #define foo 0001 /* whether or not this menu is enabled */
g = re.match(r'^\#define\s*([A-Za-z_]+)\s+([0-9]+)', line)
if g:
print ("CONST AS ? %s = %s" % (g.group(1), g.group(2)))
continue
if in_type:
#};
g = re.match(r'^\s*}\s*;', line)
if g:
print ("END TYPE")
in_type = False
continue
# struct Menu *NextMenu; /* same level */
g = re.match(r'^\s*struct\s+([A-Za-z_]+)\s+(\*?)\s*([^;]+)', line)
if g:
print (" AS %s %s %s" % (g.group(1), g.group(2).replace('*','PTR'), g.group(3)))
continue
# WORD LeftEdge, TopEdge; /* position of the select box */
g = re.match(r'^\s+([A-Za-z_]+)\s+(\*?)\s*([^;]+)', line)
if g:
print (" AS %s %s %s" % (g.group(1).replace('WORD', 'INTEGER'), g.group(2).replace('*','PTR'), g.group(3)))
continue
print ("REM %s" % line.strip())
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,938 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/marko/inline_parser.py | """
Parse inline elements
"""
import re
import string
from .helpers import is_paired, normalize_label, is_type_check
from . import patterns
if is_type_check():
from typing import Type, List, Optional, Match, Tuple, Union
from .inline import InlineElement
from .block import Document
ElementType = Type[InlineElement]
Group = Tuple[int, int, Optional[str]]
def parse(text, elements, fallback):
# type: (str, List[ElementType], ElementType) -> List[InlineElement]
"""Parse given text and produce a list of inline elements.
:param text: the text to be parsed.
:param elements: the element types to be included in parsing
:param fallback: fallback class when no other element type is matched.
"""
# this is a raw list of elements that may contain overlaps.
tokens = [] # type: List[Token]
for etype in elements:
for match in etype.find(text):
tokens.append(Token(etype, match, text, fallback))
tokens.sort()
tokens = _resolve_overlap(tokens)
return make_elements(tokens, text, fallback=fallback)
def _resolve_overlap(tokens):
# type: (List[Token]) -> List[Token]
if not tokens:
return tokens
result = []
prev = tokens[0]
for cur in tokens[1:]:
r = prev.relation(cur)
if r == Token.PRECEDE:
result.append(prev)
prev = cur
elif r == Token.CONTAIN:
prev.append_child(cur)
elif r == Token.INTERSECT and prev.etype.priority < cur.etype.priority:
prev = cur
result.append(prev)
return result
def make_elements(tokens, text, start=0, end=None, fallback=None):
# type: (List[Token], str, int, Optional[int], ElementType) -> List[InlineElement]
"""Make elements from a list of parsed tokens.
It will turn all unmatched holes into fallback elements.
:param tokens: a list of parsed tokens.
:param text: the original tet.
:param start: the offset of where parsing starts. Defaults to the start of text.
:param end: the offset of where parsing ends. Defauls to the end of text.
:param fallback: fallback element type.
:returns: a list of inline elements.
"""
result = [] # type: List[InlineElement]
end = end or len(text)
prev_end = start
for token in tokens:
if prev_end < token.start:
result.append(fallback(text[prev_end : token.start])) # type: ignore
result.append(token.as_element())
prev_end = token.end
if prev_end < end:
result.append(fallback(text[prev_end:end])) # type: ignore
return result
class Token:
"""An intermediate class to wrap the match object.
It can be converted to element by :meth:`as_element()`
"""
PRECEDE = 0
INTERSECT = 1
CONTAIN = 2
SHADE = 3
def __init__(self, etype, match, text, fallback):
# type: (ElementType, _Match, str, ElementType) -> None
self.etype = etype
self.match = match
self.start = match.start()
self.end = match.end()
self.inner_start = match.start(etype.parse_group)
self.inner_end = match.end(etype.parse_group)
self.text = text
self.fallback = fallback
self.children = [] # type: List[Token]
def relation(self, other): # type: (Token) -> int
if self.end <= other.start:
return Token.PRECEDE
if self.end >= other.end:
if (
self.etype.parse_children
and other.start >= self.inner_start
and other.end <= self.inner_end
):
return Token.CONTAIN
if self.etype.parse_children and self.inner_end <= other.start:
return Token.SHADE
return Token.INTERSECT
def append_child(self, child): # type: (Token) -> None
if not self.etype.parse_children:
return
self.children.append(child)
def as_element(self): # type: () -> InlineElement
e = self.etype(self.match)
if e.parse_children:
self.children = _resolve_overlap(self.children)
e.children = make_elements(
self.children,
self.text,
self.inner_start,
self.inner_end,
self.fallback,
)
return e
def __repr__(self): # type: () -> str
return "<{}: {} start={} end={}>".format(
self.__class__.__name__, self.etype.__name__, self.start, self.end
)
def __lt__(self, o): # type: (Token) -> bool
return self.start < o.start
def find_links_or_emphs(text, root_node): # type: (str, Document) -> List[MatchObj]
"""Fink links/images or emphasis from text.
:param text: the original text.
:param root_node: a reference to the root node of the AST.
:returns: an iterable of match object.
"""
delimiters_re = re.compile(r"(?:!?\[|\*+|_+)")
i = 0
delimiters = [] # type: List[Delimiter]
escape = False
matches = [] # type: List[MatchObj]
code_pattern = re.compile(r"(?<!`)(`+)(?!`)([\s\S]+?)(?<!`)\1(?!`)")
while i < len(text):
if escape:
escape = False
i += 1
elif text[i] == "\\":
escape = True
i += 1
elif code_pattern.match(text, i):
i = code_pattern.match(text, i).end() # type: ignore
elif text[i] == "]":
node = look_for_image_or_link(text, delimiters, i, root_node, matches)
if node:
i = node.end()
matches.append(node)
else:
i += 1
else:
m = delimiters_re.match(text, i)
if m:
delimiters.append(Delimiter(m, text))
i = m.end()
else:
i += 1
process_emphasis(text, delimiters, None, matches)
return matches
def look_for_image_or_link(text, delimiters, close, root_node, matches):
# type: (str, List[Delimiter], int, Document, List[MatchObj]) -> Optional[MatchObj]
for i, d in list(enumerate(delimiters))[::-1]:
if d.content not in ("[", "!["):
continue
if not d.active:
break # break to remove the delimiter and return None
if not _is_legal_link_text(text[d.end : close]):
break
link_text = (d.end, close, text[d.end : close])
etype = "Image" if d.content == "![" else "Link"
match = _expect_inline_link(text, close + 1) or _expect_reference_link(
text, close + 1, link_text[2], root_node
)
if not match: # not a link
break
rv = MatchObj(etype, text, d.start, match[2], link_text, match[0], match[1])
process_emphasis(text, delimiters, i, matches)
if etype == "Link":
for d in delimiters[:i]:
if d.content == "[":
d.active = False
del delimiters[i]
return rv
else:
# no matching opener is found
return None
del delimiters[i]
return None
def _is_legal_link_text(text): # type: (str) -> bool
return is_paired(text, "[", "]")
def _expect_inline_link(text, start):
# type: (str, int) -> Optional[Tuple[Group, Group, int]]
"""(link_dest "link_title")"""
if start >= len(text) - 1 or text[start] != "(":
return None
i = start + 1
m = patterns.whitespace.match(text, i)
if m:
i = m.end()
m = patterns.link_dest_1.match(text, i)
if m:
link_dest = m.start(), m.end(), m.group()
i = m.end()
else:
if text[i] == "<":
return None
open_num = 0
escaped = False
prev = i
while i < len(text):
c = text[i]
if escaped:
escaped = False
elif c == "\\":
escaped = True
elif c == "(":
open_num += 1
elif c in string.whitespace:
break
elif c == ")":
if open_num > 0:
open_num -= 1
else:
break
i += 1
if open_num != 0:
return None
link_dest = prev, i, text[prev:i]
link_title = i, i, None
tail_re = re.compile(r"(?:\s+%s)?\s*\)" % patterns.link_title, flags=re.UNICODE)
m = tail_re.match(text, i)
if not m:
return None
if m.group("title"):
link_title = m.start("title"), m.end("title"), m.group("title") # type: ignore
return (link_dest, link_title, m.end())
def _expect_reference_link(text, start, link_text, root_node):
# type: (str, int, str, Document) -> Optional[Tuple[Group, Group, int]]
match = patterns.optional_label.match(text, start)
link_label = link_text
if match and match.group()[1:-1]:
link_label = match.group()[1:-1]
result = _get_reference_link(link_label, root_node)
if not result:
return None
link_dest = start, start, result[0]
link_title = start, start, result[1]
return (link_dest, link_title, match and match.end() or start)
def _get_reference_link(link_label, root_node):
# type: (str, Document) -> Optional[Tuple[str, str]]
normalized_label = normalize_label(link_label)
return root_node.link_ref_defs.get(normalized_label, None)
def process_emphasis(text, delimiters, stack_bottom, matches):
# type: (str, List[Delimiter], Optional[int], List[MatchObj]) -> None
star_bottom = underscore_bottom = stack_bottom
cur = _next_closer(delimiters, stack_bottom)
while cur is not None:
d_closer = delimiters[cur]
bottom = star_bottom if d_closer.content[0] == "*" else underscore_bottom
opener = _nearest_opener(delimiters, cur, bottom)
if opener is not None:
d_opener = delimiters[opener]
n = 2 if len(d_opener.content) >= 2 and len(d_closer.content) >= 2 else 1
match = MatchObj(
"StrongEmphasis" if n == 2 else "Emphasis",
text,
d_opener.end - n,
d_closer.start + n,
(d_opener.end, d_closer.start, text[d_opener.end : d_closer.start]),
)
matches.append(match)
del delimiters[opener + 1 : cur]
cur -= cur - opener - 1
if d_opener.remove(n):
delimiters.remove(d_opener)
cur -= 1
if d_closer.remove(n, True):
delimiters.remove(d_closer)
cur = cur - 1 if cur > 0 else None
else:
bottom = cur - 1 if cur > 1 else None
if d_closer.content[0] == "*":
star_bottom = bottom
else:
underscore_bottom = bottom
if not d_closer.can_open:
delimiters.remove(d_closer)
cur = _next_closer(delimiters, cur)
lower = stack_bottom + 1 if stack_bottom is not None else 0
del delimiters[lower:]
def _next_closer(delimiters, bound):
# type: (List[Delimiter], Optional[int]) -> Optional[int]
i = bound + 1 if bound is not None else 0
while i < len(delimiters):
d = delimiters[i]
if getattr(d, "can_close", False):
return i
i += 1
return None
def _nearest_opener(delimiters, higher, lower):
# type: (List[Delimiter], int, Optional[int]) -> Optional[int]
i = higher - 1
lower = lower if lower is not None else -1
while i > lower:
d = delimiters[i]
if getattr(d, "can_open", False) and d.closed_by(delimiters[higher]):
return i
i -= 1
return None
class Delimiter:
whitespace_re = re.compile(r"\s", flags=re.UNICODE)
def __init__(self, match, text): # type: (_Match, str) -> None
self.start = match.start()
self.end = match.end()
self.content = match.group()
self.text = text
self.active = True
if self.content[0] in ("*", "_"):
self.can_open = self._can_open()
self.can_close = self._can_close()
def _can_open(self): # type: () -> bool
if self.content[0] == "*":
return self.is_left_flanking()
return self.is_left_flanking() and (
not self.is_right_flanking() or self.preceded_by_punc()
)
def _can_close(self): # type: () -> bool
if self.content[0] == "*":
return self.is_right_flanking()
return self.is_right_flanking() and (
not self.is_left_flanking() or self.followed_by_punc()
)
def is_left_flanking(self): # type: () -> bool
return (
self.end < len(self.text)
and self.whitespace_re.match(self.text, self.end) is None
) and (
not self.followed_by_punc()
or self.start == 0
or self.preceded_by_punc()
or self.whitespace_re.match(self.text, self.start - 1) is not None
)
def is_right_flanking(self): # type: () -> bool
return (
self.start > 0
and self.whitespace_re.match(self.text, self.start - 1) is None
) and (
not self.preceded_by_punc()
or self.end == len(self.text)
or self.followed_by_punc()
or self.whitespace_re.match(self.text, self.end) is not None
)
def followed_by_punc(self): # type: () -> bool
return (
self.end < len(self.text)
and patterns.punctuation.match(self.text, self.end) is not None
)
def preceded_by_punc(self): # type: () -> bool
return (
self.start > 0
and patterns.punctuation.match(self.text[self.start - 1]) is not None
)
def closed_by(self, other): # type: (Delimiter) -> bool
return not (
self.content[0] != other.content[0]
or (self.can_open and self.can_close or other.can_open and other.can_close)
and len(self.content + other.content) % 3 == 0
and not all(len(d.content) % 3 == 0 for d in [self, other])
)
def remove(self, n, left=False): # type: (int, bool) -> bool
if len(self.content) <= n:
return True
if left:
self.start += n
else:
self.end -= n
self.content = self.content[n:]
return False
def __repr__(self): # type: () -> str
return "<Delimiter {!r} start={} end={}>".format(
self.content, self.start, self.end
)
class MatchObj:
"""A fake match object that memes re.match methods"""
def __init__(self, etype, text, start, end, *groups):
# type: (str, str, int, int, Group) -> None
self._text = text
self._start = start
self._end = end
self._groups = groups
self.etype = etype
def group(self, n=0): # type: (int) -> str
if n == 0:
return self._text[self._start : self._end]
return self._groups[n - 1][2] # type: ignore
def start(self, n=0): # type: (int) -> int
if n == 0:
return self._start
return self._groups[n - 1][0]
def end(self, n=0): # type: (int) -> int
if n == 0:
return self._end
return self._groups[n - 1][1]
if is_type_check():
_Match = Union[Match, MatchObj]
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,939 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/marko/helpers.py | """
Helper functions and data structures
"""
import functools
import re
from contextlib import contextmanager
from importlib import import_module
def camel_to_snake_case(name): # type: (str) -> str
"""Takes a camelCased string and converts to snake_case."""
pattern = r"[A-Z][a-z]+|[A-Z]+(?![a-z])"
return "_".join(map(str.lower, re.findall(pattern, name)))
def is_paired(text, open="(", close=")"): # type: (str, str, str) -> bool
"""Check if the text only contains:
1. blackslash escaped parentheses, or
2. parentheses paired.
"""
count = 0
escape = False
for c in text:
if escape:
escape = False
elif c == "\\":
escape = True
elif c == open:
count += 1
elif c == close:
if count == 0:
return False
count -= 1
return count == 0
def _preprocess_text(text): # type: (str) -> str
return text.replace("\r\n", "\n")
class Source:
"""Wrapper class on content to be parsed"""
def __init__(self, text): # type: (str) -> None
self._buffer = _preprocess_text(text)
self.pos = 0
self._anchor = 0
self._states = [] # type: List[BlockElement]
self.match = None # type: Optional[Match]
@property
def state(self): # type: () -> BlockElement
"""Returns the current element state."""
if not self._states:
raise RuntimeError("Need to push a state first.")
return self._states[-1]
@property
def root(self): # type: () -> BlockElement
"""Returns the root element, which is at the bottom of self._states."""
if not self._states:
raise RuntimeError("Need to push a state first.")
return self._states[0]
def push_state(self, element): # type: (BlockElement) -> None
"""Push a new state to the state stack."""
self._states.append(element)
def pop_state(self): # type: () -> BlockElement
"""Pop the top most state."""
return self._states.pop()
@contextmanager
def under_state(self, element):
# type: (BlockElement) -> Generator[Source, None, None]
"""A context manager to enable a new state temporarily."""
self.push_state(element)
yield self
self.pop_state()
@property
def exhausted(self): # type: () -> bool
"""Indicates whether the source reaches the end."""
return self.pos >= len(self._buffer)
@property
def prefix(self): # type: () -> str
"""The prefix of each line when parsing."""
return "".join(s._prefix for s in self._states)
def _expect_re(self, regexp, pos):
# type: (Union[Pattern, str], int) -> Optional[Match]
if isinstance(regexp, str):
regexp = re.compile(regexp)
return regexp.match(self._buffer, pos)
@staticmethod
@functools.lru_cache()
def match_prefix(prefix, line): # type: (str, str) -> int
"""Check if the line starts with given prefix and
return the position of the end of prefix.
If the prefix is not matched, return -1.
"""
m = re.match(prefix, line.expandtabs(4))
if not m:
if re.match(prefix, line.expandtabs(4).replace("\n", " " * 99 + "\n")):
return len(line) - 1
return -1
pos = m.end()
if pos == 0:
return 0
for i in range(1, len(line) + 1):
if len(line[:i].expandtabs(4)) >= pos:
return i
return -1 # pragma: no cover
def expect_re(self, regexp): # type: (Union[Pattern, str]) -> Optional[Match]
"""Test against the given regular expression and returns the match object.
:param regexp: the expression to be tested.
:returns: the match object.
"""
prefix_len = self.match_prefix(
self.prefix, self.next_line(require_prefix=False) # type: ignore
)
if prefix_len >= 0:
match = self._expect_re(regexp, self.pos + prefix_len)
self.match = match
return match
else:
return None
def next_line(self, require_prefix=True): # type: (bool) -> Optional[str]
"""Return the next line in the source.
:param require_prefix: if False, the whole line will be returned.
otherwise, return the line with prefix stripped or None if the prefix
is not matched.
"""
if require_prefix:
m = self.expect_re(r"(?m)[^\n]*?$\n?")
else:
m = self._expect_re(r"(?m)[^\n]*$\n?", self.pos)
self.match = m
if m:
return m.group()
return None
def consume(self): # type: () -> None
"""Consume the body of source. ``pos`` will move forward."""
if self.match:
self.pos = self.match.end()
if self.match.group()[-1:] == "\n":
self._update_prefix()
self.match = None
def anchor(self): # type: () -> None
"""Pin the current parsing position."""
self._anchor = self.pos
def reset(self): # type: () -> None
"""Reset the position to the last anchor."""
self.pos = self._anchor
def _update_prefix(self): # type: () -> None
for s in self._states:
if hasattr(s, "_second_prefix"):
s._prefix = s._second_prefix # type: ignore
def normalize_label(label): # type: (str) -> str
"""Return the normalized form of link label."""
return re.sub(r"\s+", " ", label).strip().casefold()
def load_extension_object(name):
"""Load extension object from a string.
First try `marko.ext.<name>` if possible
"""
module = None
if "." not in name:
try:
module = import_module(f"marko.ext.{name}")
except ImportError:
pass
if module is None:
try:
module = import_module(name)
except ImportError:
raise ImportError(
f"Extension {name} cannot be found. Please check the name."
)
try:
maker = getattr(module, "make_extension")
except AttributeError:
raise AttributeError(
f"Module {name} does not have 'make_extension' attributte."
)
return maker
def is_type_check() -> bool: # pragma: no cover
try:
from typing import TYPE_CHECKING
except ImportError:
return False
else:
return TYPE_CHECKING
if is_type_check():
from .block import BlockElement
from typing import Optional, List, Generator, Union, Pattern, Match
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,940 | gooofy/aqb | refs/heads/master | /src/lib/minbrt/as2mot.py | #!/usr/bin/env python3
import os
import sys
import re
BRANCH_TABLE = {
'jra' : 'bra',
'jhi' : 'bhi',
'jls' : 'bls',
'jhs' : 'bhs',
'jlo' : 'blo',
'jne' : 'bne',
'jeq' : 'beq',
'jvc' : 'bvc',
'jvs' : 'bvs',
'jpl' : 'bpl',
'jmi' : 'bmi',
'jge' : 'bge',
'jlt' : 'blt',
'jgt' : 'bgt',
'jle' : 'ble',
'jcc' : 'bcc',
'jcs' : 'bcs',
}
CONVERSION_PATTERNS = [
# move.l d0,(-368,a5)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<r0>[ad0-7]+),\((?P<off>[+\-0-9]+),(?P<r1>[ad0-7]+)\)$'), '\t\g<mn>\t\g<r0>, \g<off>(\g<r1>)'),
# link.w a5,#-12
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<r0>[ad0-7]+),#(?P<imm>[+\-0-9]+)$'), '\t\g<mn>\t\g<r0>, #\g<imm>'),
# clr.l (-4,a5)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<off>[+\-0-9]+),(?P<r0>[ad0-7]+)\)$'), '\t\g<mn>\t\g<off>(\g<r0>)'),
# subq.l #1,d0
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+#(?P<imm>[+\-0-9]+),(?P<r1>[ad0-7sp]+)$'), '\t\g<mn>\t#\g<imm>, \g<r1>'),
# and.l #0xFFFF,d0
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+#(?P<imm>0x[+\-a-fA-F0-9]+),(?P<r1>[ad0-7sp]+)$'), '\t\g<mn>\t#\g<imm>, \g<r1>HEXHEX'),
# jra .L2
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<label>[a-zA-Z0-9_\.]+)$'), '\t\g<mn>\t\g<label>'),
# add.l d0,a0
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<r0>[ad0-7]+),(?P<r1>[ad0-7]+)$'), '\t\g<mn>\t\g<r0>, \g<r1>'),
# move.l (12,a5),d0
# movem.l (-28,a5),d2/d3/a6
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<off>[+\-0-9]+),(?P<r0>[ad0-7sp]+)\),(?P<r1>[ad0-7sp/]+)$'), '\t\g<mn>\t\g<off>(\g<r0>), \g<r1>'),
# move.b (a0),(-9,a5)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<r0>[ad0-7]+)\),\((?P<off>[+\-0-9]+),(?P<r1>[ad0-7]+)\)$'), '\t\g<mn>\t(\g<r0>), \g<off>(\g<r1>)'),
# move.l _g_positiveExpThreshold,(-10,a5)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<label>[a-zA-Z0-9_\.]+),\((?P<off>[+\-0-9]+),(?P<r1>[ad0-7]+)\)$'), '\t\g<mn>\t\g<label>, \g<off>(\g<r1>)'),
# move.l #.LC0,d0
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+#(?P<label>[a-zA-Z0-9_\.]+),(?P<r1>[ad0-7sp]+)$'), '\t\g<mn>\t#\g<label>, \g<r1>'),
# move.l #.LC0,(-4,a5)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+#(?P<label>[a-zA-Z0-9_\.]+),\((?P<off>[+\-0-9]+),(?P<r1>[ad0-7]+)\)$'), '\t\g<mn>\t#\g<label>, \g<off>(\g<r1>)'),
# pea .LC1
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<label>[a-zA-Z0-9_\.]+)$'), '\t\g<mn>\t\g<label>'),
# move.b (a1),d0
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<r0>[ad0-7sp]+)\),(?P<r1>[ad0-7sp]+)$'), '\t\g<mn>\t(\g<r0>), \g<r1>'),
# move.b d0,(a0)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<r0>[ad0-7]+),\((?P<r1>[ad0-7]+)\)$'), '\t\g<mn>\t\g<r0>, (\g<r1>)'),
# move.b (-9,a5),(a0)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<off>[+\-0-9]+),(?P<r0>[ad0-7sp]+)\),\((?P<r1>[ad0-7sp]+)\)$'), '\t\g<mn>\t\g<off>(\g<r0>), (\g<r1>)'),
# move.l (-4,a5),_g_mem
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<off>[+\-0-9]+),(?P<r0>[ad0-7sp]+)\),(?P<label>[a-zA-Z0-9_\.]+)$'), '\t\g<mn>\t\g<off>(\g<r0>), \g<label>'),
# addq.l #1,(-4,a5)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+#(?P<imm>[+\-0-9]+),\((?P<off>[+\-0-9]+),(?P<r1>[ad0-7]+)\)$'), '\t\g<mn>\t#\g<imm>, \g<off>(\g<r1>)'),
# nop
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)$'), '\t\g<mn>'),
# unlk a5
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<r1>[ad0-7]+)$'), '\t\g<mn>\t\g<r1>'),
# move.b #32,(a0)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+#(?P<imm>[+\-0-9]+),\((?P<r1>[ad0-7]+)\)$'), '\t\g<mn>\t#\g<imm>, (\g<r1>)'),
# move.w #1,_do_resume
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+#(?P<imm>[+\-0-9]+),(?P<label>[a-zA-Z0-9_\.]+)$'), '\t\g<mn>\t#\g<imm>, \g<label>'),
# clr.b (a0)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<r1>[ad0-7]+)\)$'), '\t\g<mn>\t(\g<r1>)'),
# move.l (16,a5),-(sp)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<off>[+\-0-9]+),(?P<r0>[ad0-7]+)\),-\((?P<r1>[ad0-7sp]+)\)$'), '\t\g<mn>\t\g<off>(\g<r0>), -(\g<r1>)'),
# move.l d0,-(sp)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<r0>[ad0-7]+),-\((?P<r1>[ad0-7sp]+)\)$'), '\t\g<mn>\t\g<r0>, -(\g<r1>)'),
# pea 1.w
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<addr>[0-7]+)\.(?P<w>[wlWL])$'), '\t\g<mn>\t\g<addr>.\g<w>'),
# clr.l -(sp)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+-\((?P<r1>[ad0-7sp]+)\)$'), '\t\g<mn>\t-(\g<r1>)'),
# move.l (8,a5),(-12,a5)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<off>[+\-0-9]+),(?P<r0>[ad0-7sp]+)\),\((?P<off2>[+\-0-9]+),(?P<r1>[ad0-7sp]+)\)$'), '\t\g<mn>\t\g<off>(\g<r0>), \g<off2>(\g<r1>)'),
# move.l _SysBase,d0
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<label>[a-zA-Z0-9_\.]+),(?P<r1>[ad0-7]+)$'), '\t\g<mn>\t\g<label>, \g<r1>'),
# move.l d0,_SysBase
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<r1>[ad0-7]+),(?P<label>[a-zA-Z0-9_\.]+)$'), '\t\g<mn>\t\g<r1>, \g<label>'),
# jsr a6@(-0x270:W)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<r0>[ad0-7sp]+)@\((?P<off>[+\-0-9xa-fA-F]+):(?P<w>[wlWL])\)$'), '\t\g<mn>\t\g<off>(\g<r0>)HEXHEX'),
# jmp %pc@(2,d0:w)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+%pc@\((?P<off>[+\-0-9xa-fA-F]+),(?P<r0>[ad0-7sp]+):(?P<w>[wlWL])\)$'), '\t\g<mn>\t\g<off>(pc, \g<r0>)'),
# move.l (sp)+,d2
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<r0>[ad0-7sp]+)\)\+,(?P<r1>[ad0-7sp]+)$'), '\t\g<mn>\t(\g<r0>)+, \g<r1>'),
# movem.l a6/d3/d2,-(sp)
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+(?P<regs>[ad0-7/]+),-\((?P<r1>[ad0-7sp]+)\)$'), '\t\g<mn>\t\g<regs>, -(\g<r1>)'),
# movem.l (sp)+,d2/d3/a6
(re.compile(r'^\s+(?P<mn>[a-zA-Z][a-zA-Z.]+)\s+\((?P<r1>[ad0-7sp]+)\)\+,(?P<regs>[ad0-7/]+)$'), '\t\g<mn>\t(\g<r1>)+, \g<regs>'),
# .lcomm _g_positiveExpThreshold,4
(re.compile(r'^\.lcomm\s+(?P<label>[a-zA-Z_.0-9]+),(?P<size>[0-9]+)$'), '\g<label>:\tDS.B\t\g<size>'),
# .skip 4
(re.compile(r'^\s+\.skip\s+(?P<size>[0-9]+)$'), '\tDS.B\t\g<size>'),
# .globl __astr_itoa_ext
(re.compile(r'^\s+\.globl\s+(?P<label>[a-zA-Z_.0-9]+)$'), '\tXDEF\t\g<label>'),
# .ascii "TRUE\0"
(re.compile(r'^\s+\.ascii\s+"(?P<str>[A-Za-z0-9 ,.?!*]*)\\0"$'), '\tDC.B\t\'\g<str>\', 0'),
# .ascii "*** error: failed to open dos.library!\12\0"
(re.compile(r'^\s+\.ascii\s+"(?P<str>[A-Za-z0-9 ,.?:;!\*]+)\\12\\0"$'), '\tDC.B\t\"\g<str>\", 10, 0'),
# .ascii "\12\0"
(re.compile(r'^\s+\.ascii\s+"\\12\\0"$'), '\tDC.B\t 10, 0'),
# .ascii "\14\0"
(re.compile(r'^\s+\.ascii\s+"\\14\\0"$'), '\tDC.B\t 12, 0'),
# .ascii "*** unhandled runtime error code: \0"'
(re.compile(r'^\s+\.ascii\s+"(?P<str>[A-Za-z0-9 ,.?:;!\*]+)\\0"$'), '\tDC.B\t\"\g<str>\", 0'),
(re.compile(r'^\#NO_APP'), ''),
(re.compile(r'^\#APP'), ''),
# | 136 "astr.c" 1
(re.compile(r'^\|\s+[0-9]+\s+"'), '; |'),
# .text
(re.compile(r'^\s+.text'), '\tSECTION SECTIONNAME, CODE'),
# .bss
(re.compile(r'^\s+.bss'), '\tSECTION SECTIONNAME, BSS'),
(re.compile(r'^\s+.align\s+2'), '\tEVEN'),
(re.compile(r'^(?P<label>[a-zA-Z_.0-9]+):$'), '\g<label>:'),
# (re.compile(), ''),
]
if len(sys.argv) != 2:
print ("usage: %s <foo.asm>", sys.argv[0])
sys.exit(1)
asmfn = sys.argv[1]
print ("%s: processing %s..." % (sys.argv[0], asmfn), file=sys.stderr)
scnt=0
with open (asmfn, 'r') as asmf:
for line in asmf:
l = line.rstrip()
# FIXME: crude local label transformation
l = re.sub(r"\.LC([0-9]+)", r"_gccLC\1", l)
l = re.sub(r"\.L([0-9]+)", r"_gccL\1", l)
matched = False
for p, f in CONVERSION_PATTERNS:
m = p.match(l)
if m:
# print ("*** MATCH ***: %s" % repr(m.groups()))
s = m.expand(f)
if ('HEXHEX' in s):
s = s.replace('HEXHEX','').replace('0x','$')
if ('SECTIONNAME' in s):
s = s.replace('SECTIONNAME', 's%05d' % scnt)
scnt += 1
for j, b in BRANCH_TABLE.items():
s = s.replace (j, b)
# print ("; %s" % p)
print (s)
matched=True
break
if not matched:
print ("*** FAIL: r'%s'" % l, file=sys.stderr)
sys.exit(1)
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,941 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/tests/test_helpers.py | import pytest
from marko import helpers
@pytest.mark.parametrize(
"raw_string",
[
"(hello(to)world)",
r"(hello\)world)",
r"he\(llo(world)",
"",
"hello world",
"(hello), (world)",
],
)
def test_is_paired(raw_string):
assert helpers.is_paired(raw_string)
@pytest.mark.parametrize(
"raw_string",
[
"(hello(toworld)",
"(hello)world)",
"(",
],
)
def test_is_not_paired(raw_string):
assert not helpers.is_paired(raw_string)
def test_source_no_state():
source = helpers.Source("hello world")
with pytest.raises(RuntimeError, match="Need to push a state first"):
source.root
with pytest.raises(RuntimeError, match="Need to push a state first"):
source.state
def test_load_extension_object():
ext = helpers.load_extension_object("pangu")()
assert len(ext.renderer_mixins) == 1
ext = helpers.load_extension_object("marko.ext.pangu")()
assert len(ext.renderer_mixins) == 1
with pytest.raises(ImportError, match="Extension foobar cannot be found"):
helpers.load_extension_object("foobar")()
def test_load_illegal_extension_object():
with pytest.raises(
AttributeError,
match="Module marko.block does not have 'make_extension' attributte",
):
helpers.load_extension_object("marko.block")()
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,942 | gooofy/aqb | refs/heads/master | /src/lib/GadToolsSupport/genprops.py | #!/usr/bin/env python3
#PROPS = { 'name' : 'GTTEXT',
# 'kind' : 'TEXT_KIND',
# 'idcmp': 'TEXTIDCMP',
# # name , OS, type , getter , default
# 'props': [ ( 'GTTX_Text' , 36, 'STRING' , True , None ),
# ( 'GTTX_CopyText' , 37, 'BOOLEAN', False , 'FALSE' ),
# ( 'GTTX_Border' , 36, 'BOOLEAN', False , 'TRUE' ),
# ( 'GTTX_FrontPen' , 39, 'UBYTE', False , '1' ),
# ( 'GTTX_BackPen' , 39, 'UBYTE', False , '0' ),
# ( 'GTTX_Justification', 39, 'UBYTE' , False , 'GTJ_LEFT' ),
# ( 'GTTX_Clipped' , 39, 'BOOLEAN', False , 'TRUE' )
# ]
# }
#PROPS = { 'name' : 'GTSCROLLER',
# 'kind' : 'SCROLLER_KIND',
# 'idcmp': 'SCROLLERIDCMP',
# # name , OS, type , getter , default
# 'props': [
# ( 'GA_Disabled' , 36, 'BOOLEAN' , True , 'FALSE' ),
# ( 'GA_RelVerify' , 36, 'BOOLEAN' , False , 'FALSE' ),
# ( 'GA_Immediate' , 36, 'BOOLEAN' , False , 'FALSE' ),
# ( 'GTSC_Top' , 36, 'INTEGER' , True , None ),
# ( 'GTSC_Total' , 36, 'INTEGER' , True , None ),
# ( 'GTSC_Visible' , 36, 'INTEGER' , True , None ),
# ( 'GTSC_Arrows' , 36, 'UINTEGER', False , 18 ),
# ( 'PGA_Freedom' , 36, 'ULONG' , False , None ),
# ]
# }
#PROPS = { 'name' : 'GTSTRING',
# 'kind' : 'STRING_KIND',
# 'idcmp': 'STRINGIDCMP',
# 'label': True,
# # name , OS, type , getter , default
# 'props': [
# ( 'GA_Disabled' , 36, 'BOOLEAN' , True , 'FALSE' ),
# ( 'GA_Immediate' , 36, 'BOOLEAN' , False , 'FALSE' ),
# ( 'GA_TabCycle' , 37, 'BOOLEAN' , False , 'TRUE' ),
# ( 'GTST_String' , 36, 'STRING' , False , 'NULL' ),
# ( 'GTST_MaxChars' , 36, 'UINTEGER', False , '256' ),
# ( 'STRINGA_ExitHelp' , 37, 'BOOLEAN' , False , 'FALSE' ),
# ( 'STRINGA_Justification', 37, 'STRING' , False , 'GACT_STRINGLEFT'),
# ( 'STRINGA_ReplaceMode' , 37, 'BOOLEAN' , False , 'FALSE' )
# ]
# }
#PROPS = { 'name' : 'GTINTEGER',
# 'kind' : 'INTEGER_KIND',
# 'idcmp': 'INTEGERIDCMP',
# 'label': True,
# # name , OS, type , getter , default
# 'props': [
# ( 'GA_Disabled' , 36, 'BOOLEAN' , True , 'FALSE' ),
# ( 'GA_Immediate' , 39, 'BOOLEAN' , False , 'FALSE' ),
# ( 'GA_TabCycle' , 37, 'BOOLEAN' , False , 'TRUE' ),
# ( 'GTIN_Number' , 36, 'LONG' , True , '0' ),
# ( 'GTIN_MaxChars' , 36, 'UINTEGER', False , '10' ),
# ( 'STRINGA_ExitHelp' , 37, 'BOOLEAN' , False , 'FALSE' ),
# ( 'STRINGA_Justification', 37, 'STRING' , False , 'GACT_STRINGLEFT'),
# ( 'STRINGA_ReplaceMode' , 37, 'BOOLEAN' , False , 'FALSE' )
# ]
# }
#PROPS = { 'name' : 'GTNUMBER',
# 'kind' : 'NUMBER_KIND',
# 'idcmp': 'NUMBERIDCMP',
# 'label': True,
# # name , OS, type , getter , default
# 'props': [ ( 'GTNM_Number' , 36, 'LONG' , True , None ),
# ( 'GTNM_Border' , 36, 'BOOLEAN', False , 'TRUE' ),
# ( 'GTNM_FrontPen' , 39, 'UBYTE', False , '1' ),
# ( 'GTNM_BackPen' , 39, 'UBYTE', False , '0' ),
# ( 'GTNM_Justification', 39, 'UBYTE' , False , 'GTJ_LEFT' ),
# ( 'GTNM_Format' , 39, 'STRING' , False , '%ld' ),
# ( 'GTNM_MaxNumberLen' , 39, 'ULONG' , False , '10' ),
# ( 'GTNM_Clipped' , 39, 'BOOLEAN', False , 'TRUE' )
# ]
# }
#PROPS = { 'name' : 'GTMX',
# 'kind' : 'MX_KIND',
# 'idcmp': 'MXIDCMP',
# 'label': True,
# # name , OS, type , getter , default
# 'props': [
# ( 'GA_Disabled' , 36, 'BOOLEAN' , True , 'FALSE' ),
# ( 'GTMX_Labels' , 36, 'STRING PTR', False , None ),
# ( 'GTMX_Active' , 36, 'UINTEGER' , True , '0' ),
# ( 'GTMX_Spacing' , 36, 'UINTEGER' , False , '1' ),
# ( 'GTMX_Scaled' , 39, 'BOOLEAN' , False , 'FALSE' ),
# ( 'GTMX_TitlePlace' , 39, 'ULONG' , False , 'PLACETEXT_LEFT'),
# ]
# }
#PROPS = { 'name' : 'GTCYCLE',
# 'kind' : 'CYCLE_KIND',
# 'idcmp': 'CYCLEIDCMP',
# 'label': True,
# # name , OS, type , getter , default
# 'props': [
# ( 'GA_Disabled' , 36, 'BOOLEAN' , True , 'FALSE' ),
# ( 'GTCY_Labels' , 36, 'STRING PTR', True , None ),
# ( 'GTCY_Active' , 36, 'UINTEGER' , True , '0' ),
# ]
# }
#PROPS = { 'name' : 'GTPALETTE',
# 'kind' : 'PALETTE_KIND',
# 'idcmp': 'PALETTEIDCMP',
# 'label': True,
# # name , OS, type , getter , default
# 'props': [
# ( 'GA_Disabled' , 39, 'BOOLEAN' , True , 'FALSE' ),
# ( 'GTPA_Depth' , 36, 'UINTEGER' , False , None ),
# ( 'GTPA_Color' , 39, 'UBYTE' , True , '1' ),
# ( 'GTPA_ColorOffset' , 39, 'UBYTE' , True , '0' ),
# ( 'GTPA_IndicatorWidth' , 36, 'UINTEGER' , False , '0' ),
# ( 'GTPA_IndicatorHeight', 36, 'UINTEGER' , False , '0' ),
# ( 'GTPA_ColorTable' , 39, 'UBYTE PTR' , True , 'NULL' ),
# ( 'GTPA_NumColors' , 39, 'UINTEGER' , False , '2' ),
# ]
# }
PROPS = { 'name' : 'GTLISTVIEW',
'kind' : 'LISTVIEW_KIND',
'idcmp': 'LISTVIEWIDCMP',
'label': True,
# name , OS, type , getter , default
'props': [
( 'GA_Disabled' , 39, 'BOOLEAN' , True , 'FALSE' ),
( 'GTLV_MakeVisible' , 39, 'INTEGER' , True , '0' ),
( 'GTLV_Labels' , 36, 'ExecList PTR', True , None ),
( 'GTLV_ReadOnly' , 36, 'BOOLEAN' , False , 'FALSE' ),
( 'GTLV_ScrollWidth' , 36, 'UINTEGER' , False , '16' ),
( 'GTLV_Selected' , 36, 'UINTEGER' , True , '~0' ),
( 'LAYOUTA_Spacing' , 36, 'UINTEGER' , False , '0' ),
]
}
def genline (f, line):
print (line)
f.write ("%s\n" % line)
def propname (pn):
pn2 = pn[pn.index('_')+1:]
return pn2[0].lower() + pn2[1:]
def gen_basic (f, props):
genline (f, 'TYPE %s EXTENDS GTGADGET' % props['name'])
genline (f, '')
genline (f, ' PUBLIC:')
cpars = ""
if props['label']:
cpars = 'BYVAL label AS STRING, '
for pn, vers, t, get, default in props['props']:
if default:
continue
cpars = cpars + 'BYVAL %s AS %s, ' % (propname(pn), t)
genline (f, ' DECLARE CONSTRUCTOR ( %s _' % cpars)
genline (f, ' _COORD2(BYVAL s1 AS BOOLEAN=FALSE, BYVAL x1 AS INTEGER, BYVAL y1 AS INTEGER, BYVAL s2 AS BOOLEAN=FALSE, BYVAL x2 AS INTEGER, BYVAL y2 AS INTEGER), _')
genline (f, ' BYVAL user_data AS VOID PTR=NULL, BYVAL flags AS ULONG=0, BYVAL underscore AS ULONG=95)')
genline (f, '')
for pn, vers, t, get, default in props['props']:
genline (f, ' DECLARE PROPERTY %s AS %s' % (propname(pn), t))
genline (f, ' DECLARE PROPERTY %s (BYVAL value AS %s)' % (propname(pn), t))
genline (f, '')
genline (f, ' PRIVATE:')
genline (f, '')
for pn, vers, t, get, default in props['props']:
genline (f, ' AS %-12s _%s' % (t, propname(pn)))
genline (f, '')
genline (f, 'END TYPE')
BT2CT = {'STRING': 'CONST_STRPTR',
'STRING PTR' : 'CONST_STRPTR *',
'ExecList PTR' : 'struct List *',
'UBYTE PTR' : 'UBYTE *',
'BOOLEAN' : 'BOOL',
'UBYTE': 'UBYTE',
'BYTE': 'BYTE',
'INTEGER': 'SHORT',
'UINTEGER': 'USHORT',
'LONG': 'LONG',
'ULONG': 'ULONG' }
# basic type to c type
def ct (t):
return BT2CT[t]
def gen_h (f, props):
genline (f, '/***********************************************************************************')
genline (f, ' *')
genline (f, ' * %s' % props['name'])
genline (f, ' *')
genline (f, ' ***********************************************************************************/')
genline (f, '')
genline (f, 'typedef struct %s_ %s_t;' % (props['name'], props['name']))
genline (f, '')
genline (f, 'struct %s_' % props['name'])
genline (f, '{')
genline (f, ' GTGADGET_t gadget;')
for pn, vers, t, get, default in props['props']:
genline (f, ' %-15s %s;' % (ct(t), propname(pn)))
genline (f, '};')
genline (f, '')
genline (f, 'void _%s_CONSTRUCTOR (%s_t *this,' % (props['name'], props['name']))
cpars = ""
if props['label']:
cpars = 'CONST_STRPTR label, '
for pn, vers, t, get, default in props['props']:
if default:
continue
cpars = cpars + '%s %s, ' % (ct(t), propname(pn))
genline (f, ' %s' % cpars)
genline (f, ' BOOL s1, SHORT x1, SHORT y1, BOOL s2, SHORT x2, SHORT y2,')
genline (f, ' void *user_data, ULONG flags, ULONG underscore);')
genline (f, '')
for pn, vers, t, get, default in props['props']:
genline (f, '%-15s _%s_%s_ (%s_t *this);' % (ct(t), props['name'], propname(pn), props['name']))
genline (f, '%-15s _%s_%s (%s_t *this, %s value);' % ('void', props['name'], propname(pn), props['name'], ct(t)))
# genline (f, ' DECLARE PROPERTY %s (BYVAL value AS %s)' % (propname(pn), t))
genline (f, '')
def gen_c (f, props):
genline (f, '#include "../_aqb/_aqb.h"')
genline (f, '#include "../_brt/_brt.h"')
genline (f, '')
genline (f, '#include "GadToolsSupport.h"')
genline (f, '')
genline (f, '#include <exec/types.h>')
genline (f, '#include <exec/memory.h>')
genline (f, '#include <clib/exec_protos.h>')
genline (f, '#include <inline/exec.h>')
genline (f, '')
genline (f, '#include <intuition/intuition.h>')
genline (f, '#include <intuition/intuitionbase.h>')
genline (f, '#include <clib/intuition_protos.h>')
genline (f, '#include <inline/intuition.h>')
genline (f, '')
genline (f, '#include <clib/graphics_protos.h>')
genline (f, '#include <inline/graphics.h>')
genline (f, '')
genline (f, '#include <clib/gadtools_protos.h>')
genline (f, '#include <inline/gadtools.h>')
genline (f, '')
genline (f, 'extern struct Library *GadToolsBase ;')
genline (f, '')
genline (f, 'static struct Gadget *_%s_deploy_cb (GTGADGET_t *gtg, struct Gadget *gad, APTR vinfo, struct TextAttr *ta)' % PROPS['name'].lower())
genline (f, '{')
genline (f, ' %s_t *gt = (%s_t *)gtg;' % (PROPS['name'], PROPS['name']))
genline (f, '')
genline (f, ' gtg->ng.ng_VisualInfo = vinfo;')
genline (f, ' gtg->ng.ng_TextAttr = ta;')
genline (f, '')
genline (f, ' gtg->gad = CreateGadget (%s, gad, >g->ng,' % PROPS['kind'])
genline (f, ' GT_Underscore , gtg->underscore,')
for pn, vers, t, get, default in props['props']:
if t != 'STRING':
genline (f, ' %-15s , gt->%s,' % (pn, propname(pn)))
else:
genline (f, ' %-15s , (intptr_t) gt->%s,' % (pn, propname(pn)))
genline (f, ' TAG_DONE);')
genline (f, '')
genline (f, ' if (!gtg->gad)')
genline (f, ' {')
genline (f, ' DPRINTF ("_%s_deploy_cb: CreateGadget() failed.\\n");' % PROPS['name'].lower())
genline (f, ' ERROR(AE_GTG_CREATE);')
genline (f, ' return gad;')
genline (f, ' }')
genline (f, '')
genline (f, ' // take care of IDCMP flags')
genline (f, ' ULONG gidcmp = %s;' % PROPS['idcmp'])
genline (f, '')
genline (f, ' DPRINTF("_%s_deploy_cb: gtg->win->IDCMPFlags=0x%%08lx, gidcmp=0x%%08lx\\n", gtg->win->IDCMPFlags, gidcmp);' % PROPS['name'].lower())
genline (f, '')
genline (f, ' if (gidcmp && ( (gtg->win->IDCMPFlags & gidcmp) != gidcmp ) )')
genline (f, ' ModifyIDCMP (gtg->win, gtg->win->IDCMPFlags | gidcmp);')
genline (f, '')
genline (f, ' return gtg->gad;')
genline (f, '}')
genline (f, '')
genline (f, 'void _%s_CONSTRUCTOR (%s_t *this,' % (props['name'], props['name']))
cpars = ""
if props['label']:
cpars = 'CONST_STRPTR label, '
for pn, vers, t, get, default in props['props']:
if default:
continue
cpars = cpars + '%s %s, ' % (ct(t), propname(pn))
genline (f, ' %s' % cpars)
genline (f, ' BOOL s1, SHORT x1, SHORT y1, BOOL s2, SHORT x2, SHORT y2,')
genline (f, ' void *user_data, ULONG flags, ULONG underscore)')
genline (f, '{')
genline (f, ' DPRINTF("_%s_CONSTRUCTOR: this=0x%%08lx, x1=%%d, y1=%%d, x2=%%d, y2=%%d\\n", this, x1, y1, x2, y2);' % props['name'])
genline (f, ' _GTGADGET_CONSTRUCTOR (&this->gadget, %s, s1, x1, y1, s2, x2, y2, user_data, flags, underscore);' % "label" if props['label'] else "")
genline (f, ' this->gadget.deploy_cb = _%s_deploy_cb;' % (props['name'].lower()))
for pn, vers, t, get, default in props['props']:
if not default:
continue
genline (f, ' this->%-15s = %s;' % (propname(pn), default))
genline (f, '}')
for pn, vers, t, get, default in props['props']:
genline (f, '')
genline (f, '%s _%s_%s_ (%s_t *this)' % (ct(t), props['name'], propname(pn), props['name']))
genline (f, '{')
if get:
genline (f, ' if (_GTGADGET_deployed_ (&this->gadget) && (GadToolsBase->lib_Version>=%d))' % vers)
genline (f, ' {')
genline (f, ' ULONG u;')
genline (f, ' LONG n = GT_GetGadgetAttrs(this->gadget.gad, this->gadget.win, NULL, %s, (intptr_t)&u, TAG_DONE);' % pn)
genline (f, ' if (n==1)')
if t != 'STRING':
genline (f, ' return u;')
else:
genline (f, ' return (CONST_STRPTR) (intptr_t) u;')
genline (f, ' }')
genline (f, ' return this->%s;' % propname(pn))
genline (f, '}')
genline (f, 'void _%s_%s (%s_t *this, %s %s)' % (props['name'], propname(pn), props['name'], ct(t), propname(pn)))
genline (f, '{')
genline (f, ' if (_GTGADGET_deployed_ (&this->gadget))')
genline (f, ' {')
if t != 'STRING':
genline (f, ' GT_SetGadgetAttrs (this->gadget.gad, this->gadget.win, NULL, %s, %s, TAG_DONE);' % (pn, propname(pn)))
else:
genline (f, ' GT_SetGadgetAttrs (this->gadget.gad, this->gadget.win, NULL, %s, (intptr_t) %s, TAG_DONE);' % (pn, propname(pn)))
genline (f, ' }')
genline (f, ' this->%s = %s;' % (propname(pn), propname(pn)))
genline (f, '}')
with open ('foo.bas', 'w') as f:
gen_basic (f, PROPS)
with open ('foo.h', 'w') as f:
gen_h (f, PROPS)
with open ('%s.c' % PROPS['name'].lower(), 'w') as f:
gen_c (f, PROPS)
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,943 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/marko/md_renderer.py | """
Markdown renderer
"""
from contextlib import contextmanager
from .renderer import Renderer
class MarkdownRenderer(Renderer):
"""Render the AST back to markdown document.
It is useful for, e.g. merging sections and formatting documents.
For convenience, markdown renderer provides all render functions for basic elements
and those from common extensions.
"""
def __enter__(self): # type: () -> Renderer
self._prefix = ""
self._second_prefix = ""
return super().__enter__()
@contextmanager
def container(self, prefix, second_prefix=""):
old_prefix = self._prefix
old_second_prefix = self._second_prefix
self._prefix += prefix
self._second_prefix += second_prefix
yield
self._prefix = old_prefix
self._second_prefix = old_second_prefix
def render_paragraph(self, element):
children = self.render_children(element)
line = self._prefix + children + "\n"
self._prefix = self._second_prefix
return line
def render_list(self, element):
result = []
if element.ordered:
num = element.start
for child in element.children:
with self.container(f"{num}. ", " " * (len(str(num)) + 2)):
result.append(self.render(child))
else:
for child in element.children:
with self.container(f"{element.bullet} ", " "):
result.append(self.render(child))
self._prefix = self._second_prefix
return "".join(result)
def render_list_item(self, element):
return self.render_children(element)
def render_quote(self, element):
with self.container("> ", "> "):
result = self.render_children(element)
self._prefix = self._second_prefix
return result + "\n"
def render_fenced_code(self, element):
extra = f" {element.extra}" if element.extra else ""
lines = [self._prefix + f"```{element.lang}{extra}"]
lines.extend(
self._second_prefix + line
for line in self.render_children(element).splitlines()
)
lines.append(self._second_prefix + "```")
self._prefix = self._second_prefix
return "\n".join(lines) + "\n"
def render_code_block(self, element):
indent = " " * 4
lines = self.render_children(element).splitlines()
lines = [self._prefix + indent + lines[0]] + [
self._second_prefix + indent + line for line in lines[1:]
]
self._prefix = self._second_prefix
return "\n".join(lines) + "\n"
def render_html_block(self, element):
result = self._prefix + element.children + "\n"
self._prefix = self._second_prefix
return result
def render_thematic_break(self, element):
result = self._prefix + "* * *\n"
self._prefix = self._second_prefix
return result
def render_heading(self, element):
result = (
self._prefix
+ "#" * element.level
+ " "
+ self.render_children(element)
+ "\n"
)
self._prefix = self._second_prefix
return result
def render_setext_heading(self, element):
return self.render_heading(element)
def render_blank_line(self, element):
result = self._prefix + "\n"
self._prefix = self._second_prefix
return result
def render_link_ref_def(self, elemement):
return ""
def render_emphasis(self, element):
return f"*{self.render_children(element)}*"
def render_strong_emphasis(self, element):
return f"**{self.render_children(element)}**"
def render_inline_html(self, element):
return element.children
def render_link(self, element):
title = (
' "{}"'.format(element.title.replace('"', '\\"')) if element.title else ""
)
return f"[{self.render_children(element)}]({element.dest}{title})"
def render_auto_link(self, element):
return f"<{element.dest}>"
def render_image(self, element):
template = ""
title = (
' "{}"'.format(element.title.replace('"', '\\"')) if element.title else ""
)
return template.format(self.render_children(element), element.dest, title)
def render_literal(self, element):
return "\\" + element.children
def render_raw_text(self, element):
return element.children
def render_line_break(self, element):
return "\n" if element.soft else "\\\n"
def render_code_span(self, element):
text = element.children
if text and text[0] == "`" or text[-1] == "`":
return f"`` {text} ``"
return f"`{element.children}`"
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,944 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/marko/renderer.py | """
Base renderer class
"""
import html
import itertools
import re
from .helpers import camel_to_snake_case, is_type_check
if is_type_check():
from typing import Any, Union
from .inline import InlineElement
from .block import BlockElement
from .parser import ElementType
Element = Union[BlockElement, InlineElement]
class Renderer:
"""The base class of renderers.
A custom renderer should subclass this class and include your own render functions.
A render function should:
* be named as ``render_<element_name>``, where the ``element_name`` is the snake
case form of the element class name, the renderer will search the corresponding
function in this way.
* accept the element instance and return any output you want.
If no corresponding render function is found, renderer will fallback to call
:meth:`Renderer.render_children`.
"""
#: Whether to delegate rendering to specific render functions
delegate: bool = True
_charref = re.compile(
r"&(#[0-9]{1,7};" r"|#[xX][0-9a-fA-F]{1,6};" r"|[^\t\n\f <&#;]{1,32};)"
)
def __init__(self): # type: () -> None
self.root_node = None
def __enter__(self): # type: () -> Renderer
"""Provide a context so that root_node can be reset after render."""
self._charref_bak = html._charref
html._charref = self._charref
return self
def __exit__(self, *args): # type: (Any) -> None
html._charref = self._charref_bak
def render(self, element): # type: (Element) -> str
"""Renders the given element to string.
:param element: a element to be rendered.
:returns: the output string or any values.
"""
# Store the root node to provide some context to render functions
if not self.root_node:
self.root_node = element # type: ignore
render_func = getattr(self, self._cls_to_func_name(element.__class__), None)
if render_func is not None and (
getattr(render_func, "_force_delegate", False) or self.delegate
):
return render_func(element)
return self.render_children(element)
def render_children(self, element): # type: (Element) -> str
"""
Recursively renders child elements. Joins the rendered
strings with no space in between.
If newlines / spaces are needed between elements, add them
in their respective templates, or override this function
in the renderer subclass, so that whitespace won't seem to
appear magically for anyone reading your program.
:param element: a branch node who has children attribute.
"""
rendered = [self.render(child) for child in element.children] # type: ignore
return "".join(rendered)
def _cls_to_func_name(self, klass): # type: (ElementType) -> str
from .block import parser
element_types = itertools.chain(
parser.block_elements.items(), # type: ignore
parser.inline_elements.items(), # type: ignore
)
for name, cls in element_types:
if cls is klass:
return "render_" + camel_to_snake_case(name)
return "render_children"
def force_delegate(func):
"""
A decorator to allow delegation for the specified method even if cls.delegate = False
"""
func._force_delegate = True
return func
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,945 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/marko/parser.py | """
Base parser
"""
import itertools
from .helpers import is_type_check, Source
class Parser:
r"""
All elements defined in CommonMark's spec are included in the parser
by default.
Attributes:
block_elements(dict): a dict of name: block_element pairs
inlin_elements(dict): a dict of name: inlin_element pairs
:param \*extras: extra elements to be included in parsing process.
"""
def __init__(self): # type: () -> None
self.block_elements = {} # type: Dict[str, BlockElementType]
self.inline_elements = {} # type: Dict[str, InlineElementType]
for element in itertools.chain(
(getattr(block, name) for name in block.__all__),
(getattr(inline, name) for name in inline.__all__),
):
self.add_element(element)
def add_element(self, element): # type: (ElementType) -> None
"""Add an element to the parser.
:param element: the element class.
.. note:: If one needs to call it inside ``__init__()``, please call it after
``super().__init__()`` is called.
"""
dest = {} # type: Dict[str, ElementType]
if issubclass(element, inline.InlineElement):
dest = self.inline_elements # type: ignore
elif issubclass(element, block.BlockElement):
dest = self.block_elements # type: ignore
else:
raise TypeError(
"The element should be a subclass of either `BlockElement` or "
"`InlineElement`."
)
if not element.override:
dest[element.__name__] = element
else:
for cls in element.__bases__:
if cls.__name__ in dest:
dest[cls.__name__] = element
break
else:
dest[element.__name__] = element
def parse(self, source_or_text):
# type: (Union[Source, AnyStr]) -> Union[List[block.BlockElement], block.BlockElement]
"""Do the actual parsing and returns an AST or parsed element.
:param source_or_text: the text or source object.
Based on the type, it will do following:
- text: returns the parsed Document element.
- source: parse the source and returns the parsed children as a list.
"""
if isinstance(source_or_text, str):
block.parser = self # type: ignore
inline.parser = self # type: ignore
return self.block_elements["Document"](source_or_text) # type: ignore
element_list = self._build_block_element_list()
ast = [] # type: List[block.BlockElement]
assert isinstance(source_or_text, Source)
while not source_or_text.exhausted:
for ele_type in element_list:
if ele_type.match(source_or_text):
result = ele_type.parse(source_or_text)
if not hasattr(result, "priority"):
# In some cases ``parse()`` won't return the element, but
# instead some information to create one, which will be passed
# to ``__init__()``.
result = ele_type(result) # type: ignore
ast.append(result)
break
else:
# Quit the current parsing and go back to the last level.
break
return ast
def parse_inline(self, text): # type: (str) -> List[inline.InlineElement]
"""Parses text into inline elements.
RawText is not considered in parsing but created as a wrapper of holes
that don't match any other elements.
:param text: the text to be parsed.
:returns: a list of inline elements.
"""
element_list = self._build_inline_element_list()
return inline_parser.parse(
text, element_list, fallback=self.inline_elements["RawText"]
)
def _build_block_element_list(self): # type: () -> List[BlockElementType]
"""Return a list of block elements, ordered from highest priority to lowest."""
return sorted(
(e for e in self.block_elements.values() if not e.virtual),
key=lambda e: e.priority,
reverse=True,
)
def _build_inline_element_list(self): # type: () -> List[InlineElementType]
"""Return a list of elements, each item is a list of elements
with the same priority.
"""
return [e for e in self.inline_elements.values() if not e.virtual]
from . import block, inline, inline_parser # noqa
if is_type_check():
from typing import Type, Union, Dict, AnyStr, List
BlockElementType = Type[block.BlockElement]
InlineElementType = Type[inline.InlineElement]
ElementType = Union[BlockElementType, InlineElementType]
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,946 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/marko/block.py | """
Block level elements
"""
import re
from . import inline, patterns
from .helpers import Source, is_paired, normalize_label, is_type_check
from .parser import Parser
if is_type_check():
from typing import Any, Optional, Match, Dict, Union, Tuple, List as _List
__all__ = (
"Document",
"CodeBlock",
"Heading",
"List",
"ListItem",
"BlankLine",
"Quote",
"FencedCode",
"ThematicBreak",
"HTMLBlock",
"LinkRefDef",
"SetextHeading",
"Paragraph",
)
class BlockElement:
"""Any block element should inherit this class"""
#: Use to denote the precedence in parsing
priority = 5
#: if True, it won't be included in parsing process but produced by other elements
#: other elements instead.
virtual = False
#: Whether children are parsed as inline elements.
inline_children = False
#: If true, will replace the element which it derives from.
override = False
_prefix = ""
@classmethod
def match(self, source): # type: (Source) -> Any
"""Test if the source matches the element at current position.
The source should not be consumed in the method unless you have to.
:param source: the ``Source`` object of the content to be parsed
"""
raise NotImplementedError()
@classmethod
def parse(self, source): # type: (Source) -> Any
"""Parses the source. This is a proper place to consume the source body and
return an element or information to build one. The information tuple will be
passed to ``__init__`` method afterwards. Inline parsing, if any, should also
be performed here.
:param source: the ``Source`` object of the content to be parsed
"""
raise NotImplementedError()
def parse_inline(self): # type: () -> None
"""Inline parsing is postponed so that all link references
are seen before that.
"""
if self.inline_children:
self.children = parser.parse_inline(self.children) # type: ignore
elif isinstance(getattr(self, "children", None), list):
for child in self.children:
if isinstance(child, BlockElement):
child.parse_inline()
def __lt__(self, o): # type: (BlockElement) -> bool
return self.priority < o.priority
class Document(BlockElement):
"""Document node element."""
_prefix = ""
virtual = True
def __init__(self, text): # type: (str) -> None
self.link_ref_defs = {} # type: Dict[str, Tuple[str, str]]
source = Source(text)
inline._root_node = self # type: ignore
with source.under_state(self):
self.children = parser.parse(source) # type: ignore
self.parse_inline()
class BlankLine(BlockElement):
"""Blank lines"""
priority = 5
def __init__(self, start): # type: (int) -> None
self._anchor = start
@classmethod
def match(cls, source): # type: (Source) -> bool
line = source.next_line()
return line is not None and not line.strip()
@classmethod
def parse(cls, source): # type: (Source) -> int
start = source.pos
while not source.exhausted and cls.match(source):
source.consume()
return start
class Heading(BlockElement):
"""Heading element: (### Hello\n)"""
priority = 6
pattern = re.compile(
r" {0,3}(#{1,6})((?=\s)[^\n]*?|[^\n\S]*)(?:(?<=\s)(?<!\\)#+)?[^\n\S]*$\n?",
flags=re.M,
)
inline_children = True
def __init__(self, match): # type: (Match) -> None
self.level = len(match.group(1))
self.children = match.group(2).strip()
@classmethod
def match(cls, source): # type: (Source) -> Optional[Match]
return source.expect_re(cls.pattern)
@classmethod
def parse(cls, source): # type: (Source) -> Optional[Match]
m = source.match
source.consume()
return m
class SetextHeading(BlockElement):
"""Setext heading: (Hello\n===\n)
It can only be created by Paragraph.parse.
"""
virtual = True
inline_children = True
def __init__(self, lines): # type: (_List[str]) -> None
self.level = 1 if lines.pop().strip()[0] == "=" else 2
self.children = "".join(line.lstrip() for line in lines).strip()
class CodeBlock(BlockElement):
"""Indented code block: ( this is a code block\n)"""
priority = 4
def __init__(self, lines): # type: (str) -> None
self.children = [inline.RawText(lines, False)]
self.lang = ""
self.extra = ""
@classmethod
def match(cls, source): # type: (Source) -> str
line = source.next_line(False)
prefix = source.prefix + " {4}"
if isinstance(source.state, Quote):
# requires five spaces to prefix
prefix = source.prefix[:-1] + " {4}"
return cls.strip_prefix(line, prefix) # type: ignore
@classmethod
def parse(cls, source): # type: (Source) -> str
prefix = source.prefix + " {4}"
lines = [cls.match(source)]
source.consume()
source.anchor()
while not source.exhausted:
line = source.next_line()
if line is not None and not line.strip():
source.consume()
stripped_line = cls.strip_prefix(line, prefix)
if stripped_line:
lines.append(stripped_line)
else:
lines.append("\n")
elif cls.match(source):
lines.append(cls.match(source))
source.consume()
source.anchor()
else:
source.reset()
break
return "".join(lines).rstrip("\n") + "\n"
@staticmethod
def strip_prefix(line, prefix): # type: (str, str) -> str
match = re.match(prefix, line.expandtabs(4))
if not match:
return ""
end = match.end()
for i in range(len(line)):
expanded = line[: i + 1].expandtabs(4)
if len(expanded) < end:
continue
d = len(expanded) - end
if d == 0:
return line[i + 1 :]
return expanded[-d:] + line[i + 1 :]
return ""
class FencedCode(BlockElement):
"""Fenced code block: (```python\nhello\n```\n)"""
priority = 7
pattern = re.compile(r"( {,3})(`{3,}|~{3,})[^\n\S]*(.*?)$", re.M)
_parse_info = ("", "", "", "") # type: Tuple[str, str, str, str]
def __init__(self, match): # type: (Tuple[str, str, str]) -> None
self.lang = inline.Literal.strip_backslash(match[0])
self.extra = match[1]
self.children = [inline.RawText(match[2], False)]
@classmethod
def match(cls, source): # type: (Source) -> Optional[Match]
m = source.expect_re(cls.pattern)
if not m:
return None
prefix, leading, info = m.groups()
if leading[0] == "`" and "`" in info:
return None
lang, extra = (info.split(None, 1) + [""] * 2)[:2]
cls._parse_info = prefix, leading, lang, extra
return m
@classmethod
def parse(cls, source): # type: (Source) -> Tuple[str, str, str]
source.next_line()
source.consume()
lines = []
while not source.exhausted:
line = source.next_line()
if line is None:
break
source.consume()
m = re.match(r" {,3}(~+|`+)[^\n\S]*$", line, flags=re.M)
if m and cls._parse_info[1] in m.group(1):
break
prefix_len = source.match_prefix(cls._parse_info[0], line)
if prefix_len >= 0:
line = line[prefix_len:]
else:
line = line.lstrip()
lines.append(line)
return cls._parse_info[2], cls._parse_info[3], "".join(lines)
class ThematicBreak(BlockElement):
"""Horizontal rules: (----\n)"""
priority = 8
pattern = re.compile(r" {,3}([-_*][^\n\S]*){3,}$\n?", flags=re.M)
@classmethod
def match(cls, source): # type: (Source) -> bool
m = source.expect_re(cls.pattern)
if not m:
return False
return len(set(re.sub(r"\s+", "", m.group()))) == 1
@classmethod
def parse(cls, source): # type: (Source) -> ThematicBreak
source.consume()
return cls()
class HTMLBlock(BlockElement):
"""HTML blocks, parsed as it is"""
priority = 5
_end_cond = None # Optional[Match]
def __init__(self, lines): # type: (str) -> None
self.children = lines
@classmethod
def match(cls, source): # type: (Source) -> Union[int, bool]
if source.expect_re(r"(?i) {,3}<(script|pre|style|textarea)[>\s]"):
cls._end_cond = re.compile(rf"(?i)</{source.match.group(1)}>")
return 1
if source.expect_re(r" {,3}<!--"):
cls._end_cond = re.compile(r"-->")
return 2
if source.expect_re(r" {,3}<\?"):
cls._end_cond = re.compile(r"\?>")
return 3
if source.expect_re(r" {,3}<!"):
cls._end_cond = re.compile(r">")
return 4
if source.expect_re(r" {,3}<!\[CDATA\["):
cls._end_cond = re.compile(r"\]\]>")
return 5
block_tag = r"(?:{})".format("|".join(patterns.tags))
if source.expect_re(r"(?im) {,3}</?%s(?: +|/?>|$)" % block_tag):
cls._end_cond = None
return 6
if source.expect_re(
r"(?m) {,3}(<%(tag)s(?:%(attr)s)*[^\n\S]*/?>|</%(tag)s[^\n\S]*>)[^\n\S]*$"
% {"tag": patterns.tag_name, "attr": patterns.attribute_no_lf}
):
cls._end_cond = None
return 7
return False
@classmethod
def parse(cls, source): # type: (Source) -> str
lines = []
while not source.exhausted:
line = source.next_line()
if line is None:
break
lines.append(line)
if cls._end_cond is not None:
if cls._end_cond.search(line):
source.consume()
break
elif line.strip() == "":
lines.pop()
break
source.consume()
return "".join(lines)
class Paragraph(BlockElement):
"""A paragraph element"""
priority = 1
pattern = re.compile(r"[^\n]+$\n?", flags=re.M)
inline_children = True
def __init__(self, lines): # type: (_List[str]) -> None
str_lines = "".join(line.lstrip() for line in lines).rstrip("\n")
self.children = str_lines
self._tight = False
@classmethod
def match(cls, source): # type: (Source) -> bool
return source.expect_re(cls.pattern) is not None
@staticmethod
def is_setext_heading(line): # type: (str) -> bool
return re.match(r" {,3}(=+|-+)[^\n\S]*$", line) is not None
@classmethod
def break_paragraph(cls, source, lazy=False): # type: (Source, bool) -> bool
assert isinstance(parser, Parser)
if (
parser.block_elements["Quote"].match(source)
or parser.block_elements["Heading"].match(source)
or parser.block_elements["BlankLine"].match(source)
or parser.block_elements["FencedCode"].match(source)
):
return True
if (
lazy
and isinstance(source.state, List)
and parser.block_elements["ListItem"].match(source)
):
return True
if parser.block_elements["List"].match(source):
result = parser.block_elements["ListItem"].parse_leading(source.next_line())
if lazy or (result[1][:-1] == "1" or result[1] in "*-+") and result[3]:
return True
html_type = parser.block_elements["HTMLBlock"].match(source)
if html_type and html_type != 7:
return True
if parser.block_elements["ThematicBreak"].match(source):
if not lazy and cls.is_setext_heading(source.next_line()):
return False
return True
return False
@classmethod
def parse(cls, source): # type: (Source) -> _List[str]
lines = [source.next_line()]
source.consume()
end_parse = False
while not source.exhausted and not end_parse:
if cls.break_paragraph(source):
break
line = source.next_line()
# the prefix is matched and not breakers
if line:
lines.append(line)
source.consume()
if cls.is_setext_heading(line):
return parser.block_elements["SetextHeading"](lines) # type: ignore
else:
# check lazy continuation, store the previous state stack
states = source._states[:]
while len(source._states) > 1:
source.pop_state()
if source.next_line():
# matches the prefix, quit the loop
if cls.break_paragraph(source, True):
# stop the whole parsing
end_parse = True
else:
lines.append(source.next_line())
source.consume()
break
source._states = states
return lines # type: ignore
class Quote(BlockElement):
"""block quote element: (> hello world)"""
priority = 6
_prefix = r" {,3}>[^\n\S]?"
@classmethod
def match(cls, source): # type: (Source) -> Optional[Match]
return source.expect_re(r" {,3}>")
@classmethod
def parse(cls, source): # type: (Source) -> Quote
state = cls()
with source.under_state(state):
state.children = parser.parse(source) # type: ignore
return state
class List(BlockElement):
"""List block element"""
priority = 6
_prefix = ""
pattern = re.compile(r" {,3}(\d{1,9}[.)]|[*\-+])[ \t\n\r\f]")
_parse_info = ("", False, 0)
def __init__(self): # type: () -> None
self.bullet, self.ordered, self.start = self._parse_info
self.start = int(self.start)
self.tight = True
@classmethod
def match(cls, source): # type: (Source) -> bool
m = source.expect_re(cls.pattern)
if not m:
return False
bullet, ordered, start = m.group(1), False, 1
if bullet[:-1].isdigit():
ordered = True
start = bullet[:-1]
cls._parse_info = (bullet, ordered, start)
return m is not None
@classmethod
def parse(cls, source): # type: (Source) -> List
state = cls()
children = []
tight = True
has_blank_line = False
with source.under_state(state):
while not source.exhausted:
if parser.block_elements["ListItem"].match(source): # type: ignore
el = parser.block_elements["ListItem"].parse(source) # type: ignore
if not isinstance(el, BlockElement):
el = parser.block_elements["ListItem"](el) # type: ignore
children.append(el)
source.anchor()
if has_blank_line:
tight = False
elif BlankLine.match(source):
BlankLine.parse(source)
has_blank_line = True
else:
source.reset()
break
tight = tight and not any(
isinstance(e, BlankLine) for item in children for e in item.children
)
if tight:
for item in children:
item._tight = tight
for child in item.children:
if isinstance(child, Paragraph):
child._tight = tight
state.children = children
state.tight = tight
return state
class ListItem(BlockElement):
"""List item element. It can only be created by List.parse"""
_parse_info = (0, "", 0, "")
virtual = True
_tight = False
pattern = re.compile(r" {,3}(\d{1,9}[.)]|[*\-+])[ \t\n\r\f]")
def __init__(self): # type: () -> None
indent, bullet, mid, tail = self._parse_info
self._prefix = " " * indent + re.escape(bullet) + " " * mid
self._second_prefix = " " * (len(bullet) + indent + (mid or 1))
@classmethod
def parse_leading(cls, line): # type: (str) -> Tuple[int, str, int, str]
line = line.expandtabs(4)
stripped_line = line.lstrip()
indent = len(line) - len(stripped_line)
temp = stripped_line.split(None, 1)
bullet = temp[0]
if len(temp) == 1:
mid = 0
tail = ""
else:
mid = len(stripped_line) - len("".join(temp))
if mid > 4:
mid = 1
tail = temp[1]
return indent, bullet, mid, tail
@classmethod
def match(cls, source): # type: (Source) -> bool
if parser.block_elements["ThematicBreak"].match(source): # type: ignore
return False
if not source.expect_re(cls.pattern):
return False
next_line = source.next_line(False)
assert next_line is not None
prefix_pos = 0
stripped_line = next_line
for i in range(1, len(next_line) + 1):
m = re.match(source.prefix, next_line[:i].expandtabs(4))
if not m:
continue
if m.end() > prefix_pos:
prefix_pos = m.end()
stripped_line = next_line[:i].expandtabs(4)[prefix_pos:] + next_line[i:]
indent, bullet, mid, tail = cls.parse_leading(stripped_line) # type: ignore
parent = source.state
assert isinstance(parent, List)
if (
parent.ordered
and not bullet[:-1].isdigit()
or bullet[-1] != parent.bullet[-1]
):
return False
if not parent.ordered and bullet != parent.bullet:
return False
cls._parse_info = (indent, bullet, mid, tail)
return True
@classmethod
def parse(cls, source): # type: (Source) -> ListItem
state = cls()
state.children = []
with source.under_state(state):
if not source.next_line().strip(): # type: ignore
source.consume()
if not source.next_line() or not source.next_line().strip(): # type: ignore
return state
state.children = parser.parse(source) # type: ignore
if isinstance(state.children[-1], BlankLine):
# Remove the last blank line from list item
blankline = state.children.pop()
if state.children:
source.pos = blankline._anchor
return state
class LinkRefDef(BlockElement):
"""Link reference definition:
[label]: destination "title"
"""
pattern = re.compile(
r" {,3}%s:(?P<s1>\s*)%s(?P<s2>\s*)(?:(?<=\s)%s)?[^\n\S]*$\n?"
% (patterns.link_label, patterns.link_dest, patterns.link_title),
flags=re.M,
)
_parse_info = ("", "", "")
@classmethod
def match(cls, source): # type: (Source) -> bool
m = source.expect_re(cls.pattern)
if not m:
return False
rv = m.groupdict()
if rv["s1"].count("\n") > 1 or rv["s1"].count("\n") > 1:
return False
label = rv["label"]
if rv["dest"][0] == "<" and rv["dest"][-1] == ">":
dest = rv["dest"]
elif is_paired(rv["dest"], "(", ")"):
dest = rv["dest"]
else:
return False
title = rv["title"]
if title and re.search(r"^$", title, re.M):
return False
cls._parse_info = label, dest, title
return m is not None
@classmethod
def parse(cls, source): # type: (Source) -> LinkRefDef
label, dest, title = cls._parse_info
normalized_label = normalize_label(label[1:-1])
assert isinstance(source.root, Document)
if normalized_label not in source.root.link_ref_defs:
source.root.link_ref_defs[normalized_label] = (dest, title)
source.consume()
return cls()
parser = None
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,947 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/noxfile.py | import nox
import os
os.environ.update(PDM_IGNORE_SAVED_PYTHON="1")
@nox.session(python=["3.6", "3.8", "3.9"])
def tests(session):
session.run("pdm", "install", "-d", external=True)
session.run("pytest", "tests/")
@nox.session
def benchmark(session):
session.run("pdm", "install", "-s", "benchmark", external=True)
session.run("python", "-m", "tests.benchmark")
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,948 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/setup.py | # This a dummy setup.py to enable GitHub "Used By" stats
from setuptools import setup
setup(name="marko")
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,949 | gooofy/aqb | refs/heads/master | /src/tools/md2amiga/md2aguide.py | #!/usr/bin/env python3
import sys
import re
from marko import Markdown, block, inline, helpers
#OUTPUT_FN = "/home/guenter/media/emu/amiga/FS-UAE/hdd/system/x/foo.guide"
def centerline(s):
l = len(s)
if l>80:
return s
return " " * (37-int(l/2)) + s
def mangleNode(s):
t = ""
for c in s:
if c.isalpha():
t = t + c.lower()
elif c==' ':
t = t + "-"
elif c=='(':
t = t + "("
elif c==')':
t = t + ")"
elif c=='#':
t = t + "#"
return t
def aguideEscape(s):
return s.replace('\\_', '_').replace('\\#', '#').replace ('\\', '\\\\').replace('@', '\\@')
class Document(block.Document):
def __init__(self, text):
self.tocnodes = []
super().__init__(text)
class ExternalLinkRef(inline.InlineElement):
pattern = re.compile(r"!?\[([^\]]+)\]\(([^)]+)\)")
priority = 6
def __init__(self, match):
self.label = match.group(1)
self.refdoc = match.group(2)
@classmethod
def find(cls, text):
for match in super().find(text):
# label = helpers.normalize_label(match.group(1))
# if label in inline._root_node.footnotes:
yield match
class TableOfContents(block.BlockElement):
priority = 6
pattern = re.compile(r"(:toc:)")
inline_children = True
def __init__(self, match): # type: (Match) -> None
self.level = len(match.group(1))
self.children = ""
@classmethod
def match(cls, source): # type: (Source) -> Optional[Match]
return source.expect_re(cls.pattern)
@classmethod
def parse(cls, source): # type: (Source) -> Optional[Match]
m = source.match
source.consume()
return m
class HeadingTOC(block.Heading):
override = True
@classmethod
def parse(cls, source): # type: (Source) -> Optional[Match]
m = source.match
level = len(m.group(1))
if level == 2:
source.root.tocnodes.append(m.group(2).strip())
source.consume()
return m
class AmigaGuideMixin(object):
def __init__(self):
self.indent = 0
self.inNode = True
def render_external_link_ref(self, element):
if "http" in element.refdoc:
return ""
if '#' in element.refdoc:
return '@{"%s" link "%s"}' % (element.label, element.refdoc[1:])
return '@{"%s" link "%s/main"}' % (element.label, element.refdoc.replace('.md', '.guide'))
def render_link(self, element):
return element.dest
def render_paragraph(self, element):
children = self.render_children(element)
if element._tight:
return children
else:
return f"\n{children}\n"
def _make_node (self, label):
if not self.inNode:
self.inNode = True
return ''
prf = "\n@endnode\n"
self.inNode = True
return prf + '@node %s "%s"\n' % (mangleNode(label), label)
def render_heading(self, element):
if element.level == 1:
b = self.render_children(element)
return "\n\n@{b}%s@{ub}\n" % centerline(b)
elif element.level == 2:
b = self.render_children(element)
return self._make_node (b) + "\n\n@{b}%s@{ub}\n" % b
elif element.level == 3:
return "\n\n@{b}%s@{ub}\n" % self.render_children(element)
else:
return "\n\n" + self.render_children(element) + "\n"
def render_list(self, element):
# FIXME: if element.ordered:
self.indent += 1
b = self.render_children(element)
self.indent -= 1
return b
def render_list_item(self, element):
return "\n" + " " * self.indent + "* " + self.render_children(element)
def render_fenced_code(self, element):
b = '\n' + element.children[0].children
lines = b.split('\n')
return aguideEscape('\n '.join(lines)+'\n')
def render_code_span(self, element):
return aguideEscape(element.children)
def render_table_of_contents(self, element):
lines = "\n"
for label in self.root_node.tocnodes:
lines = lines + '@{"%s" link "%s"}\n' % (aguideEscape(label), mangleNode(label))
return lines
def render_children(self, element):
rendered = [self.render(child) for child in element.children]
body = "".join(rendered)
if element is self.root_node:
suff = ("\n"
"@endnode\n"
"@node navidx \"Index\"\n"
"\n\n"
" * @{\"Start\" link \"README.guide/main\"}\n\n"
" * @{\"Reference: Core\" link \"help/RefCore.guide/main\"}\n"
" * @{\"Reference: Amiga specific commands\" link \"help/RefAmiga.guide/main\"}\n"
" * @{\"Reference: IFFSupport module\" link \"help/IFFSupport.guide/main\"}\n"
" * @{\"Reference: AnimSupport module\" link \"help/AnimSupport.guide/main\"}\n"
"@endnode\n"
"@index navidx\n")
return "@DATABASE\n@node MAIN\n" + body + suff
else:
return body
def render_plain_text(self, element):
if isinstance(element.children, str):
return aguideEscape(element.children)
return self.render_children(element)
def render_raw_text(self, element):
return aguideEscape(element.children)
class AmigaGuide:
elements = [HeadingTOC, ExternalLinkRef, Document, TableOfContents]
renderer_mixins = [AmigaGuideMixin]
markdown = Markdown(extensions=[AmigaGuide])
if len(sys.argv) != 2:
print ("usage: %s <foo.md>" % sys.argv[0])
sys.exit(1)
infn = sys.argv[1]
with open (infn, "r") as mdf:
md = mdf.read()
aguide = markdown.convert(md)
#with open (OUTPUT_FN, "w", encoding="latin1") as outf:
# outf.write(aguide)
print (aguide)
| {"/src/tools/md2amiga/marko/__init__.py": ["/src/tools/md2amiga/marko/renderer.py", "/src/tools/md2amiga/marko/parser.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/inline_parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/helpers.py": ["/src/tools/md2amiga/marko/block.py"], "/src/tools/md2amiga/marko/md_renderer.py": ["/src/tools/md2amiga/marko/renderer.py"], "/src/tools/md2amiga/marko/renderer.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/block.py", "/src/tools/md2amiga/marko/parser.py"], "/src/tools/md2amiga/marko/parser.py": ["/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/__init__.py"], "/src/tools/md2amiga/marko/block.py": ["/src/tools/md2amiga/marko/__init__.py", "/src/tools/md2amiga/marko/helpers.py", "/src/tools/md2amiga/marko/parser.py"]} |
68,956 | kajin41/Duckdelivery | refs/heads/master | /Models.py | import random
import Config
class Location:
def __init__(self, name):
self.name = name
self.lat = random.uniform(40.738178, 40.754353)
self.lon = random.uniform(-74.042851, -74.024468)
class User:
def __init__(self, name):
self.id = Config.newUserIndex + 1
Config.newUserIndex = self.id
self.name = name
class Order:
def __init__(self, start, end, orderer, deliverer, item, store, cost, fee, paymentmethod):
import random
import string
self.id = Config.newOrderIndex + 1
Config.newOrderIndex = self.id
self.start = start
self.end = end
self.orderer = orderer
self.deliverer = deliverer
self.item = item
self.store = store
self.cost = cost
self.fee = fee
self.paymentmethod = paymentmethod
self.confirmationId = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)) # creates a random 6 char confirmation code
self.complete = False
def init_data():
nullUser = User('null')
u2 = User('Greg Mercado')
u3 = User('Orange Joe')
Config.users = {str(nullUser.id): nullUser, str(u2.id): u2, str(u3.id): u3}
o1 = Order(Location('Pizza Planet'), Location('Apartment A'), u2.id, nullUser.id, 'Pizza', 'Pizza Planet', 16, 2, 1)
o2 = Order(Location('The Wash'), Location('Apartment B'), u2.id, nullUser.id, 'Dry Cleaning #5437', 'The Wash', 0, 2, 1)
o3 = Order(Location('Jack Hardware'), Location('Apartment C'), u2.id, nullUser.id, 'Nails', 'Jack Hardware', 4, 2, 1)
Config.orders = {str(o1.id): o1, str(o2.id): o2, str(o3.id): o3}
print('01', o1.confirmationId)
print('02', o2.confirmationId)
print('03', o3.confirmationId)
| {"/Models.py": ["/Config.py"], "/Duck.py": ["/Config.py", "/Models.py"]} |
68,957 | kajin41/Duckdelivery | refs/heads/master | /Config.py | newOrderIndex = 0
newUserIndex = 0
users = {}
orders = {} | {"/Models.py": ["/Config.py"], "/Duck.py": ["/Config.py", "/Models.py"]} |
68,958 | kajin41/Duckdelivery | refs/heads/master | /Duck.py | import Config
import Models
from flask import Flask, render_template, request, redirect
app = Flask(__name__)
@app.route('/')
def hello_world():
currentUser = '3'
newOrders = {}
mOrders = {}
for order in Config.orders:
print(Config.orders[order].deliverer)
if Config.orders[order].deliverer == Config.users['1'].id:
newOrders[order] = Config.orders[order]
elif Config.orders[order].deliverer == currentUser:
mOrders[order] = Config.orders[order]
print(mOrders, newOrders)
return render_template('deliverView.html', mOrders=mOrders, newOrders=newOrders, currentUser=Config.users[currentUser])
@app.route('/<user>/<order>', methods=['GET', 'POST'])
def map(user, order):
if request.method == 'GET':
if Config.orders[order].deliverer == user:
return render_template('map.html', order=Config.orders[order], user=user, active=True)
else:
return render_template('map.html', order=Config.orders[order], user=user, active=False)
else:
if Config.orders[order].deliverer == user:
if request.form['confirm'] == Config.orders[order].confirmationId:
del Config.orders[order]
return redirect('/')
else:
return render_template('map.html', order=Config.orders[order], user=user, active=True)
else:
Config.orders[order].deliverer = user
return redirect('/')
if __name__ == '__main__':
Models.init_data()
print(Config.users)
print(Config.users['1'].name)
app.run()
| {"/Models.py": ["/Config.py"], "/Duck.py": ["/Config.py", "/Models.py"]} |
68,962 | yangsej/board_game | refs/heads/master | /Unit.py | from tkinter import *
class Unit:
def __init__(self, name='Unnamed', player=None, x=-1, y=-1,
action=0, speed=0, attack=0, defence=0,
range=0, accuracy=0, image=None):
self.name = name
self.player = player
self.stat = {
'attack' : attack,
'defence' : defence,
'range' : range,
'accuracy' : accuracy,
'action' : action,
'speed' : speed,
}
self.x = x
self.y = y
self.image = image
def __str__(self):
return self.name
핫산 = Unit('핫산',action=2, speed=5, attack=3, defence=1,
range=7, accuracy=70, image='hot.png')
예거 = Unit('예거',action=3, speed=8, attack=4, defence=2,
range=10, accuracy=80, image='예거형.png')
| {"/Board.py": ["/Unit.py"]} |
68,963 | yangsej/board_game | refs/heads/master | /Board.py | import pyVulkan as Vk
from tkinter import *
from Unit import *
from win32api import GetSystemMetrics
class Board:
def __init__(self, rows, cols):
#맵의 크기
self.rows = rows
self.cols = cols
#기본 설정
self.row_pos = 4
self.col_pos = 4
self.rect_size = 100
#보드와 유저들 프레임
b_u_frame = Frame(root, )
b_u_frame.pack(expand=True, fill=BOTH)
#보드 프레임
board_frame = Frame(b_u_frame)
board_frame.grid_rowconfigure(0, weight=1)
board_frame.grid_columnconfigure(0, weight=1)
board_frame.pack(side=LEFT, expand=True, fill=BOTH)
#보드 캔버스
self.board_canvas = Canvas(board_frame, height=900,width=900, bg='black', confine=False,
xscrollincrement=self.rect_size, yscrollincrement=self.rect_size)
self.board_canvas.grid(row=0, column=0)#, sticky='nsew')
#타일 생성
self.canvas = []
for r in range(rows):
self.canvas.append([])
for c in range(cols):
self.canvas[r].append(
{
'ID' : self.board_canvas.create_rectangle((c*self.rect_size, r*self.rect_size),
((c+1)*self.rect_size, (r+1)*self.rect_size),
fill='white', ),
})
self.pos_rect = self.board_canvas.create_rectangle(
(self.col_pos*self.rect_size, self.row_pos*self.rect_size),
((self.col_pos+1)*self.rect_size, (self.row_pos+1)*self.rect_size),
outline='blue', width=5)#, fill='#8AC6ff')
self.sel_rect = self.board_canvas.create_rectangle(
(0, 0), (self.rect_size, self.rect_size),
outline='yellow', width=3, state=HIDDEN,)#, fill='#8AC6ff')
#보드와 스크롤 묶기
root.bind('<Left>', self._scroll)
root.bind('<Right>', self._scroll)
root.bind('<Up>', self._scroll)
root.bind('<Down>', self._scroll)
#캔버스의 스크롤 구역 제한
board_bbox = list(self.board_canvas.bbox(ALL))
board_bbox[0] -= 49
board_bbox[1] -= 49
board_bbox[2] -= 1
board_bbox[3] += 49
self.board_canvas.config(scrollregion=board_bbox)
#선택과 유저 프레임
s_u_frame = Frame(b_u_frame,)
s_u_frame.pack(side=RIGHT, fill=Y)
#선택 리스트
sel_Lab = Label(s_u_frame, text='선택지')
sel_Lab.pack()
self.sel_Lb = Listbox(s_u_frame, height=20, width=30, state=DISABLED)
self.sel_Lb.pack()
root.bind('z', self._select)
## root.bind('Z', self._select)
root.bind('x', self._cancel)
## root.bind('X', self._select)
#유저 리스트
users_Lab = Label(s_u_frame, text='사용자')
users_Lab.pack()
users_Lb = Listbox(s_u_frame, width=30, activestyle=NONE, highlightcolor='white',
selectforeground='black', selectbackground='white')
users_Lb.pack(fill=Y, expand=True)
users_Lb.insert(END,'KSK')
users_Lb.insert(END,'KSC')
users_Lb.insert(END,'Computer1')
users_Lb.insert(END,'Computer2')
#정보 프레임
i_frame = LabelFrame(root, text='info')
i_frame.pack(fill=X, side=BOTTOM)
#정보 라벨
info_label = Label(i_frame,text='이름')
info_label.pack(fill=X)
## info_label.place(bordermode=OUTSIDE, x=0, y=0, height=100, width=100)
def _scroll(self, event):
if self.sel_Lb.config('state')[-1] == DISABLED:
if event.keysym == 'Left':
if self.col_pos > 0:
self.board_canvas.xview_scroll(-1, UNITS)
self.col_pos -= 1
self.board_canvas.move(self.pos_rect, -self.rect_size, 0)
elif event.keysym == 'Right':
if self.col_pos < self.cols-1:
self.board_canvas.xview_scroll(1, UNITS)
self.col_pos += 1
self.board_canvas.move(self.pos_rect, self.rect_size, 0)
elif event.keysym == 'Up':
if self.row_pos > 0:
self.board_canvas.yview_scroll(-1, UNITS)
self.row_pos -= 1
self.board_canvas.move(self.pos_rect, 0, -self.rect_size)
elif event.keysym == 'Down':
if self.row_pos < self.rows-1:
self.board_canvas.yview_scroll(1, UNITS)
self.row_pos += 1
self.board_canvas.move(self.pos_rect, 0, self.rect_size)
else:
pass
def _select(self, event):
if self.sel_Lb.config('state')[-1] == DISABLED: # 판의 활성화 상태
if self.board_canvas.itemcget(self.sel_rect, 'state') == NORMAL: # 유닛 선택 상태
sel_coords = self.board_canvas.coords(self.sel_rect)
sel_x = int(sel_coords[0])//self.rect_size
sel_y = int(sel_coords[1])//self.rect_size
unit = self.canvas[sel_y][sel_x].get('unit')
self._move(unit)
elif self.canvas[self.row_pos][self.col_pos].get('unit'): # 유닛의 존재 상태
# 판 비활성화
self.board_canvas.itemconfig(self.sel_rect, state=NORMAL)
self.board_canvas.coords(self.sel_rect, self.board_canvas.coords(self.pos_rect))
self.board_canvas.lift(self.sel_rect)
self.board_canvas.config(state=DISABLED)
# 선택지 활성화
self.sel_Lb.config(state=NORMAL)
self.sel_Lb.insert(END,'이동', '공격', '기술')
self.sel_Lb.focus()
self.sel_Lb.select_set(0)
self.sel_Lb.activate(0)
else:
sel = self.sel_Lb.selection_get()
if sel == '이동': self._move_range(self.canvas[self.row_pos][self.col_pos].get('unit'))
elif sel == '공격': self._attack_range(self.canvas[self.row_pos][self.col_pos].get('unit'))
elif sel == '기술': self._skill()
def _cancel(self, event):
pass
def _move_range(self, unit):
self.move_rects = []
for m in range(-unit.stat['speed'], unit.stat['speed']+1):
for n in range(-(unit.stat['speed']-abs(m)), unit.stat['speed']-abs(m)+1):
if unit.x+m < 0 or unit.x+m > self.rows-1 or unit.y+n < 0 or unit.y+n > self.cols-1: pass
elif self.canvas[unit.y+n][unit.x+m].get('unit', None): pass
else:
self.move_rects.append(self.board_canvas.create_rectangle(
((unit.x+m)*self.rect_size, (unit.y+n)*self.rect_size),
((unit.x+m+1)*self.rect_size, (unit.y+n+1)*self.rect_size),
fill='skyblue'))
# 판 활성화
self.board_canvas.config(state=NORMAL)
self.board_canvas.lift(self.sel_rect)
self.board_canvas.lift(self.pos_rect)
# 선택지 비활성화
self.sel_Lb.delete(0, END)
self.sel_Lb.config(state=DISABLED)
def _move(self, unit):
sel_coords = self.board_canvas.coords(self.sel_rect)
sel_x = int(sel_coords[0])//self.rect_size
sel_y = int(sel_coords[1])//self.rect_size
dist = abs(self.row_pos - sel_y) + abs(self.col_pos - sel_x)
if 0 < dist <= unit.stat['speed'] and not self.canvas[self.row_pos][self.col_pos].get('unit', False):
self.canvas[self.row_pos][self.col_pos]['unit'] = self.canvas[unit.y][unit.x].pop('unit')
self.board_canvas.coords(unit.image_ID,
self.board_canvas.coords(self.pos_rect)[0] + self.rect_size//2,
self.board_canvas.coords(self.pos_rect)[1] + self.rect_size//2)
for R in self.move_rects:
self.board_canvas.delete(R)
del(self.move_rects)
self.board_canvas.itemconfig(self.sel_rect, state=HIDDEN)
unit.x = self.col_pos
unit.y = self.row_pos
def _attack_range(self, unit):
self.attack_rects = []
self.target_rects = []
for m in range(-unit.stat['range'], unit.stat['range']+1):
for n in range(-(unit.stat['range']-abs(m)), unit.stat['range']-abs(m)+1):
if unit.x+m < 0 or unit.x+m > self.rows-1 or unit.y+n < 0 or unit.y+n > self.cols-1: pass
elif not m and not n: pass
else:
self.attack_rects.append(self.board_canvas.create_rectangle(
((unit.x+m)*self.rect_size, (unit.y+n)*self.rect_size),
((unit.x+m+1)*self.rect_size, (unit.y+n+1)*self.rect_size),
fill='orange'))
other_unit = self.canvas[unit.y+n][unit.x+m].get('unit', None)
if other_unit:
self.board_canvas.lift(other_unit.image_ID)
self.target_rects.append(self.board_canvas.create_rectangle(
(other_unit.x*self.rect_size, other_unit.y*self.rect_size),
((other_unit.x+1)*self.rect_size, (other_unit.y+1)*self.rect_size),
outline='red', width=3))
# 판 활성화
self.board_canvas.config(state=NORMAL)
for T in self.target_rects:
self.board_canvas.lift(T)
self.board_canvas.lift(self.sel_rect)
self.board_canvas.lift(self.pos_rect)
# 선택지 비활성화
self.sel_Lb.delete(0, END)
self.sel_Lb.config(state=DISABLED)
def _skill(self):
pass
def set_unit(self, unit, x, y):
unit.x = x
unit.y = y
unit.image = PhotoImage(file = unit.image)
unit.image_ID = self.board_canvas.create_image(x*self.rect_size+unit.image.width()//2,
y*self.rect_size+unit.image.height()//2,
image=unit.image)
self.board_canvas.lift(self.pos_rect)
self.canvas[y][x]['unit'] = unit
##a.set_unit(1,3,Unit(speed = 5))
##a.set_unit(40,30,Unit())
##print(a)
##
##a.move(1,3)
root = Tk()
root.geometry('1200x1000+0+0')
root.title('턴제 게임')
a = Board(20,20)
a.set_unit(핫산, 0, 1)
a.set_unit(예거, 10,7)
root.mainloop()
| {"/Board.py": ["/Unit.py"]} |
69,026 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/orders/serializers.py | from django.db import transaction
from rest_framework import serializers
from members.serializers import UserSerializer
from store.models.food import Food, SideDishes
from store.models.store import Store
from store.serializers import FoodSerializer, SideDishSerializer
from .models.cart import Cart, CartItem
from .models.order import Order
class CartItemSerializer(serializers.ModelSerializer):
food = FoodSerializer(read_only=True)
total_price = serializers.SerializerMethodField()
class Meta:
model = CartItem
fields = (
'pk',
'cart',
'food',
'quantity',
'is_ordered',
'options',
'total_price',
)
read_only_fields = ('options',)
@transaction.atomic
def create(self, validate_data):
cart = validate_data['cart']
food = self.context['food']
if CartItem.objects.filter(cart=cart, food=food, is_ordered=False).exists():
raise serializers.ValidationError('이미 존재하는 아이템입니다.')
item = CartItem.objects.create(
cart=cart,
food=food,
quantity=validate_data['quantity'],
)
if food.has_side_dishes:
side_dishes = self.context['side_dishes']
for side_dish in side_dishes:
item.options.add(side_dish)
item.save()
return item
def get_total_price(self, obj):
return obj.total_price
class OrderSerializer(serializers.ModelSerializer):
store = serializers.SerializerMethodField()
class Meta:
model = Order
fields = (
'pk',
'user',
'phone',
'shipping',
'comment',
'payment_option',
'payment_status',
'store',
'payment',
'created_at',
)
read_only_fields = ('user',)
@classmethod
def setup_eager_loading(cls, qs):
queryset = qs.select_related('user').prefetch_related('cartitem_set')
return queryset
def get_store(self, obj):
data =[]
for i in obj.cartitem_set.select_related('cart', 'food', 'order').prefetch_related('options'):
store = Store.objects.get(foodcategory__food__pk=i.food.pk)
total_price = i.total_price
info = {'store': store.name, 'total_price': total_price}
data.append(info)
return data
@transaction.atomic
def create(self, validate_data):
user = self.context['request'].user
order = Order.objects.create(
**validate_data,
user=user,
)
cart = Cart.objects.get(user=user)
for item in cart.item.filter(is_ordered=False).select_related('cart', 'food', 'order').prefetch_related('options'):
item.order = order
item.is_ordered = True
item.save()
return order
class OptionSerializer(serializers.RelatedField):
def to_representation(self, value):
return value.name
# class CartItemInfoSerializer(serializers.ModelSerializer):
# store = serializers.SerializerMethodField()
# food = serializers.SerializerMethodField()
# options = OptionSerializer(many=True, read_only=True)
#
# class Meta:
# model = CartItem
# fields = (
# 'pk',
# 'store',
# 'food',
# 'is_ordered',
# 'options',
# 'quantity',
# 'total_price',
# )
#
# def get_store(self, obj):
# return Store.objects.get(foodcategory__food__pk=obj.food.pk).name
#
# def get_food(self, obj):
# return Food.objects.get(pk=obj.food.pk).name
#
# def to_representation(self, instance):
# if not instance.is_ordered:
# return super().to_representation(instance)
class CartSerializer(serializers.ModelSerializer):
# item = CartItemInfoSerializer(many=True, read_only=True)
item = serializers.SerializerMethodField()
class Meta:
model = Cart
fields = (
'user',
'item',
'payment',
)
def get_item(self, obj):
data = []
for i in obj.item.filter(is_ordered=False):
store = Store.objects.get(foodcategory__food__pk=i.food.pk)
food = Food.objects.get(pk=i.food.pk)
quantity = i.quantity
total_price = i.total_price
info = {
'pk': i.pk,
'store': store.name,
'food': food.name,
'quantity': quantity,
'total_price': total_price
}
data.append(info)
return data
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,027 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/members/urls.py | from django.urls import path
from members.apis import AuthToken, UserRegister, Profile, FacebookAuthToken
urlpatterns = [
path('login/', AuthToken.as_view()),
path('register/', UserRegister().as_view()),
path('profile/', Profile.as_view()),
path('facebook/', FacebookAuthToken.as_view()),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,028 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/address/serializers.py | from rest_framework import serializers
from address.models import Address
class AddressSerializer(serializers.ModelSerializer):
class Meta:
model = Address
fields = (
'pk',
'old_address',
'address',
'detail_address',
'lat',
'lng',
'user',
'store',
'created_at',
)
read_only_fields = ('user', 'store')
def create(self, validate_data):
address = Address.objects.create(
**validate_data,
user=self.context['request'].user,
)
return address
class AddressInfoSerializer(serializers.ModelSerializer):
class Meta:
model = Address
fields = (
'pk',
'old_address',
'address',
'detail_address',
'lat',
'lng'
)
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,029 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/review/apis.py | from rest_framework import permissions, status, serializers
from rest_framework.generics import get_object_or_404
from rest_framework.response import Response
from rest_framework.views import APIView
from review.models import Review
from review.serializers import ReviewSerializer, ReviewCreateSerializer, ReviewImageSerializer, \
ReviewImageCreateSerializer
from store.models.store import Store
from store.serializers import StoreImageSerializer
class ReviewList(APIView):
permission_classes = (permissions.IsAuthenticatedOrReadOnly, )
def get(self, request):
review = Review.objects.all()
serializer = ReviewSerializer(review, many=True)
return Response(serializer.data)
def post(self, request, format=None):
# store = Store.objects.get(pk=request.data.pop('store'))
store = Store.objects.get(pk=request.data['store'])
serializer = ReviewSerializer(
data=request.data,
context={
'request': request,
'store': store,
}
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def patch(self, request, partial=True):
review_pk = request.data.get('pk')
if not review_pk:
raise serializers.ValidationError({'detail': 'pk값이 주어지지 않았습니다.'})
review = Review.objects.get(pk=review_pk)
if review.user != request.user:
raise serializers.ValidationError({'detail': '해당 유저가 아닙니다.'})
serializer = ReviewSerializer(review, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request):
review = get_object_or_404(Review, user=request.user, pk=request.data.get('review_pk'))
if review.user != request.user:
raise serializers.ValidationError({'detail': '해당 유저가 아닙니다.'})
review.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class ReviewImageCreate(APIView):
def post(self, request, format=None):
serializer = ReviewImageCreateSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,030 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/store/serializers.py | from rest_framework import serializers
from address.models import Address
from address.serializers import AddressInfoSerializer
from members.serializers import UserSerializer
from .models.food import Food, FoodCategory, FoodImage, SideDishes
from .models.store import Store, StoreCategory, StoreImage
class SideDishSerializer(serializers.ModelSerializer):
class Meta:
model = SideDishes
fields = (
'pk',
'name',
'price',
'is_required',
)
class FoodImageSerializer(serializers.ModelSerializer):
class Meta:
model = FoodImage
fields = (
'location',
'created_at',
)
# 음식 데이터를 위한 Serializer
class FoodSerializer(serializers.ModelSerializer):
foodimage_set = FoodImageSerializer(many=True)
sidedishes_set = SideDishSerializer(many=True)
def get_foodimage_set(self, obj):
images = FoodImage.objects.select_related('food').filter(food=obj)
return FoodImageSerializer(images, many=True)
class Meta:
model = Food
fields = (
'pk',
'name',
'price',
'stock',
'has_side_dishes',
'food_info',
'foodimage_set',
'sidedishes_set',
)
# 식당에 있는 메뉴 Serializer
class FoodCategorySerializer(serializers.ModelSerializer):
food_set = FoodSerializer(many=True)
class Meta:
model = FoodCategory
fields = (
'name',
'store',
'food_set',
)
# 식당의 범주 Serializer
class StoreCategorySerializer(serializers.ModelSerializer):
class Meta:
model = StoreCategory
fields = (
'name',
)
class StoreImageSerializer(serializers.ModelSerializer):
class Meta:
model = StoreImage
fields = (
'location',
'created_at',
)
# 식당 데이터를 위한 Serializer
class StoreSerializer(serializers.ModelSerializer):
# owner = UserSerializer()
category = StoreCategorySerializer()
storeimage_set = StoreImageSerializer(many=True)
address = serializers.SerializerMethodField()
class Meta:
model = Store
fields = (
'pk',
'name',
'store_info',
'origin_info',
'owner',
'least_cost',
'takeout',
'fee',
'storeimage_set',
'category',
'address',
'rating_average',
)
read_only_fields = ('owner', 'category',)
def get_address(self, obj):
if not Address.objects.filter(store=obj).exists():
return
else:
return AddressInfoSerializer(Address.objects.get(store=obj)).data
# 식당에 있는 음식과 음식 메뉴를 위한 Serializer
class StoreDetailSerializer(StoreSerializer):
menu = serializers.SerializerMethodField(read_only=True)
# 식당에 있는 메뉴의 음식들을 꺼내오기 위한 함수
def get_menu(self, obj):
category = FoodCategory.objects.filter(store=obj).prefetch_related('food_set')
return FoodCategorySerializer(category, many=True).data
class Meta(StoreSerializer.Meta):
fields = StoreSerializer.Meta.fields + (
'menu',
)
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,031 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/address/urls.py | from django.urls import path
from .apis import UserAddressAPIView
urlpatterns = [
path('', UserAddressAPIView.as_view()),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,032 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/members/apis.py | from django.contrib.auth import get_user_model
from rest_framework import status, generics, permissions
from rest_framework.exceptions import AuthenticationFailed
from rest_framework.response import Response
from rest_framework.views import APIView
from .serializers import AuthTokenSerializer, UserRegisterSerializer, UserSerializer, FacebookSerializer
User = get_user_model()
class UserRegister(APIView):
permission_classes = (
permissions.AllowAny,
)
def post(self, request):
serializer = UserRegisterSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class AuthToken(APIView):
permission_classes = (
permissions.AllowAny,
)
def post(self, request):
serializer = AuthTokenSerializer(data=request.data)
if serializer.is_valid():
return Response(serializer.data, status=status.HTTP_200_OK)
raise AuthenticationFailed()
class Profile(APIView):
permission_classes = (
permissions.IsAuthenticatedOrReadOnly,
)
def get(self, request):
user = request.user
serializer = UserSerializer(user)
return Response(serializer.data, status=status.HTTP_200_OK)
class FacebookAuthToken(APIView):
permission_classes = (
permissions.AllowAny,
)
def post(self, request):
serializer = FacebookSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return AuthenticationFailed()
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,033 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/store/models/food.py | from django.db import models
from .store import Store
class FoodCategory(models.Model):
name = models.CharField(verbose_name='음식분류', max_length=100)
store = models.ForeignKey(
Store,
on_delete=models.CASCADE,
verbose_name='상점',
)
def __str__(self):
return self.name
class Meta:
verbose_name = '메뉴'
verbose_name_plural = f'{verbose_name} 목록'
class Food(models.Model):
category = models.ForeignKey(
FoodCategory,
on_delete=models.CASCADE,
verbose_name='메뉴'
)
name = models.CharField(verbose_name='음식이름', max_length=100)
price = models.PositiveIntegerField(verbose_name='가격', default=0)
stock = models.PositiveIntegerField(verbose_name='수량', default=0)
created_at = models.DateTimeField(verbose_name='등록일', auto_now_add=True)
modified_at = models.DateTimeField(verbose_name='수정일', auto_now=True)
has_side_dishes = models.BooleanField(verbose_name='사이드메뉴', default=False)
food_info = models.TextField(verbose_name='음식정보', blank=True)
def __str__(self):
return '{name}::{price}원'.format(
name=self.name,
price=self.price,
)
class Meta:
verbose_name = '음식'
verbose_name_plural = f'{verbose_name} 목록'
class FoodImage(models.Model):
location = models.ImageField(verbose_name='음식사진', upload_to='food', blank=True)
food = models.ForeignKey(
Food,
verbose_name='음식',
on_delete=models.CASCADE,
)
created_at = models.DateTimeField(verbose_name='등록일', auto_now_add=True)
class Meta:
verbose_name = '음식사진'
verbose_name_plural = f'{verbose_name} 목록'
class SideDishes(models.Model):
name = models.CharField(verbose_name='사이드메뉴이름', max_length=50)
price = models.PositiveIntegerField(verbose_name='가격', default=0)
is_required = models.BooleanField(verbose_name='필수선택', default=False)
food = models.ForeignKey(
Food,
on_delete=models.CASCADE,
verbose_name='음식'
)
def __str__(self):
return self.name
class Meta:
verbose_name = '사이드메뉴'
verbose_name_plural = f'{verbose_name} 목록'
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,034 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/orders/urls/cart_urls.py | from django.urls import path
from ..apis import CartItemList, CartItemDetail
urlpatterns = [
path('items/', CartItemList.as_view()),
path('items/<int:pk>/', CartItemDetail.as_view()),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,035 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/store/migrations/0001_initial.py | # Generated by Django 2.1.4 on 2018-12-20 08:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Food',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='음식이름')),
('price', models.PositiveIntegerField(default=0, verbose_name='가격')),
('stock', models.PositiveIntegerField(default=0, verbose_name='수량')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='등록일')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='수정일')),
('has_side_dishes', models.BooleanField(default=False, verbose_name='사이드메뉴')),
('food_info', models.TextField(blank=True, verbose_name='음식정보')),
],
options={
'verbose_name': '음식',
'verbose_name_plural': '음식 목록',
},
),
migrations.CreateModel(
name='FoodCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='음식분류')),
],
options={
'verbose_name': '메뉴',
'verbose_name_plural': '메뉴 목록',
},
),
migrations.CreateModel(
name='FoodImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.ImageField(blank=True, upload_to='food', verbose_name='음식사진')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='등록일')),
('food', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.Food', verbose_name='음식')),
],
options={
'verbose_name': '음식사진',
'verbose_name_plural': '음식사진 목록',
},
),
migrations.CreateModel(
name='SideDishes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, verbose_name='사이드메뉴이름')),
('price', models.PositiveIntegerField(default=0, verbose_name='가격')),
('is_required', models.BooleanField(default=False, verbose_name='필수선택')),
('food', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.Food', verbose_name='음식')),
],
options={
'verbose_name': '사이드메뉴',
'verbose_name_plural': '사이드메뉴 목록',
},
),
migrations.CreateModel(
name='Store',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='상점이름')),
('store_info', models.TextField(blank=True, verbose_name='상점소개')),
('origin_info', models.TextField(blank=True, verbose_name='원산지 정보')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='등록일')),
('least_cost', models.PositiveIntegerField(default=0, verbose_name='최소주문금액')),
('takeout', models.BooleanField(default=False, verbose_name='테이크아웃')),
('fee', models.PositiveIntegerField(default=0, verbose_name='배달팁')),
('rating_average', models.DecimalField(blank=True, decimal_places=1, max_digits=5, null=True, verbose_name='별점평균')),
],
options={
'verbose_name': '상점',
'verbose_name_plural': '상점 목록',
},
),
migrations.CreateModel(
name='StoreCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, verbose_name='카테고리')),
],
options={
'verbose_name': '카테고리',
'verbose_name_plural': '카테고리 목록',
},
),
migrations.CreateModel(
name='StoreImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.ImageField(blank=True, upload_to='store', verbose_name='상점사진')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='등록일')),
('store', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.Store', verbose_name='상점')),
],
options={
'verbose_name': '상점이미지',
'verbose_name_plural': '상점이미지 목록',
},
),
migrations.AddField(
model_name='store',
name='category',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='store.StoreCategory', verbose_name='카테고리'),
),
migrations.AddField(
model_name='store',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='상점주'),
),
migrations.AddField(
model_name='foodcategory',
name='store',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.Store', verbose_name='상점'),
),
migrations.AddField(
model_name='food',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.FoodCategory', verbose_name='메뉴'),
),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,036 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/store/apis.py | from rest_framework import generics, permissions
from rest_framework.pagination import PageNumberPagination
from .models.store import Store
from .serializers import StoreSerializer, StoreDetailSerializer
from rest_framework import filters
class CustomPaginator(PageNumberPagination):
page_size = 10
max_page_size = 1000
class StoreList(generics.ListCreateAPIView):
queryset = Store.objects.all()
serializer_class = StoreSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('name',)
permission_classes = (permissions.AllowAny,)
pagination_class = CustomPaginator
lookup_url_kwarg = 'category_pk'
def get_queryset(self):
category = self.kwargs['category_pk']
return Store.objects.filter(category=category).\
select_related('category', 'owner'). \
prefetch_related('storeimage_set', 'foodcategory_set', 'is_store_address_set')
class StoreDetail(generics.RetrieveUpdateDestroyAPIView):
serializer_class = StoreDetailSerializer
permission_classes = (permissions.AllowAny,)
lookup_url_kwarg = 'store_pk'
def get_queryset(self):
store = self.kwargs['store_pk']
return Store.objects.filter(pk=store). \
select_related('category', 'owner'). \
prefetch_related('storeimage_set', 'foodcategory_set', 'is_store_address_set')
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,037 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/review/admin.py | from django.contrib import admin
from .models import Review, ReviewImage, Comment
admin.site.register(Review)
admin.site.register(ReviewImage)
admin.site.register(Comment)
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,038 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/orders/models/order.py | from django.contrib.auth import get_user_model
from django.db import models
User = get_user_model()
class Order(models.Model):
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
)
shipping = models.CharField(verbose_name='배송지', max_length=50)
created_at = models.DateTimeField(verbose_name='주문일자', auto_now=True)
payment_status = models.BooleanField(verbose_name='결제상태', default=False)
payment_option = models.CharField(verbose_name='결제수단', max_length=50, blank=True)
comment = models.TextField(verbose_name='요청사항', blank=True)
phone = models.CharField(verbose_name='전화번호', max_length=13, blank=True)
class Meta:
verbose_name = '주문'
verbose_name_plural = f'{verbose_name} 목록'
@property
def payment(self):
payment = 0
for item in self.cartitem_set.all():
if item.is_ordered:
payment += item.total_price
return payment
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,039 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/store/models/store.py | from django.contrib.auth import get_user_model
from django.db import models
User = get_user_model()
class StoreCategory(models.Model):
name = models.CharField(verbose_name='카테고리', max_length=50)
def __str__(self):
return self.name
class Meta:
verbose_name = '카테고리'
verbose_name_plural = f'{verbose_name} 목록'
class Store(models.Model):
category = models.ForeignKey(
StoreCategory,
on_delete=models.SET_NULL,
verbose_name='카테고리',
null=True
)
name = models.CharField(verbose_name='상점이름', max_length=100)
store_info = models.TextField(verbose_name='상점소개', blank=True)
origin_info = models.TextField(verbose_name='원산지 정보', blank=True)
created_at = models.DateTimeField(verbose_name='등록일', auto_now_add=True)
owner = models.ForeignKey(
User,
on_delete=models.CASCADE,
verbose_name='상점주',
)
least_cost = models.PositiveIntegerField(verbose_name='최소주문금액', default=0)
takeout = models.BooleanField(verbose_name='테이크아웃', default=False)
fee = models.PositiveIntegerField(verbose_name='배달팁', default=0)
rating_average = models.DecimalField(verbose_name='별점평균', blank=True, null=True, max_digits=5, decimal_places=1)
def __str__(self):
return '{category}::{name}'.format(
category=self.category,
name=self.name,
)
class Meta:
verbose_name = '상점'
verbose_name_plural = f'{verbose_name} 목록'
class StoreImage(models.Model):
location = models.ImageField(verbose_name='상점사진', upload_to='store', blank=True)
store = models.ForeignKey(
Store,
on_delete=models.CASCADE,
verbose_name='상점',
)
created_at = models.DateTimeField(verbose_name='등록일', auto_now_add=True)
class Meta:
verbose_name = '상점이미지'
verbose_name_plural = f'{verbose_name} 목록'
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,040 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/review/models.py | from django.core.validators import MaxValueValidator
from django.db import models
from django.db.models import Avg
from members.models import User
from store.models.store import Store
class Review(models.Model):
content = models.TextField(verbose_name='리뷰내용', blank=True)
rating = models.PositiveIntegerField(
verbose_name='별점',
default=0,
validators=[MaxValueValidator(5)]
)
user = models.ForeignKey(
User,
verbose_name='사용자',
on_delete=models.CASCADE,
)
store = models.ForeignKey(
Store,
verbose_name='상점',
on_delete=models.CASCADE,
)
created_at = models.DateTimeField(verbose_name='등록일', auto_now_add=True)
modified_at = models.DateTimeField(verbose_name='수정일', auto_now=True)
class Meta:
verbose_name = '리뷰'
verbose_name_plural = f'{verbose_name} 목록'
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
star_rating = Review.objects.filter(store_id=self.store.pk).aggregate(Avg('rating'))
store_average = Store.objects.get(pk=self.store.pk)
store_average.rating_average = star_rating['rating__avg']
store_average.save()
class ReviewImage(models.Model):
location = models.ImageField(verbose_name='리뷰사진', upload_to='review', blank=True, null=True)
review = models.ForeignKey(
Review,
verbose_name='리뷰',
on_delete=models.SET_NULL,
blank=True,
null=True,
)
created_at = models.DateTimeField(verbose_name='등록일', auto_now_add=True)
class Meta:
verbose_name = '리뷰이미지'
verbose_name_plural = f'{verbose_name} 목록'
class Comment(models.Model):
content = models.TextField(verbose_name='사장님댓글', blank=True)
user = models.ForeignKey(
User,
verbose_name='사장님',
on_delete=models.CASCADE,
)
review = models.ForeignKey(
Review,
verbose_name='리뷰',
on_delete=models.CASCADE,
)
created_at = models.DateTimeField(verbose_name='등록일', auto_now_add=True)
modified_at = models.DateTimeField(verbose_name='수정일', auto_now=True)
class Meta:
verbose_name = '사장님댓글'
verbose_name_plural = f'{verbose_name} 목록'
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,041 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/orders/models/cart.py | from django.contrib.auth import get_user_model
from django.db import models
from store.models.food import Food, SideDishes
from .order import Order
User = get_user_model()
class Cart(models.Model):
user = models.OneToOneField(
User,
primary_key=True,
on_delete=models.CASCADE,
)
def __str__(self):
return f'{self.user.username}::cart'
@property
def payment(self):
payment = 0
for item in self.item.all():
if not item.is_ordered:
payment += item.total_price
return payment
class CartItem(models.Model):
cart = models.ForeignKey(
Cart,
verbose_name='장바구니',
on_delete=models.CASCADE,
related_name='item',
related_query_name='items',
)
food = models.ForeignKey(
Food,
verbose_name='음식',
on_delete=models.CASCADE,
related_name='in_cart',
related_query_name='in_carts',
)
quantity = models.PositiveIntegerField(verbose_name='수량', default=0)
is_ordered = models.BooleanField(
verbose_name='주문상태',
default=False
)
order = models.ForeignKey(
Order,
on_delete=models.CASCADE,
verbose_name='주문번호',
blank=True,
null=True,
)
options = models.ManyToManyField(
SideDishes,
related_name='option',
related_query_name='options',
)
@property
def total_price(self):
price = self.food.price
for item in self.options.all():
price += item.price
return self.quantity * price
class Meta:
verbose_name = '아이템'
verbose_name_plural = f'{verbose_name} 목록'
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,042 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/members/models.py | from django.contrib.auth.models import AbstractUser
from django.db import models
class User(AbstractUser):
img_profile = models.ImageField(upload_to='user', blank=True)
phone = models.CharField(max_length=13, blank=True)
is_host = models.BooleanField(default=False)
nickname = models.CharField(max_length=50, blank=True)
def __str__(self):
return self.username
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,043 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/orders/apis.py | from django.contrib.auth import get_user_model
from rest_framework.generics import get_object_or_404
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import permissions, status, generics, filters
from store.models.food import Food, SideDishes
from .models.order import Order
from .models.cart import Cart, CartItem
from .serializers import CartItemSerializer, OrderSerializer, CartSerializer
User = get_user_model()
class CartItemList(APIView):
permission_classes = (
permissions.IsAuthenticatedOrReadOnly,
)
def get(self, request):
cart = Cart.objects.get(user=request.user)
serializer = CartSerializer(cart)
return Response(serializer.data, status=status.HTTP_200_OK)
def post(self, request):
cart = Cart.objects.get_or_create(user=request.user)[0]
food = Food.objects.get(pk=request.data.pop('food_pk'))
# 음식에 추가(사이드)메뉴가 존재하는 경우
if food.has_side_dishes:
side_index_list = request.data.pop('side_dishes_pk')
side_dishes = []
for index in side_index_list:
side_dishes.append(SideDishes.objects.get(pk=index))
serializer = CartItemSerializer(
data={
**request.data,
'cart': cart,
},
context={
'food': food,
'side_dishes': side_dishes,
},
)
# 추가 메뉴가 없는 경우
else:
serializer = CartItemSerializer(
data={
**request.data,
'cart': cart,
},
context={
'food': food,
}
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class CartItemDetail(APIView):
def patch(self, request, pk):
item = CartItem.objects.get(pk=pk)
serializer = CartItemSerializer(item, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk):
item = CartItem.objects.get(pk=pk)
item.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class OrderList(generics.ListCreateAPIView):
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
serializer_class = OrderSerializer
def get_queryset(self):
qs = Order.objects.all().order_by('-created_at')
queryset = self.get_serializer_class().setup_eager_loading(qs)
return queryset
def get_serializer_context(self):
return {'request': self.request}
class OrderDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Order.objects.all()
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
serializer_class = OrderSerializer
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,044 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/store/admin.py | from django.contrib import admin
# Register your models here.
from .models.food import Food, FoodCategory, FoodImage, SideDishes
from .models.store import Store, StoreCategory, StoreImage
admin.site.register(Store)
admin.site.register(StoreCategory)
admin.site.register(StoreImage)
admin.site.register(Food)
admin.site.register(FoodCategory)
admin.site.register(FoodImage)
admin.site.register(SideDishes)
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,045 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/store/urls.py | from django.urls import path, include
from .apis import StoreList, StoreDetail
urlpatterns = [
path('', StoreList.as_view(),),
path('<int:category_pk>/', StoreList.as_view(),),
path('<int:category_pk>/store/<int:store_pk>/', StoreDetail.as_view(),),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,046 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/address/migrations/0001_initial.py | # Generated by Django 2.1.4 on 2018-12-20 08:58
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('old_address', models.CharField(max_length=100, verbose_name='지번주소')),
('address', models.CharField(max_length=100, verbose_name='도로명 주소')),
('detail_address', models.CharField(blank=True, max_length=100, verbose_name='상세주소')),
('lat', models.DecimalField(decimal_places=14, max_digits=17, verbose_name='위도')),
('lng', models.DecimalField(decimal_places=14, max_digits=17, verbose_name='경도')),
('created_at', models.DateTimeField(auto_now=True, verbose_name='등록일')),
],
options={
'verbose_name': '주소',
'verbose_name_plural': '주소 목록',
},
),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,047 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/members/serializers.py |
from django.contrib.auth import authenticate, get_user_model
from rest_framework import serializers
from rest_framework.authtoken.models import Token
from rest_framework.exceptions import AuthenticationFailed
from address.models import Address
from address.serializers import AddressInfoSerializer
from orders.models.cart import Cart
User = get_user_model()
class UserSerializer(serializers.ModelSerializer):
address = serializers.SerializerMethodField()
class Meta:
model = User
fields = (
'pk',
'username',
'nickname',
'first_name',
'last_name',
'img_profile',
'phone',
'address',
)
def get_address(self, obj):
if not Address.objects.filter(user=obj).exists():
return
else:
address = Address.objects.filter(user=obj)
return AddressInfoSerializer(address, many=True).data
class UserRegisterSerializer(UserSerializer):
class Meta(UserSerializer.Meta):
fields = UserSerializer.Meta.fields + (
'password',
)
def create(self, validate_data):
user = User.objects.create_user(
**validate_data,
)
Cart.objects.create(user=user)
return user
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField(max_length=50)
password = serializers.CharField(max_length=50)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.user = None
def validate(self, data):
username = data['username']
password = data['password']
user = authenticate(username=username, password=password)
if user is None:
raise AuthenticationFailed('아이디 또는 비밀번호가 일치하지 않습니다.')
self.user = user
return data
def to_representation(self, instance):
token = Token.objects.get_or_create(user=self.user)[0]
data = {
'user': UserSerializer(self.user).data,
'token': token.key,
}
return data
class FacebookSerializer(serializers.Serializer):
facebook_id = serializers.CharField(max_length=50)
name = serializers.CharField(max_length=50)
email = serializers.EmailField(max_length=50)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.user = None
def create(self, validate_data):
username = validate_data['facebook_id']
password = 'foodfly'
name = validate_data['name']
email = validate_data['email']
if User.objects.filter(username=username).exists():
user = User.objects.get(username=username)
else:
user = User.objects.create_user(
username=username,
password=password,
first_name=name,
email=email,
)
self.user = user
return self.user
def to_representation(self, instance):
token = Token.objects.get_or_create(user=self.user)[0]
data = {
'user': UserSerializer(self.user).data,
'token': token.key,
}
return data
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,048 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/address/models.py | from django.contrib.auth import get_user_model
from django.db import models
from store.models.store import Store
User = get_user_model()
class Address(models.Model):
old_address = models.CharField(verbose_name='지번주소', max_length=100)
address = models.CharField(verbose_name='도로명 주소', max_length=100)
detail_address = models.CharField(verbose_name='상세주소', max_length=100, blank=True)
# 위도, 경도 (정수 부분 4자리, 소수점 이하 자리수 6자리)
# lat(latitude): 위도 lng(longitude): 경도
lat = models.DecimalField(verbose_name='위도', max_digits=17, decimal_places=14)
lng = models.DecimalField(verbose_name='경도', max_digits=17, decimal_places=14)
user = models.ForeignKey(
User,
on_delete=models.SET_NULL,
verbose_name='사용자',
related_name='is_host_address_set',
related_query_name='is_host_address',
blank=True,
null=True,
)
store = models.ForeignKey(
Store,
on_delete=models.SET_NULL,
verbose_name='상점',
related_name='is_store_address_set',
related_query_name='is_store_address',
blank=True,
null=True,
)
created_at = models.DateTimeField(verbose_name='등록일', auto_now=True)
class Meta:
verbose_name = '주소'
verbose_name_plural = f'{verbose_name} 목록'
def __str__(self):
return '{present} {detail}'.format(
present=self.address,
detail=self.detail_address,
)
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,049 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/orders/urls/order_urls.py | from django.urls import path
from ..apis import OrderList, OrderDetail
urlpatterns = [
path('', OrderList.as_view()),
path('<int:pk>/', OrderDetail.as_view()),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,050 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/orders/admin.py | from django.contrib import admin
# Register your models here.
from .models.order import Order
from .models.cart import Cart, CartItem
admin.site.register(Cart)
admin.site.register(CartItem)
admin.site.register(Order)
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,051 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/review/urls.py | from django.urls import path
from review.apis import ReviewList, ReviewImageCreate
urlpatterns = [
path('', ReviewList.as_view()),
path('image/', ReviewImageCreate.as_view())
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,052 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/address/apis.py | from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import permissions, status
from .models import Address
from .serializers import AddressSerializer, AddressInfoSerializer
class UserAddressAPIView(APIView):
permission_classes = (
permissions.IsAuthenticatedOrReadOnly,
)
def get(self, request):
user = request.user
serializer = AddressInfoSerializer(Address.objects.filter(user=user), many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def post(self, request):
serializer = AddressSerializer(
data={
**request.data,
},
context={
'request': request,
}
)
if serializer.is_valid():
serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request):
address_pk = request.data.get('address_pk')
user = request.user
if user.is_host_address_set.filter(pk=address_pk).exists():
user.is_host_address_set.get(pk=address_pk).delete()
serializer = AddressInfoSerializer(Address.objects.filter(user=user), many=True)
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
else:
return Response(status=status.HTTP_400_BAD_REQUEST)
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,053 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/review/migrations/0001_initial.py | # Generated by Django 2.1.4 on 2018-12-20 08:58
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('store', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField(blank=True, verbose_name='사장님댓글')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='등록일')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='수정일')),
],
options={
'verbose_name': '사장님댓글',
'verbose_name_plural': '사장님댓글 목록',
},
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField(blank=True, verbose_name='리뷰내용')),
('rating', models.PositiveIntegerField(default=0, validators=[django.core.validators.MaxValueValidator(5)], verbose_name='별점')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='등록일')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='수정일')),
('store', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.Store', verbose_name='상점')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='사용자')),
],
options={
'verbose_name': '리뷰',
'verbose_name_plural': '리뷰 목록',
},
),
migrations.CreateModel(
name='ReviewImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.ImageField(blank=True, null=True, upload_to='review', verbose_name='리뷰사진')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='등록일')),
('review', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='review.Review', verbose_name='리뷰')),
],
options={
'verbose_name': '리뷰이미지',
'verbose_name_plural': '리뷰이미지 목록',
},
),
migrations.AddField(
model_name='comment',
name='review',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='review.Review', verbose_name='리뷰'),
),
migrations.AddField(
model_name='comment',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='사장님'),
),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,054 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/review/serializers.py | from rest_framework import serializers
from rest_framework.compat import MaxValueValidator
from rest_framework.generics import get_object_or_404
from review.models import Review, ReviewImage, Comment
from store.models.store import Store
class ReviewImageCreateSerializer(serializers.ModelSerializer):
class Meta:
model = ReviewImage
fields = (
'review',
'location',
)
class ReviewImageSerializer(serializers.ModelSerializer):
class Meta:
model = ReviewImage
fields = (
'location',
)
class ReviewSerializer(serializers.ModelSerializer):
store = serializers.SlugRelatedField(
read_only=True,
slug_field='name',
)
reviewimage_set = ReviewImageSerializer(many=True, read_only=True)
class Meta:
model = Review
fields = (
'pk',
'content',
'rating',
'user',
'store',
'reviewimage_set',
)
read_only_fields = (
'user',
)
def create(self, validate_data):
user = self.context['request'].user
store = self.context['store']
review = Review.objects.create(
**validate_data,
user=user,
store=store,
)
return review
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = (
'pk',
'content',
'user',
'review',
)
class ReviewCreateSerializer(serializers.Serializer):
content = serializers.CharField(max_length=100)
rating = serializers.IntegerField(default=0, validators=[MaxValueValidator(5)])
store = serializers.IntegerField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.review = None
def create(self, validate_data):
store = Store.objects.get(pk=validate_data['store'])
review = Review.objects.create(
content=validate_data['content'],
rating=validate_data['rating'],
store=store,
user=self.context['request'].user
)
self.review = review
return review
def to_representation(self, instance):
return ReviewSerializer(self.review).data
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,055 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/orders/migrations/0001_initial.py | # Generated by Django 2.1.4 on 2018-12-20 08:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('members', '0001_initial'),
('store', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Cart',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='CartItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.PositiveIntegerField(default=0, verbose_name='수량')),
('is_ordered', models.BooleanField(default=False, verbose_name='주문상태')),
('cart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='item', related_query_name='items', to='orders.Cart', verbose_name='장바구니')),
('food', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='in_cart', related_query_name='in_carts', to='store.Food', verbose_name='음식')),
('options', models.ManyToManyField(related_name='option', related_query_name='options', to='store.SideDishes')),
],
options={
'verbose_name': '아이템',
'verbose_name_plural': '아이템 목록',
},
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('shipping', models.CharField(max_length=50, verbose_name='배송지')),
('created_at', models.DateTimeField(auto_now=True, verbose_name='주문일자')),
('payment_status', models.BooleanField(default=False, verbose_name='결제상태')),
('payment_option', models.CharField(blank=True, max_length=50, verbose_name='결제수단')),
('comment', models.TextField(blank=True, verbose_name='요청사항')),
('phone', models.CharField(blank=True, max_length=13, verbose_name='전화번호')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': '주문',
'verbose_name_plural': '주문 목록',
},
),
migrations.AddField(
model_name='cartitem',
name='order',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='orders.Order', verbose_name='주문번호'),
),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,056 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/members/backends.py | from django.contrib.auth import get_user_model
from django.core.files.uploadedfile import SimpleUploadedFile
import imghdr
import requests
User = get_user_model()
API_ME = 'https://graph.facebook.com/v3.2'
class FacebookBackend:
def get_user_by_access_token(self, access_token):
params = {
'access_token': access_token,
'fields': ','.join([
'id',
'first_name',
'last_name',
'picture.type(large)',
])
}
response = requests.get(API_ME, params)
data = response.json()
facebook_id = data['id']
first_name = data['first_name']
last_name = data['last_name']
url_img_profile = data['picture']['data']['url']
img_response = requests.get(url_img_profile)
img_data = img_response.content
ext = imghdr.what('', h=img_data)
f = SimpleUploadedFile(f'{facebook_id}.{ext}', img_response.content)
try:
user = User.objects.get(username=facebook_id)
user.last_name = last_name
user.first_name = first_name
user.save()
except User.DoesNotExist:
user = User.objects.create_user(
username=facebook_id,
first_name=first_name,
last_name=last_name,
img_profile=f,
)
return user
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,057 | hanyonghee9264/TeamProject_FoodFly | refs/heads/master | /app/address/migrations/0002_auto_20181220_1759.py | # Generated by Django 2.1.4 on 2018-12-20 08:59
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('store', '0001_initial'),
('address', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='address',
name='store',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='is_store_address_set', related_query_name='is_store_address', to='store.Store', verbose_name='상점'),
),
migrations.AddField(
model_name='address',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='is_host_address_set', related_query_name='is_host_address', to=settings.AUTH_USER_MODEL, verbose_name='사용자'),
),
]
| {"/app/orders/serializers.py": ["/app/orders/models/cart.py", "/app/orders/models/order.py"], "/app/store/serializers.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/address/urls.py": ["/app/address/apis.py"], "/app/members/apis.py": ["/app/members/serializers.py"], "/app/store/models/food.py": ["/app/store/models/store.py"], "/app/orders/urls/cart_urls.py": ["/app/orders/apis.py"], "/app/store/apis.py": ["/app/store/models/store.py", "/app/store/serializers.py"], "/app/review/admin.py": ["/app/review/models.py"], "/app/orders/models/cart.py": ["/app/orders/models/order.py"], "/app/orders/apis.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py", "/app/orders/serializers.py"], "/app/store/admin.py": ["/app/store/models/food.py", "/app/store/models/store.py"], "/app/store/urls.py": ["/app/store/apis.py"], "/app/orders/urls/order_urls.py": ["/app/orders/apis.py"], "/app/orders/admin.py": ["/app/orders/models/order.py", "/app/orders/models/cart.py"], "/app/address/apis.py": ["/app/address/models.py", "/app/address/serializers.py"]} |
69,071 | JakesCode/The-Saving-of-Chora | refs/heads/master | /itemLib.py | # Item Library for Python Adventure Game #
# Copyright Jake Stringer 2015 #
from termcolor import *
import colorama
import sys
import os
colorama.init()
def useItems(playerItems, itemDesc, specialItems, health, strength, mana):
keys = list(specialItems.keys())
for x in range(0,len(playerItems)):
cprint(str(x) + ": " + (playerItems[x]), "white", "on_magenta")
int(x)
cprint((itemDesc[playerItems[x]]), "white", "on_magenta")
if any(playerItems[x] in e for e in keys):
timer = 0
cprint(("This item has a perk!"), "white", "on_blue")
if specialItems[playerItems[x]] == "heal":
cprint(("Heals the player by 20 points."), "grey", "on_cyan")
elif specialItems[playerItems[x]] == "attack":
cprint(("Raises the strength of the player by 2, but deals 5 damage to the player."), "grey", "on_cyan")
elif specialItems[playerItems[x]] == "mana":
cprint(("Gives the player 10 mana."), "grey", "on_cyan")
elif specialItems[playerItems[x]] == "heal+":
cprint(("Heals the player by 50 points."), "grey", "on_cyan")
elif specialItems[playerItems[x]] == "heal++":
cprint(("Heals the player by 100 points."), "grey", "on_cyan")
cprint(("Increases strength by 6."), "grey", "on_cyan")
print("")
if len(playerItems) == 0:
cprint("No items to show!", "blue", "on_white")
timer = 0
elif len(playerItems) > 0:
cprint("Please enter the number of the item you wish to use: ", "magenta", "on_white")
itemChoice = input("?: ")
if int(itemChoice) < int(len(playerItems)):
if any(playerItems[int(itemChoice)] in g for g in specialItems):
if specialItems[str(playerItems[int(itemChoice)])] == "heal":
health += 20
cprint(("Gained 20 health."), "magenta", "on_white")
del specialItems[str(playerItems[int(itemChoice)])]
del playerItems[int(itemChoice)]
elif specialItems[str(playerItems[int(itemChoice)])] == "attack":
strength += 2
cprint(("Strength increased by 24."), "magenta", "on_white")
health -= 5
cprint(("You took 5 damage!"), "magenta", "on_white")
elif specialItems[str(playerItems[int(itemChoice)])] == "mana":
mana += 10
cprint(("Gained 10 mana."), "magenta", "on_white")
del specialItems[str(playerItems[int(itemChoice)])]
del playerItems[int(itemChoice)]
elif specialItems[str(playerItems[int(itemChoice)])] == "heal+":
health += 50
cprint(("Gained 50 health."), "magenta", "on_white")
del specialItems[str(playerItems[int(itemChoice)])]
del playerItems[int(itemChoice)]
elif specialItems[str(playerItems[int(itemChoice)])] == "heal++":
health += 100
cprint(("Gained 100 health."), "magenta", "on_white")
strength += 6
cprint(("Strength increased by 6."), "magenta", "on_white")
del specialItems[str(playerItems[int(itemChoice)])]
del playerItems[int(itemChoice)]
else:
cprint(("You can't use that!"), "magenta", "on_white")
return health, timer, strength, mana
def viewItems(playerItems, itemDesc):
# Inventory #
for x in range(0,len(playerItems)):
cprint((playerItems[x]), "white", "on_magenta")
cprint((itemDesc[playerItems[x]]), "white", "on_magenta")
print("")
if len(playerItems) == 0:
cprint("No items to show!", "blue", "on_white")
def itemInit():
itemDesc = {}
playerItems = []
specialItems = {}
return itemDesc, playerItems, specialItems
def addItem(itemName, itemDescription, playerItems, itemDesc, specialItems, perk):
playerItems.append(itemName)
itemDesc[itemName] = itemDescription
if not(perk==None):
specialItems[itemName] = perk
cprint(("Gained the " + itemName.upper() + "."), "white", "on_blue")
input("")
return playerItems, itemDesc, specialItems
def removeItem(itemName, playerItems, itemDesc):
playerItems.remove(itemName)
del itemDesc[itemName]
# cprint((itemName.upper() + " has been removed from your inventory."), "white", "on_blue")
# input("")
return playerItems, itemDesc
| {"/questLib.py": ["/dialogueLib.py", "/itemLib.py"], "/game.py": ["/spellLib.py", "/itemLib.py", "/dialogueLib.py", "/saveLib.py", "/questLib.py"]} |
69,072 | JakesCode/The-Saving-of-Chora | refs/heads/master | /spellLib.py | # Spell Library for Python Adventure Game #
# Copyright Jake Stringer 2015 #
from termcolor import *
import colorama
import sys
import os
colorama.init()
def spells():
global spellDict
global damage
global specialSpellsKeys
global specialSpells
spellDict = {"Blink": "Casts a sharp, dark blanket of shadow over the enemy.",
"Swipe": "Attack the enemy with a ferocious swipe.",
"Flux": "Paralyzes the enemy for two turns.",
"Barrage": "The user runs at full speed into the enemy, inflicting more damage than a basic attack.",
"Cloak": "Cloaks the user in a veil of shadow, making them impossible to hit."}
damage = {"Blink": 2,
"Swipe": 4,
"Flux": 3,
"Barrage": 5,
"Cloak": 2}
specialSpells = {"Flux": "paralyze",
"Cloak": "invisible"}
specialSpellsKeys = specialSpells.keys()
return spellDict, damage, specialSpells, specialSpellsKeys, spellDict, specialSpells
def addSpell(toBeAdded, playerSpells):
playerSpells.append(toBeAdded)
cprint("You just gained the spell " + (toBeAdded.upper()), "white", "on_red")
cprint(("(" + spellDict[toBeAdded] + ")"), "white", "on_red")
input("")
def useSpell(playerSpells, mana):
cprint((("----"*8) + "USE A SPELL" + ("----"*8)), "magenta", "on_white")
print("")
for x in range(0,len(playerSpells)):
cprint(str(x) + ": " + playerSpells[x], "white", "on_blue")
int(x)
cprint((" (" + spellDict[playerSpells[x]] + ")"), "white", "on_blue")
cprint(" Deals " + str(damage[playerSpells[x]]) + " damage", "white", "on_blue")
manaCost = damage[playerSpells[x]]
manaCost/2
cprint(" Costs " + str(manaCost) + " mana", "white", "on_blue")
int(manaCost)
print("")
cprint("Please enter the number of the spell you wish to cast: ", "magenta", "on_white")
spellChoice = input("?: ")
if spellChoice == False:
print("Nothing was entered! Using default spell, Blink, instead.")
spellChoice = "Blink"
playerDamageToEnemy = 4
elif int(spellChoice) < int(len(playerSpells)):
if not(any(playerSpells[int(spellChoice)] in n for n in specialSpellsKeys)) and int(mana) > damage[playerSpells[int(spellChoice)]]:
cprint(("You cast a " + playerSpells[int(spellChoice)] + " spell!"), "white", "on_magenta")
cprint(("Dealt " + str(damage[playerSpells[int(spellChoice)]]) + " damage!"), "white", "on_magenta")
cprint(("Used " + str(manaCost) + " mana."), "white", "on_magenta")
int(manaCost)
playerDamageToEnemy = int(damage[playerSpells[int(spellChoice)]])
mana -= manaCost
int(playerDamageToEnemy)
# Since it's not in the special list of spells, no effect will be applied to the enemy. #
sendEffect = "nothing"
elif mana < damage[playerSpells[int(spellChoice)]]:
cprint(("Not enough mana!"), "white", "on_magenta")
sendEffect = "nothing"
playerDamageToEnemy = 0
else:
effect = specialSpells[playerSpells[int(spellChoice)]]
if effect == "paralyze":
sendEffect = "paralyze"
elif effect == "invisible":
sendEffect = "invisible"
else:
sendEffect = "nothing"
cprint(("You cast a " + playerSpells[int(spellChoice)] + " spell!"), "white", "on_magenta")
cprint(("Dealt " + str(damage[playerSpells[int(spellChoice)]]) + " damage!"), "white", "on_magenta")
playerDamageToEnemy = int(damage[playerSpells[int(spellChoice)]])
int(playerDamageToEnemy)
else:
print("Please enter something valid.")
input("")
os.system("cls")
useSpell(playerSpells, mana)
return playerDamageToEnemy, sendEffect, mana | {"/questLib.py": ["/dialogueLib.py", "/itemLib.py"], "/game.py": ["/spellLib.py", "/itemLib.py", "/dialogueLib.py", "/saveLib.py", "/questLib.py"]} |
69,073 | JakesCode/The-Saving-of-Chora | refs/heads/master | /questLib.py | # Quest Library for Python Adventure Game #
# Copyright Jake Stringer 2015 #
from termcolor import *
import colorama
import sys
import os
import dialogueLib
import itemLib
colorama.init()
def questInit():
ongoingQuests = []
ongoingQuestsDescription = {}
ongoingQuestsRewards = {}
ongoingQuestsRequirements = {}
return ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements
# def checkQuestCompletion(playerItems, ongoingQuestsRequirements, ongoingQuests, position, locations, ongoingQuestsRewards):
# reward = False
# print(ongoingQuests)
# input("")
# for z in range(0,len(ongoingQuests)):
# for w in range(0,len(ongoingQuestsRequirements[ongoingQuests[z]])):
# if ongoingQuestsRequirements[ongoingQuests[z]][w][:5] == "Item:":
# for v in range(0,len(playerItems)):
# if any(playerItems[v] in d for d in (ongoingQuestsRequirements[ongoingQuests[z]][w])[5:]):
# print("Item Quest Completed!")
# reward = True
# toRemove = ongoingQuestsRequirements[ongoingQuests[z]]
# toRemove2 = ongoingQuests.remove(ongoingQuests[z])
# elif ongoingQuestsRequirements[ongoingQuests[z]][w][:9] == "Location:":
# if locations[position] == (ongoingQuestsRequirements[ongoingQuests[z]][w])[9:]:
# print("Location Quest Completed!")
# toRemove = ongoingQuestsRequirements[ongoingQuests[z]]
# toRemove2 = ongoingQuests.remove(ongoingQuests[z])
# reward = True
# if reward:
# keysList = list(ongoingQuestsRequirements.keys())
# for x in range(0,len(keysList)):
# print(keysList[x])
# input("")
# # del ongoingQuestsRequirements[ongoingQuestsRequirements]
# # ongoingQuests.remove(toRemove2)
def events(eventID, playerItems, itemDesc, seenDialogues, specialItems, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, health):
good, bad, alternate = dialogueLib.initPresets()
if eventID == "Reach Emelle Village":
dialogueLib.say("Alcea", "Oh my! Did you just come from the Jaded Forest?", good)
dialogueLib.say("Alcea", "Many have rested here recently due to wounds; it seems that \nwe're losing the forest to the beasts.", good)
dialogueLib.say("Alcea", "It's a terrible shame; if we lose the forest to beasts, then \nwe'll lose access to Home Town....", good)
dialogueLib.say("Alcea", "If you want to stay here and rest for a while, you're more than welcome to.", good)
dialogueLib.say("Alcea", "My father, Zaor, will take you in. He'll even give you a potion\nfor half the price of the shops. He was once wounded from fighting, and the\nprices of potions made everything worse - selling them for half price is his\nway of giving something back.", good)
dialogueLib.say("Zaor", "Ah! You must be from Home Town. The forest is getting worse;\njust yesterday I saw hideous beasts clambering in the treetops - why can't we\n just all group together and kill the damn things?", alternate)
dialogueLib.say("Zaor", "But still - it seems as though there's hope. If you got through\nthe forest alive, no doubt you're a good fighter....", alternate)
dialogueLib.say("Zaor", "You know what? My fighting days are over. Take this:", alternate)
playerItems, itemDesc, specialItems = itemLib.addItem("Silver Sword", "A beautifully crafted blade. The name 'Zaor' is etched into the side.", playerItems, itemDesc, specialItems, "attack")
dialogueLib.say("Zaor", "I'm sorry about the name in the side, but it'll do you good.\nI've had that sword for 38 years now, and it's never failed my once. Good luck!", alternate)
dialogueLib.say("Alcea", "Wait, are you going to fight more beasts?", good)
dialogueLib.say("Alcea", "You know what? I'll assign you a quest!", good)
dialogueLib.say("Alcea", "It seems that all the monsters are following the actions of a\nleader - and we think we've seen it.", good)
dialogueLib.say("Zaor", "Alcea - you can't possibly be asking this gentleman to fight\nthe Nightbody?", alternate)
dialogueLib.say("Alcea", "I am, father - this man seems strong enough to take the beast down!", good)
dialogueLib.say("Zaor", "(aside, to you) The Nightbody is too powerful.\nYou can't possibly be thinking of fighting it!", alternate)
dialogueLib.say("Alcea", "Father - he is strong enough. And he shall be even stronger\nafter he uses one of these: ", good)
playerItems, itemDesc, specialItems = itemLib.addItem("Powerful Potion", "A bottle of pulsing red liquid. It will heal you by 50 points.", playerItems, itemDesc, specialItems, "heal+")
dialogueLib.say("Alcea", "I made this the other day. It replenishes your health\nwhilst you're in a battle. If you go and kill the Nightbody, grab some of its blood in this: ", good)
playerItems, itemDesc, specialItems = itemLib.addItem("Vial", "A small tube made of shiny glass.", playerItems, itemDesc, specialItems, None)
dialogueLib.say("Alcea", "With the blood of the Nightbody, I can make a potion twice\nas powerful as this health potion, with only a drop. You can do it!", good)
ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = addQuest("Kill the Nightbody", "A fearsome beast has taken residence in the Jaded Forest, preventing the locals from reaching the other town.", ["Item:Nightbody Blood"], ["EXP:200"], "event1", ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
elif eventID == "Kill the Nightbody":
cprint(("The beast stares at you with glowing red eyes. As it lifts its claws\nagain, you see that it is getting weaker. As it raises\nits head to try and attack you, a look of pain falls\nacross its face. Falling to the ground with\na huge crash, it is clear that the Nightbody has been slain."), "white", "on_red")
input("")
playerItems, itemDesc = itemLib.removeItem("Vial", playerItems, itemDesc)
cprint(("Remembering the vial that Alcea gave you, you kneel down to the slain creature.\nLetting some of its blackened blood roll out of the wounds it has\nsustained, you wait until it has filled completely - before turning\nyour back to the beast and walking away."), "white", "on_red")
playerItems, itemDesc, specialItems = itemLib.addItem("Filled Vial", "A vial completely filled with the blood of the Nightbody.", playerItems, itemDesc, specialItems, None)
input("")
cprint(("Now that you have the vial, you should return to Emelle Village."), "white", "on_red")
ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = addQuest("Return to Emelle Village", "Now that the beast has been slain, it's time to return to Emelle Village.", ["Location: Emelle Village"], ["EXP:50"], "event1", ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
input("")
elif eventID == "Return to Emelle Village":
dialogueLib.say("Alcea", "(surprised) I thought I'd sent you to your death! Oh, father, father!", good)
dialogueLib.say("Zaor", "(running over) We all thought you'd be killed!", alternate)
dialogueLib.say("Zaor", "But it's amazing to see you back - without a scratch!", alternate)
if not("Powerful Potion" in playerItems):
dialogueLib.say("Alcea", "And.... you didn't use the Powerful Potion?! Amazing!", good)
else:
dialogueLib.say("Alcea", "I hope the Powerful Potion worked its magic.... I was worried I'd\nmissed something out in the brewing....", good)
dialogueLib.say("Alcea", "(handing her the filled vial) What? You actually obtained some of the blood?\nIs there no end to your achievements?!", good)
playerItems, itemDesc = itemLib.removeItem("Filled Vial", playerItems, itemDesc)
dialogueLib.say("Alcea", "This is amazing! With this, I can make something so powerful, we can take the forest back!", good)
dialogueLib.say("Zaor", "She will be up all night now! If she begins brewing something, she\nwon't give up until it's perfect for her!", alternate)
dialogueLib.say("Zaor", "Do stay the night. I'm sure it will heal any wounds you may have.", alternate)
health += 50
dialogueLib.say("In the morning", "", bad)
dialogueLib.say("Zaor", "I hope you rested well!", good)
dialogueLib.say("Alcea", "We've asked too much of you. Have a gift, from all of us....", good)
playerItems, itemDesc, specialItems = itemLib.addItem("Red Sigil", "A piece of fabric with a red symbol on it.", playerItems, itemDesc, specialItems, None)
dialogueLib.say("Alcea", "That's a Sigil. If you collect all of them, they say something will\nhappen. Each village knows its own sigil, but nobody else's.", good)
dialogueLib.say("Alcea", "I think you should go for them all!", good)
dialogueLib.say("Zaor", "Yes! Oh, Alcea - shouldn't you be giving our friend here\nsomething else?", alternate)
dialogueLib.say("Alcea", "Oh! How could I forget?", good)
playerItems, itemDesc, specialItems = itemLib.addItem("Nightbody Potion", "A dark liquid with a black haze circling it.", playerItems, itemDesc, specialItems, "heal++")
return ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, health
def completeQuest(questToBeCompleted, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRequirements, ongoingQuestsRewards, playerItems, itemDesc, seenDialogues, specialItems, exp, health):
for x in range(0, len(ongoingQuests)):
if ongoingQuests[x] == questToBeCompleted:
# Events
event = ongoingQuests[x]
events(event, playerItems, itemDesc, seenDialogues, specialItems, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, health)
# # Rewards
# input("")
# reward = ongoingQuestsRewards[ongoingQuests[x]]
# for x in range(0,len(ongoingQuestsRewards[ongoingQuests[x]])):
# if ongoingQuestsRewards[ongoingQuests[x]] == None:
# pass
# elif ongoingQuestsRewards[ongoingQuests[x]][:4] == "EXP:":
# exp += int(ongoingQuestsRewards[ongoingQuests[x]][4:])
# Delete it
del ongoingQuestsDescription[ongoingQuests[x]]
del ongoingQuestsRewards[ongoingQuests[x]]
del ongoingQuestsRequirements[ongoingQuests[x]]
return ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, exp, health
def addQuest(questName, questDescription, requirements, rewards, event, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements):
ongoingQuests.append(questName)
ongoingQuestsDescription[questName] = questDescription
ongoingQuestsRequirements[questName] = requirements
ongoingQuestsRewards[questName] = rewards
cprint(("Quest Added!"), "grey", "on_white")
cprint((" " + questName), "grey", "on_white")
cprint((" " + questDescription), "grey", "on_white")
print("")
return ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements
def viewQuests(ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements):
for x in range(0,len(ongoingQuests)):
cprint((str(x) + ": " + ongoingQuests[x]), "grey", "on_white")
cprint((" " + ongoingQuestsDescription[ongoingQuests[x]]), "grey", "on_white")
cprint((" Requirements:"), "grey", "on_white")
for z in range(0,len(ongoingQuestsRequirements[ongoingQuests[x]])):
if (ongoingQuestsRequirements[ongoingQuests[x]][z])[:5] == "Item:":
ongoingQuestsRequirements[ongoingQuests[x]][z] = ongoingQuestsRequirements[ongoingQuests[x]][z][5:]
cprint((" " + ongoingQuestsRequirements[ongoingQuests[x]][z]), "grey", "on_white")
elif (ongoingQuestsRequirements[ongoingQuests[x]][z])[:9] == "Location:":
ongoingQuestsRequirements[ongoingQuests[x]][z] = ongoingQuestsRequirements[ongoingQuests[x]][z][9:]
cprint((" Reach " + ongoingQuestsRequirements[ongoingQuests[x]][z]), "grey", "on_white")
cprint((" Rewards:"), "grey", "on_white")
for y in range(0,len(ongoingQuestsRewards[ongoingQuests[x]])):
if (ongoingQuestsRewards[ongoingQuests[x]][y])[:4] == "EXP:":
ongoingQuestsRewards[ongoingQuests[x]][y] = ongoingQuestsRewards[ongoingQuests[x]][y][4:]
cprint((" " + ongoingQuestsRewards[ongoingQuests[x]][y] + " EXP"), "grey", "on_white")
# locations = ["Home Town",
# "Jaded Forest Entrance *---",
# "Jaded Forest Path -*--",
# "Jaded Forest Clearing --*-",
# "Jaded Forest Opening ---*",
# "Cobalt Beck *--",
# "Cobalt Beck Bridge -*-",
# "Cobalt Beck --*",
# "Emelle Village",
# "Shaded Path"]
# itemDesc, playerItems, specialItems = itemLib.itemInit()
# seenDialogues = 0
# health = 10
# exp = 10
# position = 8
# playerItems = ["Vial"]
# itemDesc = {"Vial": "It's a fucking vial."}
# ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = questInit()
# ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = addQuest("Kill the Nightbody", "A fearsome beast has taken residence in the Jaded Forest, preventing the locals from reaching the other town.", ["Item:Nightbody Blood"], ["EXP:200"], "event1", ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
# input("")
# ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, exp, health = completeQuest("Kill the Nightbody", ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, exp, health)
# input("") | {"/questLib.py": ["/dialogueLib.py", "/itemLib.py"], "/game.py": ["/spellLib.py", "/itemLib.py", "/dialogueLib.py", "/saveLib.py", "/questLib.py"]} |
69,074 | JakesCode/The-Saving-of-Chora | refs/heads/master | /dialogueLib.py | # Dialogue Library for Python Adventure Game #
# Copyright Jake Stringer 2015 #
from termcolor import *
import colorama
import sys
import os
colorama.init()
def initPresets():
good = {"grey": "on_cyan"}
bad = {"grey": "on_red"}
alternate = {"white": "on_blue"}
return good, bad, alternate
def say(name, line, preset):
initPresets()
presetKeys = list(preset.keys())
cprint(("****"*9) + name + ("****"*9), presetKeys[0], preset[presetKeys[0]])
print("")
print(line)
print("")
cprint((("****"*18) + "****"), presetKeys[0], preset[presetKeys[0]])
input("")
print("") | {"/questLib.py": ["/dialogueLib.py", "/itemLib.py"], "/game.py": ["/spellLib.py", "/itemLib.py", "/dialogueLib.py", "/saveLib.py", "/questLib.py"]} |
69,075 | JakesCode/The-Saving-of-Chora | refs/heads/master | /saveLib.py | # Save Library for Python Adventure Game #
# Copyright Jake Stringer 2015 #
from termcolor import *
import colorama
import sys
import os
import ast
colorama.init()
def newGame():
newFile = open("saveGame.dat", "w")
newFile.write("")
def save(position, health, strength, exp, playerLevel, playerSpells, playerClass, seenDialogues, rank, playerItems, itemDesc, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements):
playerClass = None
toSave = [position, health, strength, exp, playerLevel, playerSpells, playerClass, seenDialogues, rank, playerItems, itemDesc, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements]
with open("saveGame.dat", "w") as saveFile:
for x in range(0,len(toSave)):
saveFile.write(str(toSave[x]) + "\n")
def load():
importedData = open("saveGame.dat", "r+").readlines()
for x in range(0,len(importedData)):
position = ast.literal_eval(importedData[0])
health = ast.literal_eval(importedData[1])
strength = ast.literal_eval(importedData[2])
exp = ast.literal_eval(importedData[3])
playerLevel = ast.literal_eval(importedData[4])
playerSpellsString = importedData[5]
playerSpells = ast.literal_eval(playerSpellsString)
# playerClass = ast.literal_eval(importedData[6])
playerClass = None
seenDialogues = ast.literal_eval(importedData[7])
rank = importedData[8]
playerItemsString = importedData[9]
playerItems = ast.literal_eval(playerItemsString)
itemDescString = importedData[10]
itemDesc = ast.literal_eval(itemDescString)
specialItemsString = importedData[11]
specialItems = ast.literal_eval(specialItemsString)
manaString = importedData[12]
mana = ast.literal_eval(manaString)
ongoingQuestsString = importedData[13]
ongoingQuests = ast.literal_eval(ongoingQuestsString)
ongoingQuestsDescriptionString = importedData[14]
ongoingQuestsDescription = ast.literal_eval(ongoingQuestsDescriptionString)
ongoingQuestsRewardsString = importedData[15]
ongoingQuestsRewards = ast.literal_eval(ongoingQuestsRewardsString)
ongoingQuestsRequirementsString = importedData[16]
ongoingQuestsRequirements = ast.literal_eval(ongoingQuestsRequirementsString)
return position, health, strength, exp, playerLevel, playerSpells, playerClass, seenDialogues, rank, playerItems, itemDesc, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements
# position = 1
# health = 10
# strength = 6
# exp = 100
# playerLevel = 2
# playerSpells = ["Magic", "Shit"]
# playerClass = None
# seenDialogues = 2
# rank = "Frajan"
# playerItems = ["Pendant"]
# itemDesc = {"Pendant": "Shiny"}
# specialItems = []
# mana = 20
# ongoingQuests = ["Bob"]
# ongoingQuestsDescription = {"Bob": "Whoa dude"}
# ongoingQuestsRewards = {"Bob": ["EXP:400"]}
# ongoingQuestsRequirements = {"Bob": ["Location: Emelle Village"]}
# save(position, health, strength, exp, playerLevel, playerSpells, playerClass, seenDialogues, rank, playerItems, itemDesc, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
# print(playerItems)
# input("")
# position, health, strength, exp, playerLevel, playerSpells, playerClass, seenDialogues, rank, playerItems, itemDesc, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = load()
# print(playerItems)
# input("") | {"/questLib.py": ["/dialogueLib.py", "/itemLib.py"], "/game.py": ["/spellLib.py", "/itemLib.py", "/dialogueLib.py", "/saveLib.py", "/questLib.py"]} |
69,076 | JakesCode/The-Saving-of-Chora | refs/heads/master | /game.py | def initLocations():
descriptions = {"Home Town": "A peaceful town",
"Jaded Forest Entrance *---": "An opening in the thick wall of tall flowers and robust logs.",
"Jaded Forest Path -*--": "A pristine woodland, decorated with intricate flowers.",
"Jaded Forest Clearing --*-": "A circle in the center of the forest. It is covered in strange markings.",
"Jaded Forest Opening ---*": "The path leads out of the forest here. The sound of water can be heard.",
"Cobalt Beck *--": "This crystal-clear stream of water gushes past rocks below you.",
"Cobalt Beck Bridge -*-": "The bridge is sturdy, built with wood from the Jaded Forest.",
"Cobalt Beck --*": "The path becomes wider here as you enter the next village.",
"Emelle Village": "Wooden huts populate the lush green land. The village is famous for its fish.",
"Shaded Path": "A small path in the shade.",
"Lunar Woods Entrance *-----": "A stunning, blue-tinted forest entrance.",
"Lunar Woods Path -*----": "Glowing flowers dance in the gentle breeze.",
"Lunar Woods River --*---": "Turquoise water flows past light-coloured rocks.",
"Lunar Woods Flower Patch ---*--": "Blue flowers sway with the wind.",
"Lunar Woods Clearing ----*-": "Light from outside the forest penetrates the glow of the flowers.",
"Lunar Woods Opening -----*": "The blue glow of the forest weakens as you get further away."}
locations = ["Home Town",
"Jaded Forest Entrance *---",
"Jaded Forest Path -*--",
"Jaded Forest Clearing --*-",
"Jaded Forest Opening ---*",
"Cobalt Beck *--",
"Cobalt Beck Bridge -*-",
"Cobalt Beck --*",
"Emelle Village",
"Shaded Path",
"Lunar Woods Entrance *-----",
"Lunar Woods Path -*----",
"Lunar Woods River --*---",
"Lunar Woods Flower Patch ---*--",
"Lunar Woods Clearing ----*-",
"Lunar Woods Opening -----*"]
hostileLocations = ["Jaded Forest Path -*--",
"Jaded Forest Clearing --*-",
"Cobalt Beck Bridge -*-",
"Lunar Woods Flower Patch ---*--"]
return locations, descriptions, hostileLocations
def enemies(position, ongoingQuests):
names = ["Cinderman", "Thornfoot"]
stats = {"Cinderman": 10,
"Thornfoot": 15}
enemyDamage = {"Cinderman": 2,
"Thornfoot": 2}
if int(position) > 5 and position < 10:
names = ["Cinderman", "Thornfoot", "Boulderchild", "Vextooth"]
stats = {"Cinderman": 10,
"Thornfoot": 15,
"Boulderchild": 20,
"Vextooth": 20}
enemyDamage = {"Cinderman": 2,
"Thornfoot": 2,
"Boulderchild": 4,
"Vextooth": 4}
elif "Kill the Nightbody" in ongoingQuests and position == 3:
names = ["Nightbody"]
stats = {"Nightbody": 40}
enemyDamage = {"Nightbody": 6}
elif int(position) > 10 and position < 16:
names = ["Blue Flower Nymph", "Purple Flower Nymph", "Red Flower Nymph"]
stats = {"Blue Flower Nymph": 5,
"Purple Flower Nymph": 5,
"Red Flower Nymph": 5}
enemyDamage = {"Blue Flower Nymph": 2,
"Purple Flower Nymph": 2,
"Red Flower Nymph": 2}
return names, stats, enemyDamage
def mainScreen(hostileLocations):
if any(locations[int(position)] in s for s in hostileLocations):
cprint(locations[int(position)].center(80), "white", "on_red")
cprint(descriptions[locations[int(position)]].center(80), "white", "on_red")
else:
cprint(locations[int(position)].center(80), "white", "on_cyan")
cprint(descriptions[locations[int(position)]].center(80), "white", "on_cyan")
print("")
def death():
os.system("cls")
print("")
print("")
print("")
print("")
print("")
time.sleep(0.25)
cprint(" ╔═╗╔═╗╔╦╗╔═╗ ╔═╗╦ ╦╔═╗╦═╗ ", "red", "on_white")
time.sleep(0.25)
cprint(" ║ ╦╠═╣║║║║╣ ║ ║╚╗╔╝║╣ ╠╦╝ ", "red", "on_white")
time.sleep(0.25)
cprint(" ╚═╝╩ ╩╩ ╩╚═╝ ╚═╝ ╚╝ ╚═╝╩╚═ ", "red", "on_white")
print("")
print("")
print("")
print("")
print("")
time.sleep(0.25)
time.sleep(0.25)
print("")
cprint(" ╔═╗╔═╗╔═╗╔═╗╔═╗ ╔═╗╔═╗╔╦╗╔═╗╔╦╗ ", "red", "on_white")
time.sleep(0.25)
cprint(" ╚═╗╠═╝╠═╣║ ║╣ ║ ╠═╣ ║║║╣ ║ ", "red", "on_white")
time.sleep(0.25)
cprint(" ╚═╝╩ ╩ ╩╚═╝╚═╝ ╚═╝╩ ╩═╩╝╚═╝ ╩ ", "red", "on_white")
input("")
leave = True
# Clear the keyboard buffer #
while msvcrt.kbhit():
msvcrt.getch()
sys.exit()
def effect(givenEffect):
if givenEffect == "paralyze":
cprint(("Enemy is paralyzed for two turns!"), "blue", "on_yellow")
paralyzed = True
if givenEffect == "invisible":
cprint(("A dark haze surrounds you, blocking the enemy's view of you."), "blue", "on_yellow")
invisible = True
return givenEffect
def battle(strength, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, position):
global enemyBaseHealth
global health
global exp
enemyGo = True
enemyChoice = (randint(1,len(names))-1)
print("")
os.system("cls")
mainScreen(hostileLocations)
if "Kill the Nightbody" in ongoingQuests and position == 3:
cprint(("The trees above you shake violently as a large creature runs through it."), "white", "on_red")
cprint(("As you stand, rooted to the spot with fear, a mass of dark flesh and seething\nred eyes appears before you, as if from thin air."), "white", "on_red")
input("")
cprint(("As your brain seeks to flee, you feel a rush of adrenaline surge within you.\nGrabbing the Silver Sword you obtained from Zaor, you hold it high and\nprepare to run at the beast."), "white", "on_red")
cprint(("The Nightbody stares as you with gigantic flame-red eyes and, with a\npiercing shriek, advances towards you...."), "white", "on_red")
input("")
os.system("cls")
mainScreen(hostileLocations)
else:
cprint((names[enemyChoice] + " appeared!"), "white", "on_red")
cprint(("Enemy has " + str(stats[names[enemyChoice]]) + " health!"), "white", "on_red")
print("")
enemyBaseHealth = int(stats[names[enemyChoice]])
enemyName = names[enemyChoice]
while enemyBaseHealth > 0:
if int(health)<=0:
death()
cprint((enemyName + " Health: " + str(enemyBaseHealth)), "white", "on_red")
cprint(("Health: " + str(health)), "white", "on_blue")
cprint(("Mana: " + str(mana)), "white", "on_blue")
int(health)
int(mana)
int(enemyBaseHealth)
print("")
cprint("S - Use a Spell", "white", "on_blue")
cprint("B - Basic Attack (deals " + str(strength) + " damage)", "white", "on_blue")
cprint("I - Use an Item", "white", "on_blue")
int(strength)
print("")
battleChoice = input("Please enter one of the letters above.... ")
battleChoice.lower()
if battleChoice == "b":
enemyBaseHealth = basicPunch(enemyBaseHealth, strength)
timer = 0
elif battleChoice == "i":
os.system("cls")
mainScreen(hostileLocations)
health, timer, strength, mana = itemLib.useItems(playerItems, itemDesc, specialItems, health, strength, mana)
elif battleChoice == "s":
int(enemyBaseHealth)
os.system("cls")
mainScreen(hostileLocations)
playerDamageToEnemy, sendEffect, mana = spellLib.useSpell(playerSpells, mana)
int(enemyBaseHealth)
int(playerDamageToEnemy)
enemyBaseHealth -= playerDamageToEnemy
if sendEffect == "nothing":
timer = 0
else:
givenEffect = effect(sendEffect)
if sendEffect == "paralyze":
timer = 2
elif sendEffect == "invisible":
timer = 4
else:
timer = 0
input("")
os.system("cls")
mainScreen(hostileLocations)
# Enemy Turn #
# If the timer ain't 0, say what's happening to the enemy. #
if timer != 0:
if givenEffect == "paralyze":
cprint(("Enemy is paralyzed for " + str(timer) + " more turns!"), "white", "on_cyan")
print("")
int(timer)
enemyGo = False
elif givenEffect == "invisible":
cprint(("A mysterious cloak of shadow masks you for " + str(timer) + " more turns!"), "white", "on_cyan")
print("")
int(timer)
enemyGo = True
if enemyGo == True:
if enemyBaseHealth > 0:
enemyHitChance = randint(1,int(enemyBaseHealth))
int(enemyBaseHealth)
if enemyHitChance < 3:
cprint("Enemy Misses!", "white", "on_red")
else:
cprint("Enemy Attacks!", "white", "on_red")
cprint(enemyName + " dealt " + str(enemyDamage[enemyName]) + " damage!", "white", "on_red")
int(enemyDamage[enemyName])
health -= enemyDamage[enemyName]
print("")
cprint("You now have " + str(health) + " health.", "white", "on_blue")
int(health)
input("")
os.system("cls")
mainScreen(hostileLocations)
elif enemyBaseHealth <= 0:
cprint("The " + enemyName + " dropped to the floor!", "white", "on_red")
input("")
else:
if givenEffect == "paralyze":
cprint(("The enemy is still paralyzed; rooted to the spot."), "white", "on_cyan")
cprint(("The enemy will continue to be paralyzed for " + str(timer) + " more turns...."), "white", "on_cyan")
elif givenEffect == "invisible":
cprint(("The shadow continues to swirl around you."), "white", "on_cyan")
cprint(("The shadow will mask you for another " + str(timer) + " turns...."), "white", "on_cyan")
# If the timer ain't 0, take one off of it, as the round has ended. #
if timer != 0:
if (timer-1)==0:
cprint(("Effect has worn off!"), "blue", "on_yellow")
enemyGo = True
timer -= 1
# BATTLE END #
os.system("cls")
mainScreen(hostileLocations)
endNumber = strength*health
startNumber = 0
cprint((enemyName + " defeated!"), "white", "on_magenta")
cprint(("Strength: " + str(strength)), "white", "on_magenta")
int(strength)
cprint(("Health: " + str(health)), "white", "on_magenta")
int(health)
cprint(("EXP: " + str(startNumber)), "white", "on_magenta")
time.sleep(2)
while startNumber != endNumber:
os.system("cls")
mainScreen(hostileLocations)
startNumber+=1
cprint((enemyName + " defeated!"), "white", "on_magenta")
cprint(("Strength: " + str(strength)), "white", "on_magenta")
int(strength)
cprint(("Health: " + str(health)), "white", "on_magenta")
int(health)
cprint(("EXP: " + str(startNumber)), "white", "on_magenta")
time.sleep(0.05)
time.sleep(1)
os.system("cls")
mainScreen(hostileLocations)
cprint(("EXP Gained: " + str(endNumber)), "white", "on_magenta")
int(endNumber)
exp += endNumber
input("")
os.system("cls")
mainScreen(hostileLocations)
if "Kill the Nightbody" in ongoingQuests and position == 3:
ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, exp, health = questLib.completeQuest("Kill the Nightbody", ongoingQuests, ongoingQuestsDescription, ongoingQuestsRequirements, ongoingQuestsRewards, playerItems, itemDesc, seenDialogues, specialItems, exp, health)
input("")
def monsterChance(strength, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, position):
chance = randint(1,20)
if any(locations[int(position)] in s for s in hostileLocations):
cprint("(Location is hostile.... watch your step!)", "blue", "on_yellow")
if chance < 5:
battle(strength, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, position)
elif "Kill the Nightbody" in ongoingQuests and position == 3:
battle(strength, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, position)
def information():
os.system("cls")
mainScreen(hostileLocations)
print("")
print("Health: " + str(health))
print("Locaton: " + locations[position])
print("EXP: " + str(exp))
int(exp)
print("Description of Location: " + descriptions[locations[position]])
print("Player Level: " + str(playerLevel))
print("")
def showHelp():
os.system("cls")
mainScreen(hostileLocations)
cprint("Commands: ", "red", "on_white")
print("")
cprint(("S: See current spells."), "blue", "on_white")
cprint(("C: See information about your character."), "blue", "on_white")
cprint(("I: View your items."), "blue", "on_white")
cprint(("B: Move back one location."), "blue", "on_white")
cprint(("F: Save the game."), "blue", "on_white")
cprint(("Q: View your quests."), "blue", "on_white")
cprint(("E: Exit the game."), "blue", "on_white")
cprint((" This does not save your game. Use 'F' to save the current game."), "red", "on_white")
print("")
print("During battle, colours are used to signal whose turn it is.")
cprint("White on BLUE signals that it's YOUR turn.", "white", "on_blue")
cprint("and white on RED signals that it's the ENEMIES turn.", "white", "on_red")
print("")
print("Type the letter when the following appears: ")
print(" Type a command, type 'help', or press enter to move on.... ")
print("")
cprint("Press the Enter key to continue....", "blue", "on_white")
def basicPunch(enemyBaseHealth, strength):
cprint(("Attacked the enemy!"), "grey", "on_green")
print("")
cprint(("Dealt " + str(strength) + " damage!"), "grey", "on_green")
int(strength)
enemyBaseHealth-=int(strength)
return enemyBaseHealth
def parseCommand(command, position, playerItems):
global health
global rank
global strength
command.lower()
if command == "help":
showHelp()
if command == "c":
information()
if command == "f":
saveLib.save(position, health, strength, exp, playerLevel, playerSpells, playerClass, seenDialogues, rank, playerItems, itemDesc, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
if command == "":
cprint(("You chose to move on...."), "grey", "on_cyan")
position += 1
if command == "b":
if locations[position] == "Home Town":
cprint(("You can't go back!"), "grey", "on_cyan")
else:
cprint(("You chose to go back...."), "grey", "on_cyan")
position -= 1
if command == "q":
if len(ongoingQuests) > 0:
questLib.viewQuests(ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
else:
cprint(("No quests! You're free to explore!"), "white", "on_red")
if command == "e":
sys.exit()
if command == "i":
os.system("cls")
mainScreen(hostileLocations)
itemLib.viewItems(playerItems, itemDesc)
## CLASS STUFF ##
if command == "frajan":
health = 24
spellLib.addSpell("Blink", playerSpells)
strength = 4
rank = "Frajan"
if command == "tezad":
health = 22
spellLib.addSpell("Blink", playerSpells)
strength = 3
rank = "Tezad"
if command == "oslid":
health = 19
spellLib.addSpell("Blink", playerSpells)
spellLib.addSpell("Swipe", playerSpells)
strength = 2
rank = "Oslid"
return position
def checkForEvents(playerItems, itemDesc, seenDialogues, specialItems, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, exp, health):
if position == 1 and seenDialogues == 1:
os.system("cls")
mainScreen(hostileLocations)
print("You need to be careful! Dangerous monsters roam these lands....")
print("")
print("Some areas are under control - but others are ruled by evil beasts.")
print("If an area is unsafe, this will appear at the top of your screen: ")
print("")
cprint("(Location is hostile.... watch your step!)", "blue", "on_yellow")
print("")
print("If a creature ever attacks you, your best option is to use brute force.")
print("However, with the right amount of training, you can master spells too.")
print("")
print("Spells are more customisable, moving to the will of the user.")
print("If you use brute force, it's you and your weapon, and nothing else.")
print("")
print("If you happen to defeat the entity, you will gain experience points.")
print("Experience points can be used to hone abilities, and buy weapons and spells.")
print("")
print("Good luck!")
print("")
cprint("Press the Enter key to continue....", "blue", "on_white")
input("")
os.system("cls")
mainScreen(hostileLocations)
seenDialogues += 1
if position == 5 and seenDialogues == 2:
os.system("cls")
mainScreen(hostileLocations)
print("What's this?")
print("")
print("There appears to be a small pendant on the ground.")
print("It is covered in tiny jewels, all of which glint in the sun.")
print("")
playerItems, itemDesc, specialItems = itemLib.addItem("Pendant", "A piece of jewellery, covered in small gems.", playerItems, itemDesc, specialItems, None)
seenDialogues += 1
if position == 8 and seenDialogues == 3:
os.system("cls")
mainScreen(hostileLocations)
ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, exp, health = questLib.completeQuest("Reach Emelle Village", ongoingQuests, ongoingQuestsDescription, ongoingQuestsRequirements, ongoingQuestsRewards, playerItems, itemDesc, seenDialogues, specialItems, exp, health)
seenDialogues += 1
if position == 8 and "Filled Vial" in playerItems and seenDialogues == 4:
os.system("cls")
mainScreen(hostileLocations)
ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, exp, health = questLib.completeQuest("Return to Emelle Village", ongoingQuests, ongoingQuestsDescription, ongoingQuestsRequirements, ongoingQuestsRewards, playerItems, itemDesc, seenDialogues, specialItems, exp, health)
seenDialogues += 1
return seenDialogues
def titleScreen():
print("")
print("")
time.sleep(0.05)
cprint(" \│/ ╔╦╗┬ ┬┌─┐ ╔═╗┌─┐┬ ┬┬┌┐┌┌─┐ ┌─┐┌─┐ ╔═╗┬ ┬┌─┐┬─┐┌─┐ \│/ ", "cyan", "on_red")
time.sleep(0.05)
cprint(" ─ ─ ║ ├─┤├┤ ╚═╗├─┤└┐┌┘│││││ ┬ │ │├┤ ║ ├─┤│ │├┬┘├─┤ ─ ─ ", "white", "on_red")
time.sleep(0.05)
cprint(" /│\ ╩ ┴ ┴└─┘ ╚═╝┴ ┴ └┘ ┴┘└┘└─┘ └─┘└ ╚═╝┴ ┴└─┘┴└─┴ ┴ /│\ ", "white", "on_red")
time.sleep(0.05)
print("")
print("")
time.sleep(0.05)
cprint(" \│/ ╔╗ ╦ ╦ \│/ ", "cyan", "on_white")
time.sleep(0.05)
cprint(" ─ ─ ╠╩╗╚╦╝ ─ ─ ", "blue", "on_white")
time.sleep(0.05)
cprint(" /│\ ╚═╝ ╩ /│\ ", "blue", "on_white")
print("")
time.sleep(0.05)
print("")
time.sleep(0.05)
cprint(" \│/ ╦╔═╗╦╔═╔═╗ ╔═╗╔╦╗╦═╗╦╔╗╔╔═╗╔═╗╦═╗ \│/ ", "cyan", "on_magenta")
time.sleep(0.05)
cprint(" ─ ─ ║╠═╣╠╩╗║╣ ╚═╗ ║ ╠╦╝║║║║║ ╦║╣ ╠╦╝ ─ ─ ", "white", "on_magenta")
time.sleep(0.05)
cprint(" /│\ ╚╝╩ ╩╩ ╩╚═╝ ╚═╝ ╩ ╩╚═╩╝╚╝╚═╝╚═╝╩╚═ /│\ ", "white", "on_magenta")
time.sleep(0.05)
print("")
time.sleep(0.05)
print("")
print("")
time.sleep(0.05)
print("")
time.sleep(0.05)
cprint(" ╔═╗╦═╗╔═╗╔═╗╔═╗ ╔═╗╔╗╔╔╦╗╔═╗╦═╗ ", "cyan")
time.sleep(0.05)
cprint(" ╠═╝╠╦╝║╣ ╚═╗╚═╗ ║╣ ║║║ ║ ║╣ ╠╦╝ ", "white")
time.sleep(0.05)
cprint(" ╩ ╩╚═╚═╝╚═╝╚═╝ ╚═╝╝╚╝ ╩ ╚═╝╩╚═ ", "white")
time.sleep(0.05)
# Clear the keyboard buffer #
while msvcrt.kbhit():
msvcrt.getch()
def intro(seenDialogues):
os.system("cls")
mainScreen(hostileLocations)
print("Adventurer!")
print("How is it that you're already old enough to go off into the world?")
print("")
print("It does not seem two minutes since your father met me....")
print("He was a very brave individual - and clearly, so are you.")
print("")
print("Welcome to the world of Chora!")
print("The world you live in is very powerful - the earth itself breathes life,")
print("the creatures themselves hold mystical forces within their strength,")
print("and the very essence of the world is controlled by ranks of humans!")
print("")
cprint("Press the Enter key to continue....", "blue", "on_white")
input("")
os.system("cls")
mainScreen(hostileLocations)
print("Now.... what rank would you like to be?")
print("")
print("FRAJAN | TEZAD | OSLID")
print("")
print("Fearless warriors. Use magic for leisure. Hunters with great strength.")
print("")
classChoice = input("Please enter either A, B or C.... ")
classChoice.lower()
if classChoice=="a" or classChoice=="b" or classChoice=="c":
if classChoice=="a":
health = 24
spellLib.addSpell("Blink", playerSpells)
strength = 4
rank = "Frajan"
if classChoice=="b":
health = 22
spellLib.addSpell("Blink", playerSpells)
strength = 3
rank = "Tezad"
if classChoice=="c":
health = 19
spellLib.addSpell("Blink", playerSpells)
spellLib.addSpell("Swipe", playerSpells)
strength = 2
rank = "Oslid"
else:
print("Please enter something valid....")
input("")
os.system("cls")
seenDialogues = checkForEvents(playerItems, itemDesc, seenDialogues, specialItems, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
print("Ah! So you've joined the rank of " + rank + "!")
print("That means you'll be starting with " + str(health) + " health.")
int(health)
print("")
print("I've also got a quest for you!")
print("Make it to Emelle Village, and you'll meet some people.")
print("Do it and you'll get 50 EXP!")
questLib.addQuest("Reach Emelle Village", "Here we go!", ["Location:Emelle Village"], ["EXP:50"], None, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
print("")
print("Very well then! If you're stuck at any time, type 'help'.")
print("I wish you luck! Come back and see us sometime....")
print("")
cprint("Press the Enter key to continue....", "blue", "on_white")
input("")
os.system("cls")
mainScreen(hostileLocations)
seenDialogues += 1
return health, strength, rank, seenDialogues
def init():
position = 0
# health (given by the beginning bit)
# strength (given by the beginning bit)
exp = 0
playerLevel = 1
playerSpells = []
playerClass = ""
seenDialogues = 0
mana = 0
# rank (given by the beginning bit)
locations, descriptions, hostileLocations = initLocations()
spellDict, damage, specialSpells, specialSpellsKeys, spellDict, specialSpells = spellLib.spells()
itemDesc, playerItems, specialItems = itemLib.itemInit()
ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = questLib.questInit()
names, stats, enemyDamage = enemies(position, ongoingQuests)
return position, exp, playerLevel, playerSpells, playerClass, seenDialogues, locations, descriptions, hostileLocations, names, stats, enemyDamage, damage, specialSpells, specialSpellsKeys, itemDesc, playerItems, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements
# def addLevel(amount):
# # Making things easier for the 'checkExpLevel' function. #
# global playerLevel
# playerLevel += amount
# cprint("Level Up!", "red", "on_yellow")
# cprint(("You are now level " + str(playerLevel)), "white", "on_red")
# int(playerLevel)
# input("")
# os.system("cls")
# mainScreen(hostileLocations)
# def checkExpLevel(playerLevel):
# # Checks the level of EXP. If it's high enough, the player goes up a level. #
# if exp in range(50, 100):
# addLevel(1)
# spellLib.addSpell("Swipe", playerSpells)
# elif exp in range(101, 200):
# addLevel(1)
# spellLib.addSpell("Barrage", playerSpells)
# elif exp in range(201, 500):
# pass
######################## END OF FUNCTIONS ###########################
global playerSpells
global playerItems
global timer
global seenDialogues
global specialItems
# Battle Related #
paralyzed = False
invisible = False
timer = 0
# Custom Libraries #
import spellLib
import itemLib
import dialogueLib
import saveLib
import questLib
# Other Libraries #
from random import randint
import os
import time
from time import sleep
from termcolor import *
import colorama
import sys
import msvcrt
import ast
# Initializing things #
colorama.init()
good, bad, alternate = dialogueLib.initPresets()
######################## END OF SETUP ###########################
os.system("title The Saving of Chora - A Python Text Adventure Game by Jake Stringer")
titleScreen()
input("")
os.system("cls")
cprint("Load a save file?", "white", "on_red")
cprint("Enter 'y' for yes, 'n' for no, or 'r' to make a new file: ", "white", "on_red")
saveChoice = input("?: ")
saveChoice.lower()
if saveChoice == "y":
position, health, strength, exp, playerLevel, playerSpells, playerClass, seenDialogues, rank, playerItems, itemDesc, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = saveLib.load()
locations, descriptions, hostileLocations = initLocations()
names, stats, enemyDamage = enemies(position, ongoingQuests)
spells, damage, specialSpells, specialSpellsKeys, spellDict, specialSpells = spellLib.spells()
cprint("Save file loaded successfully.", "white", "on_blue")
print("")
cprint(("Health: " + str(health)), "white", "on_magenta")
int(health)
cprint(("Strength: " + str(strength)), "white", "on_magenta")
int(strength)
cprint(("Mana: " + str(mana)), "white", "on_magenta")
int(mana)
cprint(("EXP: " + str(exp)), "white", "on_magenta")
int(exp)
print("")
cprint(("Location: " + locations[position]), "white", "on_magenta")
cprint("Location description: ", "grey", "on_cyan")
cprint(" " + descriptions[locations[position]], "grey", "on_cyan")
if any(locations[int(position)] in s for s in hostileLocations):
cprint("(Location is hostile.... watch your step!)", "white", "on_red")
print("")
cprint(("Items: "), "white", "on_magenta")
for x in range(0,len(playerItems)):
cprint((" " + str(x) + ": " + playerItems[x]), "blue", "on_white")
int(x)
cprint((" " + itemDesc[playerItems[x]]), "blue", "on_white")
if len(playerItems) == 0:
cprint((" No items to show!"), "blue", "on_white")
input("")
os.system("cls")
cprint(("Spells: "), "white", "on_magenta")
for x in range(0,len(playerSpells)):
cprint(" " + str(x) + ": " + playerSpells[x], "white", "on_blue")
int(x)
cprint((" (" + spellDict[playerSpells[x]] + ")"), "white", "on_blue")
cprint(" Deals " + str(damage[playerSpells[x]]) + " damage", "white", "on_blue")
print("")
cprint(("Quests: "), "white", "on_magenta")
print("")
questLib.viewQuests(ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements)
print("")
cprint("Press the enter key to return to your game....", "grey", "on_yellow")
elif saveChoice == "n":
cprint("Save file not opened.", "white", "on_red")
input("")
position, exp, playerLevel, playerSpells, playerClass, seenDialogues, locations, descriptions, hostileLocations, names, stats, enemyDamage, damage, specialSpells, specialSpellsKeys, itemDesc, playerItems, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = init()
health, strength, rank, seenDialogues = intro(seenDialogues)
elif saveChoice == "r":
saveLib.newGame()
position, exp, playerLevel, playerSpells, playerClass, seenDialogues, locations, descriptions, hostileLocations, names, stats, enemyDamage, damage, specialSpells, specialSpellsKeys, itemDesc, playerItems, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = init()
health, strength, rank, seenDialogues = intro(seenDialogues)
cprint("Save file has been wiped.", "white", "on_red")
else:
print("Nothing valid entered.")
cprint("Save file not opened.", "white", "on_red")
input("")
position, exp, playerLevel, playerSpells, playerClass, seenDialogues, locations, descriptions, hostileLocations, names, stats, enemyDamage, damage, specialSpells, specialSpellsKeys, itemDesc, playerItems, specialItems, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements = init()
health, strength, rank, seenDialogues = intro(seenDialogues)
input("")
os.system("cls")
while 1<2:
names, stats, enemyDamage = enemies(position, ongoingQuests)
locations, descriptions, hostileLocations = initLocations()
mainScreen(hostileLocations)
seenDialogues = checkForEvents(playerItems, itemDesc, seenDialogues, specialItems, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, exp, health)
# questLib.checkQuestCompletion(playerItems, ongoingQuestsRequirements, ongoingQuests, position, locations, ongoingQuestsRewards)
monsterChance(strength, mana, ongoingQuests, ongoingQuestsDescription, ongoingQuestsRewards, ongoingQuestsRequirements, playerItems, itemDesc, seenDialogues, specialItems, position)
cprint("Type a command, type 'help', or press enter to move on.... ", "white", "on_blue")
command = input("?: ")
position = parseCommand(command, position, playerItems)
input("")
os.system("cls")
| {"/questLib.py": ["/dialogueLib.py", "/itemLib.py"], "/game.py": ["/spellLib.py", "/itemLib.py", "/dialogueLib.py", "/saveLib.py", "/questLib.py"]} |
69,079 | mor0981/ParkFlask | refs/heads/master | /test2.py | import unittest
from App import app
import json
# import firebase_admin
# from firebase_admin import auth
# from firebase_admin import credentials
# from firebase_admin import firestore
# cred = credentials.Certificate('parkflask-firebase-adminsdk-wplsp-87a9bb6106.json')
# firebase_admin.initialize_app(cred)
# db = firestore.client()
class TestHello(unittest.TestCase):
#User login with correct details
def setUp(self):
app.testing = True
self.app = app.test_client()
def test_homePage(self):
rv = self.app.get('/')
self.assertEqual(rv.status, '200 OK')
def test_login_logout(self):
taster = app.test_client(self)
rv = taster.post('/login' , data=dict(email="mor0981@gmail.com",password="123456"),follow_redirects=True)
self.assertTrue(rv.status, '200 OK')
self.assertTrue('ברוכים'.encode() in rv.data)
rv= taster.get('/logout',follow_redirects=True)
self.assertTrue('התנתקת בהצלחה'.encode() in rv.data)
def test_login_session(self):
taster = app.test_client(self)
rv = taster.post('/login' , data=dict(email="mor0981@gmail.com",password="123456"),follow_redirects=True)
rv = taster.get('/login',follow_redirects=True)
self.assertTrue('ברוכים'.encode() in rv.data)
rv= taster.get('/logout',follow_redirects=True)
def test_delete_user(self):
taster = app.test_client(self)
rv = taster.post('/register' , data=dict(email="test@gmail.com",password="123456",name="test",last="test"),follow_redirects=True)
rv = taster.post('/login' , data=dict(email="test@gmail.com",password="123456"),follow_redirects=True)
self.assertTrue('ברוכים'.encode() in rv.data)
rv = taster.post('/unregister' , data=dict(email="test@gmail.com",password="123456"),follow_redirects=True)
rv = taster.post('/login' , data=dict(email="test@gmail.com",password="123456"),follow_redirects=True)
self.assertTrue('שם משתמש או סיסמא לא נכונים'.encode() in rv.data)
def test_comment(self):
taster = app.test_client(self)
rv = taster.post('/register' , data=dict(email="test2@gmail.com",password="123456",name="test",last="test"),follow_redirects=True)
rv = taster.post('/login' , data=dict(email="test2@gmail.com",password="123456"),follow_redirects=True)
rv = taster.post('/comments/פארק%20ליכטנשטיין',data=dict(comment="test"),follow_redirects=True)
rv = taster.get('/comments/פארק%20ליכטנשטיין')
self.assertTrue('test'.encode() in rv.data)
rv = taster.post('/unregister',data=dict(email="test2@gmail.com",password="123456"),follow_redirects=True)
def test_jasonPark_show(self):
taster = app.test_client(self)
with open('playgrounds.json', 'r',encoding="utf8") as myfile:
arr=[]
data=json.loads(myfile.read())
arr.append(data[0]['Name'])
rv = taster.post('/login' , data=dict(email="mor0981@gmail.com",password="123456"),follow_redirects=True)
rv = taster.get('/parks')
for p in arr:
self.assertTrue(p.encode() in rv.data)
def test_login_as_admin(self):
taster = app.test_client(self)
rv = taster.post('/login' , data=dict(email="mor0981@gmail.com",password="123456"),follow_redirects=True)
rv = taster.get('/login',follow_redirects=True)
self.assertTrue('משתמשים'.encode() in rv.data)
rv= taster.get('/logout',follow_redirects=True)
def test_login_as_visit(self):
taster = app.test_client(self)
rv = taster.post('/login' , data=dict(email="dani@gmail.com",password="123456"),follow_redirects=True)
rv = taster.get('/login',follow_redirects=True)
self.assertFalse('משתמשים'.encode() in rv.data)
rv= taster.get('/logout',follow_redirects=True)
def test_add_admin(self):
taster = app.test_client(self)
rv = taster.post('/login' , data=dict(email="mor0981@gmail.com",password="123456"),follow_redirects=True)
rv = taster.get('/login',follow_redirects=True)
rv = taster.post('/registerByAdmin' , data=dict(name="טסט",last="טסט",email="test3@gmail.com",password="123456",Admin="true"),follow_redirects=True)
rv= taster.get('/logout',follow_redirects=True)
rv = taster.post('/login' , data=dict(email="test3@gmail.com",password="123456"),follow_redirects=True)
self.assertTrue('ברוכים'.encode() in rv.data)
rv = taster.post('/unregister',data=dict(email="test3@gmail.com",password="123456"),follow_redirects=True)
# def delet_comment(self):
# taster = app.test_client(self)
# rv = taster.post('/register' , data=dict(email="test3@gmail.com",password="123456",name="test",last="test"),follow_redirects=True)
# rv = taster.post('/login' , data=dict(email="test3@gmail.com",password="123456"),follow_redirects=True)
# rv = taster.post('/comments/פארק%20ליכטנשטיין',data=dict(comment="test"),follow_redirects=True)
# rv = taster.get('/comments/פארק%20ליכטנשטיין')
# self.assertTrue('test'.encode() in rv.data)
# def test_correct(self):
# try:
# auth.sign_in_with_email_and_password("mor0981@gmail.com","123456")
# self.assertTrue(True)
# except:
# self.assertTrue(False)
# #User login with incorrect details
# def test_incorrect(self):
# try:
# auth.sign_in_with_email_and_password("mor081@gmail.com","12661266")
# self.assertTrue(False)
# except:
# self.assertTrue(True)
if __name__ == '__main__':
unittest.main() | {"/test2.py": ["/App.py"], "/test3.py": ["/App.py"], "/test.py": ["/App.py"], "/App.py": ["/forms.py"]} |
69,080 | mor0981/ParkFlask | refs/heads/master | /forms.py | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField , SubmitField , RadioField,TextAreaField
from wtforms.validators import DataRequired , Length , Email
class LoginForm(FlaskForm):
email = StringField('אימייל',
validators=[DataRequired(),Email()])
password= PasswordField('סיסמא',
validators=[DataRequired()])
submit = SubmitField('התחבר')
class SignOutForm(FlaskForm):
submit = SubmitField('התנתק')
class NewParkForm(FlaskForm):
parkName = StringField("שם הפארק", validators=[DataRequired()])
parkAddress = StringField("כתובת הפארק", validators=[DataRequired()])
shadow = RadioField("?הצללה",choices=[('yes','כן'),('no','לא')], validators=[DataRequired()])
submit = SubmitField('צור פארק')
class DeleteParkForm(FlaskForm):
parkName = StringField("שם הפארק", validators=[DataRequired()])
parkAddress = StringField("כתובת הפארק", validators=[DataRequired()])
submit = SubmitField('מחק פארק')
class signupForm(FlaskForm):
email = StringField("דואר אלקטרוני")
password= PasswordField("סיסמא")
name = StringField("שם פרטי")
last = StringField("שם משפחה")
Admin = RadioField("?אדמין",choices=[('true','כן'),('false','לא')], validators=[DataRequired()])
submit=SubmitField("הרשם")
Admin = RadioField("?אדמין",choices=[('True','כן'),('False','לא')], validators=[DataRequired()])
class signout2Form(FlaskForm):
email = StringField("דואר אלקטרוני")
password= PasswordField("סיסמא")
username = StringField("שם משתמש")
submit=SubmitField("ביטול מנוי")
class addComment(FlaskForm):
submit = SubmitField('הוסף תגובה')
comment=TextAreaField("רשום תגובה",validators=[DataRequired()])
stars = RadioField(choices=[('1','1'),('2','2'),('3','3'),('4','4'),('5','5')], validators=[DataRequired()])
class updateComment(FlaskForm):
submit = SubmitField('הוסף תגובה')
comment=TextAreaField("עדכן תגובה")
class facilitiesForm(FlaskForm):
parkName = StringField("שם הפארק")
parkNameDB = StringField("שם הפארק")
facilities = StringField("מתקנים")
submit = SubmitField('אישור')
class PostForm(FlaskForm):
email = StringField('איימל', validators=[DataRequired()])
name = StringField('שם משתמש', validators=[DataRequired()])
last = StringField('שם משפחה', validators=[DataRequired()])
password= PasswordField("סיסמא")
submit = SubmitField('עדכן')
class infoForm(FlaskForm):
name=StringField("שם")
job=StringField("תפקיד")
email = StringField("דואר אלקטרוני")
submit=SubmitField("הכנס")
| {"/test2.py": ["/App.py"], "/test3.py": ["/App.py"], "/test.py": ["/App.py"], "/App.py": ["/forms.py"]} |
69,081 | mor0981/ParkFlask | refs/heads/master | /test3.py | import unittest
from App import app
import json
# import firebase_admin
# from firebase_admin import auth
# from firebase_admin import credentials
# from firebase_admin import firestore
# cred = credentials.Certificate('parkflask-firebase-adminsdk-wplsp-87a9bb6106.json')
# firebase_admin.initialize_app(cred)
# db = firestore.client()
class TestHello(unittest.TestCase):
def setUp(self):
app.testing = True
self.app = app.test_client()
def test_homePage(self):
rv = self.app.get('/')
self.assertEqual(rv.status, '200 OK')
def test_login(self):
taster = app.test_client(self)
print("hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh")
rv = taster.post('/login' , data=dict(email="mor0981@gmail.com",password="123456"),follow_redirects=True)
rv = taster.get('/login',follow_redirects=True)
self.assertTrue('משתמשים'.encode() in rv.data)
rv= taster.get('/logout',follow_redirects=True)
if __name__ == '__main__':
unittest.main() | {"/test2.py": ["/App.py"], "/test3.py": ["/App.py"], "/test.py": ["/App.py"], "/App.py": ["/forms.py"]} |
69,082 | mor0981/ParkFlask | refs/heads/master | /test.py | import unittest
import pyrebase
import App
from App import delete_info_item
import firebase_admin
from firebase_admin import credentials
from firebase_admin import firestore
config={
"apiKey": "AIzaSyDab7tKKm11tgRuLsAPejXGGAYJ1d20cnQ",
"authDomain": "parkflask.firebaseapp.com",
"databaseURL": "https://parkflask.firebaseio.com",
"projectId": "parkflask",
"storageBucket": "parkflask.appspot.com",
"messagingSenderId": "685599054335",
"appId": "1:685599054335:web:db2d1d2890588a14772fca",
"measurementId": "G-H8HGMEE4WB"
}
db = firestore.client()
firebase = pyrebase.initialize_app(config)
auth= firebase.auth()
class TestHello(unittest.TestCase):
#User login with correct details
def test_correct(self):
try:
auth.sign_in_with_email_and_password("mor0981@gmail.com","12661266")
self.assertTrue(True)
except:
self.assertTrue(False)
#User login with uncorrect details
def test_uncorrect(self):
try:
auth.sign_in_with_email_and_password("mor081@gmail.com","12661266")
self.assertTrue(False)
except:
self.assertTrue(True)
#Register User with correct details
def test_register(self):
try:
email="newRr@gmail.com"
password="123"
username="new"
#user=auth.create_user_with_email_and_password("newRr@gmail.com","123321")
data={"username":username,"email":email,"password":password,"admin":False}
#info=auth.get_account_info(user['idToken'])['users'][0]['localId']
db.collection(u'Users').document().set(data)
self.assertTrue(True)
except:
self.assertTrue(False)
#Register User with uncorrect details
def test_register_uncorrect(self):
try:
email="r@gmail.com"
password="12345"
username="r"
#user=auth.create_user_with_email_and_password("newRr@gmail.com","123321")
data={"username":username,"email":email,"password":password,"admin":False}
#info=auth.get_account_info(user['idToken'])['users'][0]['localId']
db.collection(u'Users').document().set(data)
self.assertTrue(False)
except:
self.assertTrue(True)
# Delete exist user
def test_delete_register(self):
try:
email="newRr@gmail.com"
password="123"
username="new"
docs=db.collection(u'Users').stream()
for doc in docs:
d=doc.to_dict()
if email==d['email'] and password==d['password']:
db.collection(u'Users').document(doc.id).delete()
return self.assertTrue(True)
except:
self.assertTrue(False)
# Delete unexist user
def test_delete_unexist_register(self):
try:
email="newRr@gmail.com"
password="123"
username="new"
docs=db.collection(u'Users').stream()
for doc in docs:
d=doc.to_dict()
if email==d['email'] and password==d['password']:
db.collection(u'Users').document(doc.id).delete()
return self.assertTrue(False)
except:
self.assertTrue(True)
#Add new vaild park
def test_add_park(self):
data = {
"name": "newTestPark",
"other": "bialik",
"shadowing": "Yes"
}
docs = db.collection(u'Parks').stream()
canMakePark = True
for doc in docs:
dici = doc.to_dict()
if data["name"] == dici['name'] and data["other"] == dici['other']:
canMakePark = False
if canMakePark:
db.collection(u'Parks').document().set(data)
self.assertTrue(True)
else:
self.assertTrue(False)
#Delete exist park
def test_delete_park(self):
try:
parkName = "newTestPark"
parkAddress = "bialik"
docs = db.collection(u'Parks').stream()
for doc in docs:
dici = doc.to_dict()
if parkName == dici['name'] and parkAddress == dici['other']:
#print (f"park {dici['name']} in {dici['other']} has beem deleted")
db.collection(u'Parks').document(doc.id).delete()
self.assertTrue(True)
except:
self.assertTrue(False)
#Delete unexist park
def test_delete_unexist_park(self):
try:
parkName = "notFound"
parkAddress = "never"
docs = db.collection(u'Parks').stream()
for doc in docs:
dici = doc.to_dict()
if parkName == dici['name'] and parkAddress == dici['other']:
db.collection(u'Parks').document(doc.id).delete()
self.assertTrue(False)
except:
self.assertTrue(True)
#Delete comment by exist id
def test_delete_comment(self):
try:
post_id='YhzN7rBXz95lA1CuhFCY'
db.collection(u'Comments').document(post_id).delete()
self.assertTrue(True)
except:
self.assertTrue(False)
#Delete unExist Comment
def test_delete_notExist_comment(self):
try:
post_id='aaaa'
db.collection(u'Comments').document(post_id).delete()
self.assertTrue(False)
except:
self.assertTrue(True)
#Add new info item
def test_add_info_item(self):
dic=db.collection(u'Information').stream()
docs = [{
'id': 1,
'name': 'name 1',
'email': 'email 1'
}, {
'id': 2,
'name': 'name 2',
'email': 'email 2'
}]
print("hello")
data = {
"name": 'מיקל גאקסון',
"job": 'ליצן',
"email": 'mike@gmail.com'
}
docs = db.collection(u'Information').stream()
for doc in docs:
dici = doc.to_dict()
print(dici)
if data["name"] == dici['name'] and data["job"] == dici['job'] and data["email"] == dici['email']:
self.assertFalse(False)
return
db.collection(u'Information').document().set(data)
self.assertTrue(True)
arr=[]
for doc in dic:
d=doc.to_dict()
d["id"] = doc.id
#print(d)
arr.append(d)
self.assertTrue(True)
#delete exist info item
def test_delete_info_item(self):
try:
info_item_id='va6sHyqPGLyPdZFz9XIE'
db.collection(u'Information').document(info_item_id).delete()
self.assertTrue(True)
except:
self.assertTrue(False)
#delete unexist info item
def test_delete_unexist_info_item(self):
try:
info_item_id='qq' #there is no id like that
db.collection(u'Information').document(info_item_id).delete()
self.assertTrue(False)
except:
self.assertTrue(True)
#add new guest by admin
def test_AddGuestByAdmin(self):
email="testttt@gmail.com"
password="password"
name='guest'
last='test'
Admin=False
try:
#user=auth.create_user_with_email_and_password("testttt@gmail.com","123456password")
data={"name":name,"last":last,"email":email,"password":password,"admin":Admin}
#print(auth.get_account_info(user['idToken'])['users'][0]['localId'])
#info=auth.get_account_info(user['idToken'])['users'][0]['localId']
db.collection(u'Users').document().set(data)
self.assertTrue(True)
except:
self.assertFalse(True)
#add new Admin by admin
def test_AddAdminByAdmin(self):
email="testttt@gmail.com"
password="password"
name='guest'
last='test'
Admin=True
try:
#user=auth.create_user_with_email_and_password("testttt@gmail.com","123456password")
data={"name":name,"last":last,"email":email,"password":password,"admin":Admin}
#print(auth.get_account_info(user['idToken'])['users'][0]['localId'])
#info=auth.get_account_info(user['idToken'])['users'][0]['localId']
db.collection(u'Users').document().set(data)
self.assertTrue(True)
except:
self.assertFalse(True)
#Update exist Guest
def test_updateGuest(self):
try:
ref_comment=db.collection(u'Users')
email="testttt@gmail.com"
ref_my=ref_comment.where(u'email',u'==',email).get()
field_updates={"name":'דני',"last":'sce',"email":email}
for r in ref_my:
rr=ref_comment.document(r.id)
rr.update(field_updates)
self.assertTrue(True)
except:
self.assertTrue(False)
#Update unexist Guest
def test_unexist_updateGuest(self):
try:
ref_comment=db.collection(u'Users')
email="NotFoundUser@gmail.com"
ref_my=ref_comment.where(u'email',u'==',email).get()
field_updates={"name":'no',"last":'no',"email":email}
for r in ref_my:
rr=ref_comment.document(r.id)
rr.update(field_updates)
self.assertTrue(False)
except:
self.assertTrue(True)
#delete exist guest
def test_deleteGuest(self):
try:
ref_comment=db.collection(u'Users')
ref_my=ref_comment.where(u'email',u'==',"r@gmail.com").get()
for r in ref_my:
rr=ref_comment.document(r.id)
rr.delete()
self.assertTrue(True)
except:
self.assertTrue(False)
#delete exist guest
def test_unexist_deleteGuest(self):
try:
ref_comment=db.collection(u'Users')
ref_my=ref_comment.where(u'email',u'==',"UserNotFound@gmail.com").get()
for r in ref_my:
rr=ref_comment.document(r.id)
rr.delete()
self.assertTrue(False)
except:
self.assertTrue(True)
#add new facilites
def test_facilities(self):
docs = db.collection(u'Parks').stream()
parkData = {
"name": "testPark",
"parkFacility": "נקי ומסודר"
}
canAddPark = False
for doc in docs:
dici = doc.to_dict()
try:
if parkData['name'] == dici['name']:
canAddPark = True
if canAddPark:
# Deleting and creating a new park witch will be updated with the new facilities
db.collection(u'Parks').document(doc.id).delete()
db.collection(u'Parks').document().set(parkData)
self.assertTrue(True)
break
except :
self.assertTrue(False)
pass
#add new vaild comment
def test_addComment(self):
try:
data={'author':'dani@gmail.com','content':'הפארק מסוודר :)','post_id':8777765554444,'title':'פארק גדולי ישראל'}
doc=db.collection(u'testComments').document()
doc.set(data)
self.assertTrue(True)
except:
self.assertTrue(False)
#add new vaild comment
def test_unvaild_addComment(self):
try:
data={'author':'dani@gmail.com','content':'','post_id':8777765554444,'title':'פארק גדולי ישראל'}
doc=db.collection(u'testComments').document()
doc.set(data)
self.assertTrue(False)
except:
self.assertTrue(True)
#update exist comment
def test_updateComment(self):
try:
ref_comment=db.collection(u'Users')
author="dani@gmail.com"
ref_my=ref_comment.where(u'author',u'==',author).get()
field_updates={"author":author,"content":'פארק יפה *-*'}
for r in ref_my:
rr=ref_comment.document(r.id)
rr.update(field_updates)
self.assertTrue(True)
except:
self.assertTrue(False)
#update unExist comment
def test_UnExist_updateComment(self):
try:
ref_comment=db.collection(u'Users')
author="UserNotFound@gmail.com"
ref_my=ref_comment.where(u'author',u'==',author).get()
field_updates={"author":author,"content":'פארק יפה *-*'}
for r in ref_my:
rr=ref_comment.document(r.id)
rr.update(field_updates)
self.assertTrue(False)
except:
self.assertTrue(True)
#delete exist comment
def test_deleteComment(self):
try:
ref_comment=db.collection(u'testComments')
ref_my=ref_comment.where(u'author',u'==',"dani@gmail.com").get()
for r in ref_my:
rr=ref_comment.document(r.id)
rr.delete()
self.assertTrue(True)
except:
self.assertTrue(False)
#delete unexist comment
def test_unExist_deleteComment(self):
try:
ref_comment=db.collection(u'testComments')
ref_my=ref_comment.where(u'author',u'==',"UserNotFound@gmail.com").get()
for r in ref_my:
rr=ref_comment.document(r.id)
rr.delete()
self.assertTrue(False)
except:
self.assertTrue(True)
if __name__ == '__main__':
unittest.main()
| {"/test2.py": ["/App.py"], "/test3.py": ["/App.py"], "/test.py": ["/App.py"], "/App.py": ["/forms.py"]} |
69,083 | mor0981/ParkFlask | refs/heads/master | /App.py | from flask import Flask,render_template,request,flash,session,redirect,url_for,abort
from forms import LoginForm,SignOutForm,NewParkForm,DeleteParkForm,signupForm,signout2Form,addComment,updateComment,facilitiesForm,PostForm,infoForm
from flask_jsglue import JSGlue
import pyrebase
import firebase_admin
from firebase_admin import auth
from firebase_admin import credentials
from firebase_admin import firestore
app = Flask(__name__)
jsglue = JSGlue(app)
app.config['SECRET_KEY']='mormormor'
import json
import os
import tempfile
from werkzeug.utils import secure_filename
print(firebase_admin)
config={
"apiKey": "AIzaSyDab7tKKm11tgRuLsAPejXGGAYJ1d20cnQ",
"authDomain": "parkflask.firebaseapp.com",
"databaseURL": "https://parkflask.firebaseio.com",
"projectId": "parkflask",
"storageBucket": "parkflask.appspot.com",
"messagingSenderId": "685599054335",
"appId": "1:685599054335:web:db2d1d2890588a14772fca",
"measurementId": "G-H8HGMEE4WB"
}
with open('playgrounds.json', 'r',encoding="utf8") as myfile:
data=json.loads(myfile.read())
cred = credentials.Certificate('parkflask-firebase-adminsdk-wplsp-87a9bb6106.json')
firebase_admin.initialize_app(cred)
db = firestore.client()
firebase = pyrebase.initialize_app(config)
auth= firebase.auth()
storage=firebase.storage()
UPLOAD_FOLDER = 'static/uploads'
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
@app.route('/',methods=['GET', 'POST'])
@app.route('/homePage',methods=['GET', 'POST'])
def homePage():
if "user" in session:
if(session["admin"]):
return redirect(url_for("adminPage"))
else:
return redirect(url_for("visitPage"))
return render_template('homePage.html')
@app.route('/login',methods=['GET', 'POST'])
def login():
print("login")
form = LoginForm()
if request.method == 'POST':
# if form.validate_on_submit():
print("click")
try:
user=auth.sign_in_with_email_and_password(form.email.data,form.password.data)
uid=auth.get_account_info(user['idToken'])['users'][0]['localId']
session["uid"]=uid
doc_ref=db.collection(u"Users").document(uid)
doc = doc_ref.get()
if doc.exists:
admin=doc.to_dict()['admin']
if(admin):
session["admin"]=True
session["user"]=form.email.data
return redirect(url_for("adminPage"))
else:
session["admin"]=False
session["user"]=form.email.data
return redirect(url_for("visitPage"))
except:
return render_template('index.html',form=form,us="Not Exist")
else:
if "user" in session:
if(session["admin"]):
return redirect(url_for("adminPage"))
else:
return redirect(url_for("visitPage"))
print("gggggg")
return render_template('index.html',form=form)
commentNum=0
'''
@app.route('/delete_comment',methods=['GET', 'POST'])
def delete_comment():
form=commentForm()
if form.validate_on_submit():
docs=db.collection(u'Comments').stream()
date=form.date.data
time=form.time.data
park=form.parkname.data
for doc in docs:
d=doc.to_dict()
if date==d['date'] and time==d['time'] and park==d['parkname']:
db.collection(u'Comments').document(doc.id).delete()
return redirect(url_for("homePage"))
return render_template('delete_comment.html',form=form)
'''
@app.route('/comment',methods=['GET', 'POST'])
def comment():
global commentNum
commentNum=commentNum+1
form=commentForm()
if form.validate_on_submit():
print("hi")
now = datetime.now()
date=now.strftime("%d/%m/%Y")
time=now.strftime("%H:%M:%S")
print(date)
print(time)
email=form.email.data
password=form.password.data
parkName=form.parkname.data
docs=db.collection(u'Users').stream()
for doc in docs:
d=doc.to_dict()
if email==d['email'] and password==d['password']:
data={'email':email,'password':password, 'comment':form.comment.data,'time':time,'date':date,'parkName':parkName}
print(data)
db.collection(u'Comments').document().set(data)
print(form.comment.data)
print(commentNum)
print(date)
print(time)
return redirect(url_for("homePage"))
break
print(form.email.data)
print("hiyou")
return render_template('comment.html',form=form)
@app.route('/adminPage',methods=['GET', 'POST'])
def adminPage():
return render_template('adminPage.html')
@app.route('/visitPage',methods=['GET', 'POST'])
def visitPage():
return render_template('visitPage.html')
@app.route('/user',methods=['GET', 'POST'])
def user():
if "user" in session:
form = SignOutForm()
if form.validate_on_submit():
return redirect(url_for("logout"))
return render_template('login.html',form=form)
else:
return redirect(url_for("login"))
@app.route('/logout')
def logout():
print("logout")
session.pop("user",None)
flash("התנתקת בהצלחה")
return redirect(url_for("homePage"))
@app.route('/register',methods=['GET', 'POST'])
def register():
form=signupForm()
if request.method == 'POST':
email=form.email.data
password=form.password.data
name=form.name.data
last=form.last.data
user=auth.create_user_with_email_and_password(email,password)
data={"name":name,"last":last,"email":email,"password":password,"admin":False}
#db.child("Guest").push(data)
#data2={"name":"1","other":email,"shadowing":"123"}
#db.child("Parks").push(data2)
print(auth.get_account_info(user['idToken'])['users'][0]['localId'])
info=auth.get_account_info(user['idToken'])['users'][0]['localId']
db.collection(u'Users').document(info).set(data)
return redirect(url_for("login"))
return render_template('basic.html',form=form)
#signup
@app.route('/signup',methods=['GET', 'POST'])
def signup():
return render_template('signup.html')
#unregister
@app.route('/unregister',methods=['GET', 'POST'])
def unregister():
form=signout2Form()
if request.method == 'POST':
print("in if1")
email=form.email.data
password=form.password.data
docs=db.collection(u'Users').stream()
for doc in docs:
d=doc.to_dict()
if email==d['email'] and password==d['password']:
user_id=doc.id
docs = db.collection(u'Comments').where(u'userId', u'==', user_id).stream()
for doc in docs:
doc.reference.delete()
firebase_admin.auth.delete_user(user_id)
db.collection(u'Users').document(user_id).delete()
session.pop("user",None)
return redirect(url_for("homePage"))
#user=auth.create_user_with_email_and_password(email,password)
#data={"username":username,"email":email,"password":password}
#db.child("Guest").push(data)
#data2={"name":"1","other":email,"shadowing":"123"}
#db.child("Parks").push(data2)
#print(auth.get_account_info(user['idToken'])['users'][0]['localId'])
#info=auth.get_account_info(user['idToken'])['users'][0]['localId']
#db.collection(u'Guest').document(info).set(data)
print("hello")
return render_template('basic3.html',form=form)
@app.route('/newpark', methods =['GET','POST'])
def newpark():
form = NewParkForm()
if form.validate_on_submit():
data = {
"name": form.parkName.data,
"other": form.parkAddress.data,
"shadowing": form.shadow.data
}
docs = db.collection(u'Parks').stream()
canMakePark = True
for doc in docs:
dici = doc.to_dict()
if data["name"] == dici['name'] and data["other"] == dici['other']:
canMakePark = False
if canMakePark:
db.collection(u'Parks').document().set(data)
flash(" יצרת פארק חדש ")
else:
flash("לא ניתן ליצור פארק")
return redirect(url_for('newpark'))
return render_template('createNewPark.html', form=form,admin=session["admin"])
@app.route('/deletepark', methods =['GET','POST'])
def deletepark():
form = DeleteParkForm()
if form.validate_on_submit():
req = request.form
parkName = req["parkName"]
parkAddress = req["parkAddress"]
docs = db.collection(u'Parks').stream()
for doc in docs:
dici = doc.to_dict()
if parkName == dici['name'] and parkAddress == dici['other']:
print (f"park {dici['name']} in {dici['other']} has beem deleted")
db.collection(u'Parks').document(doc.id).delete()
flash("מחקת פארק")
return redirect(url_for('deletepark'))
return render_template('deletePark.html', form=form,admin=session["admin"])
@app.route('/parks',methods=['GET', 'POST'])
def parks():
return render_template('parks.html',data=data,admin=session["admin"])
@app.route('/comments/<p>',methods=['GET', 'POST'])
def comments(p):
form=addComment()
rat=True
doc = db.collection(u'Users').document(session["uid"]).get()
c=doc.to_dict()
try:
if p in c['parks']:
rat=False
except:
print("Not")
docs = db.collection(u'Comments').where(u'name', u'==', p).stream()
arr=[]
for doc in docs:
d=doc.to_dict()
d["first"]=db.collection(u'Users').document(d["userId"]).get().to_dict()["name"]
d["last"]=db.collection(u'Users').document(d["userId"]).get().to_dict()["last"]
d["post_id"]=doc.id
arr.append(d)
########## park facility
data = {'name': p, 'userId': session["uid"], 'data-rating': form.comment.data}
docs = db.collection(u'Parks').stream()
for doc in docs:
dici = doc.to_dict()
if p == dici['name']:
parkFacility = dici['parkFacility']
docs = db.collection(u'Parks').where(u'name', u'==', p).stream()
for doc in docs:
r=doc.to_dict()
if(r['votes']==0):
ret=5
else:
ret=r['rating']/r['votes']
########## end - park facility
if request.method == 'POST':
data={'name':p,'userId':session["uid"],'text':form.comment.data}
doc=db.collection(u'Comments').document()
doc.set(data)
f = request.files['file']
if f.filename != '':
filename = secure_filename(f.filename)
print(filename)
f.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
storage.child("image/"+doc.id).put("static/uploads/"+filename)
url=storage.child("image/"+doc.id).get_url(None)
doc.update({
'image':url
})
return redirect(request.referrer)
return render_template('comments.html',admin=session["admin"],parkName=p,email=session["user"],comments=arr,form=form,now=session["uid"],parkFacility=parkFacility,ret=ret,rat=rat)
@app.route('/comments/<post_id>/delete',methods=['GET', 'POST'])
def delete_comments(post_id):
db.collection(u'Comments').document(post_id).delete()
return redirect(url_for('parks'))
@app.route('/comments/<p>/<r>/rating',methods=['GET', 'POST'])
def rating(p,r):
docs = db.collection(u'Parks').where(u'name', u'==', p).stream()
for doc in docs:
c=doc.to_dict()
a=c['votes']
b=c['rating']
db.collection(u'Parks').document(doc.id).update({
'votes':a+1,
'rating':b+int(r)
})
now=session["uid"]
doc = db.collection(u'Users').document(now).get()
c=doc.to_dict()
try:
db.collection(u'Users').document(now).update({
'parks':c['parks'].append(p),
})
except:
db.collection(u'Users').document(now).update({
'parks':[p],
})
return redirect(request.referrer)
@app.route('/info_items',methods=['GET', 'POST'])
def info_items():
form=infoForm()
#docs = db.collection(u'Comments').where(u'name', u'==', p).stream()
dic=db.collection(u'Information').stream()
docs = [{
'id': 1,
'name': 'name 1',
'email': 'email 1'
}, {
'id': 2,
'name': 'name 2',
'email': 'email 2'
}]
print(form.email.data)
if request.method == 'POST':
print("hello")
data = {
"name": form.name.data,
"job": form.job.data,
"email": form.email.data
}
docs = db.collection(u'Information').stream()
for doc in docs:
dici = doc.to_dict()
print(dici)
if data["name"] == dici['name'] and data["job"] == dici['job'] and data["email"] == dici['email']:
flash("עובד קיים")
return
db.collection(u'Information').document().set(data)
print("hello2")
return redirect(url_for('info_items'))
arr=[]
for doc in dic:
d=doc.to_dict()
d["id"] = doc.id
print(d)
arr.append(d)
print("not")
return render_template('info.html',admin=session["admin"],email=session["user"],info_items=arr,now=session["uid"],form=form)
@app.route('/info_items/<info_item_id>',methods=['GET'])
def delete_info_item(info_item_id):
form=infoForm()
db.collection(u'Information').document(info_item_id).delete()
return redirect(url_for('info_items'))
@app.route('/comments/<post_id>/<text>/update',methods=['GET', 'POST'])
def update_comments(post_id,text):
form=updateComment()
if form.validate_on_submit():
data={'text':form.comment.data}
db.collection(u'Comments').document(post_id).update(data)
return redirect(url_for('parks'))
return render_template('updateComment.html',form=form,admin=session["admin"],text=text)
@app.route('/facilities', methods=['GET', 'POST'])
def facilities():
form = facilitiesForm()
if form.validate_on_submit():
docs = db.collection(u'Parks').stream()
parkData = {
"name": form.parkNameDB.data,
"parkFacility": request.form.getlist('facility')
}
canAddPark = False
for doc in docs:
dici = doc.to_dict()
try:
if parkData['name'] == dici['name']:
canAddPark = True
if canAddPark:
# Deleting and creating a new park witch will be updated with the new facilities
# db.collection(u'Parks').document(doc.id).delete()
db.collection(u'Parks').document(doc.id).update(parkData)
flash("עדכן מתקנים")
break
except Exception as err:
pass
return redirect(url_for('facilities'))
return render_template('facilities.html', data=data, admin=session["admin"], form=form)
def addData():
# UP LOADING ALL PARKS TO FIRE-BASE
for i in data:
db.collection(u'Parks').document().set({"name": i['Name']})
# db.collection(u'Parks').document().set({"name": i['Name'], "Other": i['other']})
@app.route('/Guests/<string:email>/update', methods=['GET', 'POST'])
def updateGuest(email):
print("into UpdateGuest")
docs = db.collection(u'Users').stream()
canMakePark = True
for doc in docs:
dici = doc.to_dict()
if dici['email']==email :
canMakePark = False
rpost=dici['name']
emailGuest=dici['email']
wanted=dici
if canMakePark:
abort(403)
else:
rrpost=rpost
ref_comment=db.collection(u'Users')
ref_my=ref_comment.where(u'email',u'==',email).stream()
for r in ref_my:
rr=r.to_dict()['email']
print(rr)
form = PostForm()
print(form.email.data)
if form.validate_on_submit():
print("after")
guest_email = form.email.data
guest_name = form.name.data
guest_last = form.last.data
guset_password=form.password.data
print(guset_password)
ref_comment=db.collection(u'Users')
ref_my=ref_comment.where(u'email',u'==',email).get()
field_updates={"name":guest_name,"last":guest_last,"email":guest_email}
for r in ref_my:
rr=ref_comment.document(r.id)
rr.update(field_updates)
flash('המשתמש התעדכן בהצלחה!', 'success')
return redirect(url_for('AllGuest', email=emailGuest))
elif request.method == 'GET':
print("get")
docs
form.email.data = wanted['email']
form.name.data = wanted['name']
form.last.data=wanted['last']
form.password=wanted['password']
return render_template('CreateGuest.html', title='Update Guest',
form=form, legend='Update Guest')
@app.route('/Guests', methods=['GET', 'POST'])
def AllGuest():
guets = db.collection(u'Users').stream()
return render_template('AllUsers.html', guests=guets)
@app.route('/registerByAdmin',methods=['GET', 'POST'])
def registerByAdmin():
form=signupForm()
if request.method == 'POST':
email=form.email.data
password=form.password.data
name=form.name.data
last=form.last.data
Admin=form.Admin.data
user=auth.create_user_with_email_and_password(email,password)
if Admin=='true':
data={"name":name,"last":last,"email":email,"password":password,"admin":True}
if Admin=='false':
data={"name":name,"last":last,"email":email,"password":password,"admin":False}
print(auth.get_account_info(user['idToken'])['users'][0]['localId'])
info=auth.get_account_info(user['idToken'])['users'][0]['localId']
db.collection(u'Users').document(info).set(data)
return redirect(url_for("login"))
return render_template('signup.html',form=form,us="Not Exist")
@app.route("/Guests/<string:email>")
def Option_guest(email):
#post=db.collection(u'testComments').query.get_or_404(post_id)
post=db.collection(u'Users').where(u'email',u'==',email).stream()
# rpost=post.to_dict()['title']
docs = db.collection(u'Users').stream()
canMakePark = True
#print(post_id)
for doc in docs:
dici = doc.to_dict()
if dici['email']==email :
canMakePark = False
rpost=dici['name']
wanted=dici
if canMakePark==True:
flash("error!")
rpost='name'
wanted=dici
else:
rpost=wanted['email']
print(post)
#post = Post.query.get_or_404(post_id)
return render_template('updateGuestOption.html', title=rpost, guest=wanted)
@app.route('/Guests/<string:email>/delete',methods=[ 'POST'])
def deleteGuest(email):
ref_comment=db.collection(u'Users')
ref_my=ref_comment.where(u'email',u'==',email).get()
for r in ref_my:
rr=ref_comment.document(r.id)
rr.delete()
firebase_admin.auth.delete_user(rr.id)
flash('המשתמש נמחק!', 'success')
return redirect(url_for('AllGuest'))
if __name__ == '__main__':
app.run(debug=True)
| {"/test2.py": ["/App.py"], "/test3.py": ["/App.py"], "/test.py": ["/App.py"], "/App.py": ["/forms.py"]} |
69,084 | kkaris/paths_graph | refs/heads/master | /paths_graph/tests/test_pre_cfpg.py | import os
import pickle
from collections import Counter
from os.path import dirname, join
import numpy as np
import networkx as nx
from nose.tools import raises
import paths_graph as pg
from paths_graph import pre_cfpg as pcf
g1_uns = nx.DiGraph()
g1_uns.add_edges_from((('A', 'B'), ('B', 'C'), ('C', 'D')))
g2_uns = nx.DiGraph()
g2_uns.add_edges_from((('A', 'B'), ('B', 'A'), ('B', 'D'), ('A', 'D')))
g3_uns = nx.DiGraph()
g3_uns.add_edges_from((('A', 'B'), ('B', 'A'), ('B', 'C'),
('C', 'D'), ('A', 'D')))
def test_prune():
g = nx.DiGraph()
g.add_edges_from((('S', 'A'), ('S', 'B'), ('A', 'S'), ('B', 'C'),
('C', 'D'), ('D', 'T'), ('B', 'T')))
length = 4
(f_level, b_level) = pg.get_reachable_sets(g, 'S', 'T', max_depth=length)
pg_raw = pg.PathsGraph.from_graph(g, 'S', 'T', length, f_level, b_level)
pg_raw_edges = pg_raw.graph.edges()
nodes_to_prune = [(2, 'S')]
# Prune the graph
pg_pruned = pcf.prune(pg_raw.graph, nodes_to_prune, (0, 'S'), (length, 'T'))
# Make sure we didn't change the original graphs or node lists
assert nodes_to_prune == [(2, 'S')]
assert pg_raw.graph.edges() == pg_raw_edges
# The correctly pruned structure
assert set(pg_pruned.edges()) == \
set([((0, 'S'), (1, 'B')), ((1, 'B'), (2, 'C')),
((2, 'C'), (3, 'D')), ((3, 'D'), (4, 'T'))])
def test_initialize():
source = 'A'
target = 'D'
length = 3
# We first run the pg_0 calculation on a simple graph with no cycles
# involving the source or target
(f_level, b_level) = pg.get_reachable_sets(g1_uns, source, target,
max_depth=length)
pg_raw = pg.PathsGraph.from_graph(g1_uns, source, target, length, f_level,
b_level)
(pg_0, tags) = pcf._initialize_pre_cfpg(pg_raw)
# Because no nodes are pruned, the initialized "cycle free" paths graph
# will be the same as the path graph we started with
assert pg_0 == pg_raw.graph
assert tags == {(0, 'A'): [(0, 'A')], (1, 'B'): [(0, 'A')],
(2, 'C'): [(0, 'A')], (3, 'D'): [(0, 'A')]}
# The next graph contains a cycle passing through the source node, A,
# and no acyclic paths
(f_level, b_level) = pg.get_reachable_sets(g2_uns, source, target,
max_depth=length)
pg_raw = pg.PathsGraph.from_graph(g2_uns, source, target, length, f_level,
b_level)
(pg_0, tags) = pcf._initialize_pre_cfpg(pg_raw)
assert not pg_0
assert not tags
# The next graph contains a cycle passing through the source node, A,
# with one acyclic path
(f_level, b_level) = pg.get_reachable_sets(g3_uns, source, target,
max_depth=length)
pg_raw = pg.PathsGraph.from_graph(g3_uns, source, target, length, f_level,
b_level)
(pg_0, tags) = pcf._initialize_pre_cfpg(pg_raw)
assert set(pg_0.edges()) == set([((0, 'A'), (1, 'B')), ((1, 'B'), (2, 'C')),
((2, 'C'), (3, 'D'))])
assert tags == {(0, 'A'): [(0, 'A')], (1, 'B'): [(0, 'A')],
(2, 'C'): [(0, 'A')], (3, 'D'): [(0, 'A')]}
# This test stems from a randomly-generated network where no paths
# were found--guarantees that the problem is NOT that pg_0 is empty
g4_uns = nx.DiGraph()
g4_uns.add_edges_from(((0, 1), (1, 0), (0, 2), (2, 0), (1, 2), (2, 1)))
source, target, length = (0, 2, 2)
(f_level, b_level) = pg.get_reachable_sets(g4_uns, source, target,
max_depth=length)
pg_raw = pg.PathsGraph.from_graph(g4_uns, source, target, length, f_level,
b_level)
(pg_0, tags) = pcf._initialize_pre_cfpg(pg_raw)
assert pg_0
assert tags
def test_from_graph_no_levels():
g4_uns = nx.DiGraph()
g4_uns.add_edges_from(((0, 1), (1, 0), (0, 2), (2, 0), (1, 2), (2, 1)))
source, target, length = (0, 2, 2)
pre_cfpg = pg.PreCFPG.from_graph(g4_uns, source, target, length)
assert isinstance(pre_cfpg, pg.PreCFPG)
assert pre_cfpg.graph
assert set(pre_cfpg.graph.edges()) == \
set([((0, 0), (1, 1)), ((1, 1), (2, 2))])
assert pre_cfpg.tags == {(0, 0): [(0, 0)],
(1, 1): [(0, 0), (1, 1)],
(2, 2): [(0, 0), (1, 1), (2, 2)]}
def test_from_graph_with_levels():
g4_uns = nx.DiGraph()
g4_uns.add_edges_from(((0, 1), (1, 0), (0, 2), (2, 0), (1, 2), (2, 1)))
source, target, length = (0, 2, 2)
max_depth = 5
(f_reach, b_reach) = \
pg.get_reachable_sets(g4_uns, source, target, max_depth=max_depth)
pre_cfpg = pg.PreCFPG.from_graph(g4_uns, source, target, length,
fwd_reachset=f_reach, back_reachset=b_reach)
assert isinstance(pre_cfpg, pg.PreCFPG)
assert pre_cfpg.graph
assert set(pre_cfpg.graph.edges()) == \
set([((0, 0), (1, 1)), ((1, 1), (2, 2))])
assert pre_cfpg.tags == {(0, 0): [(0, 0)],
(1, 1): [(0, 0), (1, 1)],
(2, 2): [(0, 0), (1, 1), (2, 2)]}
def test_from_graph_with_levels_bad_depth():
"""Raise an exception if the requested path length is greater than the
depth of the provided reach sets."""
g4_uns = nx.DiGraph()
g4_uns.add_edges_from(((0, 1), (1, 0), (0, 2), (2, 0), (1, 2), (2, 1)))
source, target, length = (0, 2, 2)
max_depth = 1
(f_reach, b_reach) = \
pg.get_reachable_sets(g4_uns, source, target, max_depth=max_depth)
pre_cfpg = pg.PreCFPG.from_graph(g4_uns, source, target, length,
fwd_reachset=f_reach, back_reachset=b_reach)
assert not pre_cfpg.graph
def test_from_pg():
g4_uns = nx.DiGraph()
g4_uns.add_edges_from(((0, 1), (1, 0), (0, 2), (2, 0), (1, 2), (2, 1)))
source, target, length = (0, 2, 2)
(f_level, b_level) = pg.get_reachable_sets(g4_uns, source, target,
max_depth=length)
pg_raw = pg.PathsGraph.from_graph(g4_uns, source, target, length, f_level,
b_level)
pre_cfpg = pg.PreCFPG.from_pg(pg_raw)
assert isinstance(pre_cfpg, pg.PreCFPG)
assert pre_cfpg.graph
assert set(pre_cfpg.graph.edges()) == \
set([((0, 0), (1, 1)), ((1, 1), (2, 2))])
assert pre_cfpg.tags == {(0, 0): [(0, 0)],
(1, 1): [(0, 0), (1, 1)],
(2, 2): [(0, 0), (1, 1), (2, 2)]}
def test_sampling_graph1():
"""Test sampling of problematic graph.
The issue with this graph is that the operation on (1, 3) would prune out
(3, 3) the one causing the cycle, except that it is retained because there
is still a non-cyclic path through (3, 3) via (1, 1). However, in
subsequent steps, pruning of downstream nodes (i.e., (2, 4)) actually
eliminate any acyclic paths through (1, 3). As a result, there is a
circumstance, when sampling the resulting graph, that one can end up
sampling into (1, 3) but there are no permissible successors from (1, 3)
based on the tags.
The solution was to repeat the sampling process iteratively until
convergence.
"""
g = nx.DiGraph()
g.add_edges_from([(0, 1), (0, 3), (0, 4), (0, 5), (1, 4), (2, 4), (2, 5),
(3, 0), (3, 2), (3, 4), (3, 5), (4, 2), (4, 3), (4, 5)])
source, target, length = (0, 5, 5)
(f_level, b_level) = pg.get_reachable_sets(g, source, target,
max_depth=length)
pre_cfpg = pg.PreCFPG.from_graph(g, source, target, length, f_level,
b_level)
paths = pre_cfpg.sample_paths(100)
def test_sampling_graph2():
"""Make sure that we can sample from the pre_cfpg without dead ends."""
# This graph produces dead-end samples which must be handled
pkl_file = join(dirname(__file__), 'dead_end_graph.pkl')
with open(pkl_file, 'rb') as f:
graph_dict = pickle.load(f)
(g_nodes, g_edges), source, target = graph_dict[2]
g = nx.DiGraph()
g.add_nodes_from(g_nodes)
g.add_edges_from(g_edges)
pre_cfpg = pg.PreCFPG.from_graph(g, source, target, 6)
cfpg = pg.CFPG.from_pre_cfpg(pre_cfpg)
# 147 unique paths
cf_paths = cfpg.enumerate_paths()
# Now, sample from the pre_cfpg and make sure that the sampled paths
# match the enumerated set from the cfpg
num_samples = 10000
sample_paths = pre_cfpg.sample_paths(num_samples)
assert len(sample_paths) == num_samples
assert set(sample_paths) == set(cf_paths)
def test_weighted_sampling():
g = nx.DiGraph()
g.add_edges_from([
('A', 'B', {'weight': 3}),
('A', 'C', {'weight': 1}),
('C', 'D'),
('B', 'D'),
('D', 'B'),
('D', 'C'),
('B', 'E'),
('C', 'E')])
source, target, length = ('A', 'E', 4)
pre_cfpg = pg.PreCFPG.from_graph(g, source, target, length)
os.environ['TEST_FLAG'] = 'TRUE'
np.random.seed(1)
samp_paths = pre_cfpg.sample_paths(1000)
ctr = Counter(samp_paths)
assert ctr[('A', 'B', 'D', 'C', 'E')] == 767
assert ctr[('A', 'C', 'D', 'B', 'E')] == 233
@raises(NotImplementedError)
def test_enumerate_not_implemented():
pre_cfpg = pg.PreCFPG.from_graph(g3_uns, 'A', 'D', 3)
pre_cfpg.enumerate_paths()
@raises(NotImplementedError)
def test_count_not_implemented():
pre_cfpg = pg.PreCFPG.from_graph(g3_uns, 'A', 'D', 3)
pre_cfpg.count_paths()
| {"/paths_graph/tests/test_pre_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/pre_cfpg.py": ["/paths_graph/pg.py"], "/paths_graph/tests/test_paths_graph.py": ["/paths_graph/__init__.py"], "/paths_graph/tests/test_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/cfpg.py": ["/paths_graph/__init__.py", "/paths_graph/pre_cfpg.py"], "/paths_graph/api.py": ["/paths_graph/pg.py", "/paths_graph/cfpg.py"], "/paths_graph/__init__.py": ["/paths_graph/pg.py", "/paths_graph/pre_cfpg.py", "/paths_graph/cfpg.py", "/paths_graph/paths_tree.py", "/paths_graph/api.py"]} |
69,085 | kkaris/paths_graph | refs/heads/master | /paths_graph/pre_cfpg.py | import os
import logging
from copy import copy, deepcopy
import numpy as np
import networkx as nx
from paths_graph.pg import PathsGraph, PathSamplingException
logger = logging.getLogger('pre_cfpg')
class PreCFPG(PathsGraph):
"""Representation of a pre- cycle free paths graph with associated methods.
The pre- cycle free paths graph consists of the paths graph remaining after
cycles through the source or target nodes are removed. However, paths
through the pre-CFPG node structure itself are not guaranteed to be cycle
free; instead, cycle-free paths can be sampled by taking into account
the tags associated with each node, representing the possible cycle-free
histories of the node in terms of other upstream nodes.
As with the "raw" paths graph (containing cycles), nodes in the pre-CFPG
consist of tuples with two elements: (depth, name).
Starting from the "raw" (i.e., containing cycles) paths graph, and
given a target path length n, the algorithm iterates over each "level"
in the graph 0 <= k <= n where level 0 consists only of the source node
and level n consists only of the target.
Each level k consists of a set of nodes, X; we examine each node x in X
and identify the subset of nodes that are reachable in both the forward
and backward directions from x. If any of the nodes in the forward
reach subgraph contain x itself (but at a different depth), this
represents a cyclic path back through x that is then pruned.
Each node x therefore defines its own subgraph of cycle free paths,
g_x. After iterating over all x in X, we combine these subgraphs into
the (in-progress) cycle free paths graph H_k. H_k therefore consists of
the superset of nodes of all the subgraphs g_x for level k. When
merging these subgraphs we prevent the re-introduction of cyclic paths
by annotating each node in the graph with a list of "tags". The tags
for any given node consist of a list of nodes lying at prior (upstream)
levels. Therefore during sampling, transitions from an upstream node to
a downstream node are only permissible if all nodes in the path up to a
certain level are contained in the tag set of the downstream node.
Parameters
----------
pg : PathsGraph
"Raw" (contains cycles) paths graph as created by
:py:func:`paths_graph.PathsGraph.from_graph`.
graph : networkx.DiGraph
The graph structure of the pre-CFPG.
tags : dict
A dictionary, keyed by node, with lists of other nodes representing
the nodes lying upstream on cycle free paths. Node that each node
also has itself as a tag.
Attributes
----------
source_node : tuple
Node in the pre-CFPG graph representing the source: (0, source_name)
target_node: tuple
Node in the pre-CFPG graph representing the target:
(path_length, target_name)
"""
def __init__(self, pg, graph, tags):
self.source_name = pg.source_name
self.source_node = pg.source_node
self.target_name = pg.target_name
self.target_node = pg.target_node
self.path_length = pg.path_length
self.graph = graph
self.tags = tags
@classmethod
def from_graph(klass, *args, **kwargs):
"""Compute a pre- cycle free paths graph from a graph.
Parameters
----------
g : networkx.DiGraph
The underlying graph on which paths will be generated.
source : str
Name of the source node.
target : str
Name of the target node.
target_polarity : int
Whether the desired path from source to target is positive (0)
or negative (1).
length : int
Length of paths to compute.
fwd_reachset : Optional[dict]
Dictionary of sets representing the forward reachset computed over
the original graph g up to a maximum depth greater than the
requested path length. If not provided, the forward reach set is
calculated up to the requested path length up to the requested path
length by calling paths_graph.get_reachable_sets.
back_reachset : Optional[dict]
Dictionary of sets representing the backward reachset computed over
the original graph g up to a maximum depth greater than the
requested path length. If not provided, the backward reach set is
calculated up to the requested path length up to the requested path
length by calling paths_graph.get_reachable_sets.
signed : bool
Specifies whether the underlying graph and the corresponding
f_level and b_level reachable sets have signed edges. If True,
sign information should be encoded in the 'sign' field of the edge
data, with 0 indicating a positive edge and 1 indicating a negative
edge.
target_polarity : 0 or 1
Specifies the polarity of the target node: 0 indicates
positive/activation, 1 indicates negative/inhibition.
Returns
-------
PreCFPG
A instance of the PreCFPG the containing the pre- cycle free paths
graph.
"""
pg = PathsGraph.from_graph(*args, **kwargs)
return PreCFPG.from_pg(pg)
@classmethod
def from_pg(klass, pg):
"""Compute a pre- cycle free paths graph from a PathsGraph.
Parameters
----------
pg : PathsGraph
"Raw" (contains cycles) paths graph as created by
:py:func:`paths_graph.PathsGraph.from_graph`.
Returns
-------
PreCFPG
A instance of the PreCFPG the containing the pre- cycle free paths
graph.
"""
# Initialize the cycle-free paths graph and the tag dictionary
source_node = pg.source_node
target_node = pg.target_node
dic_PG = {0: _initialize_pre_cfpg(pg)}
round_counter = 1
# Perform CFPG generation in successive rounds to ensure convergence
logger.info("Creating pre-CFPG from PG")
while True:
logger.info("Starting round %d" % round_counter)
for k in range(1, pg.path_length+1):
logger.info("Iterating over level %d" % k)
# Start by copying the information from the previous level
H = dic_PG[k-1][0]
tags = dic_PG[k-1][1]
# Check if we have already detected there are no cycle free
# paths, which would be indicated by an empty graph at the
# previous level. If so just propagate this information.
if not H:
dic_PG[k] = dic_PG[k-1]
else:
# Identify the nodes at level k in G_(k-1)
logger.info("Finding nodes at level %d" % k)
X = [v for v in H.nodes() if v[0] == k]
# We will track the (g_x, tags_x) pairs contributed by each
# x through dic_X
dic_X = {}
logger.info("Iterating over nodes in level %d" % k)
for x in X:
tags_x = {}
g_x_f = _forward(x, H, pg.path_length)
g_x_b = _backward(x, H)
g_x = nx.DiGraph()
g_x.add_edges_from(g_x_b.edges(data=True))
g_x.add_edges_from(g_x_f.edges(data=True))
# Get the nodes in the forward reach set representing
# cycles back through node x, (excluding x at level k)
nodes_to_prune = [v for v in g_x_f
if v[1] == x[1] and v[0] != k]
# If there are no nodes to prune then just add the tag
# 'x' to all the nodes in g_x_f but not to x
g_x_prune = prune(g_x, nodes_to_prune, source_node,
target_node)
nodes_to_tag = [v for v in g_x_prune.nodes()
if v[0] >= k]
# Otherwise add the tag x to the nodes in the strict
# future of x and update dic_X
for v in g_x_prune.nodes():
if v[0] >= k:
D = tags[v]
D.append(x)
tags_x[v] = D
else:
tags_x[v] = tags[v]
dic_X[x] = (g_x_prune, tags_x)
# We can now piece together the pairs in dic_X to obtain
# (G_k, tags_k)
H_k = nx.DiGraph()
tags_k = {}
for x in X:
h_x = dic_X[x][0]
H_k.add_edges_from(h_x.edges(data=True))
# For every node in the combined subgraphs
for v in H_k.nodes():
# Create a set of tags...
t = []
# ...by iterating over every node at this level
for x in X:
# ...and checking to see if the node v in the
# subgraph is in the history of a particular node x
# at this level
if v in dic_X[x][0]:
# ...if so, add v to the list of tags for x
tags_x = dic_X[x][1]
t.extend(tags_x[v])
t = list(set(t))
tags_k[v] = t
dic_PG[k] = (H_k, tags_k)
if not dic_PG[len(dic_PG)-1][0] or \
set(dic_PG[0][0].edges()) == \
set(dic_PG[len(dic_PG)-1][0].edges()):
break
else:
dic_PG = {0: dic_PG[k]}
round_counter += 1
pre_cfpg, tags = dic_PG[pg.path_length]
# Return the fully processed cfpg as an instance of the PreCFPG class
return klass(pg, pre_cfpg, tags)
def enumerate_paths(self):
raise NotImplementedError()
def count_paths(self):
raise NotImplementedError()
def set_uniform_path_distribution():
raise NotImplementedError()
def _successor(self, path, u):
"""Randomly choose a successor node of u given the current path."""
out_edges = []
for edge in self.graph.out_edges(u, data=True):
if set(path) <= set(self.tags[edge[1]]):
out_edges.append(edge)
# If there are no admissible successors, raise a PathSamplingException
if not out_edges:
raise PathSamplingException("No cycle-free successors")
# For determinism in testing
if 'TEST_FLAG' in os.environ:
out_edges = sorted(list(out_edges))
weights = [t[2]['weight'] for t in out_edges]
# Normalize the weights to a proper probability distribution
p = np.array(weights) / np.sum(weights)
pred_idx = np.random.choice(len(out_edges), p=p)
return out_edges[pred_idx][1]
def _initialize_pre_cfpg(pg):
"""Initialize pre- cycle free paths graph data structures.
Parameters
----------
pg : PathsGraph
"Raw" (contains cycles) paths graph as created by
:py:func:`paths_graph.paths_graph`.
source_node : tuple
Source node, of the form (0, source_name).
target_node : tuple
Target node, of the form (target_depth, source_name).
Returns
-------
tuple : (networkx.DiGraph(), dict)
"""
# Identify the initial set of nodes to be pruned. In this initial phase,
# they are simply nodes whose names match the source or target.
nodes_to_prune = set([v for v in pg.graph.nodes()
if (v != pg.source_node) and (v != pg.target_node) and \
((v[1] == pg.source_node[1]) or \
(v[1] == pg.target_node[1]))])
# Get the paths graph after initial source_node/target_node cycle pruning
pre_cfpg_0 = prune(pg.graph, nodes_to_prune, pg.source_node, pg.target_node)
# Initialize an empty list of tags for each node
tags = dict([(node, []) for node in pre_cfpg_0.nodes()])
# Add source_node tag to all nodes
_add_tag(tags, pg.source_node, [v for v in pre_cfpg_0.nodes()])
return (pre_cfpg_0, tags)
def _add_tag(tag_dict, tag_node, nodes_to_tag):
for v in nodes_to_tag:
tag_dict[v].append(tag_node)
def prune(pg, nodes_to_prune, source, target):
"""Iteratively prunes nodes from (a copy of) a paths graph or CFPG.
We prune the graph *pg* iteratively by the following procedure:
1. Remove the nodes given by *nodes_to_prune* from the graph.
2. Identify nodes (other than the source node) that now have no
incoming edges.
3. Identify nodes (other than the target node) that now have no outgoing
edges.
4. Set *nodes_to_prune* to the nodes identified in steps 2 and 3.
5. Repeat from 1 until there are no more nodes to prune.
Parameters
----------
pg : networkx.DiGraph
Paths graph to prune.
nodes_to_prune : list
Nodes to prune from paths graph.
source : tuple
Source node, of the form (0, source_name).
target : tuple
Target node, of the form (target_depth, source_name).
Returns
-------
networkx.DiGraph()
Pruned paths graph.
"""
# First check if we are pruning any nodes to prevent unnecessary copying
# of the paths graph
if not nodes_to_prune:
return pg
# Make a copy of the graph
pg_pruned = pg.copy()
# Perform iterative pruning
while nodes_to_prune:
# Remove the nodes in our pruning list
pg_pruned.remove_nodes_from(nodes_to_prune)
# Make a list of nodes whose in or out degree is now 0 (making
# sure to exclude the source and target, whose depths are at 0 and
# path_length, respectively)
no_in_edges = [node for node, in_deg in pg_pruned.in_degree()
if in_deg == 0 and node != source]
no_out_edges = [node for node, out_deg in pg_pruned.out_degree()
if out_deg == 0 and node != target]
nodes_to_prune = set(no_in_edges + no_out_edges)
return pg_pruned
def _forward(v, H, length):
"""Compute the subgraph of H defined by the paths forward from node v.
Parameters
----------
v : tuple(int, str)
The node to get the _forward subgraph for.
H : networkx.DiGraph()
For a given path length n, H defines the graph G_i at the i-th stage
for 1 <= i <= n.
Returns
-------
networkx.DiGraph()
Subgraph reachable by forward paths from v in H.
"""
j = v[0]
L = {}
L[j] = [v]
h = nx.DiGraph()
for k in range(j+1, length+1):
for v in L[k - 1]:
h.add_edges_from(H.out_edges(v, data=True))
L[k] = [w for w in h if w[0] == k]
return h
def _backward(v, H):
"""Compute the subgraph of H defined by the paths backward from node v.
Parameters
----------
v : tuple(int, str)
The node to get the _backward subgraph for.
H : networkx.DiGraph()
For a given path length n, H defines the graph G_i at the i-th stage
for 1 <= i <= n.
Returns
-------
networkx.DiGraph()
Subgraph reachable by backward paths from v in H.
"""
j = v[0]
L = {}
L[j] = [v]
J = list(reversed(range(0, j)))
h = nx.DiGraph()
for k in J:
for v in L[k+1]:
h.add_edges_from(H.in_edges(v, data=True))
L[k] = [w for w in h if w[0] == k]
return h
| {"/paths_graph/tests/test_pre_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/pre_cfpg.py": ["/paths_graph/pg.py"], "/paths_graph/tests/test_paths_graph.py": ["/paths_graph/__init__.py"], "/paths_graph/tests/test_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/cfpg.py": ["/paths_graph/__init__.py", "/paths_graph/pre_cfpg.py"], "/paths_graph/api.py": ["/paths_graph/pg.py", "/paths_graph/cfpg.py"], "/paths_graph/__init__.py": ["/paths_graph/pg.py", "/paths_graph/pre_cfpg.py", "/paths_graph/cfpg.py", "/paths_graph/paths_tree.py", "/paths_graph/api.py"]} |
69,086 | kkaris/paths_graph | refs/heads/master | /paths_graph/tests/test_paths_graph.py | import os
import pickle
from collections import Counter
import numpy as np
import networkx as nx
from paths_graph import *
#random_graph_pkl = join(dirname(__file__), 'random_graphs.pkl')
source = 'A'
target = 'D'
target_polarity = 0
graph1_s = nx.DiGraph()
graph1_s.add_nodes_from(['A', 'B', 'C', 'D'])
graph1_s.add_edges_from([('A', 'B', {'sign': 0}),
('B', 'D', {'sign': 0}),
('A', 'C', {'sign': 0}),
('C', 'D', {'sign': 0})])
graph1_uns = nx.DiGraph()
graph1_uns.add_nodes_from(['A', 'B', 'C', 'D'])
graph1_uns.add_edges_from([('A', 'B'), ('B', 'D'), ('A', 'C'), ('C', 'D')])
g_samp = nx.DiGraph() # Graph for testing sampling uniformly vs. non-uniformly
g_samp.add_edges_from([
('source', 'A1'), ('source', 'A2'),
('A1', 'B1'),
('A2', 'B2'), ('A2', 'B3'), ('A2', 'B4'), ('A2', 'B5'),
('B1', 'target'),
('B2', 'target'), ('B3', 'target'), ('B4', 'target'), ('B5', 'target')])
g_split_nodes = nx.DiGraph()
g_split_nodes.add_edges_from(
[(0, 1), (0, 2), (0, 3), (0, 5), (1, 0), (1, 2), (1, 3), (1, 4), (2, 0),
(2, 3), (2, 4), (3, 0), (3, 1), (3, 2), (3, 4), (3, 5), (4, 1), (4, 2),
(4, 3), (5, 1), (5, 3), (5, 4)])
def test_get_reachable_sets_unsigned():
f_level, b_level = get_reachable_sets(graph1_uns, source, target,
signed=False)
assert f_level == {0: {'A'}, 1: {'B', 'C'}, 2: {'D'}}
assert b_level == {0: {'D'}, 1: {'B', 'C'}, 2: {'A'}}
def test_get_reachable_sets_signed():
f_level, b_level = get_reachable_sets(graph1_s, source, target, signed=True)
assert f_level == {0: {('A', 0)}, 1: {('B', 0), ('C', 0)}, 2: {('D', 0)}}
assert b_level == {0: {('D', 0)}, 1: {('B', 0), ('C', 0)}, 2: {('A', 0)}}
def test_unreachability_unsigned():
graph = nx.DiGraph()
graph.add_nodes_from(['A', 'B', 'C', 'D'])
graph.add_edges_from([('A', 'B'), ('D', 'B'), ('C', 'A'), ('C', 'D')])
(f_level, b_level) = get_reachable_sets(graph, source, target,
max_depth=5, signed=False)
assert f_level == {}
assert b_level == {}
def test_unreachability_signed():
# First make the unreachability due to the direction of the edges
graph = nx.DiGraph()
graph.add_nodes_from(['A', 'B', 'C', 'D'])
graph.add_edges_from([('A', 'B', {'sign': 0}),
('D', 'B', {'sign': 0}),
('C', 'A', {'sign': 0}),
('C', 'D', {'sign': 0})])
(f_level, b_level) = get_reachable_sets(graph, source, target,
max_depth=5, signed=True)
assert f_level == {}
assert b_level == {}
# This time, make the unreachability due to the sign
graph = nx.DiGraph()
graph.add_nodes_from(['A', 'B', 'C', 'D'])
graph.add_edges_from([('A', 'B', {'sign': 0}),
('D', 'B', {'sign': 0}),
('C', 'A', {'sign': 0}),
('C', 'D', {'sign': 0})])
(f_level, b_level) = get_reachable_sets(graph, source, target, max_depth=5,
signed=True)
assert f_level == {}
assert b_level == {}
def test_from_graph_unsigned():
# Path length 1
f_level, b_level = get_reachable_sets(graph1_s, source, target, max_depth=3,
signed=False)
pg = PathsGraph.from_graph(graph1_uns, source, target, 1, f_level, b_level,
signed=False)
assert len(pg.graph) == 0
# Path length 2
pg = PathsGraph.from_graph(graph1_uns, source, target, 2, f_level, b_level,
signed=False)
paths = list(nx.shortest_simple_paths(pg.graph, (0, 'A'), (2, 'D')))
assert len(paths) == 2
assert [(0, 'A'), (1, 'C'), (2, 'D')] in paths
assert [(0, 'A'), (1, 'B'), (2, 'D')] in paths
# Path length 3
pg = PathsGraph.from_graph(graph1_uns, source, target, 3, f_level, b_level,
signed=False)
assert len(pg.graph) == 0
def test_from_graph_unsigned_no_levels():
length = 2
pg = PathsGraph.from_graph(graph1_uns, source, target, length)
assert isinstance(pg, PathsGraph)
paths = list(nx.shortest_simple_paths(pg.graph, (0, 'A'), (2, 'D')))
assert len(paths) == 2
assert [(0, 'A'), (1, 'C'), (2, 'D')] in paths
assert [(0, 'A'), (1, 'B'), (2, 'D')] in paths
def test_from_graph_signed():
# Path length 1
f_level, b_level = get_reachable_sets(graph1_s, source, target, signed=True,
max_depth=3)
pg = PathsGraph.from_graph(graph1_s, source, target, 1, f_level, b_level,
signed=True, target_polarity=0)
assert not pg.graph
# Path length 2
pg = PathsGraph.from_graph(graph1_s, source, target, 2, f_level, b_level,
signed=True, target_polarity=0)
paths = list(nx.shortest_simple_paths(pg.graph, (0, ('A', 0)),
(2, ('D', 0))))
assert len(paths) == 2
assert [(0, ('A', 0)), (1, ('C', 0)), (2, ('D', 0))] in paths
assert [(0, ('A', 0)), (1, ('B', 0)), (2, ('D', 0))] in paths
# Path length 3
pg = PathsGraph.from_graph(graph1_s, source, target, 3, f_level, b_level,
signed=True, target_polarity=0)
assert not pg.graph
def test_pg_check_unreachable_unsigned():
graph = nx.DiGraph()
graph.add_nodes_from(['A', 'B', 'C', 'D'])
graph.add_edges_from([('A', 'B'), ('D', 'B'), ('C', 'A'), ('C', 'D')])
(f_level, b_level) = get_reachable_sets(graph, source, target, max_depth=5,
signed=False)
assert f_level == {}
assert b_level == {}
pg = PathsGraph.from_graph(graph, source, target, 2, f_level, b_level,
signed=False)
assert not pg.graph
# A graph where there is a path, but not of the given length (3)
(f_level, b_level) = get_reachable_sets(graph1_s, source, target,
max_depth=5, signed=False)
pg = PathsGraph.from_graph(graph, source, target, 3, f_level, b_level,
signed=False)
assert not pg.graph
def test_multidigraph_signed():
graph = nx.MultiDiGraph()
graph.add_edges_from([('A', 'B', {'sign': 0}), ('A', 'B', {'sign': 1})])
f_level, b_level = get_reachable_sets(graph, 'A', 'B', max_depth=3,
signed=True)
assert f_level[0] == {('A', 0)}
assert f_level[1] == {('B', 0), ('B', 1)}
assert b_level[0] == {('B', 0)}
assert b_level[1] == {('A', 0), ('A', 1)}
def test_sample_paths():
g_uns = nx.DiGraph()
g_uns.add_edges_from((('A', 'B'), ('A', 'C'), ('C', 'D'), ('B', 'D'),
('D', 'B'), ('D', 'C'), ('B', 'E'), ('C', 'E')))
source, target, length = ('A', 'E', 4)
pg = PathsGraph.from_graph(g_uns, source, target, length)
sample_paths = pg.sample_paths(100)
assert set(sample_paths) == set(
[('A', 'B', 'D', 'B', 'E'),
('A', 'B', 'D', 'C', 'E'),
('A', 'C', 'D', 'B', 'E'),
('A', 'C', 'D', 'C', 'E')])
def test_sample_paths_default_weights():
g = nx.DiGraph()
g.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'D')])
source, target, length = ('A', 'D', 2)
pg = PathsGraph.from_graph(g, source, target, length)
# For determinism in testing
os.environ['TEST_FLAG'] = 'TRUE'
# Seed the random number generator
np.random.seed(1)
sample_paths = pg.sample_paths(200)
assert set(sample_paths) == set([('A', 'B', 'D'), ('A', 'C', 'D')])
ctr = Counter(sample_paths)
assert ctr[('A', 'B', 'D')] == 100
assert ctr[('A', 'C', 'D')] == 100
def test_sample_paths_weighted():
g = nx.DiGraph()
g.add_edges_from([
('A', 'B', {'weight': 3}),
('A', 'C', {'weight': 1}),
('B', 'D', {'weight': 1}),
('C', 'D', {'weight': 1})])
source, target, length = ('A', 'D', 2)
pg = PathsGraph.from_graph(g, source, target, length)
# For determinism in testing
os.environ['TEST_FLAG'] = 'TRUE'
# Seed the random number generator
np.random.seed(1)
sample_paths = pg.sample_paths(200)
assert set(sample_paths) == set([('A', 'B', 'D'), ('A', 'C', 'D')])
ctr = Counter(sample_paths)
assert ctr[('A', 'B', 'D')] == 148
assert ctr[('A', 'C', 'D')] == 52
def test_sample_paths_weighted_signed():
g = nx.DiGraph()
g.add_edges_from([
('A', 'B', {'weight': 3, 'sign': 1}),
('A', 'C', {'weight': 1, 'sign': 1}),
('B', 'D', {'weight': 1, 'sign': 1}),
('C', 'D', {'weight': 1, 'sign': 1})])
source, target, length = ('A', 'D', 2)
pg = PathsGraph.from_graph(g, source, target, length, signed=True,
target_polarity=0)
# For determinism in testing
os.environ['TEST_FLAG'] = 'TRUE'
# Seed the random number generator
np.random.seed(1)
sample_paths = pg.sample_paths(200)
ctr = Counter(sample_paths)
assert len(ctr) == 2
assert ctr[(('A', 0), ('B', 1), ('D', 0))] == 148
assert ctr[(('A', 0), ('C', 1), ('D', 0))] == 52
def test_enumerate_paths():
g_uns = nx.DiGraph()
g_uns.add_edges_from((('A', 'B'), ('A', 'C'), ('C', 'D'), ('B', 'D'),
('D', 'B'), ('D', 'C'), ('B', 'E'), ('C', 'E')))
source, target, length = ('A', 'E', 4)
pg = PathsGraph.from_graph(g_uns, source, target, length)
enum_paths = pg.enumerate_paths()
assert set(enum_paths) == set(
[('A', 'B', 'D', 'B', 'E'),
('A', 'B', 'D', 'C', 'E'),
('A', 'C', 'D', 'B', 'E'),
('A', 'C', 'D', 'C', 'E')])
def test_count_paths():
g_uns = nx.DiGraph()
g_uns.add_edges_from((('A', 'B'), ('A', 'C'), ('C', 'D'), ('B', 'D'),
('D', 'B'), ('D', 'C'), ('B', 'E'), ('C', 'E')))
source, target, length = ('A', 'E', 4)
pg = PathsGraph.from_graph(g_uns, source, target, length)
num_paths = pg.count_paths()
assert num_paths == 4
def test_non_uniform_sampling():
pg = PathsGraph.from_graph(g_samp, 'source', 'target', 3)
# There are five different paths, but sampling uniformly based on local
# edge weights should result in ~50% of paths going through B1
os.environ['TEST_FLAG'] = 'TRUE'
np.random.seed(1) # Seed the random number generator
num_samples = 1000
paths = pg.sample_paths(num_samples)
num_b1_paths = len([p for p in paths if 'B1' in p])
num_other_paths = len([p for p in paths if 'B1' not in p])
assert num_b1_paths == 510
assert num_other_paths == 490
def test_uniform_sampling():
pg = PathsGraph.from_graph(g_samp, 'source', 'target', 3)
# There are five different paths; sampling uniformly across the whole
# path distribution should result in 20% of paths going through each of
# paths going through B1-B5.
pg.set_uniform_path_distribution()
os.environ['TEST_FLAG'] = 'TRUE'
np.random.seed(1) # Seed the random number generator
num_samples = 5000
path_count = pg.count_paths()
assert path_count == 5
paths = pg.sample_paths(num_samples)
b_ctr = Counter([p[2] for p in paths])
print(b_ctr)
assert b_ctr == {'B1': 1021, 'B2': 991, 'B3': 964, 'B4': 1022, 'B5': 1002}
def test_combine_paths_graphs():
g = nx.DiGraph()
g.add_edges_from([('S', 'A'), ('S', 'T'), ('A', 'T'), ('A', 'S')])
max_depth = 4
pg_list = []
for length in range(1, max_depth+1):
paths_graph = PathsGraph.from_graph(g, 'S', 'T', length)
pg_list.append(paths_graph)
cpg = CombinedPathsGraph(pg_list)
paths = cpg.sample_paths(1000)
path_ctr = Counter(paths)
def test_paths_tree():
g = nx.DiGraph()
g.add_edges_from((('A', 'B'), ('A', 'C'), ('A', 'E'),
('B', 'D'), ('B', 'E'),
('C', 'D'), ('C', 'E'),
('D', 'B'), ('D', 'C'), ('D', 'E'),
))
source, target, length = ('A', 'E', 4)
paths = list(nx.all_simple_paths(g, source, target))
pt = PathsTree(paths)
pt_ref_edges = set([
(tuple(), ('A',)),
(('A',), ('A', 'E')),
(('A',), ('A', 'B')),
(('A',), ('A', 'C')),
(('A', 'B'), ('A', 'B', 'E')),
(('A', 'B'), ('A', 'B', 'D')),
(('A', 'C'), ('A', 'C', 'E')),
(('A', 'C'), ('A', 'C', 'D')),
(('A', 'B', 'D'), ('A', 'B', 'D', 'E')),
(('A', 'C', 'D'), ('A', 'C', 'D', 'E')),
(('A', 'B', 'D'), ('A', 'B', 'D', 'C')),
(('A', 'C', 'D'), ('A', 'C', 'D', 'B')),
(('A', 'B', 'D', 'C'), ('A', 'B', 'D', 'C', 'E')),
(('A', 'C', 'D', 'B'), ('A', 'C', 'D', 'B', 'E')),
])
assert set(pt.graph.edges()) == pt_ref_edges
# Sample from the path tree
num_samples = 1000
samp_paths = pt.sample(num_samples=num_samples)
assert len(samp_paths) == num_samples
assert set(samp_paths) == set([tuple(p) for p in paths])
def test_paths_tree_weighted_sampling():
g = nx.DiGraph()
g.add_edges_from([
('A', 'B', {'weight': 3}),
('A', 'C', {'weight': 1}),
('B', 'D', {'weight': 1}),
('C', 'D', {'weight': 1})])
source, target, length = ('A', 'D', 2)
paths = list(nx.all_simple_paths(g, source, target))
pt = PathsTree(paths, source_graph=g)
num_samples = 1000
# For determinism in testing
os.environ['TEST_FLAG'] = 'TRUE'
# Seed the random number generator
np.random.seed(1)
samp_paths = pt.sample(num_samples=num_samples)
assert len(samp_paths) == num_samples
assert set(samp_paths) == set([tuple(p) for p in paths])
ctr = Counter(samp_paths)
assert ctr[('A', 'B', 'D')] == 744
assert ctr[('A', 'C', 'D')] == 256
def test_paths_tree_path_probabilities():
source, target = ('source', 'target')
all_paths = list(nx.all_simple_paths(g_samp, source, target))
pt = PathsTree(all_paths)
pw = pt.path_probabilities()
assert isinstance(pw, dict)
assert pw[('source', 'A1', 'B1', 'target')] == 0.5
assert pw[('source', 'A2', 'B2', 'target')] == 0.125
assert pw[('source', 'A2', 'B3', 'target')] == 0.125
assert pw[('source', 'A2', 'B4', 'target')] == 0.125
assert pw[('source', 'A2', 'B5', 'target')] == 0.125
total = sum([v for v in pw.values()])
assert total == 1.
def test_paths_tree_path_probabilities_empty():
pt = PathsTree([])
pw = pt.path_probabilities()
assert pw == {}
"""
def test_sampling_on_random_graphs():
# We use 25 randomly generated graphs for testing the algorithm
with open(random_graph_pkl, 'rb') as f:
rg_dict = pickle.load(f)
for i in range(1):
G_i, source, target = rg_dict[i]
print("graph# %d, %d nodes, %d edges" % (i, len(G_i), len(G_i.edges())))
(f_reach, b_reach) = pg.get_reachable_sets(G_i, source, target,
max_depth=max_depth, signed=False)
"""
def test_cf_sampling_backtracking1():
g_uns = nx.DiGraph()
g_uns.add_edges_from((('A', 'B'), ('A', 'C'), ('C', 'D'), ('B', 'D'),
('D', 'B'), ('D', 'C'), ('B', 'E'), ('C', 'E')))
source = 'A'
target = 'E'
length = 4
pg = PathsGraph.from_graph(g_uns, source, target, length)
sample_paths = pg.sample_cf_paths(100)
assert set(sample_paths) == set(
[('A', 'B', 'D', 'C', 'E'),
('A', 'C', 'D', 'B', 'E'),
])
def test_cf_sampling_backtracking2():
"""Tests if the backtracking list is pruned as it retreats."""
g_uns = nx.DiGraph()
g_uns.add_edges_from([('A', 'B'), ('A', 'C'),
('B', 'D'),
('C', 'D'), ('C', 'E'), ('C', 'F'),
('D', 'C'),
('E', 'C')])
source = 'A'
target = 'F'
length = 4
pg = PathsGraph.from_graph(g_uns, source, target, length)
sample_paths = pg.sample_cf_paths(10)
assert set(sample_paths) == set([('A', 'B', 'D', 'C', 'F')])
def test_cf_sampling_backtracking_no_paths():
g_edges = [('A', 'B'), ('A', 'C'),
('B', 'A'),]
g = nx.DiGraph()
g.add_edges_from(g_edges)
f_level, b_level = get_reachable_sets(g, 'A', 'C', 3)
pg = PathsGraph.from_graph(g, 'A', 'C', 3, f_level, b_level)
p = pg.sample_cf_paths(10)
assert p == tuple()
def test_cf_sampling_signed():
g_uns = nx.DiGraph()
g_uns.add_edges_from([
('A', 'B', {'sign': 1}),
('A', 'C', {'sign': 0}),
('C', 'D', {'sign': 0}),
('B', 'D', {'sign': 0}),
('D', 'B', {'sign': 0}),
('D', 'C', {'sign': 0}),
('B', 'E', {'sign': 0}),
('C', 'E', {'sign': 0})])
source = 'A'
target = 'E'
length = 4
target_polarity = 1
pg = PathsGraph.from_graph(g_uns, source, target, length, signed=True,
target_polarity=target_polarity)
sample_paths = pg.sample_cf_paths(10)
assert set(sample_paths) == {(('A', 0), ('B', 1), ('D', 1),
('C', 1), ('E', 1))}
def test_cf_path_count():
"""Tests if the cycle-free path count is updated from the blacklist."""
g_uns = nx.DiGraph()
g_uns.add_edges_from([('A', 'B'), ('A', 'C'),
('B', 'D'),
('C', 'D'), ('C', 'E'), ('C', 'F'),
('D', 'C'),
('E', 'C'), ('E', 'G'),
('G', 'F')])
source = 'A'
target = 'F'
length = 4
pg = PathsGraph.from_graph(g_uns, source, target, length)
sample_paths = pg.sample_cf_paths(30)
assert set(sample_paths) == set([('A', 'B', 'D', 'C', 'F'),
('A', 'C', 'E', 'G', 'F')])
cf_count = pg.count_cf_paths()
assert cf_count == 2
def test_cf_path_count_random():
num_nodes = 10
length = 6
#g = nx.erdos_renyi_graph(num_nodes, 0.5, directed=True)
g = nx.DiGraph()
g.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 7), (1, 0), (1, 3), (1, 8),
(1, 9), (3, 0), (3, 1), (3, 2), (3, 4), (3, 5), (3, 6), (3, 7), (3, 9),
(4, 0), (4, 1), (4, 2), (4, 6), (4, 7), (5, 2), (5, 4), (5, 6), (5, 7),
(6, 0), (6, 1), (6, 2), (6, 3), (6, 4), (6, 5), (6, 7), (6, 9), (7, 4),
(7, 5), (7, 8), (8, 2), (8, 6), (9, 0), (9, 5), (9, 6), (9, 8)])
source = 0
target = num_nodes - 1
pg = PathsGraph.from_graph(g, source, target, length)
simple_paths = [tuple(p) for p in nx.all_simple_paths(g, source, target)
if len(p) == length+1]
simple_path_count = len(simple_paths)
cf_paths = pg.sample_cf_paths(1000)
cf_count = pg.count_cf_paths()
assert cf_count == simple_path_count
assert set(simple_paths) == set(cf_paths)
| {"/paths_graph/tests/test_pre_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/pre_cfpg.py": ["/paths_graph/pg.py"], "/paths_graph/tests/test_paths_graph.py": ["/paths_graph/__init__.py"], "/paths_graph/tests/test_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/cfpg.py": ["/paths_graph/__init__.py", "/paths_graph/pre_cfpg.py"], "/paths_graph/api.py": ["/paths_graph/pg.py", "/paths_graph/cfpg.py"], "/paths_graph/__init__.py": ["/paths_graph/pg.py", "/paths_graph/pre_cfpg.py", "/paths_graph/cfpg.py", "/paths_graph/paths_tree.py", "/paths_graph/api.py"]} |
69,087 | kkaris/paths_graph | refs/heads/master | /paths_graph/tests/test_cfpg.py | import os
import pickle
from collections import Counter
from os.path import dirname, join
import numpy as np
import networkx as nx
import paths_graph as pg
random_graph_pkl = join(dirname(__file__), 'random_graphs_edges.pkl')
g_uns = nx.DiGraph()
g_uns.add_edges_from((('A', 'B'), ('A', 'C'), ('C', 'D'), ('B', 'D'),
('D', 'B'), ('D', 'C'), ('B', 'E'), ('C', 'E')))
source = 'A'
target = 'E'
length = 4
def test_from_graph_no_levels():
cfpg = pg.CFPG.from_graph(g_uns, source, target, length)
assert isinstance(cfpg, pg.CFPG)
paths = cfpg.enumerate_paths()
assert len(paths) == 2
assert ('A', 'B', 'D', 'C', 'E') in paths
assert ('A', 'C', 'D', 'B', 'E') in paths
assert len(cfpg.graph) == 8
# The D node should be split into two nodes
d_nodes = [n for n in cfpg.graph.nodes() if n[1] == 'D']
assert len(d_nodes) == 2
def test_from_graph_with_levels_bad_depth():
"""Raise an exception if the requested path length is greater than the
depth of the provided reach sets."""
(f_reach, b_reach) = pg.get_reachable_sets(g_uns, source, target,
max_depth=2)
cfpg = pg.CFPG.from_graph(g_uns, source, target, length,
fwd_reachset=f_reach, back_reachset=b_reach)
assert not cfpg.graph
def test_from_pg():
(f_reach, b_reach) = pg.get_reachable_sets(g_uns, source, target,
max_depth=length)
pg_0 = pg.PathsGraph.from_graph(g_uns, source, target, length, f_reach,
b_reach)
cfpg = pg.CFPG.from_pg(pg_0)
paths = cfpg.enumerate_paths()
assert len(paths) == 2
assert ('A', 'B', 'D', 'C', 'E') in paths
assert ('A', 'C', 'D', 'B', 'E') in paths
assert len(cfpg.graph) == 8
# The D node should be split into two nodes
d_nodes = [n for n in cfpg.graph.nodes() if n[1] == 'D']
assert len(d_nodes) == 2
def test_sample_paths():
cfpg = pg.CFPG.from_graph(g_uns, source, target, length)
sample_paths = cfpg.sample_paths(100)
assert set(sample_paths) == set(
[('A', 'B', 'D', 'C', 'E'),
('A', 'C', 'D', 'B', 'E')])
def test_enumerate_paths():
cfpg = pg.CFPG.from_graph(g_uns, source, target, length)
enum_paths = cfpg.enumerate_paths()
assert set(enum_paths) == set(
[('A', 'B', 'D', 'C', 'E'),
('A', 'C', 'D', 'B', 'E')])
def test_count_paths():
cfpg = pg.CFPG.from_graph(g_uns, source, target, length)
num_paths = cfpg.count_paths()
assert num_paths == 2
def test_on_random_graphs():
"""For each of 25 random graphs, check that the number of cycle free paths
for a given depth and source/target pair matches the results from
networkx all_simple_paths. Graphs range from rough"""
# We use 25 randomly generated graphs for testing the algorithm
with open(random_graph_pkl, 'rb') as f:
rg_dict = pickle.load(f)
min_depth = 5
max_depth = 10
for i in range(1):
edges, source, target = rg_dict[i]
G_i = nx.DiGraph()
G_i.add_edges_from(edges)
print("graph# %d, %d nodes, %d edges" % (i, len(G_i.nodes()),
len(G_i.edges())))
(f_reach, b_reach) = pg.get_reachable_sets(G_i, source, target,
max_depth=max_depth, signed=False)
# Try different path lengths
for length in range(min_depth, max_depth+1):
print("Checking paths of length %d" % length)
# For validation, we compute explicitly the set of paths in the
# original graph of a fixed length
P = list(nx.all_simple_paths(G_i, source, target, length+1))
# Filter to paths of this length
P_correct = [tuple(p) for p in P if len(p) == length+1]
# Generate the raw paths graph
G_cf = pg.CFPG.from_graph(G_i, source, target, length, f_reach,
b_reach)
# Check the path count
path_count = G_cf.count_paths()
assert len(P_correct) == path_count
# Enumerate paths using node tuples
P_cf_pruned = G_cf.enumerate_paths(names_only=False)
# Next we extract the paths by projecting down to second
# component (node names)
P_cf_pruned_names = G_cf.enumerate_paths(names_only=True)
print("# of paths: %d" % len(P_cf_pruned_names))
# We verify the three required properties.
# Recall:
# CF1: Every source-to-target path in G_cf is cycle free.
# CF2: Every cycle free path in the original graph appears as a
# source-to-target path in G_cf.
# CF3: There is a 1-1 correspondence between the paths in G_cf and
# the paths in the original graph. This means there is no
# redundancy in the representation. For every path in the original
# graph there is a unique path in G_cf that corresponds to it.
# We first verify CF1.
for p in P_cf_pruned_names:
if len(p) != len(list(set(p))):
print("cycle!")
print(p)
assert False
# Next we verify CF2. We will in fact check if the set of paths in
# P_cf_pruned_names is exactly the set of paths in the original
# graph.
if set(P_correct) != set(P_cf_pruned_names):
print("Paths do not match reference set from networkx")
print("graph, length", (i, length))
assert False
# Finally we verify CF3
if len(P_cf_pruned) != len(list(set(P_cf_pruned_names))):
print("redundant representation!")
print("graph, length", (i, length))
assert False
def test_example_graph1():
sif_file = join(dirname(__file__), 'korkut_im.sif')
g = pg.load_signed_sif(sif_file)
source = 'BLK_phosphoY389_phosphorylation_PTK2_Y397'
target = 'EIF4EBP1_T37_p_obs'
target_polarity = 0
enum_paths = pg.enumerate_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=False)
assert len(enum_paths) == len(set(enum_paths))
enum_paths = set(enum_paths)
sampled_paths = set(pg.sample_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8,
num_samples=10000, cycle_free=False))
count_paths = pg.count_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=False)
assert len(sampled_paths) == len(enum_paths)
assert len(sampled_paths) == count_paths
assert sampled_paths == enum_paths
enum_cf_paths = pg.enumerate_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=True)
assert len(enum_cf_paths) == len(set(enum_cf_paths))
enum_cf_paths = set(enum_cf_paths)
sampled_cf_paths = set(pg.sample_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8,
num_samples=10000, cycle_free=True))
count_cf_paths = pg.count_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=True)
assert len(sampled_cf_paths) == len(enum_cf_paths)
assert len(sampled_cf_paths) == count_cf_paths
assert sampled_cf_paths == enum_cf_paths
# 101 cycle free paths
assert count_cf_paths == 101
# All of the cycle-free paths should be contained in the set of paths
# with cycles
assert sampled_paths.intersection(sampled_cf_paths) == sampled_cf_paths
# Check that all paths in the set difference contain cycles
for path in sampled_paths.difference(sampled_cf_paths):
assert len(set(path)) < len(path)
def test_uniform_sampling_example_graph1():
sif_file = join(dirname(__file__), 'korkut_im.sif')
g = pg.load_signed_sif(sif_file)
source = 'BLK_phosphoY389_phosphorylation_PTK2_Y397'
target = 'EIF4EBP1_T37_p_obs'
target_polarity = 0
length = 8
cfpg = pg.CFPG.from_graph(g, source, target, length, signed=True,
target_polarity=0)
os.environ['TEST_FLAG'] = 'TRUE'
np.random.seed(1)
# Count paths
# Now, re-weight for uniformity and re-sample
num_samples = cfpg.count_paths() * 1000
cfpg.set_uniform_path_distribution()
sampled_paths_uni = cfpg.sample_paths(num_samples=num_samples)
ctr_uni = Counter(sampled_paths_uni)
for path, count in ctr_uni.items():
assert count > 900 and count < 1100
def test_enumerate_example_graph2():
sif_file = join(dirname(__file__), 'korkut_stmts.sif')
g = pg.load_signed_sif(sif_file)
source = 'BLK'
target = 'EIF4EBP1'
enum_paths = pg.enumerate_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=False)
assert len(enum_paths) == len(set(enum_paths))
enum_paths = set(enum_paths)
count_paths = pg.count_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=False)
enum_cf_paths = pg.enumerate_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=True)
assert len(enum_cf_paths) == len(set(enum_cf_paths))
enum_cf_paths = set(enum_cf_paths)
count_cf_paths = pg.count_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=True)
# Check that the counts match the enumeratio
assert len(enum_paths) == count_paths
assert len(enum_cf_paths) == count_cf_paths
# All of the cycle-free paths should be contained in the set of paths
# with cycles
assert enum_paths.intersection(enum_cf_paths) == enum_cf_paths
# Check that all paths in the set difference contain cycles
for path in enum_paths.difference(enum_cf_paths):
assert len(set(path)) < len(path)
def test_sampling_example_graph2():
sif_file = join(dirname(__file__), 'korkut_stmts.sif')
g = pg.load_signed_sif(sif_file)
source = 'BLK'
target = 'EIF4EBP1'
enum_cf_paths = set(pg.enumerate_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=True))
sampled_cf_paths = set(pg.sample_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8,
num_samples=10000, cycle_free=True))
count_cf_paths = pg.count_paths(g, source, target, signed=True,
target_polarity=0, max_depth=8, cycle_free=True)
# The sampled paths should be a subset of the enumerated paths
assert sampled_cf_paths.intersection(enum_cf_paths) == sampled_cf_paths
def test_weighted_sampling():
g = nx.DiGraph()
g.add_edges_from([
('A', 'B', {'weight': 3}),
('A', 'C', {'weight': 1}),
('C', 'D'),
('B', 'D'),
('D', 'B'),
('D', 'C'),
('B', 'E'),
('C', 'E')])
source, target, length = ('A', 'E', 4)
cfpg = pg.CFPG.from_graph(g, source, target, length)
os.environ['TEST_FLAG'] = 'TRUE'
np.random.seed(1)
samp_paths = cfpg.sample_paths(1000)
ctr = Counter(samp_paths)
assert ctr[('A', 'B', 'D', 'C', 'E')] == 767
assert ctr[('A', 'C', 'D', 'B', 'E')] == 233
def test_combine_cfpgs():
g = nx.DiGraph()
g.add_edges_from([('S', 'A'), ('S', 'T'), ('A', 'T'), ('A', 'S')])
max_depth = 4
pg_list = []
for length in range(1, max_depth+1):
cfpg = pg.CFPG.from_graph(g, 'S', 'T', length)
pg_list.append(cfpg)
cpg = pg.CombinedCFPG(pg_list)
paths = cpg.sample_paths(1000)
path_ctr = Counter(paths)
def test_problem_graph():
g = nx.DiGraph()
g.add_edges_from([
(0, 1), (0, 4), (0, 5), (1, 0), (1, 4), (1, 5), (2, 1), (2, 4), (2, 5),
(3, 1), (3, 5), (4, 0), (4, 3), (4, 5), (5, 0), (5, 1), (5, 2), (5, 3),
(5, 4)])
cfpg = pg.CFPG.from_graph(g, 0, 5, 5)
nx_paths_5 = tuple([p for p in nx.all_simple_paths(g, 0, 5) if len(p) == 6])
cfpg_paths = cfpg.enumerate_paths()
assert nx_paths_5 == cfpg_paths
def test_pruning_src_tgt():
"""Test for graceful handling of networks where src or tgt nodes are pruned.
"""
g = nx.DiGraph()
g.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4), (1, 0), (2, 0), (2, 1),
(2, 3), (3, 0), (3, 2), (3, 4), (3, 5), (4, 1), (4, 3), (4, 5), (5, 0),
(5, 2)])
src = 0
tgt = 5
length = 5
cfpg = pg.CFPG.from_graph(g, src, tgt, length)
cfpg_paths = cfpg.sample_paths(100)
nx_paths = set([p for p in nx.all_simple_paths(g, src, tgt)
if len(p) - 1 == length])
assert set(cfpg_paths) == nx_paths
| {"/paths_graph/tests/test_pre_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/pre_cfpg.py": ["/paths_graph/pg.py"], "/paths_graph/tests/test_paths_graph.py": ["/paths_graph/__init__.py"], "/paths_graph/tests/test_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/cfpg.py": ["/paths_graph/__init__.py", "/paths_graph/pre_cfpg.py"], "/paths_graph/api.py": ["/paths_graph/pg.py", "/paths_graph/cfpg.py"], "/paths_graph/__init__.py": ["/paths_graph/pg.py", "/paths_graph/pre_cfpg.py", "/paths_graph/cfpg.py", "/paths_graph/paths_tree.py", "/paths_graph/api.py"]} |
69,088 | kkaris/paths_graph | refs/heads/master | /paths_graph/cfpg.py | import os
import logging
import itertools
from collections import Counter
import numpy as np
import networkx as nx
from paths_graph import PathsGraph
from paths_graph.pre_cfpg import PreCFPG
import pickle
logger = logging.getLogger('cfpg')
class CFPG(PathsGraph):
"""Representation of cycle-free paths in a graph of a given length.
We construct a representation of cycle_free paths of a fixed length. This
fixed length will often not be mentioned in what follows. We call our
representation "the cycle_free paths graph". Below it is the graph G_cf
(actually it is G_cf_pruned but for now it will be convenient to ignore
this distinction).
G_cf is required to have three properties.
* CF1: Every source-to-target path in G_cf is cycle free.
* CF2: Every cycle free path in the original graph appears as a
source-to-target path in G_cf.
* CF3: There is a 1-1 correspondence between the paths in G_cf and the
paths in the original graph. This means there is no redundancy in the
representation. For every path in the original graph there is a unique
path in G_cf that corresponds to it.
These 3 conditions will ensure that we can sample paths in the original
graph faithfully by sampling paths in G_cf. We can also perform graph
theoretic operations on G_cf to simulate useful operations on the set of
paths of interest in the original graph.
The starting point is the paths graph (pg_raw below) that represents "all"
paths (cycle free or not) of the given fixed length from source to target.
Then using an initial iterative procedure we prune away junk nodes (that
cannot appear on any cycle free path from source to target) and more
importantly tag each node with its cycle free history. More precisely if u
is in tags[v] then we are guaranteed that every path from u to v that
involves only nodes appearing in tags[v] will be v-cycle_free. In other
words the name of v (i.e. v[1]) will not appear in the path. Further, it
will also be u-cycle free. Note however tags[u] may contain a node that
has the same name as that of v. Indeed this the crux of the problem.
Moving on, this tagged path graph is named G_0 and the associated tags map
is named T_0 below.
G_cf is computed by refining G_0. But first let us consider why G_0 is not
an ideal representation of the set of cycle free paths of a fixed length.
First, G_0 does not have the property (CF1) (though it does have the
properties (CF2) and (CF3)). As a result one can't just walk through the
graph from source to node and generate a cycle free path. Instead one must
use a sampling method with memory to generate cycle free paths. In
particular if one has reached the node u via the path p and v is a
successors of u then one can extend p by moving to v only if p is contained
in T_0[v]. Thus whether the move along the edge (u,v) is conditioned by the
memory of how u was reached. Further, one can get stuck while using this
sampling procedure. Hence it is not clear whether one is sampling the set
of paths of interest in a faithful fashion. More importantly it is not
clear how one can perform graph theoretic operations on G_0 to simulate
operations on the set of cycle fre paths of interest. We will however keep
in mind that G_0 together with its path sampling procedure is a useful
tool to have around.
Constructing G_cf by refining G_0 may be viewed as synthesizing a
memoryless strategy for generating cycle free paths. In other words, if
(u,v) is an edge in G_cf then no matter how we have reached u we must be
able to transition to v. A necessary condition that will enable this is to
ensure that the set of tags of u (T_cf[u]) is included in the set of tags
of v (T_cf[v]) in G_cf. The challenge is to achieve this while ensuring
that the properies (CF1), (CF2) and (CF3) are met.
"""
def __init__(self, source_name, source_node, target_name, target_node,
path_length, graph):
self.source_name = source_name
self.source_node = source_node
self.target_name = target_name
self.target_node = target_node
self.path_length = path_length
self.graph = graph
@classmethod
def from_graph(klass, *args, **kwargs):
"""Get an instance of a CFPG from a graph.
Parameters
----------
g : networkx.DiGraph
The underlying graph on which paths will be generated.
source : str
Name of the source node.
target : str
Name of the target node.
target_polarity : int
Whether the desired path from source to target is positive (0)
or negative (1).
length : int
Length of paths to compute.
fwd_reachset : Optional[dict]
Dictionary of sets representing the forward reachset computed over
the original graph g up to a maximum depth greater than the
requested path length. If not provided, the forward reach set is
calculated up to the requested path length up to the requested path
length by calling paths_graph.get_reachable_sets.
back_reachset : Optional[dict]
Dictionary of sets representing the backward reachset computed over
the original graph g up to a maximum depth greater than the
requested path length. If not provided, the backward reach set is
calculated up to the requested path length up to the requested path
length by calling paths_graph.get_reachable_sets.
signed : bool
Specifies whether the underlying graph and the corresponding
f_level and b_level reachable sets have signed edges. If True,
sign information should be encoded in the 'sign' field of the edge
data, with 0 indicating a positive edge and 1 indicating a negative
edge.
target_polarity : 0 or 1
Specifies the polarity of the target node: 0 indicates
positive/activation, 1 indicates negative/inhibition.
Returns
-------
CFPG
Instance of CFPG class representing cycle-free paths from source to
target with a given length and overall polarity.
"""
#pre_cfpg = PreCFPG.from_graph(*args, **kwargs)
pg = PathsGraph.from_graph(*args, **kwargs)
return klass.from_pg(pg)
@classmethod
def from_pg(klass, pg):
"""Get an instance of a CFPG from a PathsGraph.
Parameters
----------
pg : PathsGraph
"Raw" (contains cycles) paths graph as created by
:py:func:`indra.explanation.paths_graph.PathsGraph.from_graph`.
Returns
-------
CFPG
Instance of CFPG class representing cycle-free paths from source to
target with a given length and overall polarity.
"""
source_name = pg.source_name
target_name = pg.target_name
path_length = pg.path_length
src_2node = pg.source_node # 2-tuple version of source
src_3node = pg.source_node + (0,) # 3-tuple version of source
tgt_2node = pg.target_node # 2-tuple version of target
tgt_3node = pg.target_node + (0,) # 3-tuple version of target
if not pg.graph:
return CFPG(pg.source_name, pg.source_node + (0,),
pg.target_name, pg.target_node + (0,),
pg.path_length, nx.DiGraph())
pg_raw = pg.graph
ntp_0 = [v for v in pg_raw.nodes()
if (v != src_2node and v[1] == src_2node[1]) or
(v != tgt_2node and v[1] == tgt_2node[1])]
""" For the negative polarity case ntp_0 should be defined as:
ntp_0 = [v for v in pg_raw.nodes() if (v != src_2node and v[1][0] == src_2node[1][0]) or (v != tgt_2node and v[1] == tgt_2node[1][0])]
"""
if ntp_0 == []:
pg_0 = pg_raw
else:
pg_0 = prune(pg_raw, ntp_0, src_2node, tgt_2node)
if not pg_0:
return CFPG(pg.source_name, pg.source_node + (0,),
pg.target_name, pg.target_node + (0,),
pg.path_length, nx.DiGraph())
past = get_past(src_2node,tgt_2node, pg_0)
next_tgt = {tgt_3node: []}
pred_tgt = {tgt_3node: list(pg_0.predecessors(tgt_2node))}
past_tgt = past[tgt_2node]
t_cf_tgt = {tgt_3node: past_tgt}
dic_CF = {path_length: ([tgt_3node], next_tgt, pred_tgt, t_cf_tgt)}
for i in reversed(range(1, path_length)):
V_ip1, next_ip1, pred_ip1, t_cf_ip1 = dic_CF[i+1]
#assert V_ip1 != []
V_current = []
for v in V_ip1:
V_current.extend(pred_ip1[v])
V_current = list(set(V_current))
#assert V_current != []
V_i = []
next_i = {}
pred_i = {}
t_cf_i = {}
# Now comes the heart of the construction. We take a node x in
# V_current and split it into -in general- multiple copies to ensure
# that if (u,v) is an edge in G_cf then the set of tags of u is
# included in the set of tags of v
for x in V_current:
past_x = past[x]
pg_x = pg_0.subgraph(past_x)
ntp_x = [v for v in past_x if v != x and v[1] == x[1]]
"""
For the negative polarity case the above should be:
ntp_x = [v for v in past_x if v != x and v[1][0] == x[1][0]]
"""
if ntp_x == []:
tags_x = pg_x.nodes()
else:
pg_x_pruned = prune(pg_x, ntp_x, src_2node, x)
if x not in pg_x_pruned or src_2node not in \
nx.ancestors(pg_x_pruned, x):
continue
tags_x = pg_x_pruned.nodes()
X_ip1 = [w for w in V_ip1 if x in pred_ip1[w]]
X_im1 = list(pg_0.predecessors(x))
assert X_ip1 != []
V_x, next_x, pred_x, t_cf_x = \
_split_graph(src_2node, tgt_2node, x, X_ip1, X_im1,
t_cf_ip1, tags_x, pg_0)
V_i.extend(V_x)
next_i.update(next_x)
pred_i.update(pred_x)
t_cf_i.update(t_cf_x)
dic_CF[i] = (V_i, next_i, pred_i, t_cf_i)
V_1 = dic_CF[1][0]
V_0 = [src_3node]
next_src = {src_3node: V_1}
pred_src = {src_3node: []}
t_cf_src = {src_3node: [src_2node]}
dic_CF[0] = (V_0, next_src, pred_src, t_cf_src)
G_cf = _dic_to_graph(dic_CF, pg)
# Prune out possible unreachable nodes in G_cf
nodes_prune = [v for v in G_cf if (v != tgt_3node and \
not G_cf.successors(v)) or
(v != src_3node and not G_cf.predecessors(v))]
G_cf_pruned = prune(G_cf, nodes_prune, src_3node, tgt_3node)
# If the source or target nodes have been pruned, the CFPG should be
# empty
if src_3node not in G_cf_pruned or tgt_3node not in G_cf_pruned:
G_cf_pruned = nx.DiGraph()
return klass(pg.source_name, src_3node, pg.target_name, tgt_3node,
pg.path_length, G_cf_pruned)
@classmethod
def from_pre_cfpg(klass, pre_cfpg):
"""Generate a cycle free paths graph (CFPG).
Implements the major step (the outer loop) for constructing G_cf. We do
so by computing dic_CF, a dictionary based version of G_cf. dic_CF[i]
will be a quadruple of the form (V_i, next_i, pred_i, t_i).
V_i will be the set of nodes at level i.
A node--after dic_CF[i] has been computed--will be of the form (i, n, c)
where i is the level, n is the name and c is the copy number of the node
(i,n) in G_0. In other words, each node in G_0 will be split into one or
more copies to implement our memoryless sampling strategy.
next_i is the successor relation for the CFPG.
pred_i[v] is the set of predecessors of v in V_i. The construction
proceeds from the target to source. At stage i of the construction we
convert nodes of the form (i, n) into nodes of the form (i,n,c). For
any such new node pred_i[v] will be nodes of the form (i-1,n) at level
i-1.
t_i[v] will be the new tags of the node v. They will be pairs of the
form (j,n). In other words their type will be the same as of T_0.
(Note: In T_0, I assign nodes of G_0 as tags rather than their names.
This turns out to be convenient for the construction of G_cf)
Once the construction of PG_cf is complete we will no onger
require pred_i and t_i.
Parameters
----------
pre_cfpg : instance of PreCFPG
The pre-cycle free paths graph to use to compute the CFPG.
Returns
-------
CFPG
Instance of CFPG class representing cycle-free paths from source to
target with a given length and overall polarity.
"""
# Define old (2-tuple) and new (3-tuple) versions of src/tgt nodes
source_name = pre_cfpg.source_name
target_name = pre_cfpg.target_name
path_length = pre_cfpg.path_length
src_2node = pre_cfpg.source_node # 2-tuple version of source
src_3node = pre_cfpg.source_node + (0,) # 3-tuple version of source
tgt_2node = pre_cfpg.target_node # 2-tuple version of target
tgt_3node = pre_cfpg.target_node + (0,) # 3-tuple version of target
# If we were given an empty pre-CFPG, then the CFPG should also be empty
if not pre_cfpg.graph:
return CFPG(pre_cfpg.source_name, pre_cfpg.source_node + (0,),
pre_cfpg.target_name, pre_cfpg.target_node + (0,),
pre_cfpg.path_length, nx.DiGraph())
# We first hardwire the contents of the dictionary for the level of the
# target node: dic_CF[path_length]
next_tgt = {tgt_3node: []}
pred_tgt = {tgt_3node: list(pre_cfpg.graph.predecessors(tgt_2node))}
t_cf_tgt = {tgt_3node: pre_cfpg.tags[tgt_2node]}
dic_CF = {path_length: ([tgt_3node], next_tgt, pred_tgt, t_cf_tgt)}
logger.info("Creating CFPG from pre-CFPG")
# Iterate from level n-1 (one "above" the target) back to the source
for i in reversed(range(1, path_length)):
# Get the information for level i+1 (one level closer to the target)
V_ip1, next_ip1, pred_ip1, t_cf_ip1 = dic_CF[i+1]
# Because we are working off of a non-empty pre-CFPG, we should
# never end with a level in the graph with no nodes
assert V_ip1 != []
# TODO: Can V_current be replaced simply by the nodes in pre-CFPG at
# level i?
# TODO: Rename V_current -> V_i_old, V_i -> V_i_new?
V_current = []
for v in V_ip1:
V_current.extend(pred_ip1[v])
V_current = list(set(V_current))
# V_current should never be empty by construction of the pre-CFPG
assert V_current != []
# Thus V_current is the set of nodes (which will be 2-tuples) at
# level i to be processed. The converted nodes (which will be
# 3-tuples, including the copy number) will be binned into V_i.
V_i, next_i, pred_i, t_cf_i = ([], {}, {}, {})
# Now comes the heart of the construction. We take a node x in
# V_current and split it into--in general--multiple copies to ensure
# that if (u,v) is an edge in G_cf then the set of tags of u is
# included in the set of tags of v
for x in V_current:
# X_ip1 is the set of nodes at the level i+1 to which x is
# connected via the pred_ip1 function. These nodes, already
# processed, will be 3-tuples. X_im1 are the set of predecessor
# nodes of x at the level i-1. They are unprocessed 2-tuples.
X_ip1 = [w for w in V_ip1 if x in pred_ip1[w]]
X_im1 = list(pre_cfpg.graph.predecessors(x))
assert X_ip1 != []
# The actual splitting of node x and connect the resulting
# copies of x to its neighbors above and below is carried out
# by the _split_graph function, below.
V_x, next_x, pred_x, t_cf_x = \
_split_graph(src_2node, tgt_2node, x, X_ip1, X_im1,
t_cf_ip1, pre_cfpg.tags[x], pre_cfpg.graph)
# We now extend V_i, next_i, pred_i and t_i in the obvious way.
V_i.extend(V_x) # V_x contains the new, split versions of x
next_i.update(next_x)
pred_i.update(pred_x)
t_cf_i.update(t_cf_x)
dic_CF[i] = (V_i, next_i, pred_i, t_cf_i)
# Finally we hardwire dic_CF[0]
V_1 = dic_CF[1][0]
V_0 = [src_3node]
next_src = {src_3node: V_1}
pred_src = {src_3node: []}
t_cf_src = {src_3node: pre_cfpg.tags[src_2node]}
dic_CF[0] = (V_0, next_src, pred_src, t_cf_src)
G_cf = _dic_to_graph(dic_CF, pre_cfpg)
# Prune out possible unreachable nodes in G_cf
nodes_prune = [v for v in G_cf
if (v != tgt_3node and not G_cf.successors(v)) or
(v != src_3node and not G_cf.predecessors(v))]
G_cf_pruned = prune(G_cf, nodes_prune, src_3node, tgt_3node)
return klass(pre_cfpg.source_name, pre_cfpg.source_node + (0,),
pre_cfpg.target_name, pre_cfpg.target_node + (0,),
pre_cfpg.path_length, G_cf_pruned)
class CombinedCFPG(object):
"""Combine a set of CFPGs for different lengths into a single super-CFPG.
Parameters
----------
cfpg_list : list of cfpg instances
"""
def __init__(self, cfpg_list):
self.graph = nx.DiGraph()
for cfpg in cfpg_list:
self.graph.add_edges_from(cfpg.graph.edges(data=True))
# Add info from the last CFPG
self.source_name = cfpg.source_name
self.source_node = cfpg.source_node
self.target_name = cfpg.target_name
self.target_node = cfpg.target_node
def sample_paths(self, num_samples):
"""Sample paths of variable length between source and target.
Sampling makes use of edge weights where available; if they are not
set, equal local edge weights of 1 are assumed.
Parameters
----------
num_samples : int
The number of paths to sample.
Returns
-------
list of tuples
Each item in the list is a tuple of strings representing a path.
Note that the paths may not be unique.
"""
if not self.graph:
return tuple([])
paths = []
while len(paths) < num_samples:
# Get a path, starting from the source node
current_nodes = [self.source_node]
current_name = self.source_name
path = [current_name]
while current_name != self.target_name:
current_name, current_nodes = self._successors(current_nodes)
path.append(current_name)
# Add the current path
paths.append(tuple(path))
return tuple(paths)
def _successors(self, current_nodes):
out_edges = [e for node in current_nodes
for e in self.graph.out_edges(node, data=True)]
weight_dict = {}
nodes_by_name = {}
for u, v, data in out_edges:
v_name = v[1]
weight_dict[v_name] = data['weight']
if v_name in nodes_by_name:
nodes_by_name[v_name].append(v)
else:
nodes_by_name[v_name] = [v]
# Get list of possible downstream nodes with associated weights
node_names = []
weights = np.empty(len(nodes_by_name))
nodes_by_name_keys = nodes_by_name.keys()
# If we're testing, canonicalize the order of the nodes we're choosing
if 'TEST_FLAG' in os.environ:
nodes_by_name_keys = sorted(list(nodes_by_name_keys))
# Get node names and weights in a corresponding order
for ix, name in enumerate(nodes_by_name_keys):
node_names.append(name)
weights[ix] = weight_dict[name]
# Normalize the weights to a proper probability distribution
p = weights / np.sum(weights)
pred_idx = np.random.choice(len(node_names), p=p)
next_name = node_names[pred_idx]
next_nodes = nodes_by_name[next_name]
return (next_name, next_nodes)
def prune(g, nodes_to_prune, source, target):
"""Iteratively prunes nodes from a copy of the paths graph.
We prune the graph *pg* iteratively by the following procedure:
1. Remove the nodes given by *nodes_to_prune* from the graph.
2. Identify nodes (other than the source node) that now have no
incoming edges.
3. Identify nodes (other than the target node) that now have no outgoing
edges.
4. Set *nodes_to_prune* to the nodes identified in steps 2 and 3.
5. Repeat from 1 until there are no more nodes to prune.
Parameters
----------
pg : networkx.DiGraph
Paths graph to prune.
nodes_to_prune : list
Nodes to prune from paths graph.
source : tuple
Source node, of the form (0, source_name).
target : tuple
Target node, of the form (target_depth, source_name).
Returns
-------
networkx.DiGraph()
Pruned paths graph.
"""
# First check if we are pruning any nodes to prevent unnecessary copying
# of the paths graph
if not nodes_to_prune:
return g
# Make a copy of the graph
g_pruned = g.copy()
# Perform iterative pruning
while nodes_to_prune:
# Remove the nodes in our pruning list
g_pruned.remove_nodes_from(nodes_to_prune)
# Make a list of nodes whose in or out degree is now 0 (making
# sure to exclude the source and target, whose depths are at 0 and
# path_length, respectively)
no_in_edges = [node for node, in_deg in g_pruned.in_degree()
if in_deg == 0 and node != source]
no_out_edges = [node for node, out_deg in g_pruned.out_degree()
if out_deg == 0 and node != target]
nodes_to_prune = set(no_in_edges + no_out_edges)
return g_pruned
def get_past(src, tgt, pg_0):
past = {src: [src]}
for i in range(1, tgt[0] + 1):
W_i = [w for w in pg_0.nodes() if w[0] == i]
for w in W_i:
past_w = [w]
for u in pg_0.predecessors(w):
past_w.extend(past[u])
past_w = list(set(past_w))
past[w] = past_w
return past
def _split_graph(src, tgt, x, X_ip1, X_im1, t_cf, tags_x, g):
"""Splits a node x from G_0 into multiple copies for the CFPG.
The nodes in X_ip1 represent the possible successor nodes to x in the CFPG.
For each successor w of x in X_ip1, we first obtain the set of possible
antecedent nodes lying on paths from the source up to the edge x->w. We
obtain this by finding the intersection between the tags of x and the tags
of w. This is the set X_wx below for each w in X_ip1.
However X_wx is the set of nodes (in G_0) from which we can reach x->w
without encountering x[1] AND without encountering w[1]. As a result some
nodes in X_wx may be isolated. Hence we prune them away.
"""
V_x = []
next_x = {}
pred_x = {}
t_x = {}
S_ip1 = {}
for w in X_ip1:
X_wx = set(t_cf[w]) & set(tags_x)
N_wx = list(X_wx)
# TODO: Reimplement pruning so as to avoid inducing a subgraph?
g_wx = g.subgraph(N_wx)
nodes_prune = [v for v in g_wx
if (v != x and not g_wx.successors(v)) or
(v!= src and not g_wx.predecessors(v))]
g_wx_pruned = prune(g_wx, nodes_prune, src, x)
# If the pruned graph still contains both src and x itself, there is
# at least one path from the source to x->w. The nodes in this subgraph
# constitute the new set of tags of the copy of x that lies on a path
# between src and w.
if x in g_wx_pruned and src in g_wx_pruned:
s = frozenset(g_wx_pruned.nodes())
S_ip1[w] = s
S = set(S_ip1.values())
# Each element of the set S will be a unique, (frozen) set of tags. We will
# create one copy x_r of x for each unique tag set r in S, and we assign r
# to be the set of tags of the new, split node x_r. The successors of x_r
# are assembled using S_ip1; pred is defined in the expected way using
# X_im1.
for c, r in enumerate(S):
x_c = (x[0], x[1], r)
V_x.append(x_c)
next_x[x_c] = [w for w in S_ip1.keys() if r == S_ip1[w]]
pred_x[x_c] = [u for u in X_im1 if u in r]
t_x[x_c] = r
return (V_x, next_x, pred_x, t_x)
"""
def _dic_to_graph_pg(dic):
G = nx.DiGraph()
E = []
for k in dic.keys():
V_k = dic[k][0]
next_k = dic[k][1]
for v in V_k:
E_v = list(itertools.product([v], next_k[v]))
E.extend(E_v)
G.add_edges_from(E)
return G
"""
def _dic_to_graph(dic, pg):
"""Create a graph from the dict"""
G = nx.DiGraph()
E = []
for k in dic.keys():
V_k = dic[k][0]
next_k = dic[k][1]
for v in V_k:
for u, v in itertools.product([v], next_k[v]):
weight = pg.graph[u[0:2]][v[0:2]]['weight']
E.append((u, v, {'weight': weight}))
G.add_edges_from(E)
return G
| {"/paths_graph/tests/test_pre_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/pre_cfpg.py": ["/paths_graph/pg.py"], "/paths_graph/tests/test_paths_graph.py": ["/paths_graph/__init__.py"], "/paths_graph/tests/test_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/cfpg.py": ["/paths_graph/__init__.py", "/paths_graph/pre_cfpg.py"], "/paths_graph/api.py": ["/paths_graph/pg.py", "/paths_graph/cfpg.py"], "/paths_graph/__init__.py": ["/paths_graph/pg.py", "/paths_graph/pre_cfpg.py", "/paths_graph/cfpg.py", "/paths_graph/paths_tree.py", "/paths_graph/api.py"]} |
69,089 | kkaris/paths_graph | refs/heads/master | /paths_graph/paths_tree.py | import os
from collections import deque
import numpy as np
import networkx as nx
class PathsTree(object):
"""Build a tree representing a set of paths.
Nodes in the tree are tuples representing the common prefix of all
downstream paths. The head of the tree is an empty tuple, `()`. Each leaf
of the tree represents a complete path.
Parameters
----------
paths : iterable of tuples
Each element of the iterable is a tuple representing a sequence of
nodes that constitutes a path.
source_graph : networkx.DiGraph (optional)
Source graph used to generate the set of paths and containing edge
weights keyed by 'weight' in the edge metadata. If provided, allows
weighted sampling over paths in the PathsTree. If not provided, all
edges are considered to have locally equal weights of 1.
Attributes
----------
graph : networkx.DiGraph
A directed graph representing the set of paths as a tree.
"""
def __init__(self, paths, source_graph=None):
self.graph = nx.DiGraph()
if paths:
edge_set = set()
for path in paths:
# Split path at all branch points
for i in range(0, len(path)):
head = tuple(path[0:i])
tail = tuple(path[0:i+1])
edge_set.add((head, tail))
# Get edge weights (have to do this as a separate step because the
# weight dictionary is not hashable in the set
edges_with_weights = []
for head, tail in edge_set:
if source_graph and len(head) > 0:
u = head[-1]
v = tail[-1]
weight = source_graph[u][v].get('weight', 1)
else:
weight = 1
edges_with_weights.append((head, tail, {'weight': weight}))
self.graph.add_edges_from(edges_with_weights)
def sample(self, num_samples=1000):
"""Sample a set of paths from the path tree.
At each sampling step, the next node is chosen at random from the set
of successors of the current node according to the edge weight in the
'weight' entry of the edge data.
Parameters
----------
num_samples : int
Number of paths to sample.
"""
# Make sure we have a graph to sample from
if not self.graph:
return []
# If so, do the sampling
sampled_paths = []
while len(sampled_paths) < num_samples:
# The root of the tree should be the empty tuple
node = tuple()
while True:
out_edges = list(self.graph.out_edges(node, data=True))
# If there are no successors to the current node, then we've
# hit a leaf of the tree and have found a path
if not out_edges:
break
# For determinism in testing, sort the out edges
if 'TEST_FLAG' in os.environ:
out_edges = sorted(list(out_edges))
# The float is necessary here for Python 2 compatibility
weights = [float(t[2]['weight']) for t in out_edges]
# Normalize the weights to a proper probability distribution
p = np.array(weights) / np.sum(weights)
# Choose a successor at random based on the weights
pred_idx = np.random.choice(range(len(out_edges)), p=p)
node = out_edges[pred_idx][1]
# Add the path (contained by the leaf node) to the list of sampled
# paths
sampled_paths.append(node)
return sampled_paths
def path_probabilities(self):
"""Get probability of each path given edge probabilities.
Returns
-------
dict
Dictionary mapping paths (as tuples of nodes) to probabilities.
"""
if not self.graph:
return {}
root = (tuple(), [1])
queue = deque([root])
paths = {}
while queue:
current_node, current_prob = queue.popleft()
# Get successors of this node with edge weights
succ_nodes = []
succ_weights = []
for succ_node, succ_data in self.graph[current_node].items():
succ_nodes.append(succ_node)
succ_weights.append(float(succ_data['weight']))
# If no successors it's a leaf node (path)
if not succ_nodes:
paths[current_node] = current_prob
# Normalize the weights to a proper probability distribution
p = (np.array(succ_weights) / np.sum(succ_weights)) * current_prob
queue.extend(zip(succ_nodes, p))
return paths
| {"/paths_graph/tests/test_pre_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/pre_cfpg.py": ["/paths_graph/pg.py"], "/paths_graph/tests/test_paths_graph.py": ["/paths_graph/__init__.py"], "/paths_graph/tests/test_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/cfpg.py": ["/paths_graph/__init__.py", "/paths_graph/pre_cfpg.py"], "/paths_graph/api.py": ["/paths_graph/pg.py", "/paths_graph/cfpg.py"], "/paths_graph/__init__.py": ["/paths_graph/pg.py", "/paths_graph/pre_cfpg.py", "/paths_graph/cfpg.py", "/paths_graph/paths_tree.py", "/paths_graph/api.py"]} |
69,090 | kkaris/paths_graph | refs/heads/master | /paths_graph/api.py | import logging
import itertools
import numpy as np
import networkx as nx
from .pg import get_reachable_sets, PathsGraph
from .cfpg import CFPG
logger = logging.getLogger('paths_graph')
__all__ = ['load_signed_sif', 'sample_paths', 'enumerate_paths', 'count_paths']
def load_signed_sif(sif_file):
"""Load edges from a SIF file with lines of the form 'u polarity v'.
Entries within each line can be separated by spaces and/or tabs. Polarity
should be specified by 0 (for a positive/activating edge) or 1 (for a
negative/inhibitory edge).
Parameters
----------
sif_file : str
Path to the SIF file.
Returns
-------
nx.DiGraph
Graph with the sign information encoded in the 'sign' attribute of each
edge.
"""
edges = []
with open(sif_file, 'rt') as f:
for line in f.readlines():
u, polarity, v = line.strip().split(' ')
# Eliminate self-loops
if u == v:
pass
else:
edges.append((u, v, {'sign': int(polarity)}))
g = nx.DiGraph()
g.add_edges_from(edges)
return g
def sample_paths(g, source, target, max_depth=None, num_samples=1000,
cycle_free=True, signed=False, target_polarity=0):
"""Sample paths over a range of lengths from a graph.
This high-level function provides explicit access to path sampling
without the user need to explicit create PathsGraphs or CFPGs for
different path lengths.
Note that this function samples an equal number of paths from each depth;
to sample paths where the sampling distribution reflects the probability
of reach paths of different lengths, use an instance of `CombinedCFPG`.
Parameters
----------
g : networkx.DiGraph
The underlying graph on which paths will be generated.
source : str
Name of the source node.
target : str
Name of the target node.
max_depth : Optional[int]
The maximum path length to consider. If not specified, the number of
nodes in the graph is used as the default maximum depth.
num_samples : int
Number of path samples at each depth.
cycle_free : bool
If True, sample only cycle-free paths using CFPGs. Default is True.
signed : bool
Specifies whether the underlying graph and the corresponding
f_level and b_level reachable sets have signed edges. If True,
sign information should be encoded in the 'sign' field of the edge
data, with 0 indicating a positive edge and 1 indicating a negative
edge.
target_polarity : 0 or 1
For a signed graph, specifies the polarity of the target node: 0
indicates positive/activation, 1 indicates negative/inhibition.
Returns
-------
list of paths
Each path in the list contains a sequence of node names representing
a path from source to target.
"""
return _run_by_depth('sample_paths', [num_samples], g, source, target,
max_depth, cycle_free, signed, target_polarity)
def enumerate_paths(g, source, target, max_depth=None,
cycle_free=True, signed=False, target_polarity=0):
"""Enumerate paths over a range of lengths.
Parameters
----------
g : networkx.DiGraph
The underlying graph on which paths will be generated.
source : str
Name of the source node.
target : str
Name of the target node.
max_depth : Optional[int]
The maximum path length to consider. If not specified, the number of
nodes in the graph is used as the default maximum depth.
cycle_free : bool
If True, sample only cycle-free paths using CFPGs. Default is True.
signed : bool
Specifies whether the underlying graph and the corresponding
f_level and b_level reachable sets have signed edges. If True,
sign information should be encoded in the 'sign' field of the edge
data, with 0 indicating a positive edge and 1 indicating a negative
edge.
target_polarity : 0 or 1
For a signed graph, specifies the polarity of the target node: 0
indicates positive/activation, 1 indicates negative/inhibition.
Returns
-------
list of paths
Each path in the list contains a sequence of node names representing
a path from source to target.
"""
return _run_by_depth('enumerate_paths', [], g, source, target, max_depth,
cycle_free, signed, target_polarity)
def count_paths(g, source, target, max_depth=None,
cycle_free=True, signed=False, target_polarity=0):
"""Count unique paths over a range of lengths without explicit enumeration.
Parameters
----------
g : networkx.DiGraph
The underlying graph on which paths will be generated.
source : str
Name of the source node.
target : str
Name of the target node.
max_depth : Optional[int]
The maximum path length to consider. If not specified, the number of
nodes in the graph is used as the default maximum depth.
cycle_free : bool
If True, sample only cycle-free paths using CFPGs. Default is True.
signed : bool
Specifies whether the underlying graph and the corresponding
f_level and b_level reachable sets have signed edges. If True,
sign information should be encoded in the 'sign' field of the edge
data, with 0 indicating a positive edge and 1 indicating a negative
edge.
target_polarity : 0 or 1
For a signed graph, specifies the polarity of the target node: 0
indicates positive/activation, 1 indicates negative/inhibition.
Returns
-------
int
Total number of paths up to the specified maximum depth.
"""
return _run_by_depth('count_paths', [], g, source, target, max_depth,
cycle_free, signed, target_polarity)
def _run_by_depth(func_name, func_args, g, source, target, max_depth=None,
cycle_free=True, signed=False, target_polarity=0):
"""Run a function over paths graphs computed for different lengths."""
if max_depth is None:
max_depth = len(g)
f_level, b_level = get_reachable_sets(g, source, target, max_depth,
signed=signed)
# Compute path graphs over a range of path lengths
pg_by_length = {}
if func_name == 'count_paths':
results = 0
else:
results = []
for path_length in range(1, max_depth+1):
logger.info("Length %d: computing paths graph" % path_length)
args = [g, source, target, path_length, f_level, b_level]
kwargs = {'signed': signed, 'target_polarity': target_polarity}
if cycle_free:
pg = CFPG.from_graph(*args, **kwargs)
else:
pg = PathsGraph.from_graph(*args, **kwargs)
pg_by_length[path_length] = pg
# If we're sampling by depth, do sampling here
if pg:
func = getattr(pg, func_name)
results += func(*func_args)
return results
| {"/paths_graph/tests/test_pre_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/pre_cfpg.py": ["/paths_graph/pg.py"], "/paths_graph/tests/test_paths_graph.py": ["/paths_graph/__init__.py"], "/paths_graph/tests/test_cfpg.py": ["/paths_graph/__init__.py"], "/paths_graph/cfpg.py": ["/paths_graph/__init__.py", "/paths_graph/pre_cfpg.py"], "/paths_graph/api.py": ["/paths_graph/pg.py", "/paths_graph/cfpg.py"], "/paths_graph/__init__.py": ["/paths_graph/pg.py", "/paths_graph/pre_cfpg.py", "/paths_graph/cfpg.py", "/paths_graph/paths_tree.py", "/paths_graph/api.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.