index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
37,249,343
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/models/vgg.py
|
import torch
import torch.nn as nn
import torch.nn.init as init
import numpy as np
class VGG_cifar10(nn.Module):
def __init__(self, layers_desc, data_input_shape=(3, 32, 32)):
super(VGG_cifar10, self).__init__()
in_shape = data_input_shape
self.layers = []
for layer_desc in layers_desc:
name, params = layer_desc
if name == "conv":
out_channels = params["out"]
batchnorm = params.get("bn", False)
stride_h, stride_w = params.get("stride", (3, 3))
in_channels, h, w = in_shape
conv = nn.Conv2d(in_channels, out_channels, kernel_size=(stride_h, stride_w), stride=1, padding=1)
init.xavier_normal_(conv.weight.data)
init.normal_(conv.bias.data)
self.layers.append(conv)
if batchnorm:
self.layers.append(nn.BatchNorm2d(out_channels))
in_shape = (out_channels, h+2-stride_h+1, w+2-stride_h+1)
elif name =="relu":
self.layers.append(nn.ReLU())
elif name =="pool":
self.layers.append(nn.MaxPool2d(kernel_size=2))
in_channels, h, w = in_shape
in_shape = (in_channels, int(h/2), int(w/2))
elif name == "flatten":
self.layers.append(nn.Flatten(start_dim=1))
in_shape = (np.prod(in_shape), )
elif name == "linear":
out_channels = params["out"]
[in_channels] = in_shape
matmul = nn.Linear(in_channels, out_channels)
init.xavier_normal_(matmul.weight.data)
init.normal_(matmul.bias.data)
self.layers.append(matmul)
in_shape = [out_channels]
else:
raise Exception(f"layer {layer_desc} not implemented")
for i, layer in enumerate(self.layers):
self.add_module(str(i), layer)
def forward(self, x):
for layer in self.layers:
x = layer(x)
return x
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,344
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/models/lth_maml.py
|
from tqdm import tqdm
import numpy as np
np.set_printoptions(linewidth=np.inf)
import torch
torch.set_printoptions(linewidth=np.inf)
try:
from torch.utils.tensorboard import SummaryWriter
except:
from unittest.mock import Mock as SummaryWriter
import copy
from .mask_ops import build_mask, apply_mask, update_mask
from .lth import detect_early_bird
from src.models.meta import Meta
from pathlib import Path
import pickle
from datetime import datetime, timedelta
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def run(dataset, lottery_ticket_params, logger):
"""
executes a lottery ticket hypothesis run (repeatedly trains n epochs, prunes, reinitializes)
"""
# unpacking inputs
prune_strategy = lottery_ticket_params["prune_strategy"]
training_params = lottery_ticket_params["model_training_params"]
dataset_params = lottery_ticket_params["dataset_params"]
prune_rate = prune_strategy["rate"]
train_data, val_data, test_data = dataset
# building model
# torch.set_default_tensor_type(get_dtype(training_params))
images_spt, labels_spt, images_qry, labels_qry = next(iter(train_data))
input_shape = images_spt.shape[2:]
model = build_model(training_params, dataset_params, input_shape)
initial_weights = {n: w.cpu().detach() for n, w in model.state_dict().items()}
mask = build_mask(model.net, prune_strategy)
# setting up logging
masks, model_state_dicts = [], []
train_accs_per_prune_iter = []
val_accs_per_prune_iter = []
test_accs_per_prune_iter = []
epoch_runtimes_per_prune_iter = []
writer = SummaryWriter(log_dir=f'tensorboard/{lottery_ticket_params["expr_id"]}')
logger.snapshot(
expr_id=lottery_ticket_params["expr_id"],
initial_weights=initial_weights,
masks=masks,
model_state_dicts=model_state_dicts,
expr_params_JSON=lottery_ticket_params,
train_accs_JSON=train_accs_per_prune_iter,
test_accs_JSON=test_accs_per_prune_iter,
val_accs_JSON=val_accs_per_prune_iter,
epoch_runtimes_TXT=str(epoch_runtimes_per_prune_iter),
prune_iterations_TXT="0"
)
print(f"{len(train_data)} training steps, {training_params['meta_training_epochs']} training epochs, {prune_strategy['iterations']} pruning iterations")
for prune_iter in range(prune_strategy["iterations"]):
print(f"========================\n{lottery_ticket_params['uid']} starting prune iteration {prune_iter}\n========================")
# reinitializing weights
model.load_state_dict(initial_weights)
# getting current pruned rate and training network to completion
pruned_rate = 1-(1-prune_rate)**(prune_iter)
expr_params = {
"prune_iter": prune_iter,
"pruned_rate": pruned_rate,
**training_params
}
train_accs, val_accs, best_mask_model, epoch_runtimes = train(model, mask, train_data, val_data, expr_params, writer, prune_iter)
train_accs_per_prune_iter.append({"prune_iter": prune_iter, "prune_rate": pruned_rate, "train_accs": train_accs})
epoch_runtimes_per_prune_iter.append(epoch_runtimes)
model.load_state_dict(best_mask_model)
masks.append(copy.deepcopy(mask))
model_state_dicts.append(best_mask_model)
val_accs_per_prune_iter.append({"prune_iter": prune_iter, "prune_rate": pruned_rate, "val_accs": list(val_accs)})
# scoring masked model
test_accs = test(model, mask, test_data, training_params)
print(f"prune iter {prune_iter} test accs: {test_accs}")
max_test_acc, max_test_acc_epoch = 0, 0
for i, test_acc in enumerate(test_accs):
if test_acc > max_test_acc:
max_test_acc_epoch = i
max_test_acc = test_acc
writer.add_scalars("test acc", {f"prune iteration {prune_iter}": test_acc}, i)
writer.add_scalars("max test epoch per prune iter", {lottery_ticket_params["uid"]: max_test_acc_epoch}, prune_iter)
writer.add_scalars("max test acc per prune iter", {lottery_ticket_params["uid"]: max_test_acc}, prune_iter)
early_stop_epoch = 0
for i in range(len(test_accs)-1):
if test_accs[i] > test_accs[i+1]:
early_stop_epoch = i
early_stop_acc = test_accs[i]
break
else:
early_stop_acc = test_accs[-1]
early_stop_epoch = len(test_accs)-1
writer.add_scalars("early stop epoch", {lottery_ticket_params["uid"]: early_stop_epoch}, prune_iter)
writer.add_scalars("early stop acc", {lottery_ticket_params["uid"]: early_stop_acc}, prune_iter)
writer.flush()
test_accs_per_prune_iter.append({"prune_iter": prune_iter, "prune_rate": pruned_rate, "test_accs": list(test_accs)})
logger.snapshot(
expr_id=lottery_ticket_params["expr_id"],
initial_weights=initial_weights,
masks=masks,
model_state_dicts=model_state_dicts,
expr_params_JSON=lottery_ticket_params,
train_accs_JSON=train_accs_per_prune_iter,
test_accs_JSON=test_accs_per_prune_iter,
val_accs_JSON=val_accs_per_prune_iter,
epoch_runtimes_TXT=str(epoch_runtimes_per_prune_iter),
prune_iterations_TXT=str(prune_iter+1)
)
# pruning weights
next_pass_prune_rate = 1-(1-prune_rate)**(1+prune_iter)
update_mask(model.net, mask, next_pass_prune_rate, prune_strategy)
print(f"{prune_iter}. perc_left: {1-pruned_rate}, test_acc {test_accs}")
writer.close()
return mask
def get_dtype(training_params):
assert(training_params["dtype"] == "float32")
dtype = training_params.get("dtype", "float32")
if dtype == "float64":
return torch.DoubleTensor
elif dtype == "float32":
return torch.FloatTensor
elif dtype == "float16":
return torch.HalfTensor
return None
def build_model(training_params, dataset_params, data_input_shape):
model_name = training_params["model_name"]
dataset_name = dataset_params["dataset_name"]
layer_definitions = training_params.get("layer_definitions", None)
if model_name == "MAML" and dataset_name == "mini_imagenet":
return Meta(training_params, dataset_params, layer_definitions).to(device)
if model_name == "MAML" and dataset_name == "mini_mini_imagenet":
return Meta(training_params, dataset_params, layer_definitions).to(device)
else:
raise Exception(f"model {model_name} and dataset {dataset_name} not implemented yet")
def train(model, mask, train_data, val_data, expr_params, writer, prune_iter):
train_accs, val_accs = [], []
epoch_runtimes = []
best_val_acc, best_model_state = 0, {}
prev_acc = 0
n_steps = len(train_data)
for epoch in list(range(expr_params["meta_training_epochs"])):
# fetch meta_batchsz num of episode each time
print(f"train epoch {epoch}")
# pbar = tqdm(total=len(train_data), leave=False) # not wrapping the train_data in tqdm as it causes a threading error
start_time = datetime.now()
epoch_val_accs = []
epoch_train_accs = []
for step, (x_spt, y_spt, x_qry, y_qry) in enumerate(train_data):
x_spt, y_spt = x_spt.to(device), y_spt.to(device)
x_qry, y_qry = x_qry.to(device), y_qry.to(device)
accs = model(mask, x_spt, y_spt, x_qry, y_qry)
epoch_train_accs.append(accs)
# writer.add_scalars(f"prune {prune_iter} train passes", {f"epoch {epoch}": max(accs)}, step)
if step % (n_steps//100) == 0:
averaged_accs = np.mean(np.array(epoch_train_accs), axis=0)
epoch_train_accs = []
train_accs.append({"epoch": epoch, "step": step, "accs": list(averaged_accs)})
perc = (step+1)/(n_steps+1)
epoch_runtime = (datetime.now()-start_time)/perc
epoch_runtime = epoch_runtime - (epoch_runtime%timedelta(seconds=1))
max_acc, max_i = int(max(averaged_accs)*1000)/1000, np.argmax(averaged_accs)
print(f"e{epoch} s{step} {perc*100:.0f}%, \tmax {epoch_runtime}\t{max_acc}\t{max_i}\ttraining acc: {averaged_accs}")
if step % (n_steps//5) == (n_steps//5-1): # evaluation
print("validating model...")
accs = test(model, mask, val_data, expr_params)
print('val acc:', accs)
writer.add_scalars(f"prune {prune_iter} val passes", {f"epoch {epoch:02}": max(accs)}, step)
epoch_val_accs.append(max(accs))
if max(accs) > best_val_acc:
best_val_acc = max(accs)
best_model_state = {n: w.cpu().detach() for n, w in model.state_dict().items()}
# print(sum(self.counts.values())/len(self.counts), len(self.counts))
val_accs.extend(epoch_val_accs)
runtime = datetime.now()-start_time
epoch_runtimes.append(runtime.total_seconds())
print(f"epoch time length: {runtime}")
if expr_params.get("meta_training_early_stopping", False) and prev_acc > max(epoch_val_accs):
print("early stopping triggered; stopping")
break
else:
prev_acc = sum(epoch_val_accs[-6:])/len(epoch_val_accs[-6:])
return train_accs, val_accs, best_model_state, epoch_runtimes
def test(model, mask, test_data, expr_params):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
accs_all_test = []
for x_spt, y_spt, x_qry, y_qry in test_data:
x_spt, y_spt = x_spt.squeeze(0).to(device), y_spt.squeeze(0).to(device)
x_qry, y_qry = x_qry.squeeze(0).to(device), y_qry.squeeze(0).to(device)
accs = model.finetunning(mask, x_spt, y_spt, x_qry, y_qry)
accs_all_test.append(accs)
# [b, update_step+1]
accs = np.array(accs_all_test).mean(axis=0)
return accs
def test_finetuning(test_data, lottery_ticket_params, log_dir):
prune_strategy = lottery_ticket_params["prune_strategy"]
training_params = lottery_ticket_params["model_training_params"]
dataset_params = lottery_ticket_params["dataset_params"]
# building model
images_spt, labels_spt, images_qry, labels_qry = next(iter(test_data))
input_shape = images_spt.shape[2:]
model = build_model(training_params, dataset_params, input_shape)
with open(log_dir/"model_state_dicts.pkl", "rb") as weight_handle:
state_dicts = pickle.load(weight_handle)
with open(log_dir/"masks.pkl", "rb") as mask_handle:
masks = pickle.load(mask_handle)
accs = []
for state_dict, mask in tqdm(zip(state_dicts, masks)):
model.load_state_dict(state_dict)
acc = test(model, mask, test_data, lottery_ticket_params["model_training_params"])
print(acc)
accs.append(accs)
return accs
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,345
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/models/lth.py
|
from itertools import chain
import torch.nn.functional as F
import torch.nn as nn
import tqdm
import numpy as np
import torch
try:
from torch.utils.tensorboard import SummaryWriter
except:
from unittest.mock import Mock as SummaryWriter
import copy
from ..utils import Logger
from .mask_ops import build_mask, apply_mask, update_mask
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
from pathlib import Path
def run(dataset, lottery_ticket_params):
"""
executes a lottery ticket hypothesis run (repeatedly trains n epochs, prunes, reinitializes)
"""
# unpacking inputs
prune_strategy = lottery_ticket_params["prune_strategy"]
training_params = lottery_ticket_params["model_training_params"]
prune_rate = prune_strategy["rate"]
train_data, val_data, test_data = dataset
# building model
images, labels = next(iter(train_data))
input_shape = images.shape[1:]
model = build_model(training_params, input_shape)
# saving initial model weights
initial_weights = {n: w.cpu().detach() for n, w in model.state_dict().items()}
mask = build_mask(model, prune_strategy)
# setting up logging
masks = []
project_dir = Path(lottery_ticket_params["project_dir"])
Logger(project_dir, project_dir/"logs").save_snapshot(
expr_id=lottery_ticket_params["expr_id"],
expr_params=lottery_ticket_params,
initial_weights=initial_weights,
)
writer = SummaryWriter(log_dir=f'tensorboard/{lottery_ticket_params["expr_id"]}')
for prune_iter in range(prune_strategy["iterations"]):
print(f"starting prune iteration {prune_iter}")
# reinitializing weights
model.load_state_dict(initial_weights)
# getting current pruned rate and training network to completion
pruned_rate = 1-(1-prune_rate)**(prune_iter)
expr_params = {
"prune_iter": prune_iter,
"pruned_rate": pruned_rate,
**training_params
}
val_accs, best_mask_model = train(model, mask, train_data, val_data, expr_params, writer)
model.load_state_dict(best_mask_model)
masks.append(copy.deepcopy(mask))
# scoring masked model
test_acc = test(model, mask, test_data, expr_params)
writer.add_scalar("test acc", test_acc, prune_iter)
writer.flush()
# pruning weights
next_pass_prune_rate = 1-(1-prune_rate)**(1+prune_iter)
update_mask(model, mask, next_pass_prune_rate, prune_strategy)
print(f"{prune_iter}. perc_left: {1-pruned_rate}, test_acc {test_acc}")
if prune_strategy["name"] == "early_bird":
if detect_early_bird(masks):
print("found early bird ticket")
break
writer.close()
project_dir = Path(lottery_ticket_params["project_dir"])
Logger(project_dir, project_dir/"logs").save_snapshot(
expr_id=lottery_ticket_params["expr_id"],
expr_params=lottery_ticket_params,
initial_weights=initial_weights,
)
return mask
def detect_early_bird(masks):
if len(masks) < 2:
return False
last_name_mask = masks[-1]
for prev_name_mask in masks[-2:-6:-1]:
diffs, size = 0, 0
assert({name for name in last_name_mask} == {name for name in prev_name_mask})
for name, lst_mask in last_name_mask.items():
print(name)
print(lst_mask)
print(prev_name_mask[name])
size += np.prod(lst_mask.shape)
diffs += torch.sum(lst_mask!=prev_name_mask[name])
print(f"diffs {diffs}")
if float(diffs) / size > .1:
return False
if len(masks) < 5:
return False
return True
def build_model(training_params, data_input_shape):
model_name = training_params["model_name"]
dataset_name = training_params["dataset_name"]
layer_definitions = training_params.get("layer_definitions", None)
if model_name == "VGG" and dataset_name == "cifar10":
from src.models.vgg import VGG_cifar10
return VGG_cifar10(layer_definitions, data_input_shape).to(device)
elif model_name == "lenet" and dataset_name == "mnist":
from src.models.lenet import LeNet_mnist
return LeNet_mnist().to(device)
else:
raise Exception(f"model {model_name} and dataset {dataset_name} not implemented yet")
def initialize_optimizer(expr_params, model):
name, params = expr_params["optimizer"]
if name == "adam":
return torch.optim.Adam(model.parameters(), **params)
else:
raise Exception(f"optimizer {name} not implemented")
def train(model, mask, train_data, val_data, expr_params, writer):
assert(expr_params["dataset_name"] in ["mnist", "cifar10"])
apply_mask(model, mask)
val_accs = []
best_val_acc, best_model_state = 0, {}
optimizer = initialize_optimizer(expr_params, model)
pass_name = f"{expr_params['prune_iter']}, {(1-expr_params['pruned_rate'])*100:.3f}% left"
if expr_params["loss_func"] == "cross_entropy":
loss_func = nn.CrossEntropyLoss()
else:
raise Exception(f"loss func {expr_params['loss_func']} not implemeted")
for epoch in tqdm(range(expr_params["training_iterations"])):
loss, train_acc = train_one_epoch(model, mask, train_data, optimizer, expr_params, loss_func)
val_acc = test(model, mask, val_data, expr_params)
writer.add_scalars("train passes", {pass_name: train_acc}, epoch)
writer.add_scalars("vals passes", {pass_name: val_acc}, epoch)
writer.flush()
# record best score
val_accs.append(val_acc)
if best_val_acc < val_acc:
best_val_acc = val_acc
best_model_state = {n: w.cpu().detach() for n, w in model.state_dict().items()}
if ["early_stopping"] and len(val_accs) >= 5:
for i in range(-2, -6, -1):
if val_accs[i] < val_accs[-1]:
break
else:
print(f"early stopping triggered at epoch {epoch}")
break
return val_accs, best_model_state
def train_one_epoch(model, mask, train_data, optimizer, expr_params, loss_func):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.train() # setting model to train mode
correct = 0
for batch_idx, (X, y_true) in enumerate(train_data):
optimizer.zero_grad() # zeroing out the gradients
X, y_true = X.to(device), y_true.to(device) # sending Xs and ys to gpu
apply_mask(model, mask)
y_pred = model(X) # pred ys
train_loss = loss_func(y_pred, y_true) # getting loss
train_loss.backward() # backpropagating the loss
optimizer.step() # incrementing step in optimizer
y_pred = y_pred.data.max(1, keepdim=True)[1] # get the index of the max log-probability
correct += y_pred.eq(y_true.data.view_as(y_pred)).sum().item()
accuracy = correct / len(train_data.dataset)
return train_loss.item(), accuracy
def test(model, mask, test_data, expr_params):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.eval() # test mode
test_loss = 0
correct = 0
with torch.no_grad():
for X, y_true in test_data:
X, y_true = X.to(device), y_true.to(device)
apply_mask(model, mask)
y_pred = model(X)
y_pred = y_pred.data.max(1, keepdim=True)[1] # get the index of the max log-probability
correct += y_pred.eq(y_true.data.view_as(y_pred)).sum().item()
accuracy = correct / len(test_data.dataset)
return accuracy
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,346
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/utils.py
|
import shutil
import os
import json
import numpy as np
import torch
import torch.nn as nn
import torch.nn.init as init
import random
import datetime
from pathlib import Path
import subprocess
import pickle
import json
import torch
import numpy as np
import pickle
from numpy.lib.format import open_memmap
from pathlib import Path
from tqdm import tqdm
import csv
class TopModelSaver():
def __init__(self, location, config):
self.prev_best = -np.inf
self.root_folder = location
if self.root_folder.exists():
shutil.rmtree(self.root_folder)
self.model_weights_path = self.root_folder/"model_weights.h5py"
self.config_path = self.root_folder/"config.json"
self.source_code_path = self.root_folder/Path(config["file_loc"]).name
self.saved_config = config
def reset(self):
self.prev_best = -np.inf
def save_best(self, model, score):
"""
saves best model according to score
"""
if score > self.prev_best:
print(f"new best score: {score}; saving weights @ {self.root_folder}")
if not self.root_folder.exists():
os.makedirs(self.root_folder)
with open(self.config_path, "w+") as fp_handle:
json.dump(self.saved_config, fp_handle)
shutil.copyfile(self.saved_config["file_loc"], self.source_code_path)
model.save_weights(str(self.model_weights_path), save_format="h5")
self.prev_best = score
else:
print(f"cur score {score}. best score remains {self.prev_best}; not saving weights")
def flatten(iterable, max_depth=np.inf):
"""recursively flattens all iterable objects in iterable.
Args:
iterable (iterable or numpy array): iterable to flatten
max_depth (int >= 0, optional): maximum number of objects to iterate into. Defaults to infinity.
>>> flatten(["01", [2, 3], [[4]], 5, {6:6}.keys(), np.array([7, 8])])
['0', '1', 2, 3, 4, 5, 6, 7, 8]
>>> utils.flatten(["asdf"], max_depth=0)
['asdf']
>>> utils.flatten(["asdf"], max_depth=1)
['a', 's', 'd', 'f']
"""
def recursive_step(iterable, max_depth):
if max_depth == -1:
yield iterable
elif type(iterable) == str:
for item in iterable:
yield item
elif type(iterable) == np.ndarray:
for array_index in iterable.flatten():
for item in recursive_step(array_index, max_depth=max_depth-1):
yield item
else:
try:
iterator = iter(iterable)
for sublist in iterator:
for item in recursive_step(sublist, max_depth=max_depth-1):
yield item
except (AttributeError, TypeError):
yield iterable
assert(max_depth >= 0)
return recursive_step(iterable, max_depth)
def new_expr_id(*args):
"""
returns new experiemnt id for process.
"""
chars = "abcdefghijklmnopqrstuvwxyz"
nums = "1234567890"
nonce = random.choices(chars+nums, k=5)
nonce = "".join(nonce)
date = datetime.datetime.now().strftime("%Y-%m-%d")
time = datetime.datetime.now().strftime("%H:%M:%S")
args = [arg.replace(" ", "_") for arg in args]
expr_id = ".".join([f"[{v}]" for v in [date, time, nonce, *args]])
expr_id = f"expr.{expr_id}"
return expr_id, nonce
def weight_init(m):
'''
Usage:
model = Model()
model.apply(weight_init)
'''
if isinstance(m, nn.Conv1d):
init.normal_(m.weight.data)
if m.bias is not None:
init.normal_(m.bias.data)
elif isinstance(m, nn.Conv2d):
init.xavier_normal_(m.weight.data)
if m.bias is not None:
init.normal_(m.bias.data)
elif isinstance(m, nn.Conv3d):
init.xavier_normal_(m.weight.data)
if m.bias is not None:
init.normal_(m.bias.data)
elif isinstance(m, nn.ConvTranspose1d):
init.normal_(m.weight.data)
if m.bias is not None:
init.normal_(m.bias.data)
elif isinstance(m, nn.ConvTranspose2d):
init.xavier_normal_(m.weight.data)
if m.bias is not None:
init.normal_(m.bias.data)
elif isinstance(m, nn.ConvTranspose3d):
init.xavier_normal_(m.weight.data)
if m.bias is not None:
init.normal_(m.bias.data)
elif isinstance(m, nn.BatchNorm1d):
init.normal_(m.weight.data, mean=1, std=0.02)
init.constant_(m.bias.data, 0)
elif isinstance(m, nn.BatchNorm2d):
init.normal_(m.weight.data, mean=1, std=0.02)
init.constant_(m.bias.data, 0)
elif isinstance(m, nn.BatchNorm3d):
init.normal_(m.weight.data, mean=1, std=0.02)
init.constant_(m.bias.data, 0)
elif isinstance(m, nn.Linear):
init.xavier_normal_(m.weight.data)
init.normal_(m.bias.data)
elif isinstance(m, nn.LSTM):
for param in m.parameters():
if len(param.shape) >= 2:
init.orthogonal_(param.data)
else:
init.normal_(param.data)
elif isinstance(m, nn.LSTMCell):
for param in m.parameters():
if len(param.shape) >= 2:
init.orthogonal_(param.data)
else:
init.normal_(param.data)
elif isinstance(m, nn.GRU):
for param in m.parameters():
if len(param.shape) >= 2:
init.orthogonal_(param.data)
else:
init.normal_(param.data)
elif isinstance(m, nn.GRUCell):
for param in m.parameters():
if len(param.shape) >= 2:
init.orthogonal_(param.data)
else:
init.normal_(param.data)
class Logger:
def __init__(self, project_folder=".", log_folder="./logs"):
self.project_folder = Path(project_folder).resolve()
self.log_folder = Path(log_folder).resolve()
if not self.log_folder.exists():
os.makedirs(self.log_folder)
assert(self.project_folder.exists() and self.project_folder.is_dir())
def zip_logs(self):
for file in os.listdir(self.log_folder):
if (self.log_folder/file).is_dir() and not (self.log_folder/(file+".zip")).exists():
shutil.make_archive(self.log_folder/file, "zip", self.log_folder/file)
shutil.rmtree(self.log_folder/file)
def save_snapshot(self, expr_id, **kwargs):
"""
saves a copy of the directory project_folder and any kwargs in log_folder/expr_id
- raises exception if directory already exists
- auto skips saving files not tracked by git
- auto skips saving anything in log_folder (don't want to recursively copy everything)
- see update_snapshot for kwargs naming scheme
logger = Logger("/project_dir", "/project_dir/logs")
logger.save_snapshot(
"experiment 5",
training_params_JSON=training_params,
testing_accs_TXT=testing_accs,
model_parameters=model_parameters
)
# do stuff
logger.update_snapshot(
"experiment 5",
training_params_JSON=training_params,
testing_accs_TXT=testing_accs,
model_parameters=model_parameters
)
"""
log_folder = self.log_folder/expr_id
if log_folder.exists():
raise Exception(f"log folder {log_folder} already exists")
os.makedirs(log_folder)
files_to_log = subprocess.check_output(["git", "ls-files"]).decode().split("\n")
for path in files_to_log:
if len(path.strip()) == 0: # stripping newlines
continue
src = self.project_folder/path
dest = log_folder/path
if str(src.resolve()).startswith(str(self.log_folder)+os.sep):
continue
if not dest.parent.exists():
os.makedirs(dest.parent)
shutil.copy(src, dest, follow_symlinks=False)
self.update_snapshot(expr_id, **kwargs)
def update_snapshot(self, expr_id, files_to_update=None,**kwargs):
"""
updates a log snapshot that exists with values in the kwargs.
- kwarg naming scheme:
- will save argument values in files based off of the parameter names
- arguments that end in "_JSON" are saved as json files
- arguments that end in "_TXT" are saved as text files
- arguments that end in "_CSV" are saved as csv files
- models that end in "_PYTORCH" are saved with torch.save
- by default, arguments are saved as pickle files
file paths in files_to_update will be updated. files in the project directory not in
files_to_update will not be updated if they already exist in the log directory
logger = Logger("/project_dir", "/project_dir/logs")
logger.save_snapshot("experiment 5")
logger.update_snapshot(
"experiment 5",
training_params_JSON=training_params,
testing_accs_TXT=testing_accs,
model_parameters=model_parameters
)
"""
log_folder = self.log_folder/expr_id
if not log_folder.exists():
raise Exception(f"log folder {log_folder} doesn't exist - call save_snapshot first")
for name, value in kwargs.items():
json_flag = "_JSON"
txt_flag = "_TXT"
csv_flag = "_CSV"
pytorch_flag = "_PYTORCH"
try:
if name[-len(json_flag):] == json_flag:
name = name[:-len(json_flag)]
with open(log_folder/f"{name}.json", "w+") as handle:
json.dump(value, handle)
elif name[-len(txt_flag):] == txt_flag:
name = name[:-len(txt_flag)]
with open(log_folder/f"{name}.txt", "w+") as handle:
handle.write(value)
elif name[-len(csv_flag):] == csv_flag:
name = name[:-len(csv_flag)]
with open(log_folder/f"{name}.csv", "w+") as handle:
writer = csv.DictWriter(handle, fieldnames=value[0].keys())
writer.writeheader()
writer.writerows(value)
elif name[-len(pytorch_flag):] == pytorch_flag:
name = name[:-len(pytorch_flag)]
torch.save(value, log_folder/f"{name}.pth")
else:
with open(log_folder/f"{name}.pkl", "wb+") as handle:
pickle.dump(value, handle)
except TypeError as e:
print(f"triggered on key {name}")
raise TypeError(e)
if files_to_update != None:
for path in files_to_update:
src = self.project_folder/path
dest = log_folder/path
if str(src.resolve()).startswith(str(self.log_folder)+os.sep):
continue
if not dest.parent.exists():
os.makedirs(dest.parent)
shutil.copy(src, dest, follow_symlinks=False)
def snapshot(self, expr_id, files_to_update=None, **kwargs):
"""
snapshots a directory by saving project_dir directory and saving any kwargs
first call with a specific expr_id will copy the project_dir directory in that log directory
subsequent calls will update the log directory with the kwarg key values
see update_snapshot for kwargs naming scheme
note that subsequent calls to snapshot won't update files in the log directory unless
it is in the files_to_update list
logger = Logger("/project_dir", "/project_dir/logs")
logger.snapshot(
"experiment 5",
files_to_update=["program_outputs.txt", "current_model.pkl"],
training_params_JSON=training_params,
testing_accs_TXT=testing_accs,
model_parameters=model_parameters
)
"""
log_folder = self.log_folder/expr_id
if not log_folder.exists():
self.save_snapshot(expr_id, **kwargs)
else:
self.update_snapshot(expr_id, files_to_update=files_to_update, **kwargs)
def set_seeds(seed, cudnn_enabled=True):
if seed != None:
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
# torch.cuda.manual_seed(seed)
# torch.cuda.manual_seed_all(seed)
if not cudnn_enabled:
torch.backends.cudnn.enabled = False
else:
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
class fs_greedy_load:
"""
greedily loads everything in lst_arrays and stores it as a chunked memory mapped numpy file
on second run, loads numpy file instead to save ram
zarr is slow as shit wtf
"""
def __init__(self, path, lst_array=None):
try:
file_chunks = sorted(list(path.iterdir()))
self.array_chunks = [np.load(file_chunk, mmap_mode="r+") for file_chunk in file_chunks]
self.chunk_size = len(self.array_chunks[0])
except FileNotFoundError:
print("rebuilding transformed dataset cache")
shape, dtype = lst_array.shape, str(lst_array.dtype)
arm32_max_filesize = 2*10**9
total_bytes = lst_array.dtype.itemsize * np.product(shape)
num_chunks = int(total_bytes/arm32_max_filesize)+1
self.chunk_size = int(len(lst_array)/num_chunks)
os.makedirs(path)
self.array_chunks = []
for chunk_index, lst_chunk in enumerate(np.array_split(lst_array, num_chunks)):
chunk_array = open_memmap(path/f"chunk_{chunk_index}.npy", mode='w+', dtype=dtype, shape=lst_chunk.shape)
self.array_chunks.append(chunk_array)
for i, val in enumerate(lst_chunk):
chunk_array[i] = val
def __getitem__(self, index):
chunk_index = index // self.chunk_size
array_index = index % self.chunk_size
return self.array_chunks[chunk_index][array_index]
def __len__(self):
return sum([len(chunk) for chunk in self.array_chunks])
def DummySummaryWriter(*args, **kwargs):
from unittest.mock import Mock
return Mock()
def sparsity(model, threshold=0.001):
state_dict = model
num_params = sum([np.prod(weights.shape) for n, weights in state_dict.items() ] )
zeros = sum([torch.sum(torch.abs(weights) < threshold).cpu() for n, weights in state_dict.items() ] )
return zeros / num_params
class tee:
def __init__(self, filename):
"""redirects output to file filename and terminal at the same time
tee("output.log")
instantiating will automatically print output to terminal and output.log
plays well with tqdm & Logger
make sure to add the redirected filename to files_to_update in logger.snapshot()
Args:
filename (str): filename to redirect output to
"""
import sys
from pathlib import Path
filename = Path(filename)
filename.parent.mkdir(parents=True, exist_ok=True)
self.log = open(filename, "w+")
self.terminal = sys.stdout
sys.stdout = self
print(f"T piping output to stdout and {filename}")
def write(self, message):
self.terminal.write(message)
self.terminal.flush()
self.log.write(message)
self.log.flush()
def flush(self):
pass
if __name__ == "__main__":
a = fs_greedy_load("fs_greedy_load_test", [np.arange(1000, dtype=np.float32).reshape(5, 2, 5, 20) for _ in range(1000000)])
print(len(a))
print(a[0])
del a
a = fs_greedy_load("fs_greedy_load_test")
print(len(a))
print(a[0])
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,347
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/models/meta.py
|
import torch
from torch import nn
from torch import optim
from torch.nn import functional as F
from torch.utils.data import TensorDataset, DataLoader
from torch import optim
import numpy as np
from src.models.maml_learner import Learner
from copy import deepcopy
from .mask_ops import apply_mask
from collections import OrderedDict
from tqdm import tqdm
def update_weights(named_parameters, loss, lr, first_order):
names, params = list(zip(*named_parameters))
if first_order:
grad = torch.autograd.grad(loss, params)
else:
grad = torch.autograd.grad(loss, params, create_graph=True, retain_graph=True)
# grad = [g.detach() for g in grad]
fast_weights = [p - lr * g for p, g in zip(params, grad)]
fast_weights = OrderedDict(zip(names, fast_weights))
return fast_weights
class Meta(nn.Module):
"""
Meta Learner
"""
def __init__(self, training_params, dataset_params, config):
super(Meta, self).__init__()
self.update_lr = training_params["update_lr"]
self.meta_lr = training_params["meta_lr"]
self.n_way = dataset_params["n_way"]
self.k_spt = dataset_params["k_spt"]
self.k_qry = dataset_params["k_qry"]
self.task_num = dataset_params["task_num"]
self.update_step = training_params["update_step"]
self.finetune_step = training_params["finetune_step"]
self.first_order = training_params["first_order"]
self.net = Learner(config, dataset_params["imgsz"])
self.meta_optim = optim.Adam(self.net.parameters(), lr=self.meta_lr)
def multihot_cross_entropy(self, pred_prob, label_prob, reduction='average'):
'''
:param pred_prob : k-shot(batch) x classes
:param label_prob: k-shot(batch)
:param reduction: sum or average
'''
log_likelihood = -1 *torch.nn.functional.log_softmax(pred_prob, dim=1)
if reduction == "average":
loss = torch.sum(torch.mul(log_likelihood,label_prob))/label_prob.size(0)
else:
loss = torch.sum(torch.mul(log_likelihood,label_prob))
return loss
def forward(self, mask, x_spt, y_spt, x_qry, y_qry):
"""
:param x_spt: [task_num, setsz, c_, h, w]
:param y_spt: [task_num, setsz]
:param x_qry: [task_num, querysz, c_, h, w]
:param y_qry: [task_num, querysz]
:return:
"""
# print("x_spt.shape", x_spt.shape)
# print("y_spt.shape", y_spt.shape)
# print("x_qry.shape", x_qry.shape)
# print("y_qry.shape", y_qry.shape)
task_num, setsz, c_, h, w = x_spt.size()
querysz = x_qry.size(1)
losses_q = [0 for _ in range(self.update_step + 1)] # losses_q[i] is the loss on step i
corrects = [0 for _ in range(self.update_step + 1)]
for i in range(task_num):
# this is the loss and accuracy before first update
fast_weights = OrderedDict(self.net.named_parameters())
with torch.no_grad():
# [setsz, nway]
logits_q = self.net(mask, x_qry[i], fast_weights, training=True)
loss_q = self.multihot_cross_entropy(logits_q, y_qry[i])
losses_q[0] += loss_q
pred_q = F.softmax(logits_q, dim=1).argmax(dim=1)
correct = torch.eq(pred_q, y_qry[i].argmax(dim=1)).sum().item()
corrects[0] += correct
for k in range(self.update_step):
# 1. run the i-th task and compute loss for k=1~K-1
logits = self.net(mask, x_spt[i], fast_weights, training=True)
loss = self.multihot_cross_entropy(logits, y_spt[i])
fast_weights = update_weights(fast_weights.items(), loss, self.update_lr, self.first_order)
logits_q = self.net(mask, x_qry[i], fast_weights, training=True)
loss_q = self.multihot_cross_entropy(logits_q, y_qry[i])
losses_q[k + 1] += loss_q
with torch.no_grad():
pred_q = F.softmax(logits_q, dim=1).argmax(dim=1)
correct = torch.eq(pred_q, y_qry[i].argmax(dim=1)).sum().item()
corrects[k + 1] += correct
# end of all tasks
# sum over all losses on query set across all tasks
loss_q = losses_q[-1] / task_num
# optimize theta parameters
self.meta_optim.zero_grad()
# loss_q.backward()
loss_q.backward(loss_q.clone().detach())
self.meta_optim.step()
accs = np.array(corrects) / (querysz * task_num)
return accs
def finetunning(self, mask, x_spt, y_spt, x_qry, y_qry):
"""
:param x_spt: [setsz, c_, h, w]
:param y_spt: [setsz]
:param x_qry: [querysz, c_, h, w]
:param y_qry: [querysz]
:return:
"""
assert len(x_spt.shape) == 4
corrects = []
# in order to not ruin the state of running_mean/variance and bn_weight/bias
# we finetunning on the copied model instead of self.net
# apply_mask(model, mask)
net = deepcopy(self.net)
# this is the loss and accuracy before first update
fast_weights = OrderedDict(net.named_parameters())
with torch.no_grad():
logits_q = net(mask, x_qry, fast_weights, training=True)
pred_q = F.softmax(logits_q, dim=1).argmax(dim=1)
corrects.append(torch.eq(pred_q, y_qry.argmax(dim=1)).sum().item()/x_qry.size(0))
for k in range(self.finetune_step):
logits = net(mask, x_spt, fast_weights, training=True)
loss = self.multihot_cross_entropy(logits, y_spt)
fast_weights = update_weights(fast_weights.items(), loss, self.update_lr, False)
with torch.no_grad():
logits_q = net(mask, x_qry, fast_weights, training=True)
pred_q = F.softmax(logits_q, dim=1).argmax(dim=1)
corrects.append(torch.eq(pred_q, y_qry.argmax(dim=1)).sum().item()/x_qry.size(0))
del net
return corrects
def main():
pass
if __name__ == '__main__':
main()
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,348
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/models/mask_ops.py
|
import torch
import numpy as np
from collections import OrderedDict
def build_mask(model, prune_strategy):
mask = {}
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if prune_strategy["name"] in ["local", "global"]:
for name, param in model.named_parameters():
if "weight" not in name:
continue
mask[name] = torch.ones_like(param, requires_grad=False, dtype=torch.bool, device="cpu")
elif prune_strategy["name"] == "early_bird":
for name, param in model.named_parameters():
if "weight" not in name:
continue
mask[name] = torch.ones_like(param, requires_grad=False, dtype=torch.bool, device="cpu")
else:
raise Exception(f"prune strategy {prune_strategy['name']} not implemented yet")
return mask
def apply_mask(model, mask):
named_parameters = model.named_parameters()
named_parameters = apply_mask_state_dict(named_parameters, mask)
model.load_state_dict(named_parameters, strict=False) # strict is false as we don't want to add buffers
def apply_mask_state_dict(named_parameters, mask):
named_parameters = OrderedDict(named_parameters)
device = None
for name, param_mask in mask.items():
if device == None:
device = named_parameters[name].device
named_parameters[name] = named_parameters[name] * param_mask.to(device)
return named_parameters
def update_mask(model, mask, prune_rate, prune_strategy):
"""
prunes model*mask weights at rate prune_rate and updates the mask.
"""
if prune_strategy["name"] == "local":
apply_mask(model, mask)
for name, param in model.named_parameters():
if "weight" not in name:
continue
# get magnitudes of weight matrices. ignores bias.
weight_magnitudes = param.flatten().cpu().detach().numpy().astype(np.float64)
weight_magnitudes = np.random.normal(scale=1e-45, size=weight_magnitudes.shape)
weight_magnitudes = np.abs(weight_magnitudes)
# gets the kth weight
num_weights = len(weight_magnitudes)
k = int(num_weights*prune_rate)
kth_weight = np.partition(weight_magnitudes, k)[k]
# updating mask
mask[name] = (param.abs() > kth_weight).cpu()
num_equal = (param.abs() == kth_weight).sum()
if num_equal > 100:
raise Exception(f"{num_equal} parameters have the same magnitude {kth_weight} - use iter prune strategy")
elif num_equal > 1:
print(f"warning: {num_equal} parameters have the same magnitude {kth_weight}")
elif prune_strategy["name"] == "global":
# get magnitudes of weight matrices. ignores bias.
apply_mask(model, mask)
layer_weights = [(name, param) for name, param in model.named_parameters() if "weight" in name]
weight_magnitudes = [param.flatten().cpu().detach().numpy().astype(np.float64) for name, param in layer_weights]
weight_magnitudes = np.concatenate(weight_magnitudes)
weight_magnitudes = np.abs(weight_magnitudes + np.random.normal(scale=1e-39, size=weight_magnitudes.shape))
# gets the kth weight
num_weights = len(weight_magnitudes)
k = int(num_weights*prune_rate)
kth_weight = np.partition(weight_magnitudes, k)[k]
# updating mask
num_equal = 0
for name, parameter in model.named_parameters():
if "weight" in name:
mask[name] = (parameter.abs() > kth_weight).cpu()
num_equal += (parameter.abs() == kth_weight).sum()
if num_equal > 100:
raise Exception(f"{num_equal} parameters have the same magnitude {kth_weight} - use iter prune strategy")
elif num_equal > 1:
print(f"warning: {num_equal} parameters have the same magnitude {kth_weight}")
elif prune_strategy["name"] == "early_bird":
# get magnitudes of weight matrices. ignores bias.
apply_mask(model, mask)
bn_layers = []
for bn_layer_name, w in model.named_children():
if isinstance(w, torch.nn.BatchNorm2d):
bn_layers.append((f"{bn_layer_name}.weight", w.weight))
weight_magnitudes = [param.flatten().cpu().detach().numpy().astype(np.float64) for name, param in bn_layers]
weight_magnitudes = np.concatenate(weight_magnitudes)
weight_magnitudes = np.abs(weight_magnitudes + np.random.normal(scale=1e-39, size=weight_magnitudes.shape))
# gets the kth weight
num_weights = len(weight_magnitudes)
k = int(num_weights*prune_rate)
kth_weight = np.partition(weight_magnitudes, k)[k]
# updating mask
num_equal = 0
for bn_layer_name, w in model.named_children():
if isinstance(w, torch.nn.BatchNorm2d):
mask[f"{bn_layer_name}.weight"] = (w.weight.abs() > kth_weight).cpu()
num_equal += (w.weight.abs() == kth_weight).sum()
if num_equal > 100:
raise Exception(f"{num_equal} parameters have the same magnitude {kth_weight} - use iter prune strategy")
elif num_equal > 1:
print(f"warning: {num_equal} parameters have the same magnitude {kth_weight}")
else:
raise Exception(f"prune strategy {prune_strategy} not found")
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,349
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/compile_graphs.py
|
import matplotlib.pyplot as plt
import numpy as np
from pathlib import Path
from tqdm import tqdm
import json
from itertools import chain
import random
def load_log(path):
if (path/"test_accs.txt").exists():
with open(path/"test_accs.txt", "r") as handle:
test_accs = json.load(handle)
with open(path/"expr_params.json", "r") as handle:
expr_params = json.load(handle)
pr = expr_params["prune_strategy"]["rate"]
sparsity = [1-(1-pr)**i for i in range(len(test_accs))]
test_accs = [{"sparsity": s, "test_accs": acc, "prune_iter": i} for (s, acc, i) in zip(sparsity, test_accs, range(len(test_accs)))]
else:
with open(path/"test_accs.json", "r") as handle:
test_accs = json.load(handle)
test_accs = [{"sparsity":score["prune_rate"], "test_accs": score["test_accs"]} for score in test_accs]
return test_accs
def sparsity_to_iter(s, pr):
return abs(int(round(np.log(1-s)/np.log(1-pr))))
def iter_to_sparsity(i, pr):
return 1-(1-pr)**i
def max_accs(log):
early_stop_epoch = []
early_stop_acc = []
sparsities = []
for prune_iter in log:
early_stop_epoch.append(0)
early_stop_acc.append(0)
sparsities.append(prune_iter["sparsity"])
accs = prune_iter["test_accs"]
for epoch, score in enumerate(accs):
if score > early_stop_acc[-1]:
early_stop_epoch[-1] = epoch
early_stop_acc[-1] = score
return [{"sparsity": s, "epoch": e, "max_acc": a} for s, e, a in zip(sparsities, early_stop_epoch, early_stop_acc)]
def early_stopping_stats(log):
early_stop_epoch = []
early_stop_acc = []
sparsities = []
for prune_iter in log:
early_stop_epoch.append(0)
early_stop_acc.append(0)
sparsities.append(prune_iter["sparsity"])
accs = prune_iter["test_accs"]
for epoch, score in enumerate(accs):
if score > early_stop_acc[-1]:
early_stop_epoch[-1] = epoch
early_stop_acc[-1] = score
elif score < early_stop_acc[-1]:
break
return [{"sparsity": s, "epoch": e, "acc": a} for s, e, a in zip(sparsities, early_stop_epoch, early_stop_acc)]
def plot_finetuning_accs(lth_accs):
title = "lth maml prune iteration test finetuning accuracies"
print(f'drawing graph for "{title}"')
plt.figure(num=None, figsize=(15, 8), dpi=80, facecolor='w', edgecolor='k')
for i, score in enumerate(lth_accs):
sparsity = score["sparsity"]
acc = score["test_accs"]
plt.plot(acc, "o-", color=(0, (i+5)/(len(lth_accs)+5), 0), label=f"prune iter {i+1}, {round(sparsity*100)}% sparsity")
plt.title(title)
plt.legend(bbox_to_anchor=(1, 0), loc='lower right')
plt.xticks(ticks=range(len(acc)))
plt.xlabel("epoch")
plt.ylabel("finetuning test accuracy")
plt.gcf().subplots_adjust(bottom=0.10)
plt.savefig(graph_dir/f"{title.replace(' ', '_')}.png", dpi=200)
plt.clf()
corder = [
np.array([112, 143, 255])/255,
np.array([205, 130, 255])/255,
np.array([255, 166, 166])/255,
]
def get_random_color(opacity=1):
if len(corder) > 0:
r, g, b = corder.pop()
else:
r = random.randrange(0, 100)/100
g = random.randrange(0, 100)/100
b = random.randrange(0, 100)/100
return [r, g, b, opacity]
def plot_max_accs(plot_accs, scatter_accs, sparsity_range=(0.0, .9)):
title = "max test acc generally increases after pruning"
print(f'drawing graph for "{title}"')
plt.figure(num=None, figsize=(8, 6), dpi=80, facecolor='w', edgecolor='k')
for name, max_accs in plot_accs:
plt.plot([score["sparsity"] for score in max_accs], [score["max_acc"] for score in max_accs], label=name, c=get_random_color())
for name, max_accs in scatter_accs:
plt.scatter([score["sparsity"] for score in max_accs], [score["max_acc"] for score in max_accs], label=name, c=[get_random_color()])
plt.title(title)
plt.xlabel("sparsity")
plt.ylabel("max test acc")
plt.gcf().subplots_adjust(bottom=0.15)
plt.legend(bbox_to_anchor=(0, 0), loc='lower left')
plt.savefig(graph_dir/f"{title.replace(' ', '_')}.png", dpi=200)
plt.clf()
def plot_finetuning_time(plot_accs, scatter_accs, iterations=14, pr=.1, time_per_iter=6.4):
title = "finetuning time on raspberry pi generally decreases after pruning"
print(f'drawing graph for "{title}"')
plt.figure(num=None, figsize=(8, 6), dpi=80, facecolor='w', edgecolor='k')
for name, scores in plot_accs:
plt.plot([sparsity_to_iter(score["sparsity"], pr) for score in scores], [score["epoch"] for score in scores], label=name, c=get_random_color())
for name, scores in scatter_accs:
plt.scatter([sparsity_to_iter(score["sparsity"], pr) for score in scores], [score["epoch"] for score in scores], label=name, c=[get_random_color()])
max_iter, max_epoch = 0, 0
for name, scores in plot_accs + scatter_accs:
for score in scores:
iter = sparsity_to_iter(score["sparsity"], pr)
epoch = score["epoch"]
max_iter = max(max_iter, abs(int(iter)))
max_epoch = max(max_epoch, abs(int(epoch)))
plt.title(title)
plt.xticks(ticks=range(1, max_iter+1), labels=[f"{round(iter_to_sparsity(i, pr)*100)}% / {i}" for i in range(1, max_iter+1)], rotation=-40)
plt.xlabel("sparsity / prune iteration (lth maml only)")
plt.ylabel("epoch / rpi2 finetuning time (lth maml only)")
plt.yticks(ticks=range(1, max_epoch+1), labels=[f"{i} / {int(i*time_per_iter)} min" for i in range(1, max_epoch+1)])
# plt.gcf().subplots_adjust(left=.19, bottom=0.2)
plt.gcf().subplots_adjust(left=.15, bottom=0.15, right=.74)
plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left')
# plt.legend(bbox_to_anchor=(-.2, -.3), loc='lower left')
plt.savefig(graph_dir/f"{title.replace(' ', '_')}.png", dpi=200)
plt.clf()
graph_dir = Path("graphs")
log_files = [
Path("logs")/"expr|2020-12-06|17:31:18|7CpKn|lth_maml",
Path("logs")/"expr|2020-12-14|19:51:11|rx3xi|lth_maml|cudnn_disabled|try_to_repeat_fB9nb|1ajhv_prune_rate_too_low"
]
maml_lth_accs = [load_log(log_file) for log_file in log_files]
maml_max_accs = [max_accs(log) for log in maml_lth_accs]
maml_early_stopping = [early_stopping_stats(log) for log in maml_lth_accs]
smaml_accs = [
{"sparsity": .27, "test_accs": [0.1957, 0.4026, 0.4214, 0.4224, 0.4253, 0.4263, 0.4287, 0.4285, 0.4292, 0.429 , 0.43]}
]
smaml_max_acc = max_accs(smaml_accs)
smaml_early_stopping = early_stopping_stats(smaml_accs)
rigl_accs = [
{"sparsity": .10, "test_accs": [0.2003, 0.4373, 0.438, 0.4404, 0.4424, 0.4421, 0.4426, 0.4426, 0.4438, 0.4446, 0.4443]},
{"sparsity": .19, "test_accs": [0.2039, 0.4272, 0.437, 0.4402, 0.4421, 0.4434, 0.445, 0.4446, 0.445, 0.4443, 0.4446]},
{"sparsity": .27, "test_accs": [0.19, 0.4175, 0.4275, 0.4294, 0.4324, 0.4324, 0.4324, 0.4329, 0.432, 0.433, 0.433]},
{"sparsity": .34, "test_accs": [0.2019, 0.4343, 0.4404, 0.4417, 0.4417, 0.443, 0.4438, 0.4429, 0.4429, 0.4429, 0.4424]},
{"sparsity": .41, "test_accs": [0.2013, 0.4355, 0.4463, 0.4482, 0.4485, 0.4495, 0.4492, 0.45, 0.45, 0.4502, 0.4512]},
{"sparsity": .47, "test_accs": [0.19, 0.4377, 0.4426, 0.4443, 0.446, 0.4453, 0.4456, 0.4463, 0.446, 0.447, 0.447]},
{"sparsity": .52, "test_accs": [0.1951, 0.4155, 0.4246, 0.427, 0.426, 0.4287, 0.4292, 0.4294, 0.4297, 0.43, 0.431]},
{"sparsity": .57, "test_accs": [0.1974, 0.4348, 0.4397, 0.4453, 0.4458, 0.4465, 0.448, 0.4487, 0.4487, 0.4492, 0.4492]},
{"sparsity": .61, "test_accs": [0.2036, 0.4482, 0.4568, 0.4578, 0.4583, 0.458, 0.4578, 0.4587, 0.4587, 0.4587, 0.4585]},
{"sparsity": .65, "test_accs": [0.2078, 0.436, 0.4463, 0.4482, 0.4485, 0.45, 0.4487, 0.4485, 0.4487, 0.4502, 0.4497]},
{"sparsity": .69, "test_accs": [0.1918, 0.4219, 0.4316, 0.4358, 0.4382, 0.4395, 0.4397, 0.441, 0.4412, 0.441, 0.4407]},
{"sparsity": .72, "test_accs": [0.1993, 0.4397, 0.4502, 0.4521, 0.4531, 0.4534, 0.4534, 0.4526, 0.4517, 0.452, 0.452]},
{"sparsity": .75, "test_accs": [0.187, 0.428, 0.44, 0.4424, 0.443, 0.4443, 0.4434, 0.4429, 0.443, 0.4426, 0.4417]},
{"sparsity": .77, "test_accs": [0.2048, 0.4346, 0.4443, 0.4463, 0.4463, 0.4475, 0.449, 0.4492, 0.45, 0.4497, 0.4507]},
]
rigl_max_acc = max_accs(rigl_accs)
rigl_early_stopping = early_stopping_stats(rigl_accs)
print("loaded data; building graphs")
plot_finetuning_accs(maml_lth_accs[0])
plot_accs = [(f"lth maml run {i}", score) for i, score in enumerate(maml_max_accs)]
scatter_accs = [("rigl", rigl_max_acc), ("smaml", smaml_max_acc)]
plot_max_accs(plot_accs, scatter_accs)
plot_accs = [(f"lth maml run {i}", score) for i, score in enumerate(maml_max_accs)]
scatter_accs = [("rigl", rigl_max_acc), ("smaml", smaml_max_acc)]
# plot_finetuning_time(plot_accs, scatter_accs, pr=.1)
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,350
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/models/rigl_maml.py
|
from itertools import chain
import torch.nn.functional as F
import torch.nn as nn
from torch import optim
from tqdm import tqdm
import numpy as np
import torch
import copy
from ..utils import Logger
from .mask_ops import build_mask, apply_mask, update_mask
from .lth import detect_early_bird
from src.models.rigl_meta import RigLMeta
from rigl_torch.RigL import RigLScheduler
import datetime
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def run(dataset, rigl_params):
# Unpack parameters and split data
prune_strategy = rigl_params['prune_strategy']
training_params = rigl_params['model_training_params']
train_data, val_data, test_data = dataset
# Get model and optimizer
model, optimizer = build_model(training_params)
# Train the model
return train(model, optimizer, train_data, val_data, prune_strategy, training_params)
def train(model, optimizer, train_data, val_data, prune_strategy, training_params):
# Define RigL pruner
pruner = RigLScheduler(model,
optimizer,
dense_allocation=prune_strategy['dense_allocation'],
sparsity_distribution=prune_strategy['sparsity_distribution'],
T_end=int(0.75 * training_params['training_iterations']),
delta=prune_strategy['delta'],
alpha=prune_strategy['alpha'],
grad_accumulation_n=prune_strategy['grad_accumulation_n'],
static_topo=prune_strategy['static_topo'],
ignore_linear_layers=prune_strategy['ignore_linear_layers'],
state_dict=prune_strategy['state_dict'])
val_accs = []
best_val_acc, best_model_state, best_model = 0, None, None
for epoch in range(training_params['training_iterations']//10000):
print(f"train epoch {epoch}")
print(f"Time {datetime.datetime.now()}")
for step, (x_spt, y_spt, x_qry, y_qry) in enumerate(train_data):
x_spt, y_spt, x_qry, y_qry = x_spt.to(device), y_spt.to(device), x_qry.to(device), y_qry.to(device)
optimizer.zero_grad()
accs = model(x_spt, y_spt, x_qry, y_qry)
if step % 30 == 0:
print(f" step: {step} \ttraining acc: {accs}")
if step % 300 == 0: # evaluation
print("validating model...")
accs = test(model, val_data)
print('val acc:', accs)
val_accs.append(max(accs))
if max(accs) > best_val_acc:
best_val_acc = max(accs)
best_model_state = {n: w.cpu().detach() for n, w in model.state_dict().items()}
best_model = model
if pruner():
optimizer.step()
return val_accs, best_model_state, best_model
def test(model, test_data):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
accs_all_test = []
for x_spt, y_spt, x_qry, y_qry in test_data:
x_spt, y_spt, x_qry, y_qry = x_spt.squeeze(0).to(device), y_spt.squeeze(0).to(device), \
x_qry.squeeze(0).to(device), y_qry.squeeze(0).to(device)
accs = model.finetunning(x_spt, y_spt, x_qry, y_qry)
accs_all_test.append(accs)
# [b, update_step+1]
accs = np.array(accs_all_test).mean(axis=0).astype(np.float16)
return accs
def build_model(training_params):
layer_definitions = training_params.get("layer_definitions", None)
model = RigLMeta(training_params, layer_definitions).to(device)
optimizer = optim.Adam(model.parameters(), lr=0.001)
return model, optimizer
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,351
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/models/lenet.py
|
from . import lth
import torch
import torch.nn as nn
import torch.nn.init as init
class LeNet_mnist(nn.Module):
def __init__(self):
super(LeNet_mnist, self).__init__()
self.layer1 = nn.Linear(784, 300)
init.xavier_normal_(self.layer1.weight.data)
init.zeros_(self.layer1.bias.data)
self.relu1 = nn.ReLU(inplace=True)
self.layer2 = nn.Linear(300, 100)
init.xavier_normal_(self.layer2.weight.data)
init.zeros_(self.layer2.bias.data)
self.relu2 = nn.ReLU(inplace=True)
self.layer3 = nn.Linear(100, 10)
init.xavier_normal_(self.layer3.weight.data)
init.zeros_(self.layer3.bias.data)
def forward(self, x):
x = torch.flatten(x, 1)
l1_out = self.relu1(self.layer1(x))
l2_out = self.relu2(self.layer2(l1_out))
logit = self.layer3(l2_out)
return logit
# class LeNet_cifar10(nn.Module):
# def __init__(self):
# super(LeNet_cifar10, self).__init__()
# self.layer1 = nn.Linear(3072, 300)
# init.xavier_normal_(self.layer1.weight.data)
# init.normal_(self.layer1.bias.data)
# self.relu1 = nn.ReLU(inplace=True)
# self.layer2 = nn.Linear(300, 100)
# init.xavier_normal_(self.layer2.weight.data)
# init.normal_(self.layer2.bias.data)
# self.relu2 = nn.ReLU(inplace=True)
# self.layer3 = nn.Linear(100, 10)
# init.xavier_normal_(self.layer3.weight.data)
# init.normal_(self.layer3.bias.data)
# self.mask = lth.build_mask(self)
# def forward(self, x):
# x = torch.flatten(x, 1)
# l1_out = self.relu1(self.layer1(x))
# l2_out = self.relu2(self.layer2(l1_out))
# logit = self.layer3(l2_out)
# return logit
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,352
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/load_data.py
|
import torchvision
import torch
from pathlib import Path
from .MiniImagenet import MiniImagenet
import gdown
import os
import shutil
import pickle
from tqdm import tqdm
import numpy as np
from .utils import fs_greedy_load, new_expr_id
def cifar10(args):
dataset_path = Path(args.get("dataset_location", 'data/CFAR10'))
transform=torchvision.transforms.Compose([torchvision.transforms.ToTensor(),torchvision.transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
train = torchvision.datasets.CIFAR10(dataset_path, train=True, download=True, transform=transform)
test = torchvision.datasets.CIFAR10(dataset_path, train=False, download=True, transform=transform)
n_train = int(len(train)*.8)
n_val = len(train)-n_train
train, val = torch.utils.data.random_split(train, [n_train, n_val])
train = torch.utils.data.DataLoader(train, batch_size=args["batch_size"], shuffle=True, num_workers=0,drop_last=False, pin_memory=True)
val = torch.utils.data.DataLoader(val, batch_size=args["batch_size"], shuffle=True, num_workers=0,drop_last=False, pin_memory=True)
test = torch.utils.data.DataLoader(test, batch_size=args["batch_size"], shuffle=False, num_workers=0,drop_last=True, pin_memory=True)
return train, val, test
def mnist(args):
dataset_path = Path(args.get("dataset_location", 'data/MNIST'))
transform=torchvision.transforms.Compose([torchvision.transforms.ToTensor(), torchvision.transforms.Normalize((0.1307,), (0.3081,))])
train = torchvision.datasets.MNIST(dataset_path, train=True, download=True,transform=transform)
test = torchvision.datasets.MNIST(dataset_path, train=False, download=True,transform=transform)
n_train = int(len(train)*.8)
n_val = len(train)-n_train
train, val = torch.utils.data.random_split(train, [n_train, n_val])
train = torch.utils.data.DataLoader(train, batch_size=args["batch_size"], shuffle=True, num_workers=0,drop_last=False, pin_memory=True)
val = torch.utils.data.DataLoader(val, batch_size=args["batch_size"], shuffle=True, num_workers=0,drop_last=False, pin_memory=True)
test = torch.utils.data.DataLoader(test, batch_size=args["batch_size"], shuffle=False, num_workers=0,drop_last=True, pin_memory=True)
return train, val, test
def download_mini_imagenet(dataset_path, saved_image_zip_file=None):
"""
downloads raw maml jpeg images and csv train-val-test split files
"""
print("downloading mini imagenet dataset (2.9 Gb)... ")
os.makedirs(dataset_path, exist_ok=True)
if saved_image_zip_file == None:
url = "https://drive.google.com/u/0/uc?id=1HkgrkAwukzEZA0TpO7010PkAOREb2Nuk"
import gdown
with open(dataset_path/"raw.zip", "wb+") as handle:
gdown.download(url, handle, quiet=False)
else:
os.system(f"cp {saved_image_zip_file} {dataset_path/'raw.zip'}")
if not (dataset_path/'raw.zip').exists():
raise Exception(f"{dataset_path/'raw.zip'} doesn't exist")
os.system(f"unzip {dataset_path/'raw.zip'} -d {dataset_path}")
for filename in ["train.csv", "val.csv", "test.csv"]:
os.system(f"wget https://raw.githubusercontent.com/twitter/meta-learning-lstm/master/data/miniImagenet/{filename} -O {dataset_path/filename}")
os.system(f"rm -rf {dataset_path/'meta-learning-lstm'}")
os.system(f"rm {dataset_path/'raw.zip'}")
def build_meta_learning_tasks(dataset_path, args, disable_training=False):
num_workers = 5
if not disable_training:
train = MiniImagenet(dataset_path, mode='train', args=args)
train = torch.utils.data.DataLoader(train, args["task_num"], shuffle=args["shuffle"], num_workers=num_workers, pin_memory=True)
val = MiniImagenet(dataset_path, mode='val', args=args)
val = torch.utils.data.DataLoader(val, 1, shuffle=False, num_workers=num_workers, pin_memory=True)
else:
train, val = None, None
test = MiniImagenet(dataset_path, mode='test', args=args)
test = torch.utils.data.DataLoader(test, 1, shuffle=False, num_workers=num_workers, pin_memory=True)
return train, val, test
def mini_imagenet(args, redownload=False, disable_training=False):
dataset_path = Path(args.get("dataset_location", "data/miniimagenet/"))
cache = dataset_path/"cache"
if redownload:
print(f"deleting dataset from {dataset_path}")
shutil.rmtree(dataset_path)
if not dataset_path.exists():
download_mini_imagenet(dataset_path)
return build_meta_learning_tasks(dataset_path, args, disable_training=disable_training)
def dataset(args, **kwargs):
dataset_name = args["dataset_name"]
if dataset_name == "cifar10":
return cifar10(args, **kwargs)
elif dataset_name == "mnist":
return mnist(args, **kwargs)
elif dataset_name == "mini_imagenet":
return mini_imagenet(args, **kwargs)
else:
raise NotImplementedError(f"dataset {dataset_name} not implemented")
if __name__ == "__main__":
# visualize image augmentations in tensorboard - first call should have same images and labels as second but augmentations should be different
dataset_params = {
"n_way": 5, # number of classes to choose between for each task
"k_spt": 1, # k shot for support set (number of examples per class per task)
"k_qry": 15, # k shot for query set (number of examples per class per task)
"imgsz": 84, # image size
"task_num": 4, # meta model batch size
"train_bs": 10000, # training batch size
"test_bs": 100, # val/test batch size
"shuffle": False,
"dataset_name": "mini_imagenet",
"dataset_location": "data/miniimagenet",
}
ds = dataset(dataset_params, redownload=False)
train_data, val_data, test_data = ds
images_spt, labels_spt, images_qry, labels_qry = next(iter(train_data))
from torch.utils.tensorboard import SummaryWriter
_, id = new_expr_id()
print(id)
# iterating through a task's support/query sets and saving their augmented images to tensorboard
writer = SummaryWriter(log_dir=f'tensorboard/{id}_1st_run')
i = 0
for images_s, labels_s, images_q, labels_q in zip(images_spt, labels_spt, images_qry, labels_qry):
for image, label in zip(images_s, labels_s):
writer.add_image(f"class {label}", image, i)
i += 1
for image, label in zip(images_q, labels_q):
writer.add_image(f"class {label}", image, i)
i += 1
break
writer.flush()
writer.close()
# if shuffle=false, iterating through the smae task's support/query sets and saving their augmented images to tensorboard. these images should have differnet crops/hues etc from the first images
writer = SummaryWriter(log_dir=f'tensorboard/{id}_2nd_run')
images_spt, labels_spt, images_qry, labels_qry = next(iter(train_data))
for images_s, labels_s, images_q, labels_q in zip(images_spt, labels_spt, images_qry, labels_qry):
for image, label in zip(images_s, labels_s):
writer.add_image(f"class {label}", image, i)
i += 1
for image, label in zip(images_q, labels_q):
writer.add_image(f"class {label}", image, i)
i += 1
break
writer.flush()
writer.close()
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,353
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/src/models/sparse_maml.py
|
from itertools import chain
import torch.nn.functional as F
import torch.nn as nn
from tqdm import tqdm
import numpy as np
import torch
import matplotlib.pyplot as plt
from torch.utils.tensorboard import SummaryWriter
import copy
from ..utils import Logger
from .mask_ops import build_mask, apply_mask, update_mask
from .lth import detect_early_bird
from src.models.sparse_meta import SparseMeta
import datetime
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def run(dataset, expr_params):
# Unpack parameters and split data
training_params = expr_params['model_training_params']
train_data, val_data, test_data = dataset
# Get model
model = build_model(training_params)
# Train the model
return train(model, train_data, val_data, training_params)
def train(model, train_data, val_data, training_params):
val_accs = []
best_val_acc, best_model_state = 0, None
for epoch in range(training_params['training_iterations']//10000):
print(f"train epoch {epoch}")
print(f"Time {datetime.datetime.now()}")
for step, (x_spt, y_spt, x_qry, y_qry) in enumerate(train_data):
x_spt, y_spt, x_qry, y_qry = x_spt.to(device), y_spt.to(device), x_qry.to(device), y_qry.to(device)
accs = model(x_spt, y_spt, x_qry, y_qry)
if step % 30 == 0:
print(f" step: {step} \ttraining acc: {accs}")
if step % 300 == 0: # evaluation
print("validating model...")
accs = test(model, val_data)
print('val acc:', accs)
val_accs.append(max(accs))
if max(accs) > best_val_acc:
best_val_acc = max(accs)
best_model_state = {n: w.cpu().detach() for n, w in model.state_dict().items()}
return val_accs, best_model_state
def test(model, test_data):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
accs_all_test = []
for x_spt, y_spt, x_qry, y_qry in test_data:
x_spt, y_spt, x_qry, y_qry = x_spt.squeeze(0).to(device), y_spt.squeeze(0).to(device), \
x_qry.squeeze(0).to(device), y_qry.squeeze(0).to(device)
accs = model.finetunning(x_spt, y_spt, x_qry, y_qry)
accs_all_test.append(accs)
# [b, update_step+1]
accs = np.array(accs_all_test).mean(axis=0).astype(np.float16)
return accs
def build_model(training_params):
layer_definitions = training_params.get("layer_definitions", None)
model = SparseMeta(training_params, layer_definitions).to(device)
return model
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,249,354
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/main.py
|
import torch
import numpy as np
from src import utils, load_data
from src.models import lth_maml as lth
from pathlib import Path
import json
expr_id, uid = utils.new_expr_id("first order 7xacq with dataset shuffle and removed mp")
if not torch.cuda.is_available():
raise Exception("CUDA ISN'T AVAILABLE WHAT WENT WRONG")
print(f"found gpu: {torch.cuda.get_device_name(0)}")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
train_params = {
"project_dir": str(Path(__file__).parent.resolve()),
"expr_id": expr_id,
"uid": uid,
"random_seed": 222,
"cudnn_enabled": True,
"dataset_params": {
"n_way": 5, # number of classes to choose between for each task
"k_spt": 1, # k shot for support set (number of examples per class per task)
"k_qry": 15, # k shot for query set (number of examples per class per task)
"imgsz": 84, # image size
"task_num": 1, # meta model batch size
"train_bs": 10000, # training batch size
"test_bs": 100, # val/test batch size
"train_image_aug": False, # turn on image augmentations False
"mixup": True, # turn on mixup augmentations True
"shuffle": True, # shuffle dataset False
"dataset_name": "mini_imagenet",
"dataset_location": "data/miniimagenet",
},
"prune_strategy": {
"name": "global",
"rate": .05,
"iterations": 15,
},
"model_training_params": {
"model_name": "MAML",
"meta_training_epochs": 16, # meta model training epochs 16
"meta_training_early_stopping": True, # meta model early stopping True
"meta_lr": 0.0005, # meta model learning rate 0.0005
"update_lr": 0.01, # task specific model learning 0.01
"update_step": 10, # task specific model training 5
"finetune_step": 10, # task specific model finetuning testing 7
"first_order": True, # use first order approximation False
"dtype": "float64",
"layer_definitions": [
# ('bn', [3]),
('conv2d', [32, 3, 3, 3, 1, 1]),
('relu', [True]),
('bn', [32]),
# ('dropout', [.5]),
('max_pool2d', [2, 2, 0]),
('conv2d', [32, 32, 3, 3, 1, 1]),
('relu', [True]),
('bn', [32]),
# ('dropout', [.5]),
('max_pool2d', [2, 2, 0]),
('conv2d', [64, 32, 3, 3, 1, 1]),
('relu', [True]),
('bn', [64]),
# ('dropout', [.5]),
# ('max_pool2d', [2, 2, 0]),
('conv2d', [64, 64, 3, 3, 1, 1]),
('relu', [True]),
('bn', [64]),
# ('dropout', [.5]),
# ('max_pool2d', [2, 2, 0]),
('conv2d', [128, 64, 3, 3, 1, 1]),
('relu', [True]),
('bn', [128]),
# ('dropout', [.5]),
# ('max_pool2d', [2, 2, 0]),
('conv2d', [128, 128, 3, 3, 1, 1]),
('relu', [True]),
('bn', [128]),
# ('dropout', [.5]),
('max_pool2d', [2, 1, 0]),
('flatten', []),
# ('linear', [5, 11552]) # 32 * 5 * 5
# ("linear", [5, 1600])
# ('linear', [32, 1600]),
# ('relu', [True]),
# ('bn', [32]),
# ('dropout', [.5]),
('bn', [51200]),
('linear', [512, 51200]),
('relu', [True]),
('bn', [512]),
('linear', [5, 512])
]
}
}
test_params = {
"project_dir": str(Path(__file__).parent/"logs"/"expr.[2020-12-22].[18:02:57].[k9gic].[lth_maml].[test_first_order_approx_acc_and_speed]"),
"train_test": "test",
}
for i, expr_params in enumerate([train_params]):
print(f">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> STARTING EXPR {i} <<<<<<<<<<<<<<<<<<<<<<<<<<<")
project_dir = Path(expr_params["project_dir"])
logger = utils.Logger(project_dir, project_dir/"logs")
logger.snapshot(
expr_id=expr_params["expr_id"],
expr_params_JSON=expr_params,
)
utils.tee(project_dir/"logs"/expr_id/"program_outputs.txt")
if expr_params.get("train_test", "train") == "train":
print(expr_params["expr_id"], "\n")
print(expr_params)
utils.set_seeds(expr_params["random_seed"], expr_params.get("cudnn_enabled", True))
args = expr_params["model_training_params"]
print(f"starting lth run {expr_params['expr_id']}")
dataset = load_data.dataset(expr_params["dataset_params"], redownload=False)
mask = lth.run(dataset, expr_params, logger)
else:
log_dir = expr_params["project_dir"]
with open(Path(log_dir)/"expr_params.json") as expr_params_handle:
logged_expr_params = json.load(expr_params_handle)
logged_expr_params.update(expr_params)
print(logged_expr_params)
print(logged_expr_params["expr_id"], "\n")
dataset = load_data.dataset(logged_expr_params["dataset_params"], redownload=False)
train, val, test = dataset
log_dir = Path(logged_expr_params["project_dir"])
print(f"log_dir: {log_dir}")
print(lth.test_finetuning(test, logged_expr_params, log_dir))
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
37,256,427
|
JannichHP/Cardealerfinal
|
refs/heads/master
|
/cardealer/api.py
|
import urllib.request, urllib.parse, urllib.error
import json
print("Enter registration:")
registration=input
#Pulls data from the given api url
ApiUrl = "http://api.nrpla.de/"
url = ApiUrl + registration() +"?api_token=iZ3T9knspElwMyglvXbFKaKXK8co5k0LKjLzke6Uirnaw5FuqtE20SXKI4ttrZIM&advanced=1"
print('Retrieving', url)
uh = urllib.request.urlopen(url)
data = uh.read().decode()
#Makes it so only the three items ("brand"-"engine") are selected from the api
js=json.loads(data)
brand = js["data"]["brand"]
model = js["data"]["model"]
engine = js["data"]['version']
print("Brand:", brand)
print("model:", model)
print("version:", engine)
|
{"/cardealer/forms.py": ["/cardealer/models.py"], "/cardealer/routes.py": ["/cardealer/__init__.py", "/cardealer/forms.py", "/cardealer/models.py"], "/cardealer/models.py": ["/cardealer/__init__.py"]}
|
37,256,428
|
JannichHP/Cardealerfinal
|
refs/heads/master
|
/cardealer/routes.py
|
import os
import secrets
import requests
import json
from PIL import Image
from flask import render_template, url_for, flash, redirect, request
from cardealer import app, db, bcrypt
from cardealer.forms import RegistrationForm, LoginForm, UpdateAccountForm, CarForm
from cardealer.models import User, CarData
from flask_login import login_user, current_user, logout_user, login_required
@app.route("/")
def home():
return render_template("home.html")
@app.route("/register", methods=['GET', 'POST']) #Loads the register page
def register_page():
if current_user.is_authenticated:
return redirect(url_for('land_page'))
form = RegistrationForm()
if form.validate_on_submit(): #Asks the person to input data in the fields ("username-password")
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user = User(username=form.Name.data, company=form.Company.data, phone=form.Phone.data, CVR=form.CVR.data, email=form.email.data, password=hashed_password)
db.session.add(user)
db.session.commit()
flash('Your account has been created! You are now able to log in', 'success')
return redirect(url_for('login_page')) #The flash above prints the sentence and afterwards the user gets returned to the
return render_template("register.html", title='Register', form=form)
@app.route("/login", methods=['GET', 'POST'])
def login_page():
if current_user.is_authenticated:
return redirect(url_for('land_page')) #If the user is already logged in you will be returned to the land page
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(CVR=form.CVR.data).first()
if user and bcrypt.check_password_hash(user.password, form.password.data): #Checks if the inputet info is acceptable
login_user(user, remember=form.remember.data) #Remembers the login that is inputet
next_page = request.args.get('next')
flash('Login Successful!', 'success') #Prints the sentence
return redirect(next_page) if next_page else redirect(url_for('land_page')) #If the the input is acceptable the user is redirected to the land page
else:
flash('Login Unsuccessful. Please Check username and password', 'danger') #If the login is not accepted the shown message will bbe printet
return render_template("login.html", title='Login', form=form) #The page will then be refreshed and the user can try to login again
@app.route("/logout")
def logout():
logout_user()
return redirect(url_for('home')) #Returns the user to the home page
@app.route("/land_page")
@login_required
def land_page():
return render_template("land_page.html")
def save_picture(form_picture):
random_hex = secrets.token_hex(8) #Giving the uploaded picture a random generated hex
_, f_ext = os.path.splitext(form_picture.filename)
picture_fn = random_hex + f_ext
picture_path = os.path.join(app.root_path, 'static/profile_pics', picture_fn)
output_size = (125, 125) #Downscaling picture to 125x125 pixels to reduce size in database
i = Image.open(form_picture)
i.thumbnail(output_size)
i.save(picture_path)
return picture_fn
@app.route("/profile", methods=['GET', 'POST'])
@login_required
def profile():
form = UpdateAccountForm()
if form.validate_on_submit():
if form.picture.data:
picture_file = save_picture(form.picture.data)
current_user.image_file = picture_file #Changes the profile picture to the one the user uploads
current_user.CVR=form.CVR.data
current_user.email=form.email.data
db.session.commit() #Changes whatever paremeter the user wants to change about his/hers profile
flash('Account updated', 'success')
return redirect(url_for('profile'))
elif request.method == 'GET':
form.CVR.data = current_user.CVR
form.email.data = current_user.email
if form.picture.data:
picture_file = save_picture(form.picture.data)
current_user.image_file = picture_file
image_file = url_for('static', filename='profile_pics/' + current_user.image_file)
return render_template("profile.html", title='profile', image_file=image_file, form=form)
@app.route("/list")
@login_required
def list():
posts = CarData.query.all()
return render_template("list.html", posts=posts)
@app.route("/account")
def account():
return render_template('account.html', title='account') #Sends the user to the account page
@app.route("/selling", methods=['GET', 'POST'])
@login_required
def sell_vehicle():
form = CarForm()
if request.method == 'POST':
if form.validate_on_submit():
cardata = CarData(Brand=form.Brand.data, Model=form.Model.data, Engine=form.Engine.data, Colour=form.Colour.data, Comment=form.Comment.data, user=current_user)#, image_file2=form.image_file2.data)
db.session.add(cardata) #Asks the user for input in the following fields ("Brand"-"Comment")
db.session.commit() #Posts a "car" with the info givin by the user
return redirect(url_for('.buy_vehicle'))
return render_template('selling.html', form=form)
@app.route("/buying") #Shows the current cars that had been put into the "CarData" databbase
@login_required
def buy_vehicle():
car_data = CarData.query.all()
return render_template('buying.html', car_data=car_data)
@app.route("/contact")
def contact():
return render_template('contact.html') #Sends the user to the contact page
|
{"/cardealer/forms.py": ["/cardealer/models.py"], "/cardealer/routes.py": ["/cardealer/__init__.py", "/cardealer/forms.py", "/cardealer/models.py"], "/cardealer/models.py": ["/cardealer/__init__.py"]}
|
37,256,429
|
JannichHP/Cardealerfinal
|
refs/heads/master
|
/cardealer/forms.py
|
from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from flask_login import current_user
from wtforms import StringField, PasswordField, SubmitField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from cardealer.models import User, CarData
class RegistrationForm(FlaskForm): #Sets up a registrationForm for the user to create an account where the user must input data in the following fields
Name = StringField('Name', validators=[DataRequired(), Length(min=2, max=50)])
Company = StringField('Company', validators=[DataRequired(), Length(min=2, max=50)])
Phone = StringField('Phone', validators=[DataRequired(), Length(min=8, max=8)])
CVR = StringField('CVR', validators=[DataRequired(), Length(min=7, max=8)])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Sign Up') #Clicking the sign up button will start the below process
#The following validation lines checks the database to see if the given input is already taken by a previously created user
def validate_Name(self, name):
user = User.query.filter_by(username=name.data).first()
if user:
raise ValidationError('Name is already taken, choose a different')
def validate_Company(self, company):
user = User.query.filter_by(company=company.data).first()
if user:
raise ValidationError('Company is already registered, choose a different')
def validate_Phone(self, phone):
user = User.query.filter_by(phone=phone.data).first()
if user:
raise ValidationError('Phone number is already taken, choose a different')
def validate_CVR(self, CVR):
user = User.query.filter_by(CVR=CVR.data).first()
if user:
raise ValidationError('CVR number is already taken, choose a different')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('Email address is already taken, choose a different')
class LoginForm(FlaskForm): #Sets up a loginForm for the User, where the user can input data from a previously created account in the following fields
CVR = StringField('CVR', validators=[DataRequired(), Length(min=7, max=8)])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember = BooleanField('Remember Me') #Ticking the box makes it so the inputet account is remembered the next time the user logs in
submit = SubmitField('Login')
#Used for updating CVR, Email and Profile picture
class UpdateAccountForm(FlaskForm):
CVR = StringField('CVR', validators=[Length(min=7, max=8)])
email = StringField('Email', validators=[Email()])
picture = FileField('Update Profile Picture', validators=[FileAllowed(['jpg', 'png'])])
submit = SubmitField('Update')
#Checks to see if the data the user inputs already is used bby another account in the database
def validate_CVR(self, CVR):
if CVR.data != current_user.CVR:
user = User.query.filter_by(CVR=CVR.data).first()
if user:
raise ValidationError('CVR number is already taken, choose a different')
def validate_email(self, email):
if email.data != current_user.email:
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('Email address is already taken, choose a different')
#Makes a carForm that asks the user for the below given parameters, afterwards the data from the created car is send to the database
class CarForm(FlaskForm):
Brand = StringField('Brand', validators=[DataRequired(), Length(min=2, max=50)])
Model = StringField('Model', validators=[DataRequired(), Length(min=2, max=50)])
Engine = StringField('Engine', validators=[DataRequired(), Length(min=2, max=50)])
Colour = StringField('Colour', validators=[DataRequired(), Length(min=2, max=50)])
Comment = StringField('Comment', validators=[DataRequired(), Length(min=2, max=50)])
#image_file2 = FileField('Attach picture', validators=[FileAllowed(['jpg', 'png'])])
submit = SubmitField('Post')
|
{"/cardealer/forms.py": ["/cardealer/models.py"], "/cardealer/routes.py": ["/cardealer/__init__.py", "/cardealer/forms.py", "/cardealer/models.py"], "/cardealer/models.py": ["/cardealer/__init__.py"]}
|
37,256,430
|
JannichHP/Cardealerfinal
|
refs/heads/master
|
/cardealer/models.py
|
from flask import current_app
from cardealer import db, login_manager
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(db.Model, UserMixin): #Creates a class named User
id = db.Column(db.Integer, primary_key=True) #Sets id to be a unique ID set an an int
username = db.Column(db.String(20), unique=True, nullable=False) #Sets it to be a max string of 20 chars, makes it unique
company = db.Column(db.String(100), unique=True, nullable=False)
phone = db.Column(db.String(100), unique=True, nullable=False) #Nullable makes it so the field must contain data
CVR = db.Column(db.String(100), unique=True, nullable=False)
email = db.Column(db.String(100), unique=True, nullable=False)
password = db.Column(db.String(60), nullable=False)
image_file = db.Column(db.String(20), nullable=False, default='avatar.png') #Sets it so the profile is always created with a defualt picture "avatar"
cardatas = db.relationship('CarData', backref='user', lazy=False) #Creates a relationship with the class "CarData"
def __repr__(self):
return f"User('{self.username}', '{self.email}', '{self.image_file}')"
class CarData(db.Model):
id = db.Column(db.Integer, primary_key=True)
Brand = db.Column(db.String(100), nullable=False)
Model = db.Column(db.String(500), nullable=False)
Engine = db.Column(db.String(100), nullable=False)
Colour = db.Column(db.String(100), nullable=False)
Comment = db.Column(db.String(100), nullable=False)
#image_file2 = db.Column(db.String(20), nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id')) #Connects this class to the "User" class
def __repr__(self):
return f"Cardata('{self.Brand}', '{self.Model}')"
|
{"/cardealer/forms.py": ["/cardealer/models.py"], "/cardealer/routes.py": ["/cardealer/__init__.py", "/cardealer/forms.py", "/cardealer/models.py"], "/cardealer/models.py": ["/cardealer/__init__.py"]}
|
37,256,431
|
JannichHP/Cardealerfinal
|
refs/heads/master
|
/cardealer/__init__.py
|
from flask import Flask
from flask_sqlalchemy import Model, SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
app = Flask(__name__)
app.config['SECRET_KEY'] = '2b3b0653a9d3c42948bd5edce8fd84cb'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///site.db' #creating a local SQlite Database
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False #Removes tracking of changes to the database
db = SQLAlchemy(app) #Assigning our database to SQLAlchemey
bcrypt = Bcrypt(app) ##Flask library used to hash passwords
login_manager = LoginManager(app) #Flask library to manage login on our web application
login_manager.login_view = 'login_page'
login_manager.login_message_category = 'info'
from cardealer import routes
|
{"/cardealer/forms.py": ["/cardealer/models.py"], "/cardealer/routes.py": ["/cardealer/__init__.py", "/cardealer/forms.py", "/cardealer/models.py"], "/cardealer/models.py": ["/cardealer/__init__.py"]}
|
37,264,277
|
Intomies/battle-game
|
refs/heads/main
|
/knight.py
|
import pygame as pg
from settings import *
from fighter import Fighter
class Knight(Fighter):
def __init__(self, char_type, max_hp, strength, potions, count):
super().__init__(char_type, max_hp, strength, potions, count)
|
{"/game.py": ["/settings.py", "/world.py", "/fighter.py", "/knight.py", "/battle_interface.py", "/healthbar.py"], "/world.py": ["/settings.py"], "/fighter.py": ["/settings.py"], "/knight.py": ["/settings.py", "/fighter.py"], "/main.py": ["/settings.py", "/game.py"], "/battle_interface.py": ["/settings.py"], "/healthbar.py": ["/settings.py"]}
|
37,264,278
|
Intomies/battle-game
|
refs/heads/main
|
/world.py
|
import pygame as pg
from settings import *
class World:
def __init__(self, width, height):
self.width = width
self.height = height
self.background_pos = (0,0)
self.images = self.load_images()
def update(self):
pass
def draw(self, screen):
screen.blit(self.images["background_img"], self.background_pos)
def load_images(self):
background_img = pg.image.load('img/Background/background.png').convert_alpha()
panel_img = pg.image.load('img/Icons/panel.png').convert_alpha()
temp_images = {
"background_img": background_img
}
return temp_images
|
{"/game.py": ["/settings.py", "/world.py", "/fighter.py", "/knight.py", "/battle_interface.py", "/healthbar.py"], "/world.py": ["/settings.py"], "/fighter.py": ["/settings.py"], "/knight.py": ["/settings.py", "/fighter.py"], "/main.py": ["/settings.py", "/game.py"], "/battle_interface.py": ["/settings.py"], "/healthbar.py": ["/settings.py"]}
|
37,264,279
|
Intomies/battle-game
|
refs/heads/main
|
/game.py
|
from os import SEEK_CUR
from bandit import Bandit
from knight import Knight
import pygame as pg
import sys
from settings import *
from world import World
from battle_interface import BattleInterface
from knight import Knight
from bandit import Bandit
from healthbar import Healthbar
class Game:
def __init__(self, screen, clock):
self.screen = screen
self.clock = clock
self.width, self.height = self.screen.get_size()
self.world = World(self.width, self.height)
self.battle_interface = BattleInterface()
self.knight = Knight('Knight', 30, 10, 3, 0)
self.knight_healthbar = Healthbar(self.knight.health_bar_pos_x, self.knight.health_bar_pos_y, self.knight.hp, self.knight.max_hp)
self.battle_interface.healthbars.append(self.knight_healthbar)
self.bandits = self.create_bandits(2)
# Tee oma HealthBar-luokka (periikö interfacen?)
# Rosvot ja niiden healthbarit dictionaryyn?
# Sitten voi käytellä rosvodic[0].draw ja rosvodic[healthbar].draw
# luonti ja pyörittäminen toiminee iteroimalla?
def run(self):
self.playing = True
print(self.bandits[0]["bandit"].name)
while self.playing:
self.clock.tick(FPS)
self.events()
self.draw()
self.update()
def events(self):
for event in pg.event.get():
if event.type == pg.QUIT:
pg.quit()
sys.exit()
if event.type == pg.KEYDOWN:
if event.key == pg.K_ESCAPE:
pg.quit()
sys.exit()
def update(self):
self.knight.update()
# for bandit in self.bandits:
# bandit.update()
# for healthbar in self.battle_interface.healthbars:
# healthbar.draw()
for i in self.bandits.values():
i["bandit"].update()
def draw(self):
self.world.draw(self.screen)
self.battle_interface.draw(self.screen)
self.knight_healthbar.draw(self.knight.hp, self.screen)
self.knight.draw(self.screen)
# for bandit in self.bandits:
# bandit.draw(self.screen)
for i in self.bandits.values():
i["bandit"].draw(self.screen)
i["healthbar"].draw(i["bandit"].hp, self.screen)
pg.display.flip()
def create_bandits(self, amount):
arr = []
temp_dict = {}
if amount > MAX_AMOUNT_OF_ENEMIES:
amount = MAX_AMOUNT_OF_ENEMIES
if amount < MIN_AMOUNT_OF_ENEMIES:
amount = MIN_AMOUNT_OF_ENEMIES
# for i in range(amount):
# arr.append(Bandit("Bandit", 20, 6, 1, i))
for i in range(amount):
temp_bandit = Bandit("Bandit", 20, 6, 1, i)
temp_healthbar = Healthbar(temp_bandit.health_bar_pos_x, temp_bandit.health_bar_pos_y, temp_bandit.hp, temp_bandit.max_hp)
temp_dict[i] = {
"bandit": temp_bandit,
"healthbar": temp_healthbar
}
return temp_dict
|
{"/game.py": ["/settings.py", "/world.py", "/fighter.py", "/knight.py", "/battle_interface.py", "/healthbar.py"], "/world.py": ["/settings.py"], "/fighter.py": ["/settings.py"], "/knight.py": ["/settings.py", "/fighter.py"], "/main.py": ["/settings.py", "/game.py"], "/battle_interface.py": ["/settings.py"], "/healthbar.py": ["/settings.py"]}
|
37,264,280
|
Intomies/battle-game
|
refs/heads/main
|
/settings.py
|
# Main settings
BOTTOM_PANEL = 150
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 400 + BOTTOM_PANEL
SCREEN_SIZE = (SCREEN_WIDTH, SCREEN_HEIGHT)
FPS = 60
# Fighter settings
FIGHTER_START_POSITIONS = {
"Knight": [
300,
200,
100
],
"Bandit": [
500,
600,
700
]
}
FIGHTER_HEIGHT = {
"Knight": 260,
"Bandit": 270
}
FIGHTER_IMAGE_SCALE = 3
FIGHTER_ACTIONS = [
"attack",
"death",
"hurt",
"idle"
]
MAX_AMOUNT_OF_ENEMIES = 3
MIN_AMOUNT_OF_ENEMIES = 1
# Interface settings
HEALTH_BAR_POSITIONS_X = {
"Knight": 100,
"Bandit": 500
}
HEALTH_BAR_POSITIONS_Y = [
SCREEN_HEIGHT * 0.8,
SCREEN_HEIGHT * 0.85,
SCREEN_HEIGHT * 0.9
]
HEALTH_BAR_HEIGHT = 20
HEALTH_BAR_WIDTH = 150
# Colors settings
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
|
{"/game.py": ["/settings.py", "/world.py", "/fighter.py", "/knight.py", "/battle_interface.py", "/healthbar.py"], "/world.py": ["/settings.py"], "/fighter.py": ["/settings.py"], "/knight.py": ["/settings.py", "/fighter.py"], "/main.py": ["/settings.py", "/game.py"], "/battle_interface.py": ["/settings.py"], "/healthbar.py": ["/settings.py"]}
|
37,264,281
|
Intomies/battle-game
|
refs/heads/main
|
/fighter.py
|
import pygame as pg
import os
import random as rand
from settings import *
class Fighter:
def __init__(self, char_type, max_hp, strength, potions, count):
self.char_type = char_type
self.max_hp = max_hp
self.strength = strength
self.count = count
self.name = self.get_name()
self.hp = max_hp
self.potions = potions
self.start_potions = potions
self.alive = True
self.actions = FIGHTER_ACTIONS # [0: attack, 1: death, 2: hurt, 3: idle]
self.current_action = "idle"
self.current_frame_index = 0
self.update_time = pg.time.get_ticks()
self.animation_cooldown = 100
self.starting_positions = FIGHTER_START_POSITIONS
self.fighter_height = FIGHTER_HEIGHT
self.image_scale = FIGHTER_IMAGE_SCALE
self.images_list = self.load_images()
self.image = self.images_list[self.current_action][self.current_frame_index]
self.rect = self.get_rect()
self.rect.center = (self.get_position())
self.health_bar_pos_x = HEALTH_BAR_POSITIONS_X[self.char_type]
self.health_bar_pos_y = HEALTH_BAR_POSITIONS_Y[self.count]
def update(self):
self.image = self.images_list[self.current_action][self.current_frame_index]
if pg.time.get_ticks() - self.update_time > self.animation_cooldown:
self.update_time = pg.time.get_ticks()
self.current_frame_index += 1
if self.current_frame_index >= len(self.images_list[self.current_action]):
self.current_frame_index = 0
def draw(self, screen):
screen.blit(self.image, self.rect)
def load_images(self):
all_images = {}
for i in range(len(self.actions)):
temp_list = []
DIR = f'img/{self.char_type}/{self.actions[int(i)]}'
for j in range(len([name for name in os.listdir(DIR)])):
img = pg.image.load(f'{DIR}/{j}.png')
img = pg.transform.scale(img, (img.get_width() * self.image_scale, img.get_height() * self.image_scale))
temp_list.append(img)
all_images[self.actions[int(i)]] = temp_list
return all_images
def get_rect(self):
return self.image.get_rect()
def get_position(self):
return self.starting_positions[self.char_type][self.count], self.fighter_height[self.char_type]
def get_current_health(self):
return self.hp / self.max_hp
def get_name(self):
cylls = "ara,gorn,gim,li,grish,nak,sna,ga,ug,luk,boro,mir,fara,lego,las,fro,do,bag,gins,sam,gam,gi,meri,a,doc,ran,di,buk,pe,reg,rin,tuk,gan,dalf,el,rond,dene,thor,eo,wyn,ar,wen,ga,lad,ri,el,gil,gol,lum,tho,rin,bom,ba,dil,tom,lut,hi,en,fi,ki,li,bur,theo,den,bil,bo"
cylls_arr = cylls.split(",")
return cylls_arr[rand.randint(0, len(cylls_arr) - 1)].capitalize() + cylls_arr[rand.randint(0, len(cylls_arr)- 1)]
|
{"/game.py": ["/settings.py", "/world.py", "/fighter.py", "/knight.py", "/battle_interface.py", "/healthbar.py"], "/world.py": ["/settings.py"], "/fighter.py": ["/settings.py"], "/knight.py": ["/settings.py", "/fighter.py"], "/main.py": ["/settings.py", "/game.py"], "/battle_interface.py": ["/settings.py"], "/healthbar.py": ["/settings.py"]}
|
37,264,282
|
Intomies/battle-game
|
refs/heads/main
|
/main.py
|
import pygame as pg
import os
from settings import *
from game import Game
def main():
pg.init()
pg.mixer.init()
screen = pg.display.set_mode(SCREEN_SIZE)
clock = pg.time.Clock()
running = True
playing = True
game = Game(screen, clock)
while running:
# Start menu
while playing:
# Game loop
game.run()
if __name__ == "__main__":
main()
|
{"/game.py": ["/settings.py", "/world.py", "/fighter.py", "/knight.py", "/battle_interface.py", "/healthbar.py"], "/world.py": ["/settings.py"], "/fighter.py": ["/settings.py"], "/knight.py": ["/settings.py", "/fighter.py"], "/main.py": ["/settings.py", "/game.py"], "/battle_interface.py": ["/settings.py"], "/healthbar.py": ["/settings.py"]}
|
37,264,283
|
Intomies/battle-game
|
refs/heads/main
|
/battle_interface.py
|
import pygame as pg
from settings import *
class BattleInterface:
def __init__(self):
self.position = (0, SCREEN_HEIGHT - BOTTOM_PANEL)
self.images = self.load_images()
self.healthbars = []
def update(self):
pass
def draw(self, screen):
screen.blit(self.images["panel_img"], self.position)
# for healthbar in self.healthbars:
# healthbar.draw()
def load_images(self):
panel_img = pg.image.load('img/Icons/panel.png').convert_alpha()
temp_images = {
"panel_img": panel_img
}
return temp_images
# def create_healthbar(self, x, y, current_hp, max_hp, screen):
# self.healthbars.append(pg.draw.rect(screen, RED, (x, y, current_hp, max_hp)))
|
{"/game.py": ["/settings.py", "/world.py", "/fighter.py", "/knight.py", "/battle_interface.py", "/healthbar.py"], "/world.py": ["/settings.py"], "/fighter.py": ["/settings.py"], "/knight.py": ["/settings.py", "/fighter.py"], "/main.py": ["/settings.py", "/game.py"], "/battle_interface.py": ["/settings.py"], "/healthbar.py": ["/settings.py"]}
|
37,264,284
|
Intomies/battle-game
|
refs/heads/main
|
/healthbar.py
|
import pygame as pg
from settings import *
class Healthbar():
def __init__(self, x, y, hp, max_hp):
self.x_pos = x
self.y_pos = y
self.height = HEALTH_BAR_HEIGHT
self.width = HEALTH_BAR_WIDTH
self.health_color = GREEN
self.dmg_color = RED
self.hp = hp
self.max_hp = max_hp
self.ratio = self.get_ratio()
self.damage_amount = self.get_damage()
def update(self):
pass
def draw(self, hp, screen):
self.hp = hp
pg.draw.rect(screen, self.dmg_color, (self.x_pos, self.y_pos, self.width, self.height))
pg.draw.rect(screen, self.health_color, (self.x_pos, self.y_pos, self.damage_amount, self.height))
def get_ratio(self):
return self.hp / self.max_hp
def get_damage(self):
return self.width * self.ratio
|
{"/game.py": ["/settings.py", "/world.py", "/fighter.py", "/knight.py", "/battle_interface.py", "/healthbar.py"], "/world.py": ["/settings.py"], "/fighter.py": ["/settings.py"], "/knight.py": ["/settings.py", "/fighter.py"], "/main.py": ["/settings.py", "/game.py"], "/battle_interface.py": ["/settings.py"], "/healthbar.py": ["/settings.py"]}
|
37,331,690
|
larryscheib/CardGame
|
refs/heads/master
|
/main.py
|
# Import required modules
import itertools
import random
# Define a class to create cards
class Cards:
# Suits = [1, 2, 3, 4] = ['Spades', 'Diamonds','Hearts','Clubs']
SUITS = [1, 2, 3, 4]
VALUES = range(1, 14)
def __init__(self):
pass
# class to categorize cards into a deck
class Deck(Cards):
def __init__(self):
Cards.__init__(self)
self.cardset = []
self.tempcardset = []
# make deck
self.cardset = list(itertools.product(self.SUITS, self.VALUES))
# method to sort cards
def sort(self):
self.tempcardset = sorted(self.cardset, key=lambda value: value[1])
self.cardset = sorted(self.tempcardset, key=lambda suit: suit[0])
return self.cardset
# method to shuffle cards
def shuffle(self):
if len(self.cardset) == 52:
random.shuffle(self.cardset)
else:
print("cannot shuffle the cards")
return self.cardset
''' # Method to remove a card from the deck
def popcard(self):
if len(self.mycardset) == 0:
return "NO CARDS TO DEAL FURTHER"
cardpopped = self.mycardset.pop()
return cardpopped
'''
# class for Hand object
class Hand:
def __init__(self):
# players hand
self.playercards = []
# total value of a hand
self.handTotal = 0
# Method to add a card to hand
def add_card(self, card):
"""
Args: card:
"""
self.playercards.append(card)
# Method to convert suit from int value to alpha value (spades, diamonds, hearts and clubs)
def conversion(self):
# suits = ['Spades', 'Diamonds','Hearts','Clubs'] = [1, 2, 3, 4]
tempstorage = []
for suit, value in list(self.playercards):
if value == 11:
value = 'Jack'
elif value == 12:
value = 'Queen'
elif value == 13:
value = 'King'
elif value == 1:
value = 'ACE'
if suit == 1:
tempstorage.append(['Spades', value])
elif suit == 2:
tempstorage.append(['Diamonds', value])
elif suit == 3:
tempstorage.append(['Hearts', value])
else:
tempstorage.append(['Clubs', value])
self.playercards = tempstorage
def totalhand(self):
self.handTotal = 0
for suit, value in self.playercards:
self.handTotal += suit * value
def printtotal(self):
print("total is ", self.handTotal)
def gettotal(self):
return self.handTotal
class Winner:
def __init__(self, hand1total, hand2total):
"""
Args: hand1total, hand2total:
"""
self.hand1total = hand1total
self.hand2total = hand2total
self.winner = ''
if self.hand1total > self.hand2total:
self.winner = 'player1 wins'
elif self.hand1total == self.hand2total:
self.winner = 'we have a tie'
else:
self.winner = 'player2 wins'
def getwinner(self):
return self.winner
# Driver Code for Creating objects
# create cards
objCards = Cards()
# generate a deck based on cards
objDeck = Deck()
deckOfCards = objDeck.cardset
# sort deck
sortedDeck = objDeck.sort()
# shuffle deck
shuffledCards = objDeck.shuffle()
# create player 1 hand
player1Hand = Hand()
# create player 2 hand
player2Hand = Hand()
# deal 3 card hands to both players
for x in range(0, 3):
playerCard1 = shuffledCards.pop(0)
player1Hand.add_card(playerCard1)
playerCard2 = shuffledCards.pop(0)
player2Hand.add_card(playerCard2)
# generate total for hand one
player1Hand.totalhand()
# generate total for hand two
player2Hand.totalhand()
# determine winner
winner = Winner(player1Hand.gettotal(), player2Hand.gettotal())
print('\n', winner.getwinner(), '\n')
# convert suits from numeric to alpha and values
player1Hand.conversion()
print('player1Hand \n',player1Hand.playercards,'\n')
player2Hand.conversion()
print('player2Hand \n',player2Hand.playercards,'\n')
|
{"/test_CardGame_pytest.py": ["/main.py"]}
|
37,331,691
|
larryscheib/CardGame
|
refs/heads/master
|
/test_CardGame_pytest.py
|
import functools, time
from main import Deck, Hand, Winner
from ptest.decorator import TestClass
@TestClass(run_mode="parallel") # the test cases in this class will be executed by multiple threads
class test_CardGame_pytest:
def test_Deck(self, list2=None, list1=None):
objDeck = Deck()
# test that new deck is 52 cards
deckOfCards = objDeck.cardset
assert len(deckOfCards) == 52
# test that values are in correct sorted location
sortedDeck = objDeck.sort()
print("sortedCards ", sortedDeck)
assert sortedDeck[0:1][0:1] == [(1, 1)]
assert sortedDeck[0:10][9:10] == [(1, 10)]
assert sortedDeck[0:13][12:13] == [(1, 13)]
assert sortedDeck[0:26][25:26] == [(2, 13)]
assert sortedDeck[0:39][38:39] == [(3, 13)]
assert sortedDeck[0:52][51:52] == [(4, 13)]
# shuffle deck
shuffledCards = objDeck.shuffle()
print("shuffledCards ",shuffledCards)
list1 = list(shuffledCards)
# test second shuffle
secondShuffle = objDeck.shuffle()
print('secondShuffle ',secondShuffle)
list2 = list(secondShuffle)
# shuffles should be unique (is it even possible to have back to back shuffles the same, I don't think so)
isSame = functools.reduce(lambda x, y :x and y, map(lambda p, q : p==q, list1,list2),True)
assert isSame == False
# create hand
playerHand = Hand()
# initial hand should always score to zero
assert playerHand.gettotal() == 0
# add card from shuffled deck
playerCard = shuffledCards.pop(0)
playerHand.add_card(playerCard)
# total hand
playerHand.totalhand()
# added first card implies hand value will be greater than 0
assert playerHand.gettotal() > 0
# size of shuffledCards should now be 51
assert len(shuffledCards) == 51
# test method add_card to verify hand is in fact 35
newPlayer1Hand = Hand()
newPlayer1Hand.add_card((4,2))
newPlayer1Hand.add_card((1,11))
newPlayer1Hand.add_card((4,4))
newPlayer1Hand.totalhand()
assert newPlayer1Hand.gettotal() == 35
# manually create another hand that totals 37
newPlayer2Hand = Hand()
newPlayer2Hand.add_card((4,2))
newPlayer2Hand.add_card((3,7))
newPlayer2Hand.add_card((1,8))
newPlayer2Hand.totalhand()
assert newPlayer2Hand.gettotal() == 37
# test for Winner class; winner should be hand two
winner = Winner(newPlayer1Hand.gettotal(), newPlayer2Hand.gettotal())
assert winner.getwinner() == 'player2 wins'
# test conversion method where suits = [1, 2, 3, 4] = ['Spades', 'Diamonds','Hearts','Clubs']
newPlayer1Hand.conversion()
assert newPlayer1Hand.playercards[1][1] == 'Jack'
assert newPlayer1Hand.playercards[0][0] == 'Clubs'
assert newPlayer1Hand.playercards[1][0] == 'Spades'
assert newPlayer1Hand.playercards[2][0] == 'Clubs'
test_CardGame_pytest().test_Deck()
|
{"/test_CardGame_pytest.py": ["/main.py"]}
|
37,357,863
|
grupo2utn/frro-soporte-2019-02
|
refs/heads/mac3333-patch-2
|
/practico_01/ejercicio-01.py
|
# 1. Implementar una función max() que tome como argumento dos números y devuelva el mayor de ellos.
def maximo(a, b):
pass
|
{"/practico_03/ejercicio_08.py": ["/practico_03/ejercicio_02.py", "/practico_03/ejercicio_06.py", "/practico_03/ejercicio_07.py"], "/practico_02/ejercicio_04.py": ["/practico_02/ejercicio_03.py"], "/practico_03/ejercicio_05.py": ["/practico_03/ejercicio_01.py", "/practico_03/ejercicio_02.py"], "/practico_03/ejercicio_06.py": ["/practico_03/ejercicio_01.py"], "/practico_02/ejercicio_05.py": ["/practico_02/ejercicio_04.py"], "/practico_03/ejercicio_07.py": ["/practico_03/ejercicio_02.py", "/practico_03/ejercicio_06.py"]}
|
37,518,613
|
w0en/minluckBot
|
refs/heads/master
|
/pull_minluck_updates_from_gsheets_to_base_dictionary.py
|
import gspread
from mouse import Mouse
import pickle
GOOGLE_SHEET_KEY = '1oNtGeOf9UqwTkQbvZ6MV974AfwDvIuuWMdLcLPR5swE'
CREDENTIALS_FILENAME = 'credentials.json'
worksheet = gspread.service_account(filename=CREDENTIALS_FILENAME).open_by_key(GOOGLE_SHEET_KEY).sheet1.get_all_values()[1:]
print("Worksheet loaded")
mouse_minlucks = {}
for breed in worksheet:
values = list(map(lambda x: None if x == '∞' or x == '' else x, breed))
mouse_minlucks[values[0].lower()] = Mouse(values[0], values[1], values[3], values[4:], values[2])
print("Dictionary populated")
filehandler = open('base_dict', 'wb')
pickle.dump(mouse_minlucks, filehandler)
filehandler.close()
print("Dictionary Pickled")
|
{"/generate_minluck_dict.py": ["/mouse.py"], "/pull_minluck_updates_from_gsheets_to_base_dictionary.py": ["/mouse.py"]}
|
37,518,614
|
w0en/minluckBot
|
refs/heads/master
|
/test_bot.py
|
import discord
from discord_slash import SlashCommand
from discord_slash.utils import manage_commands
import pickle
# Discord-Related
with open('bot_token.txt', 'r') as file:
TOKEN = file.read().replace('\n', '')
with open('server_ids.txt', 'r') as file:
GUILD_IDS = [int(x.strip()) for x in file.readlines()]
print(GUILD_IDS)
minluckBot = discord.Client()
slash = SlashCommand(minluckBot, sync_commands=True)
# Constants
POWERTYPES = ['Arcane', 'Draconic', 'Forgotten', 'Hydro', 'Parental', 'Physical', 'Shadow', 'Tactical', 'Law', 'Rift']
# Dictionary Loading
filehandler = open('minluck_dict', 'rb')
mouse_dict = pickle.load(file=filehandler)
filehandler.close()
print("Mouse minluck dictionary successfully unpickled.")
filehandler = open('alias_dict', 'rb')
alias_dict = pickle.load(file=filehandler)
filehandler.close()
print("Alias dictionary successfully unpickled")
@minluckBot.event
async def on_ready():
print('Bot is ready')
@slash.slash(
name="minluck",
guild_ids=GUILD_IDS,
description="Finds the minluck for a mouse")
async def _minluck(ctx, breed):
try:
mouse = mouse_dict[breed.lower()]
except KeyError:
try:
mouse = mouse_dict[alias_dict[breed.lower()].lower()]
except KeyError:
await ctx.send("This mouse does not exist. Check that it's spelt properly.")
powertypes = ", ".join(mouse.minluckPowerTypes)
await ctx.respond()
await ctx.send(f"Minluck for __{mouse.breed}__: {mouse.minluck}\nPower Type(s): {powertypes}")
'''
@slash.slash(
name="gminluck",
guild_ids=GUILD_IDS,
description="Finds the minluck for a group")
async def _gminluck(ctx, group):
for mouse in groups_dict[group]:
try:
mouse = mouse_dict[breed.lower()]
except KeyError:
try:
mouse = mouse_dict[alias_dict[breed.lower()].lower()]
except KeyError:
await ctx.send("This mouse does not exist. Check that it's spelt properly.")
powertypes = ", ".join(mouse.minluckPowerTypes)
await ctx.respond()
await ctx.send(f"Minluck for __{mouse.breed}__: {mouse.minluck}\nPower Type(s): {powertypes}")
'''
minluckBot.run(TOKEN)
|
{"/generate_minluck_dict.py": ["/mouse.py"], "/pull_minluck_updates_from_gsheets_to_base_dictionary.py": ["/mouse.py"]}
|
37,538,699
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/Strategy/__init__.py
|
import logging
import datetime
file_name = 'strategy.{day}'.format(day=datetime.datetime.now().strftime('%Y%m%d'))
logger = logging.getLogger(file_name)
hdlr = logging.FileHandler('C:\\Users\\Jianbo\\{file}.log'.format(file=file_name))
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.WARNING)
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,700
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/DataLoading/yahoo_page.py
|
import datetime
from Model.model import ValuationMeasures, StockPriceHistory, ShareStatistics, StockProfitability, \
StockIncomeStatment, StockBalanceSheet, StockCashFlowStatement, StockDividendsAndSplits, StockFundamentalStats, \
StockBasicInfo
import time
from DataLoading.crud import select_all_loading_minute_tickers as select_minute_ticker, merge_into_stock_basic_info, \
select_all_fundamental_tickers
from DataLoading.crud import merge_many_into_stock_min_data as merge_many_data
from DataLoading.crud import merge_many_into_stock_fundamental_data as merge_many_fundamental_data
from DataLoading.crud import select_all_tickers
from selenium import webdriver
import logging
'''
Yahoo Finance modules table summary :
assetProfile
financialData
defaultKeyStatistics
calendarEvents
incomeStatementHistory
cashflowStatementHistory
balanceSheetHistory
'''
logger = logging.getLogger('yahoo_page')
hdlr = logging.FileHandler('C:\\Users\\js799\\yahoo_page.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.WARNING)
def get_ticker_stats(ticker):
url = "https://query2.finance.yahoo.com/v10/finance/quoteSummary/{ticker}?formatted=true&crumb=8ldhetOu7RJ&" \
"lang=en-US®ion=US&modules=defaultKeyStatistics%2CfinancialData%2CcalendarEvents&corsDomain=finance.yahoo.com"\
.format(ticker=ticker)
summary_detial_url = "https://query2.finance.yahoo.com/v10/finance/quoteSummary/" \
"{ticker}?formatted=true&crumb=8ldhetOu7RJ&lang=en-US®ion=US&" \
"modules=summaryDetail&corsDomain=finance.yahoo.com".format(ticker=ticker)
headers = {'User-Agent': 'Mozilla/5.0'}
htmlfile = requests.get(url, headers=headers)
data_statistics = htmlfile.json()
htmlfile = requests.get(summary_detial_url, headers=headers)
data_summary = htmlfile.json()
if data_statistics['quoteSummary']['error'] is not None:
return None
default_key_statistics = data_statistics['quoteSummary']['result'][0]['defaultKeyStatistics']
financial_data = data_statistics['quoteSummary']['result'][0]['financialData']
summary_detail = data_summary['quoteSummary']['result'][0]['summaryDetail']
valuation_measures = ValuationMeasures(ticker=ticker,
market_cap=summary_detail.get('marketCap'),
enterprise_value=default_key_statistics.get('enterpriseValue'),
trailing_pe=summary_detail.get('trailingPE'),
forward_pe=summary_detail.get('forwardPE'),
peg_ratio_5y=default_key_statistics.get('pegRatio'),
price_to_sale=summary_detail.get('priceToSalesTrailing12Months'),
price_to_book=default_key_statistics.get('priceToBook'),
enterprise_revenue=default_key_statistics.get('enterpriseToRevenue'),
enterprise_ebitda=default_key_statistics.get('enterpriseToEbitda'))
stock_price_history = StockPriceHistory(ticker=ticker,
beta=default_key_statistics.get('beta'),
high_52_week=summary_detail.get('fiftyTwoWeekHigh'),
low_52_week=summary_detail.get('fiftyTwoWeekLow')
)
share_stats = ShareStatistics(ticker=ticker,
avg_volume_3m=summary_detail.get('averageVolume'),
avg_volume_10d=summary_detail.get('averageVolume10days'),
share_outstanding=default_key_statistics.get('sharesOutstanding'),
hold_insiders=default_key_statistics.get('heldPercentInsiders'),
hold_inst=default_key_statistics.get('heldPercentInstitutions'),
shares_short=default_key_statistics.get('sharesShort'),
short_ratio=default_key_statistics.get('shortRatio'),
shares_short_prev_m=default_key_statistics.get('sharesShortPriorMonth'))
stock_profitability = StockProfitability(ticker=ticker,
profit_margin=financial_data.get('profitMargins'),
operating_margin=financial_data.get('operatingMargins'),
ret_asset=financial_data.get('returnOnAssets'),
ret_equity=financial_data.get('returnOnEquity'))
stock_income_statement = StockIncomeStatment(ticker=ticker, revenue=financial_data.get('totalRevenue'),
revenue_per_share=financial_data.get('revenuePerShare'),
quarterly_revenue_growth=financial_data.get('revenueGrowth'),
gross_profit=financial_data.get('grossProfits'),
ebitda=financial_data.get('ebitda'),
net_income_avi_to_common=default_key_statistics.get('netIncomeToCommon'),
trailing_eps=default_key_statistics.get('trailingEps'),
forward_eps=default_key_statistics.get('forwardEps'),
quarterly_earnings_growth=default_key_statistics.get('earningsQuarterlyGrowth'))
stock_balance_sheet = StockBalanceSheet(ticker=ticker,
total_cash=financial_data.get('totalCash'),
total_cash_per_share=financial_data.get('totalCashPerShare'),
total_debt=financial_data.get('totalDebt'),
total_debt_per_equity=financial_data.get('debtToEquity'),
current_ratio=financial_data.get('currentRatio'),
book_value_per_share=default_key_statistics.get('bookValue'))
cash_flow_statement = StockCashFlowStatement(ticker=ticker,
operating_cash_flow=financial_data.get('operatingCashflow'),
levered_free_cash_flow=financial_data.get('freeCashflow'))
stock_dividend_split = StockDividendsAndSplits(ticker=ticker,
forward_dividend_rate=summary_detail.get('dividendRate'),
forward_dividend_yield=summary_detail.get('dividendYield'),
trailing_dividend_rate=summary_detail.get('trailingAnnualDividendRate'),
trailing_dividend_yield=summary_detail.get('trailingAnnualDividendYield'),
avg_dividend_yield_5y=summary_detail.get('fiveYearAvgDividendYield'),
payout_ratio=summary_detail.get('payoutRatio'),
dividend_date=data_statistics['quoteSummary']['result'][0]['calendarEvents'].get('dividendDate'),
ex_dividend_date=summary_detail.get('exDividendDate'))
return StockFundamentalStats(valuation_measures=valuation_measures,
stock_price_history=stock_price_history,
share_stats=share_stats,
stock_profitability=stock_profitability,
stock_income_statement=stock_income_statement,
stock_balance_sheet=stock_balance_sheet,
cash_flow_statement=cash_flow_statement,
stock_dividend_split=stock_dividend_split)
def get_ticker_previous_min_data(position, cur_data, data_list):
if cur_data is not None:
return cur_data
try:
for i in range(position-1, -1, -1):
ret_data = data_list[i]
if ret_data is not None:
return ret_data
except IndexError as e:
print(e)
print("position = {0}".format(position))
raise
return 0
def load_daily_minute_ticker_data():
logger = logging.getLogger('yahoo_page_stock_min_data')
hdlr = logging.FileHandler('C:\\Users\\Jianbo\\yahoo_page_stock_min_data.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.WARNING)
tickers = select_minute_ticker()
program_start_time = time.time()
for ticker in tickers:
try:
if ticker == 'BCR':
continue
print('Start loading ticker = {0}'.format(ticker))
start_time = time.time()
data = get_ticker_min_data(ticker)
merge_many_data(data)
print('finish loading ticker = {0},'
' and {1} data has been loaded'
' and it totally takes {2}'.format(ticker, len(data), time.strftime("%H:%M:%S", time.gmtime(
time.time() - start_time))))
except:
logging.info('ticker = {ticker} is bad'.format(ticker=ticker))
print('The whole load_daily_minute_ticker_data program takes {0}'.format(time.strftime("%H:%M:%S", time.gmtime(
time.time() - program_start_time))))
# logger.log('The whole load_daily_minute_ticker_data program takes {0}'.format(time.strftime("%H:%M:%S", time.gmtime(
# time.time() - program_start_time))))
def load_ticker_fundamental_data(date=datetime.datetime.today().strftime('%Y-%m-%d')):
logger = logging.getLogger('yahoo_page_fundamental')
hdlr = logging.FileHandler('C:\\Users\\js799\\yahoo_page_fundamental.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.WARNING)
tickers = select_all_fundamental_tickers(date)
program_start_time = time.time()
for ticker in tickers:
print('Start loading ticker = {0}'.format(ticker))
start_time = time.time()
try:
data = get_ticker_stats(ticker)
merge_many_fundamental_data(data)
print('finish loading ticker = {0},'
' and it totally takes {1}'.format(ticker, time.strftime("%H:%M:%S", time.gmtime(time.time() - start_time))))
except:
print('ticker = {ticker} load failed'.format(ticker=ticker))
#logger.log('ticker = {ticker} load failed'.format(ticker=ticker))
print('The whole load_ticker_fundamental_data program takes {0}'.format(time.strftime("%H:%M:%S", time.gmtime(
time.time() - program_start_time))))
# logger.log('The whole load_ticker_fundamental_data program takes {0}'.format(time.strftime("%H:%M:%S", time.gmtime(
# time.time() - program_start_time))))
def load_ticker_sector_info(ticker=None):
if ticker is None:
return None
browser = None
success = True
try:
url = "https://finance.yahoo.com/quote/{ticker}/profile?p={ticker}".format(ticker=ticker)
if browser is None:
browser = webdriver.Chrome()
browser.get(url)
company_name = browser.find_element_by_xpath("//div[@id='Col1-0-Profile-Proxy']/section/div/div").text
company_name = company_name.split('\n')[0]
table = browser.find_element_by_xpath("//div[@id='Col1-0-Profile-Proxy']/section/div/div/div")
parts = table.find_elements_by_xpath('.//p')
sector_info = parts[1].text.split('\n')
sector = sector_info[0].split(':')[1]
sub_sector = sector_info[1].split(':')[1]
stocker = StockBasicInfo(ticker, company_name, sector, sub_sector)
merge_into_stock_basic_info(stocker)
except :
print(ticker + " does not have good data")
return None
finally:
if browser is not None:
browser.quit()
return success
def load_all_tickers_sector_info():
tickers = select_all_tickers()
for ticker in tickers:
print(ticker + ' starts')
good = load_ticker_sector_info(ticker)
if good is None:
logger.info('{ticker} does not work'.format(ticker=ticker))
print(ticker + ' ends')
def main():
# parser = argparse.ArgumentParser()
#
# parser.add_argument('-g', action='store', dest='goal',
# help='the goal of this run')
#
# results = parser.parse_args()
#
# if results.goal == 'return':
# load_daily_minute_ticker_data()
# else:
# load_ticker_fundamental_data()
#load_ticker_fundamental_data()
load_all_tickers_sector_info()
if __name__ == '__main__':
main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,701
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/Analysis/analysis.py
|
import Analysis.sql as sql
import json
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import Analysis.analysis_date as ana_date
import DataLoading.yahoo_daily_data as daily_data
import DataLoading.yahoo_page as page
def get_market_weight():
df = sql.find_all_tickers_cap()
df['weight'] = df['market_cap']*1.0/df['market_cap'].sum()
#sort data
df = df.set_index('ticker').sort_values(by='weight')
rank = df.weight.cumsum()
df_fq = df[rank.apply(lambda x: x < 0.25)]
#df_fq.index.values
df_lq = df[rank.apply(lambda x:x > 0.75)]
return df_fq, df_lq
def get_tickers_return(tickers, start_date, end_date):
df = sql.get_portfolio_daily_time_series(tickers, start_date, end_date)
if df is None:
raise Exception('The DataFrame from get_tickers_return is None')
num_days = (df.index.values[-1] - df.index.values[0]).days
year_faction = num_days/365.0
ret_series = (df.pct_change(len(df.index)-1)*(1/year_faction)).iloc[-1]
return ret_series
def get_tickers_lowest_opp():
df = sql.find_ticker_low_position()
if df is None:
raise Exception('The DataFrame from get_tickers_return is None')
df['diff'] = df['close'] - df['lowest']
df['per_diff'] = df['diff'] / df['lowest']
df = df.sort_values('per_diff')
return df
def run_portfolio():
with open('portfolio.ini') as json_data_file:
data = json.load(json_data_file)
portfolio = data.get('portfolio')
start_date = data.get('start_date')
end_date = data.get('end_date')
portfolio_time_series = get_tickers_return(portfolio.keys(), start_date, end_date)
print(portfolio_time_series)
print(portfolio_time_series.get('AAPL'))
return portfolio_time_series
def get_ticker_daily_returns_data(ticker, dates=""):
df = pd.DataFrame()
for i in dates:
ret_df = sql.find_ticker_daily_returns(ticker, i)
df = df.append(ret_df, ignore_index=True)
return df
def analysis_ticker_return_data(ticker):
dates = ['2017-12-27', '2017-12-28', '2017-12-29', '2018-01-02', '2018-01-03', '2018-01-04', '2018-01-05',
'2018-01-08', '2018-01-09', '2018-01-10', '2018-01-11', '2018-01-12']
data = get_ticker_daily_returns_data(ticker, dates)
mean_10 = data.rolling(window=60).mean()
return get_ticker_daily_returns_data(ticker,dates)
def analysis_ticker_release_info(ticker):
earnings = sql.find_stock_earings(ticker)
if earnings is None:
return None
ticker_info = []
#daily_data.get_data([ticker], ana_date.next_business_dates(n=-1000), ana_date.next_business_dates())
liquid = sql.find_liquid_ticker(ticker)
if liquid is None:
return None
try:
for e in earnings:
release_date = e[1]
time = e[2]
if 'before' in time.lower():
pre_date = ana_date.next_business_dates(datestr=ana_date.convert_date_to_str(release_date),n=-1)
release_day = ana_date.next_business_dates(datestr=ana_date.convert_date_to_str(release_date))
pre_stock_daily_data = sql.find_ticker_daily_data(ticker, ana_date.convert_date_to_str(pre_date))
release_data_stock_daily_data = sql.find_ticker_daily_data(ticker, ana_date.convert_date_to_str(release_day))
if release_data_stock_daily_data is None:
continue
release_open = release_data_stock_daily_data[2]
release_high = release_data_stock_daily_data[3]
release_low = release_data_stock_daily_data[4]
diff_open = (release_open - pre_stock_daily_data[5]) / (pre_stock_daily_data[5])
diff_high = (release_high - pre_stock_daily_data[5]) / (pre_stock_daily_data[5])
diff_low = (release_low - pre_stock_daily_data[5]) / (pre_stock_daily_data[5])
print('ticker = {ticker}, release date is {date}, {time} , EPS change from {eps1} to {eps2} by {sup},'
' jump from {price1} to {price2} by {percentage}, '
'release price range is from {p1} to {p2} and change from {per1} to {per2}'
.format(ticker=ticker, date=sql.convert_date_to_str(release_date), time=time,price1=pre_stock_daily_data[5],
eps1=e[3], eps2=e[4], sup=e[5],
price2=release_data_stock_daily_data[2],
percentage=str(diff_open), p1=str(release_low), p2=str(release_high),
per1=str(diff_low), per2=str(diff_high)))
ticker_info.append((ticker, diff_open, diff_low, diff_high,release_date))
elif 'after' in time.lower():
next_date = ana_date.next_business_dates(datestr=sql.convert_date_to_str(release_date), n=1)
release_day = ana_date.next_business_dates(datestr=sql.convert_date_to_str(release_date))
nex_stock_daily_data = sql.find_ticker_daily_data(ticker, ana_date.convert_date_to_str(next_date))
release_data_stock_daily_data = sql.find_ticker_daily_data(ticker,
ana_date.convert_date_to_str(release_day))
if nex_stock_daily_data is None:
continue
next_open = nex_stock_daily_data[2]
next_high = nex_stock_daily_data[3]
next_low = nex_stock_daily_data[4]
diff_open = (next_open - release_data_stock_daily_data[5]) / (release_data_stock_daily_data[5])
diff_high = (next_high - release_data_stock_daily_data[5]) / (release_data_stock_daily_data[5])
diff_low = (next_low - release_data_stock_daily_data[5]) / (release_data_stock_daily_data[5])
print('ticker = {ticker}, release date is {date}, {time} , EPS change from {eps1} to {eps2} by {sup},'
'jump from {price1} to {price2} by {percentage}, '
'release price range is from {p1} to {p2} and change from {per1} to {per2}'
.format(ticker=ticker, date=sql.convert_date_to_str(release_date), time=time,
eps1=e[3], eps2=e[4],sup=e[5],
price1=release_data_stock_daily_data[5],
price2=next_open,
percentage=str(diff_open), p1=str(next_low), p2=str(next_high),
per1=str(diff_low), per2=str(diff_high)))
ticker_info.append((ticker, diff_open, diff_low, diff_high,release_date))
else:
continue
return ticker_info
except:
print('ticker is bad')
return None
def find_interesting_big_jump_ticker(ticker,jump):
ticker_info = analysis_ticker_release_info(ticker)
open_interest = 0
if ticker_info is not None and len(ticker_info) != 0:
for info in ticker_info:
#price jump more then 1%
if info[1] > jump:
open_interest += 1
else:
return None
if open_interest / len(ticker_info) > 0.7:
return ticker
else:
return None
def find_interesting_large_gap_ticker(ticker,jump):
ticker_info = analysis_ticker_release_info(ticker)
open_interest = 0
if ticker_info is not None and len(ticker_info) != 0:
for info in ticker_info:
#price jump more then 1%
if info[3]-info[2] > jump:
open_interest += 1
else:
return None
if open_interest / len(ticker_info) > 0.7:
return ticker
else:
return None
def analysis_after_release_large_gap_ticker(date):
tickers = sql.find_tickers_release_earnings_by_date(date)
ret = []
for ticker in tickers:
good = find_interesting_large_gap_ticker(ticker, 0.08)
if good is not None:
ret.append(ticker)
return ret
def find_interesting_prerelease_ticker(date):
tickers = analysis_release_ticker_earnings(date)
if len(tickers)==0:
return None
ret = []
for ticker in tickers:
good = analysis_ticker_before_release_behavior(ticker)
if good is not None:
ret.append(ticker)
right = 1.0*len(ret)/len(tickers)
return 'good tickers {tickers}, only {subset} follow two days pre trade, you are betting {per} is right'\
.format(tickers=tickers, subset=ret, per=right)
def analysis_release_ticker_earnings(date):
tickers = sql.find_tickers_release_earnings_by_date(date)
ret = []
for ticker in tickers:
good = find_interesting_big_jump_ticker(ticker, 0.01)
if good is not None:
ret.append(ticker)
return ret
def analysis_release_ticker_before_release_behavior(ticker, start, end, jump):
info = sql.get_ticker_bench_return(ticker, start, end)
if info is None or len(info) == 0:
return None
for i in info:
if '^' in i[0]:
bench_return = i[1]
else:
ticker_return = i[1]
print('ticker = {ticker} from {d1} to {d2}, return is {r}, and sp500 has {br}, release jump is {jump}'
.format(ticker=ticker,d1=start,d2=end,r=ticker_return,br=bench_return,jump=jump))
if ticker_return < bench_return and ticker_return+jump < bench_return:
return -1
elif ticker_return > bench_return or ticker_return+jump > bench_return:
return 1
def analysis_ticker_before_release_behavior(ticker):
earnings = sql.find_stock_earings(ticker)
if earnings is None or len(earnings)==0:
return None
count = 0
total = 0
ticker_info = analysis_ticker_release_info(ticker)
if ticker_info is None or len(ticker_info)==0:
return None
map = {}
for ticker_i in ticker_info:
map[ticker_i[4]] = ticker_i
for e in earnings:
release_date = e[1]
time = e[2]
if e[5] is None:
continue
if 'before' in time.lower():
end = ana_date.next_business_dates(datestr=ana_date.convert_date_to_str(release_date),n=-1)
start = ana_date.next_business_dates(datestr=ana_date.convert_date_to_str(release_date), n=-2)
elif 'after' in time.lower():
end = ana_date.next_business_dates(datestr=ana_date.convert_date_to_str(release_date))
start = ana_date.next_business_dates(datestr=ana_date.convert_date_to_str(release_date), n=-1)
else:
continue
jump = map[release_date][1]
good = analysis_release_ticker_before_release_behavior(ticker, start, end, jump)
if good == -1:
count -= 2
elif good is not None:
count += 1
total += 1
if total !=0 and 1.0*count/total >0.7:
return ticker
else:
return None
def analysis_oil_inventories_uwt_relationship(ticker):
info = sql.get_ticker_oil_inventories_relationships(ticker)
size = len(info)
count = 0
for i in info:
if i[0]>=i[1] and i[2]<=i[4]:
count+=1
elif i[0]<=i[1] and i[2]>=i[5]:
count+=1
print('{per} % of the time, we are betting right'.format(per=1.0*count/size))
def analysis_tickers_daily_return_ranks(date):
return sql.find_all_tickers_daily_returns(date)
def analysis_all_sectors_performance(start_date,end_date):
sectors = sql.find_all_sectors()
if sectors is None:
print('No sector in database')
return None
for sector in sectors:
if 'index' in sector.lower():
continue
tickers = sql.find_top_tickers_by_sector(sector,'2018-03-06', start_date,end_date)
portfolio_time_series = get_tickers_return(tickers, start_date, end_date)
print(portfolio_time_series)
print(portfolio_time_series.get('AAPL'))
def main():
#run_portfolio()
# ret = get_ticker_daily_returns_data('AAPL')
# plt.hist(ret, normed=True, bins=30)
# plt.ylabel('Probability')
# plt.show()
# print('asd')
# tickers = analysis_release_ticker_earnings('2018-03-02')
#
# print('--------------------------------')
# print(tickers)
analysis_ticker_release_info('WUBA')
#analysis_oil_inventories_uwt_relationship('OIL')
#analysis_all_sectors_performance('2018-01-01','2018-03-07')
#print(find_interesting_prerelease_ticker('2018-03-13'))
if __name__ == '__main__':
main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,702
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/test/test_config.py
|
import unittest
import config
import os
import psycopg2
class TestConfig(unittest.TestCase):
path = os.getcwd() + os.sep + "test_database.ini"
def test_config(self):
result = config.config(filename=TestConfig.path)
self.assertEqual(result['user'], 'postgres')
self.assertEqual(result['database'], 'stockDB')
self.assertEqual(result['database'], 'stockDB')
self.assertEqual(result['password'], 'admin')
self.assertEqual(result['host'], 'localhost')
def test_connect(self):
""" This is an example that how to connect to the PostgreSQL database server """
conn = None
try:
# read connection parameters
params = config.config(filename=TestConfig.path)
# connect to the PostgreSQL server
print('Connecting to the PostgreSQL database...')
conn = psycopg2.connect(**params)
# create a cursor
cur = conn.cursor()
# execute a statement
print('PostgreSQL database version:')
cur.execute('SELECT version()')
# display the PostgreSQL database server version
db_version = cur.fetchone()
print(db_version)
# close the communication with the PostgreSQL
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
if __name__ == '__main__':
unittest.main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,703
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/DataLoading/crude_oil.py
|
from selenium import webdriver
import time
import datetime
from DataLoading.crud import merge_into_crude_oil_inventories
def convert(value):
if value:
# determine multiplier
multiplier = 1
if value.endswith('K'):
multiplier = 1000
value = value[0:len(value)-1] # strip multiplier character
elif value.endswith('M'):
multiplier = 1000000
value = value[0:len(value)-1] # strip multiplier character
# convert value to float, multiply, then convert the result to int
return int(float(value) * multiplier)
else:
return 0
def load_data():
url = 'https://www.investing.com/economic-calendar/eia-crude-oil-inventories-75'
browser = webdriver.Chrome()
browser.get(url)
button = browser.find_element_by_xpath("//div[@id='eventTabDiv_history_0']/div[@id='showMoreHistory75']")
button.click()
for i in range(0, 50):
button.click()
time.sleep(1)
table = browser.find_element_by_xpath("//div[@id='eventTabDiv_history_0']/table/tbody")
rows = table.find_elements_by_xpath('.//tr')
for row in rows[1:]:
cols = row.find_elements_by_tag_name('td')
date = datetime.datetime.strptime(cols[0].text, '%b %d, %Y').strftime('%Y-%m-%d')
actual = convert(cols[2].text)
exp = convert(cols[3].text)
merge_into_crude_oil_inventories(date, actual, exp)
if browser is not None:
browser.quit()
def main():
load_data()
if __name__ == '__main__':
main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,704
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/DataLoading/yahoo_daily_data.py
|
from pandas_datareader import data
from pandas_datareader._utils import RemoteDataError
import requests
from Model.model import StockDailyData
from DataLoading.crud import merge_many_into_stock_daily_data as merge_many, select_all_tickers
from DataLoading.crud import merge_many_into_fx_daily_data as merge_fx_many
from DataLoading.crud import select_all_loading_tickers, select_all_fx_currency
import time
import Analysis.analysis_date as analysis_date
def load_daily_instrument_price(tickers, start_date, end_date, data_source='yahoo'):
"""Define the instruments to download. We would like to see Apple, Microsoft and the S&P500 index.
Reference : https://www.learndatasci.com/tutorials/python-finance-part-yahoo-finance-api-pandas-matplotlib/
"""
# In case, we query too fast and yahoo refuse the request. remain list will put the ticker do not load correctly
remain = ['a']
attend = -1
while remain:
if attend > 50:
break
remain.clear()
for ticker in tickers:
# yahoo do not support '.' ticker. We need to replace '.' to '-'
if data_source == 'yahoo':
query_ticker = ticker.replace('.', '-')
start_time = time.time()
print('--------------------------------- Start load {0} -----------------------------'.format(ticker))
# User pandas_reader.data.DataReader to load the desired data. As simple as that.
try:
daily_data = data.DataReader([query_ticker], data_source, start_date, end_date)
#daily_data = panel_data.minor_xs(ticker)
filtered_df = daily_data[daily_data['Volume'].notnull()]
temp = [StockDailyData(ticker=ticker, date=index.date(), open=row["Open"][0], high=row["High"][0],
low=row["Low"][0], close=row["Close"][0], adj_close=row["Adj Close"][0],
volume=row["Volume"][0])
for index, row in filtered_df.iterrows()]
merge_many(temp)
print('finish load ticker = {0}. Total spend {1}'.format(ticker, time.strftime("%H:%M:%S", time.gmtime(
time.time()-start_time))))
except RemoteDataError as er:
print(er)
print(ticker + ' does not have any thing -------------------------------- ')
remain.append(ticker)
except ConnectionError as er:
print(er)
print(ticker + ' does not have any thing -------------------------------- ')
remain.append(ticker)
except Exception as er:
print(er)
print(ticker + ' does not have any thing -------------------------------- ')
tickers = remain.copy()
attend += 1
return True
def load_daily_currency_data(fx_id, fx_currency, start_time_stamp, end_time_stamp):
url = "https://query1.finance.yahoo.com/v8/finance/chart/{currency}=X?symbol=EURUSD%3DX&period1={p1}&" \
"period2={p2}&interval=1d&includePrePost=true&events=div%7Csplit%7Cearn&corsDomain=finance.yahoo.com"\
.format(currency=fx_currency, p1=start_time_stamp, p2=end_time_stamp)
def get_ticker_previous_min_data(position, cur_data, data_list):
if cur_data is not None:
return cur_data
try:
for i in range(position - 1, -1, -1):
ret_data = data_list[i]
if ret_data is not None:
return ret_data
except IndexError as e:
print(e)
print("position = {0}".format(position))
raise
return 0
headers = {'User-Agent': 'Mozilla/5.0'}
htmlfile = requests.get(url, headers=headers)
fx_currency_data = htmlfile.json()
time_stamps = fx_currency_data['chart']['result'][0].get('timestamp')
if time is None:
return None
close_rate = fx_currency_data['chart']['result'][0]['indicators']['adjclose'][0]['adjclose']
return [(fx_id, time.strftime("%Y-%m-%d", time.localtime(time_stamps[i])),
get_ticker_previous_min_data(i, close_rate[i], close_rate)) for i in range(len(time_stamps))]
def main(tickers=[]):
# start_date = analysis_date.next_business_dates()
start_date = analysis_date.convert_str_to_date('2007-01-01')
# end_date = analysis_date.next_business_dates(n=1)
end_date = analysis_date.convert_str_to_date()
if not tickers:
tickers = select_all_loading_tickers(start_date, end_date)
#tickers = select_all_tickers()
load_daily_instrument_price(tickers, start_date, end_date)
def load_fx_currenty(start_date, end_date):
# load fx_currency part
fx_currencys = select_all_fx_currency(start_date, end_date)
start_date_ux_timestampe = int((time.mktime(time.strptime("{0} 00:00:00".format(start_date), "%Y-%m-%d %H:%M:%S"))))
end_date_ux_timestampe = int(time.mktime(time.strptime("{0} 00:00:00".format(end_date), "%Y-%m-%d %H:%M:%S")))
for fx in fx_currencys:
print('--------------------------------- Start load {0} -----------------------------'.format(fx[1]+fx[2]))
temp = load_daily_currency_data(fx[0], '{0}{1}'.format(fx[1], fx[2]), start_date_ux_timestampe, end_date_ux_timestampe)
merge_fx_many(temp)
print('--------------------------------- Finish load {0} -----------------------------'.format(fx[1]+fx[2]))
if __name__ == '__main__':
main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,705
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/Analysis/analysis_date.py
|
from datetime import datetime
from datetime import timedelta
def convert_str_to_date(datestr="", format="%Y-%m-%d"):
'''
This function will convert the input date string to date object based on the format
input :
datestr : for exmaple '2018-01-01'
format : for example "%Y-%m-%d"
output :
date object
no input => will be return today
'''
if not datestr:
return datetime.today().date()
return datetime.strptime(datestr, format).date()
def next_business_dates(datestr="", format="%Y-%m-%d", n=0):
'''
This function will find the the next n business date for given the input date string and format
input :
datestr : for exmaple '2018-01-01'
format : for example "%Y-%m-%d"
n : for example 3, next 3 business day. n can be negative meaning previous n days
output :
date object
no input => will be return yesterday's date
'''
date = convert_str_to_date(datestr=datestr, format=format)
if n > 0:
delta = 1
elif n < 0:
delta = -1
else:
return date
for i in range(0, abs(n)):
date = date + timedelta(days=delta)
while date.isoweekday() > 5:
date = date + timedelta(days=delta)
return date
def convert_date_to_str(date, format="%Y-%m-%d"):
return date.strftime(format)
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,706
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/Model/model.py
|
class StockBasicInfo:
def __init__(self, ticker, security, sector, sub_sector):
self.ticker = ticker
self.security = security
self.sector = sector
self.sub_sector = sub_sector
class StockDailyData:
def __init__(self, ticker, date, open, high, low, close, adj_close, volume):
self.ticker = ticker
self.date = date
self.open = open
self.high = high
self.low = low
self.close = close
self.adj_close = adj_close
self.volume = volume
class StockMinData:
def __init__(self, ticker, datetime, high, low, open, close, volume):
self.ticker = ticker
self.datetime = datetime
self.high = high
self.low = low
self.open = open
self.close = close
self.volume = volume
class ValuationMeasures:
def __init__(self, ticker, market_cap, enterprise_value, trailing_pe, forward_pe, peg_ratio_5y, price_to_sale,
price_to_book, enterprise_revenue, enterprise_ebitda ):
self.ticker = ticker
self.market_cap = market_cap
self.enterprise_value = enterprise_value
self.trailing_pe = trailing_pe
self.forward_pe = forward_pe
self.peg_ratio_5y = peg_ratio_5y
self.price_to_sale = price_to_sale
self.price_to_book = price_to_book
self.enterprise_revenue = enterprise_revenue
self.enterprise_ebitda = enterprise_ebitda
class StockPriceHistory:
def __init__(self, ticker, beta, high_52_week, low_52_week):
self.ticker = ticker
self.beta = beta
self.high_52_week = high_52_week
self.low_52_week = low_52_week
class ShareStatistics:
def __init__(self, ticker, avg_volume_3m, avg_volume_10d, share_outstanding, hold_insiders, hold_inst, shares_short,
short_ratio, shares_short_prev_m):
self.ticker = ticker
self.avg_volume_3m = avg_volume_3m
self.avg_volume_10d = avg_volume_10d
self.share_outstanding = share_outstanding
self.hold_insiders = hold_insiders
self.hold_inst = hold_inst
self.shares_short = shares_short
self.short_ratio = short_ratio
self.shares_short_prev_m = shares_short_prev_m
class StockProfitability:
def __init__(self, ticker, profit_margin, operating_margin, ret_asset, ret_equity):
self.ticker = ticker
self.profit_margin = profit_margin
self.operating_margin = operating_margin
self.ret_asset = ret_asset
self.ret_equity = ret_equity
class StockIncomeStatment:
def __init__(self, ticker, revenue, revenue_per_share, quarterly_revenue_growth, gross_profit, ebitda,
net_income_avi_to_common, trailing_eps, forward_eps, quarterly_earnings_growth):
self.ticker = ticker
self.revenue = revenue
self.revenue_per_share = revenue_per_share
self.quarterly_revenue_growth = quarterly_revenue_growth
self.gross_profit = gross_profit
self.ebitda = ebitda
self.net_income_avi_to_common = net_income_avi_to_common
self.trailing_eps = trailing_eps
self.forward_eps = forward_eps
self.quarterly_earning_growth = quarterly_earnings_growth
class StockBalanceSheet:
def __init__(self, ticker, total_cash, total_cash_per_share, total_debt, total_debt_per_equity,
current_ratio, book_value_per_share):
self.ticker = ticker
self.total_cash = total_cash
self.total_cash_per_share = total_cash_per_share
self.total_debt = total_debt
self.total_debt_per_equity = total_debt_per_equity
self.current_ratio = current_ratio
self.book_value_per_share = book_value_per_share
class StockCashFlowStatement:
def __init__(self, ticker, operating_cash_flow, levered_free_cash_flow):
self.ticker = ticker
self.operating_cash_flow = operating_cash_flow
self.levered_free_cash_flow = levered_free_cash_flow
class StockDividendsAndSplits:
def __init__(self, ticker, forward_dividend_yield, forward_dividend_rate, trailing_dividend_yield,
trailing_dividend_rate, avg_dividend_yield_5y, payout_ratio, dividend_date, ex_dividend_date):
self.ticker = ticker
self.forward_dividend_yield = forward_dividend_yield
self.forward_dividend_rate = forward_dividend_rate
self.trailing_dividend_yield = trailing_dividend_yield
self.trailing_dividend_rate = trailing_dividend_rate
self.avg_dividend_yield_5y = avg_dividend_yield_5y
self.payout_ratio = payout_ratio
self.dividend_date = dividend_date
self.ex_dividend_date = ex_dividend_date
class StockFundamentalStats:
def __init__(self, valuation_measures, stock_price_history, share_stats, stock_profitability, stock_income_statement,
stock_balance_sheet, cash_flow_statement, stock_dividend_split):
self.valuation_measures = valuation_measures
self.stock_price_history=stock_price_history
self.share_stats = share_stats
self.stock_profitability = stock_profitability
self.stock_income_statement = stock_income_statement
self.stock_balance_sheet = stock_balance_sheet
self.cash_flow_statement = cash_flow_statement
self.stock_dividend_split = stock_dividend_split
class Portfolio:
def __init__(self, ticker, weight=0, money=0):
self.ticker = ticker
self.weight = weight
self.money = money
class StockEarningData:
def __init__(self, ticker, release_date, time, expect_eps, actual_eps , surprise):
self.ticker = ticker
self.release_date = release_date
self.time = time
self.expect_eps = expect_eps if expect_eps != '-' else None
self.actual_eps = actual_eps if actual_eps != '-' else None
self.surprise = surprise if surprise != '-' else None
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,707
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/Strategy/daily_strategy.py
|
import Analysis.analysis as analysis
import Analysis.analysis_date as analysis_date
import sys
import datetime
def big_plunge_strategy(date):
old_stdout = sys.stdout
file_name = 'big_plunge_strategy.{day}'.format(day=datetime.datetime.now().strftime('%Y%m%d'))
log_file = open('C:\\Users\\Jianbo\\{file}.log'.format(file=file_name), "w")
sys.stdout = log_file
print('------------Pay attention to the following tickers on date = {date}----------------'
.format(date=date))
info = analysis.analysis_tickers_daily_return_ranks(date)
for i in info:
print(i)
print('\n')
sys.stdout = old_stdout
log_file.close()
def main():
date = analysis_date.next_business_dates()
big_plunge_strategy(date)
if __name__ == '__main__':
main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,708
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/DataLoading/yahoo_minute_price_loading.py
|
import time
import logging
from DataLoading.crud import select_all_loading_minute_tickers as select_minute_ticker
import requests
from DataLoading.crud import merge_many_into_stock_min_data as merge_many_data
from Model.model import StockMinData
def load_daily_minute_ticker_data():
# logger = logging.getLogger('yahoo_page_stock_min_data')
# hdlr = logging.FileHandler('yahoo_page_stock_min_data.log')
# formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
# hdlr.setFormatter(formatter)
# logger.addHandler(hdlr)
# logger.setLevel(logging.WARNING)
tickers = select_minute_ticker()
program_start_time = time.time()
for ticker in tickers:
try:
if ticker == 'BCR':
continue
print('Start loading ticker = {0}'.format(ticker))
start_time = time.time()
data = get_ticker_min_data(ticker)
merge_many_data(data)
print('finish loading ticker = {0},'
' and {1} data has been loaded'
' and it totally takes {2}'.format(ticker, len(data), time.strftime("%H:%M:%S", time.gmtime(
time.time() - start_time))))
except:
logging.info('ticker = {ticker} is bad'.format(ticker=ticker))
print('The whole load_daily_minute_ticker_data program takes {0}'.format(time.strftime("%H:%M:%S", time.gmtime(
time.time() - program_start_time))))
# logger.log('The whole load_daily_minute_ticker_data program takes {0}'.format(time.strftime("%H:%M:%S", time.gmtime(
# time.time() - program_start_time))))
def get_ticker_previous_min_data(position, cur_data, data_list):
if cur_data is not None:
return cur_data
try:
for i in range(position-1, -1, -1):
ret_data = data_list[i]
if ret_data is not None:
return ret_data
except IndexError as e:
print(e)
print("position = {0}".format(position))
raise
return 0
def get_ticker_min_data(ticker):
url = "https://query1.finance.yahoo.com/v8/finance/chart/{ticker}?range=1d&includePrePost=false&interval=1" \
"m&corsDomain=finance.yahoo.com&.tsrc=finance".format(ticker=ticker)
headers = {'User-Agent': 'Mozilla/5.0'}
html_file = requests.get(url, headers=headers)
data = html_file.json()
time_stamps = data['chart']['result'][0].get('timestamp')
if time_stamps is None:
return []
high_datas=data['chart']['result'][0].get('indicators')["quote"][0]["high"]
low_datas = data['chart']['result'][0].get('indicators')["quote"][0]["low"]
open_datas = data['chart']['result'][0].get('indicators')["quote"][0]["open"]
close_datas = data['chart']['result'][0].get('indicators')["quote"][0]["close"]
volume_datas = data['chart']['result'][0]['indicators']["quote"][0]["volume"]
if len(time_stamps) ==0 and len(close_datas) == 0:
raise Exception("time stampe size is {0} and close price size is {1}".format(len(time_stamps), len(close_datas)))
# return [StockMinData(ticker=ticker, datetime=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time_stamps[i])),
# close=get_ticker_previous_min_data(i, close_datas[i], close_datas),
# volume=get_ticker_previous_min_data(i, volume_datas[i], volume_datas)) for i in range(len(time_stamps))]
return [StockMinData(ticker=ticker, datetime=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time_stamps[i])),
high=high_datas[i],low=low_datas[i],open= open_datas[i],
close=close_datas[i],
volume=volume_datas[i]) for i in
range(len(time_stamps))]
def main():
load_daily_minute_ticker_data()
if __name__=='__main__':
main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,709
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/Strategy/release_strategy.py
|
import Analysis.analysis as analysis
import Analysis.analysis_date as analysis_date
import sys
import datetime
def main():
#jump_strategy()
#after_release_overheat_strategy('2018-03-12')
prerelease_strategy('2018-03-12')
def prerelease_strategy(date):
old_stdout = sys.stdout
file_name = 'prerelease_strategy.{day}'.format(day=datetime.datetime.now().strftime('%Y%m%d'))
log_file = open('C:\\Users\\Jianbo\\{file}.log'.format(file=file_name), "w")
start_date = date
end_date = analysis_date.next_business_dates(n=5)
while analysis_date.convert_str_to_date(start_date) < end_date:
str = analysis.find_interesting_prerelease_ticker(analysis_date.convert_str_to_date(start_date))
sys.stdout = log_file
print('------------Pay attention to the following tickers on date = {date}----------------'
.format(date=start_date))
print(str)
log_file.flush()
print('\n')
print('\n')
sys.stdout = old_stdout
start_date = analysis_date.convert_date_to_str(analysis_date.next_business_dates(datestr=start_date, n=1))
print('\n')
print('\n')
log_file.close()
def after_release_overheat_strategy(date ):
old_stdout = sys.stdout
file_name = 'after_jump_strategy.{day}'.format(day=datetime.datetime.now().strftime('%Y%m%d'))
log_file = open('C:\\Users\\Jianbo\\{file}.log'.format(file=file_name), "w")
start_date = date
end_date = analysis_date.next_business_dates(datestr=start_date, n=5)
while analysis_date.convert_str_to_date(start_date) <= end_date:
tickers = analysis.analysis_after_release_large_gap_ticker(start_date)
sys.stdout = log_file
print('------------Pay attention to the following tickers on date = {date}----------------'
.format(date=start_date))
print(tickers)
for ticker in tickers:
analysis.analysis_ticker_release_info(ticker)
log_file.flush()
print('\n')
print('\n')
sys.stdout = old_stdout
start_date = analysis_date.convert_date_to_str(analysis_date.next_business_dates(datestr=start_date, n=1))
print('\n')
print('\n')
log_file.close()
def jump_strategy():
old_stdout = sys.stdout
file_name = 'release_jump_strategy.{day}'.format(day=datetime.datetime.now().strftime('%Y%m%d'))
log_file = open('C:\\Users\\Jianbo\\{file}.log'.format(file=file_name), "w")
start_date = '2018-03-12'
end_date = analysis_date.next_business_dates(datestr=start_date, n=7)
while analysis_date.convert_str_to_date(start_date) <= end_date:
tickers = analysis.analysis_release_ticker_earnings(start_date)
sys.stdout = log_file
print('------------Pay attention to the following tickers on date = {date}----------------'
.format(date=start_date))
print(tickers)
for ticker in tickers:
analysis.analysis_ticker_release_info(ticker)
log_file.flush()
print('\n')
print('\n')
sys.stdout = old_stdout
start_date = analysis_date.convert_date_to_str(analysis_date.next_business_dates(datestr=start_date, n=1))
print('\n')
print('\n')
log_file.close()
if __name__ == '__main__':
main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,710
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/Analysis/sql.py
|
import psycopg2
from config import config
import pandas as pd
from Analysis.analysis_date import *
def get_portfolio_daily_time_series(tickers, start_date, end_date):
if tickers is None or len(tickers) == 0:
return None
sql = "select a.date, a.close, a.ticker from stock_daily_data a where a.ticker in ('" + "','".join(tickers)\
+"') and a.date between cast(%s as date) and cast(%s as date);"
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
df = pd.DataFrame()
for chunk in pd.read_sql(sql, index_col='date', params=(start_date, end_date), con=conn, chunksize=5000):
df = df.append(chunk)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
df = df.sort_index()
data_group = df.groupby('ticker')
sorted_dates = sorted(list(set(df.index.values)))
ticker_dict = {'date': sorted_dates}
for t in tickers:
ticker_dict[t] = data_group.get_group(t).sort_index().close.values
new_df = pd.DataFrame.from_dict(ticker_dict)
new_df = new_df.set_index('date')
return new_df
def find_all_tickers_cap():
sql = "with temp(ticker, latest_date) as (select ticker,max(update_date) from stock_fundamental_statistics " \
"group by ticker)select a.ticker, market_cap from stock_fundamental_statistics a, temp t where a.market_cap " \
"is not null and a.update_date = t.latest_date and t.ticker = a.ticker"
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
df = pd.DataFrame()
for chunk in pd.read_sql(sql, con=conn, chunksize=5000):
df = df.append(chunk)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
return df
def find_ticker_low_position():
sql = "with sdd_temp(ticker, latest_date) as (select ticker,max(update_date) from stock_fundamental_statistics " \
"group by ticker), sfs_temp(ticker, latest_date) as (select ticker, max(date) from stock_daily_data " \
"group by ticker)select a.ticker, a.low_52_week lowest, b.adj_close as close from stock_fundamental_statistics a, " \
"sdd_temp t , sfs_temp t1 , stock_daily_data b where a.market_cap is not null and " \
"a.update_date = t.latest_date and t.ticker = a.ticker and b.ticker = t1.ticker and " \
"b.date=t1.latest_date and b.ticker=a.ticker"
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
df = pd.DataFrame()
for chunk in pd.read_sql(sql, index_col='ticker', con=conn, chunksize=5000):
df = df.append(chunk)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
return df
def find_ticker_min_data(ticker):
sql = "select datetime,close from stock_minute_data where ticker ='{ticker}'".format(ticker=ticker)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
df = pd.DataFrame()
for chunk in pd.read_sql(sql, index_col='datetime', con=conn, chunksize=5000):
df = df.append(chunk)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
return df
def find_ticker_daily_returns(ticker, date, format ="%Y-%m-%d" ):
today = convert_str_to_date(datestr=date, format=format)
p_date = convert_date_to_str(next_business_dates(datestr=date, format=format, n=-1))
n_date = convert_date_to_str(next_business_dates(datestr=date, format=format, n=1))
sql = "select (a.close-b.adj_close)/(b.adj_close)*100 as daily_return " \
"from stock_minute_data a join stock_daily_data b on a.ticker = b.ticker " \
"where a.ticker= '{ticker}' " \
"and a.datetime between '{today}' and '{next_date}' " \
"and b.date = '{pre_date}'".format(ticker=ticker, today=today, next_date=n_date, pre_date=p_date)
#print(sql)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
df = pd.DataFrame()
for chunk in pd.read_sql(sql, con=conn, chunksize=5000):
df = df.append(chunk)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
return df
def find_stock_earings(ticker):
if ticker is None:
return
sql = """ select * from stock_earnings where ticker = '{ticker}'
""".format(ticker=ticker)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
# commit the changes to the database
earnings = cur.fetchall()
# close communication with the database
cur.close()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return None
finally:
if conn is not None:
conn.close()
return earnings
def find_tickers_release_earnings_by_date(date):
if date is None:
return
sql = """ select * from stock_earnings where release_date = cast('{date}' as date)
""".format(date=date)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
# commit the changes to the database
tickers = [i[0] for i in cur.fetchall()]
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return None
finally:
if conn is not None:
conn.close()
return tickers
def find_ticker_daily_data(ticker, datastr):
if ticker is None:
return
sql = """ select * from stock_daily_data where ticker = '{ticker}' and date = cast('{date}' as date)
""".format(ticker=ticker,date=datastr)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
# commit the changes to the database
stock_daily_data = cur.fetchall()
# close communication with the database
cur.close()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return None
finally:
if conn is not None:
conn.close()
if len(stock_daily_data) !=0:
return stock_daily_data[0]
return None
def find_liquid_ticker(ticker):
if ticker is None:
return
sql = """ select ticker FROM stock_daily_data where ticker= '{ticker}' and date between cast('{d1}' as date)
and cast('{d2}' as date) GROUP BY TICKER having avg(volume)>200000 and avg(close)>30
""".format(ticker=ticker, d1=convert_date_to_str(next_business_dates(n=-30)), d2=convert_date_to_str(next_business_dates()))
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
# commit the changes to the database
stock_daily_data = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return None
finally:
if conn is not None:
conn.close()
if len(stock_daily_data) !=0:
return stock_daily_data[0]
return None
def get_ticker_oil_inventories_relationships(ticker):
sql = '''SELECT ACTUAL,EXPECT,OPEN,CLOSE ,HIGH,LOW FROM STOCK_DAILY_DATA A JOIN crude_oil_inventories B
ON A.DATE=B.DATE WHERE TICKER = '{ticker}' '''.format(ticker=ticker)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
info = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return info
def get_ticker_bench_return(ticker, start, end):
sql = ''' SELECT a.ticker, b.close/a.open-1 FROM STOCK_DAILY_DATA a join stock_daily_data b on a.ticker=b.ticker
WHERE a.TICKER = '{ticker}' AND a.DATE = cast('{d1}' as date) and b.date = cast('{d2}' as date)
union SELECT a.ticker, b.close/a.open-1 FROM STOCK_DAILY_DATA a join
stock_daily_data b on a.ticker=b.ticker WHERE a.TICKER like '%^G%' AND a.DATE = cast('{d1}' as date)
and b.date = cast('{d2}' as date)
'''.format(ticker=ticker,d1=start,d2=end)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
info = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return info
def find_all_tickers_daily_returns(date):
sql = ''' SELECT A.TICKER, A.CLOSE/B.CLOSE-1 AS R ,c.sector,c.sub_sector
FROM STOCK_DAILY_DATA A JOIN STOCK_DAILY_DATA B ON A.TICKER=B.TICKER
join stock_basic_info c on a.ticker=c.ticker
WHERE A.DATE=cast('{today}' as date) AND B.DATE=cast('{preday}' as date) AND B.CLOSE !=0
AND A.TICKER IN (SELECT TICKER FROM STOCK_DAILY_DATA WHERE DATE BETWEEN '2018-02-07' AND '2018-03-07'
GROUP BY TICKER HAVING( AVG(VOLUME)>40000 ) AND AVG(CLOSE)>30)
ORDER BY R'''.format(today=date,preday=next_business_dates(datestr=convert_date_to_str(date), n=-1))
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
info = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return info
def find_top_tickers_by_sector(sector, date, number=100, is_large=True):
sql = '''select b.ticker,market_cap
from stock_fundamental_statistics a join stock_basic_info b on a.ticker=b.ticker and b.sector ='{sector}'
where a.update_date = cast('{date}' as date)
order by market_cap {order} limit {num}'''.format(sector=sector, date=date,
num=number,order=('ASC' if is_large else 'DESC'))
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
info = cur.fetchall()
# close communication with the database
cur.close()
return [i[0] for i in info]
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return None
def find_all_sectors():
sql = ''' select sector,count(1) from stock_basic_info
where sector is not null and length(sector)!=0 group by sector'''
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
info = cur.fetchall()
# close communication with the database
cur.close()
return [i[0] for i in info]
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return None
if __name__ == '__main__':
find_ticker_daily_returns('AAPL', '2018-02-23')
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,711
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/DataLoading/release_earnings.py
|
from selenium import webdriver
from Model.model import StockEarningData
from DataLoading.crud import merge_into_stock_earings as merge_earning
from Analysis.analysis_date import *
import logging
def load_daily_earnings_info(date):
row_length = 101
offset = 0
browser = None
success = True
try:
while row_length > 100:
if offset == 0:
url = "https://finance.yahoo.com/calendar/earnings?day={date}".format(date=date)
else:
url = "https://finance.yahoo.com/calendar/earnings?day={date}".format(date=date) + \
"&offset={offset}&size=100".format(offset=offset)
if browser is None:
browser = webdriver.Chrome()
browser.get(url)
table = browser.find_element_by_xpath("//div[@id='fin-cal-table']/div/div/table")
rows = table.find_elements_by_xpath('.//tr')
row_length = len(rows)
for row in rows[1:]:
ticker = row.find_element_by_xpath('.//td[1]').text
time = row.find_element_by_xpath('.//td[3]').text
expect_eps = row.find_element_by_xpath('.//td[4]').text
actual_eps = row.find_element_by_xpath('.//td[5]').text
surprise = row.find_element_by_xpath('.//td[6]').text
print("{ticker},{time},{expect_eps},{actual_eps},{surprise}".format(ticker=ticker
,time=time
,expect_eps=expect_eps,
actual_eps=actual_eps,
surprise=surprise))
stock_earning = StockEarningData(ticker, date, time, expect_eps, actual_eps, surprise)
merge_earning(stock_earning)
offset+=100
except Exception as e:
print(e)
print(date + " does not have data")
if offset == 0:
success = False
finally:
browser.quit()
return success
def main():
logger = logging.getLogger('release_earnings')
hdlr = logging.FileHandler('release_earnings.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.WARNING)
start_date = '2019-02-20'
end_date = next_business_dates(n=225)
while convert_str_to_date(start_date) < end_date:
print(start_date)
attend = 0
while not load_daily_earnings_info(start_date) and attend < 3:
attend += 1
if attend >= 3:
logger.error('tried more than 3 time for date {d}'.format(d=start_date))
print('tried more than 3 time for date {d}'.format(d=start_date))
else:
print(start_date + " is complete")
start_date = convert_date_to_str(next_business_dates(datestr=start_date, n=1))
if __name__ == '__main__':
main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,712
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/DataLoading/crud.py
|
import psycopg2
from config import config
from pprint import pprint
import datetime
def merge_into_stock_basic_info(stock):
""" insert a new vendor into the stock_basic_info table
Args : stock an object of stock_basic_info class
Return :
"""
sql = """
WITH temp (ticker,instr_name,sector,sub_sector ) as (
values(%s,%s,%s,%s)
),
upsert as
(
UPDATE stock_basic_info SET instr_name = temp.instr_name
, sector= trim(temp.sector) ,
sub_sector=trim(temp.sub_sector)
From temp where stock_basic_info.ticker = temp.ticker
RETURNING stock_basic_info.*
)
INSERT INTO stock_basic_info(ticker,instr_name,sector,sub_sector)
SELECT ticker,instr_name,trim(sector),trim(sub_sector)
FROM temp
WHERE NOT EXISTS (SELECT 1 FROM upsert up WHERE up.ticker = temp.ticker );
"""
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
# datetime.datetime.today().strftime('%Y-%m-%d')
cur.execute(sql, (stock.ticker, stock.security, stock.sector, stock.sub_sector))
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
return True
def delete_from_stock_basic_info(ticker):
""" delete a row into the stock_basic_info table
Args : stock ticker
Return :
"""
sql = """DELETE FROM stock_basic_info where ticker = '{ticker}';""".format(ticker=ticker)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
return True
def select_all_tickers():
sql = """SELECT ticker FROM stock_basic_info where insert_date is not null order by 1"""
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
tickers = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return (ticker[0] for ticker in tickers)
def select_all_fundamental_tickers(date):
sql = """select distinct ticker from stock_fundamental_statistics where ticker not in
(SELECT ticker FROM stock_fundamental_statistics
where update_date = cast('{date}' as date))""".format(date=date)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
tickers = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return [ticker[0] for ticker in tickers]
def select_all_loading_tickers(start_date, end_date):
sql = """ select a.ticker from stock_basic_info a where a.ticker not in(
SELECT distinct b.ticker FROM stock_daily_data b where b.date between %s and %s) """
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql, (start_date, end_date))
tickers = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return [ticker[0] for ticker in tickers]
def select_all_fx_currency(start_date, end_date):
sql = """ select a.fx_id, a.from_ccy, a.to_ccy from fx_basic_info a where a.ccy_id not in(
SELECT distinct b.fx_id FROM fx_daily_rate b where b.date between %s and %s) """
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql, (start_date, end_date))
tickers = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return tickers
def select_all_loading_minute_tickers():
sql = """ select a.ticker from stock_basic_info a where a.ticker not in(
SELECT distinct b.ticker FROM stock_minute_data b where b.datetime >= current_date and b.ticker <>'BCR')
and a.insert_date is not null """
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql)
tickers = cur.fetchall()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return [ticker[0] for ticker in tickers]
def merge_into_stock_daily_data(stock_daily_data):
""" insert a new vendor into the stock_basic_info table
Args : stock an object of stock_basic_info class
Return :
"""
sql = """ WITH temp (ticker,date,open,high,low, close, adj_close,volume) as (
values(%s,%s,%s,%s,%s,%s,%s,%s)
),
upsert as
(
UPDATE stock_daily_data SET OPEN = temp.open
, close= temp.close ,
high=temp.high ,low = temp.low
, volume = temp.volume From temp where stock_daily_data.ticker = temp.ticker
and stock_daily_data.date=temp.date
RETURNING stock_daily_data.*
)
INSERT INTO stock_daily_data (ticker,date,open,high,low, close, adj_close,volume)
SELECT ticker,date,open,high,low, close, adj_close,volume
FROM temp
WHERE NOT EXISTS (SELECT 1 FROM upsert up WHERE up.ticker = temp.ticker and up.date=temp.date);
"""
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql, (stock_daily_data.ticker, stock_daily_data.date,
round(stock_daily_data.open, 2), round(stock_daily_data.high, 2),
round(stock_daily_data.low, 2), round(stock_daily_data.close, 2),
round(stock_daily_data.adj_close, 2), stock_daily_data.volume))
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
raise
finally:
if conn is not None:
conn.close()
return True
def merge_many_into_stock_daily_data(stock_daily_datas):
""" insert a new vendor into the stock_daily_data table
Args : stock an object of stock_daily_data class
Return :
"""
sql = """ WITH temp (ticker,date,open,high,low, close, adj_close,volume) as (
values(%s,%s,%s,%s,%s,%s,%s,%s)
),
upsert as
(
UPDATE stock_daily_data SET OPEN = temp.open
, close= temp.close ,
high=temp.high ,low = temp.low
, volume = cast(temp.volume as integer) From temp where stock_daily_data.ticker = temp.ticker
and stock_daily_data.date=temp.date
RETURNING stock_daily_data.*
)
INSERT INTO stock_daily_data (ticker,date,open,high,low, close, adj_close,volume)
SELECT ticker,date,open,high,low, close, adj_close,cast(temp.volume as integer)
FROM temp
WHERE NOT EXISTS (SELECT 1 FROM upsert up WHERE up.ticker = temp.ticker and up.date=temp.date);
"""
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
param_list = [(stock_daily_data.ticker, stock_daily_data.date,
round(stock_daily_data.open, 2), round(stock_daily_data.high, 2),
round(stock_daily_data.low, 2), round(stock_daily_data.close, 2),
round(stock_daily_data.adj_close, 2),round(stock_daily_data.volume, 0)
#round(stock_daily_data.volume, 0) if len(str(round(stock_daily_data.volume,0))) < 9 else 99999999
)
for stock_daily_data in stock_daily_datas]
cur.executemany(sql, param_list)
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
pprint(param_list)
raise
finally:
if conn is not None:
conn.close()
return True
def merge_many_into_fx_daily_data(fx_rates):
""" insert a new vendor into the stock_basic_info table
Args : stock an object of stock_basic_info class
Return :
"""
sql = """ WITH temp (ccy_id, date, value) as (
values(%s,%s,%s)
),
upsert as
(
UPDATE fx_daily_rate SET value = cast(temp.value as double precision)
From temp where fx_daily_rate.ccy_id = cast(temp.ccy_id as integer)
and fx_daily_rate.date=cast(temp.date as date)
RETURNING fx_daily_rate.*
)
INSERT INTO fx_daily_rate
SELECT cast(ccy_id as integer), cast(date as date), cast(value as double precision)
FROM temp
WHERE NOT EXISTS (SELECT 1 FROM upsert up WHERE up.ccy_id = cast(temp.ccy_id as integer)
and up.date=cast(temp.date as date));
"""
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
param_list = [(fx_rate[0], fx_rate[1],
round(fx_rate[2], 2)) if fx_rate[2] is not None else None
for fx_rate in fx_rates]
cur.executemany(sql, param_list)
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
pprint(param_list)
raise
finally:
if conn is not None:
conn.close()
return True
def merge_many_into_stock_min_data(stock_min_datas):
""" insert a new vendor into the stock_basic_info table
Args : stock an object of stock_basic_info class
Return :
"""
sql = """ WITH temp (ticker,datetime,high,low,open,close,volume) as (
values(%s,
to_timestamp(%s,'YYYY-MM-DD hh24:mi:ss'),
cast(%s as double precision),
cast(%s as double precision),
cast(%s as double precision),
cast(%s as double precision),
cast(%s as bigint)
)
),
upsert as
(
UPDATE stock_minute_data SET close= temp.close ,open=temp.open ,high= temp.high ,low= temp.low
, volume = temp.volume From temp where stock_minute_data.ticker = temp.ticker
and stock_minute_data.datetime=temp.datetime
RETURNING stock_minute_data.*
)
INSERT INTO stock_minute_data (ticker,datetime,high,low,open,close,volume)
SELECT ticker,datetime,high,low,open,close,volume
FROM temp
WHERE NOT EXISTS (SELECT 1 FROM upsert up WHERE up.ticker = temp.ticker and up.datetime=temp.datetime);
"""
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
param_list = [(stock_min_data.ticker, stock_min_data.datetime,
round(stock_min_data.high, 2) if stock_min_data.high is not None else None,
round(stock_min_data.low, 2) if stock_min_data.low is not None else None,
round(stock_min_data.open, 2) if stock_min_data.open is not None else None,
round(stock_min_data.close, 2) if stock_min_data.close is not None else None,
round(stock_min_data.volume, 0) if stock_min_data.volume is not None else None)
for stock_min_data in stock_min_datas]
cur.executemany(sql, param_list)
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
pprint(param_list)
raise
finally:
if conn is not None:
conn.close()
return True
def merge_many_into_stock_fundamental_data(stock_fundamental_statistics):
if stock_fundamental_statistics is None:
return False
sql = """WITH t (ticker,market_cap,enterprise_value,trailing_pe,forward_pe,peg_ratio_5y,price_to_sale,
price_to_book,enterprise_revenue,enterprise_ebitda,beta,high_52_week,low_52_week,avg_volume_3m,avg_volume_10d,
share_outstanding,hold_insiders,hold_inst,shares_short,short_ratio,shares_short_prev_m,profit_margin,operating_margin,
return_on_asset,return_on_equity,revenue,revenue_per_share,quarterly_revenue_growth,gross_profit,ebitda,
net_income_avi_to_common,trailing_eps,forward_eps,quarterly_earning_growth,total_cash,total_cash_per_share,
total_debt,total_debt_per_equity,current_ratio,book_value_per_share,operating_cash_flow,levered_free_cash_flow,
forward_dividend_yield,forward_dividend_rate,trailing_dividend_yield,trailing_dividend_rate,avg_dividend_yield_5y,
payout_ratio,dividend_date,ex_dividend_date,update_date) as (values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,
%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)),
upsert as
(
UPDATE stock_fundamental_statistics
SET market_cap= cast(t.market_cap as bigint),
enterprise_value=cast(t.enterprise_value as bigint),
trailing_pe=cast(t.trailing_pe as double precision),
forward_pe=cast(t.forward_pe as double precision),
peg_ratio_5y=cast(t.peg_ratio_5y as double precision),
price_to_sale=cast(t.price_to_sale as double precision),
price_to_book=cast(t.price_to_book as double precision),
enterprise_revenue=cast(t.enterprise_revenue as double precision),
enterprise_ebitda=cast(t.enterprise_ebitda as double precision),
beta=cast(t.beta as double precision),
high_52_week=cast(t.high_52_week as double precision),
low_52_week=cast(t.low_52_week as double precision),
avg_volume_3m=cast(t.avg_volume_3m as bigint),
avg_volume_10d=cast(t.avg_volume_10d as bigint),
share_outstanding=cast(t.share_outstanding as bigint),
hold_insiders=cast(t.hold_insiders as bigint),
hold_inst=cast(t.hold_inst as bigint),
shares_short=cast(t.shares_short as bigint),
short_ratio=cast(t.short_ratio as double precision),
shares_short_prev_m=cast(t.shares_short_prev_m as bigint),
profit_margin=cast(t.profit_margin as double precision),
operating_margin=cast(t.operating_margin as double precision),
return_on_asset=cast(t.return_on_asset as double precision),
return_on_equity=cast(t.return_on_equity as double precision),
revenue=cast(t.revenue as bigint),
revenue_per_share=cast(t.revenue_per_share as double precision),
quarterly_revenue_growth=cast(t.quarterly_revenue_growth as double precision),
total_cash=cast(t.total_cash as bigint),
total_cash_per_share=cast(t.total_cash_per_share as double precision),
total_debt=cast(t.total_debt as bigint),
total_debt_per_equity=cast(t.total_debt_per_equity as double precision),
current_ratio=cast(t.current_ratio as double precision),
book_value_per_share=cast(t.book_value_per_share as double precision),
operating_cash_flow=cast(t.operating_cash_flow as bigint),
levered_free_cash_flow=cast(t.levered_free_cash_flow as bigint),
forward_dividend_yield=cast(t.forward_dividend_yield as double precision),
forward_dividend_rate=cast(t.forward_dividend_rate as double precision),
trailing_dividend_yield=cast(t.trailing_dividend_yield as double precision),
trailing_dividend_rate=cast(t.trailing_dividend_rate as double precision),
avg_dividend_yield_5y=cast(t.avg_dividend_yield_5y as double precision),
payout_ratio=cast(t.payout_ratio as double precision),
dividend_date=cast(t.dividend_date as date),
ex_dividend_date=cast(t.ex_dividend_date as date)
From t where stock_fundamental_statistics.ticker = t.ticker
and stock_fundamental_statistics.update_date = cast(t.update_date as date)
RETURNING stock_fundamental_statistics.*
)INSERT INTO stock_fundamental_statistics(ticker,market_cap,enterprise_value,trailing_pe,forward_pe,
peg_ratio_5y,price_to_sale,price_to_book,enterprise_revenue,enterprise_ebitda,beta,high_52_week,low_52_week,avg_volume_3m,avg_volume_10d,
share_outstanding,hold_insiders,hold_inst,shares_short,short_ratio,shares_short_prev_m,profit_margin,operating_margin,
return_on_asset,return_on_equity,revenue,revenue_per_share,quarterly_revenue_growth,gross_profit,ebitda,
net_income_avi_to_common,trailing_eps,forward_eps,quarterly_earning_growth,total_cash,total_cash_per_share,
total_debt,total_debt_per_equity,current_ratio,book_value_per_share,operating_cash_flow,levered_free_cash_flow,
forward_dividend_yield,forward_dividend_rate,trailing_dividend_yield,trailing_dividend_rate,avg_dividend_yield_5y,
payout_ratio,dividend_date,ex_dividend_date,update_date
)
SELECT ticker,
cast(market_cap as bigint),
cast(enterprise_value as bigint),
cast(trailing_pe as double precision),
cast(forward_pe as double precision),
cast(peg_ratio_5y as double precision),
cast(price_to_sale as double precision),
cast(price_to_book as double precision),
cast(enterprise_revenue as double precision),
cast(enterprise_ebitda as double precision),
cast(beta as double precision),
cast(high_52_week as double precision),
cast(low_52_week as double precision),
cast(avg_volume_3m as bigint),
cast(avg_volume_10d as bigint),
cast(share_outstanding as bigint),
cast(hold_insiders as bigint),
cast(hold_inst as bigint),
cast(shares_short as bigint),
cast(short_ratio as double precision),
cast(shares_short_prev_m as bigint),
cast(profit_margin as double precision),
cast(operating_margin as double precision),
cast(return_on_asset as double precision),
cast(return_on_equity as double precision),
cast(revenue as bigint),
cast(revenue_per_share as double precision),
cast(quarterly_revenue_growth as double precision),
cast(gross_profit as bigint),
cast(ebitda as bigint),
cast(net_income_avi_to_common as bigint),
cast(trailing_eps as double precision),
cast(forward_eps as double precision),
cast(quarterly_earning_growth as double precision),
cast(total_cash as bigint),
cast(total_cash_per_share as double precision),
cast(total_debt as bigint),
cast(total_debt_per_equity as double precision),
cast(current_ratio as double precision),
cast(book_value_per_share as double precision),
cast(operating_cash_flow as bigint),
cast(levered_free_cash_flow as bigint),
cast(forward_dividend_yield as double precision),
cast(forward_dividend_rate as double precision),
cast(trailing_dividend_yield as double precision),
cast(trailing_dividend_rate as double precision),
cast(avg_dividend_yield_5y as double precision),
cast(payout_ratio as double precision),
cast(dividend_date as date),
cast(ex_dividend_date as date),
cast(update_date as date)
FROM t WHERE NOT EXISTS (SELECT 1 FROM upsert up WHERE up.ticker = t.ticker and up.update_date=cast(t.update_date as date) );"""
def get_data(json, field):
if json is None or len(json) == 0:
return None
return json.get(field)
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
param = (stock_fundamental_statistics.valuation_measures.ticker,
get_data(stock_fundamental_statistics.valuation_measures.market_cap, 'raw'),
get_data(stock_fundamental_statistics.valuation_measures.enterprise_value, 'raw'),
get_data(stock_fundamental_statistics.valuation_measures.trailing_pe, 'raw'),
get_data(stock_fundamental_statistics.valuation_measures.forward_pe, 'raw'),
get_data(stock_fundamental_statistics.valuation_measures.peg_ratio_5y, 'raw'),
get_data(stock_fundamental_statistics.valuation_measures.price_to_sale, 'raw'),
get_data(stock_fundamental_statistics.valuation_measures.price_to_book, 'raw'),
get_data(stock_fundamental_statistics.valuation_measures.enterprise_revenue, 'raw'),
get_data(stock_fundamental_statistics.valuation_measures.enterprise_ebitda, 'raw'),
get_data(stock_fundamental_statistics.stock_price_history.beta, 'raw'),
get_data(stock_fundamental_statistics.stock_price_history.high_52_week, 'raw'),
get_data(stock_fundamental_statistics.stock_price_history.low_52_week, 'raw'),
get_data(stock_fundamental_statistics.share_stats.avg_volume_3m, 'raw'),
get_data(stock_fundamental_statistics.share_stats.avg_volume_10d, 'raw'),
get_data(stock_fundamental_statistics.share_stats.share_outstanding, 'raw'),
get_data(stock_fundamental_statistics.share_stats.hold_insiders, 'raw'),
get_data(stock_fundamental_statistics.share_stats.hold_inst, 'raw'),
get_data(stock_fundamental_statistics.share_stats.shares_short, 'raw'),
get_data(stock_fundamental_statistics.share_stats.short_ratio, 'raw'),
get_data(stock_fundamental_statistics.share_stats.shares_short_prev_m, 'raw'),
get_data(stock_fundamental_statistics.stock_profitability.profit_margin, 'raw'),
get_data(stock_fundamental_statistics.stock_profitability.operating_margin, 'raw'),
get_data(stock_fundamental_statistics.stock_profitability.ret_asset, 'raw'),
get_data(stock_fundamental_statistics.stock_profitability.ret_equity, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.revenue, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.revenue_per_share, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.quarterly_revenue_growth, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.gross_profit, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.ebitda, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.net_income_avi_to_common, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.trailing_eps, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.forward_eps, 'raw'),
get_data(stock_fundamental_statistics.stock_income_statement.quarterly_earning_growth, 'raw'),
get_data(stock_fundamental_statistics.stock_balance_sheet.total_cash, 'raw'),
get_data(stock_fundamental_statistics.stock_balance_sheet.total_cash_per_share, 'raw'),
get_data(stock_fundamental_statistics.stock_balance_sheet.total_debt, 'raw'),
get_data(stock_fundamental_statistics.stock_balance_sheet.total_debt_per_equity, 'raw'),
get_data(stock_fundamental_statistics.stock_balance_sheet.current_ratio, 'raw'),
get_data(stock_fundamental_statistics.stock_balance_sheet.book_value_per_share, 'raw'),
get_data(stock_fundamental_statistics.cash_flow_statement.operating_cash_flow, 'raw'),
get_data(stock_fundamental_statistics.cash_flow_statement.levered_free_cash_flow, 'raw'),
get_data(stock_fundamental_statistics.stock_dividend_split.forward_dividend_yield, 'raw'),
get_data(stock_fundamental_statistics.stock_dividend_split.forward_dividend_rate, 'raw'),
get_data(stock_fundamental_statistics.stock_dividend_split.trailing_dividend_yield, 'raw'),
get_data(stock_fundamental_statistics.stock_dividend_split.trailing_dividend_rate, 'raw'),
get_data(stock_fundamental_statistics.stock_dividend_split.avg_dividend_yield_5y, 'raw'),
get_data(stock_fundamental_statistics.stock_dividend_split.payout_ratio, 'raw'),
get_data(stock_fundamental_statistics.stock_dividend_split.dividend_date, 'fmt'),
get_data(stock_fundamental_statistics.stock_dividend_split.ex_dividend_date, 'fmt'),
datetime.datetime.today().strftime('%Y-%m-%d')
)
cur.execute(sql, param)
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
pprint(param)
raise
finally:
if conn is not None:
conn.close()
return True
def merge_into_stock_earings(stock_earing):
if stock_earing is None:
return
sql = """ WITH temp (ticker,release_date,time,expect_eps,actual_eps,surprise) as (
values(%s,%s,%s,%s,%s,%s)
),
upsert as
(
UPDATE stock_earnings SET expect_eps = cast( temp.expect_eps as double precision)
, actual_eps= cast(temp.actual_eps as double precision) ,
surprise= cast(temp.surprise as double precision)
From temp where stock_earnings.ticker = temp.ticker
and stock_earnings.release_date = cast(temp.release_date as date)
and stock_earnings.release_date=cast(stock_earnings.release_date as date)
RETURNING stock_earnings.*
)
INSERT INTO stock_earnings (ticker,release_date,time,expect_eps,actual_eps,surprise)
SELECT ticker, cast(release_date as date),time,
cast(expect_eps as double precision),
cast(actual_eps as double precision),
cast(surprise as double precision)
FROM temp
WHERE NOT EXISTS (SELECT 1 FROM upsert up WHERE up.ticker = temp.ticker
and up.release_date=cast(temp.release_date as date));
"""
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql, (stock_earing.ticker, stock_earing.release_date,
stock_earing.time, stock_earing.expect_eps,
stock_earing.actual_eps, stock_earing.surprise))
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
return True
def merge_into_crude_oil_inventories(date, actual, expect):
if date is None or actual is None or expect is None:
return
sql = """ WITH temp (date, actual, expect) as (
values(%s,%s,%s)
),
upsert as
(
UPDATE crude_oil_inventories SET actual = cast( temp.actual as double precision)
, expect= cast(temp.expect as double precision)
From temp where crude_oil_inventories.date = cast(temp.date as date)
RETURNING crude_oil_inventories.*
)
INSERT INTO crude_oil_inventories (date, actual, expect)
SELECT cast(date as date),
cast(actual as double precision),
cast(expect as double precision)
FROM temp
WHERE NOT EXISTS (SELECT 1 FROM upsert up WHERE up.date=cast(temp.date as date));
"""
conn = None
try:
# read database configuration
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
cur = conn.cursor()
# execute the INSERT statement
cur.execute(sql, (date,actual,expect))
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return False
finally:
if conn is not None:
conn.close()
return True
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,713
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/DataLoading/sp500_wki.py
|
"""
Scrape a table from wikipedia using python. Allows for cells spanning multiple rows and/or columns. Outputs csv files for
each table
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
from DataLoading.crud import merge_into_stock_basic_info as insert
from Model.model import StockBasicInfo
def scrap_sp500():
wiki = "https://en.wikipedia.org/wiki/List_of_S%26P_500_companies"
page = urlopen(wiki)
soup = BeautifulSoup(page)
tables = soup.findAll("table", {"class": "wikitable"})
# show tables
table = tables[0]
# preinit list of lists
rows = table.findAll("tr")
row_lengths = [len(r.findAll(['th', 'td'])) for r in rows]
ncols = max(row_lengths)
nrows = len(rows)
data = []
for i in range(nrows):
rowD = []
for j in range(ncols):
rowD.append('')
data.append(rowD)
# process html
for i in range(len(rows)):
row = rows[i]
rowD = []
cells = row.findAll(["td", "th"])
for j in range(len(cells)):
cell = cells[j]
# lots of cells span cols and rows so lets deal with that
cspan = int(cell.get('colspan', 1))
rspan = int(cell.get('rowspan', 1))
for k in range(rspan):
for l in range(cspan):
data[i + k][j + l] += cell.text
data.append(rowD)
for i in range(1, nrows):
stock = StockBasicInfo(ticker=data[i][1], security=data[i][0], sector=data[i][3], sub_sector=data[i][4])
insert(stock)
if __name__ == '__main__':
scrap_sp500()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,538,714
|
sunjianbo945/webcrawler
|
refs/heads/main
|
/test/test_crud.py
|
import unittest
import config
import psycopg2
from DataLoading import crud
import Model.model as sbi
class TestCRUD(unittest.TestCase):
def test_insert(self):
""" This is an example to test insert operation """
conn = None
try:
# read connection parameters
params = config.config()
# connect to the PostgreSQL server
print('Connecting to the PostgreSQL database...')
conn = psycopg2.connect(**params)
# create a cursor
cur = conn.cursor()
# execute a statement
sql = "SELECT 1 from {table} where ticker = '{ticker}'".format(table='stock_basic_info', ticker='MMM')
cur.execute(sql)
row = cur.fetchone()
if row is not None:
print(row)
crud.delete_from_stock_basic_info('MMM')
stock = sbi.StockBasicInfo('MMM', '3M Company', 'Industrials', 'Industrial Conglomerates')
ret = crud.insert_into_stock_basic_info(stock)
self.assertEqual(ret, True)
# close the communication with the PostgreSQL
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
if __name__ == '__main__':
unittest.main()
|
{"/DataLoading/yahoo_page.py": ["/Model/model.py", "/DataLoading/crud.py"], "/Analysis/analysis.py": ["/Analysis/sql.py", "/Analysis/analysis_date.py", "/DataLoading/yahoo_daily_data.py", "/DataLoading/yahoo_page.py"], "/DataLoading/crude_oil.py": ["/DataLoading/crud.py"], "/DataLoading/yahoo_daily_data.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/Strategy/daily_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/DataLoading/yahoo_minute_price_loading.py": ["/DataLoading/crud.py", "/Model/model.py"], "/Strategy/release_strategy.py": ["/Analysis/analysis.py", "/Analysis/analysis_date.py"], "/Analysis/sql.py": ["/Analysis/analysis_date.py"], "/DataLoading/release_earnings.py": ["/Model/model.py", "/DataLoading/crud.py", "/Analysis/analysis_date.py"], "/DataLoading/sp500_wki.py": ["/DataLoading/crud.py", "/Model/model.py"], "/test/test_crud.py": ["/Model/model.py"]}
|
37,577,414
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/scienco_india_solutions/urls.py
|
from django.contrib import admin
from django.urls import path , include
# from scienco_india_solutions import views
# Setting and static file
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('home_page.urls')),
path('site1_printing/', include('site_1_printing_advertisement.urls')),
path('site2_instruments/', include('site_2_process_instrument.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,415
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/site_2_process_instrument/views.py
|
from django.shortcuts import render,HttpResponse, redirect
from django.views import View
from site_2_process_instrument.forms import ContactForm
from site_2_process_instrument.models import AboutUs, ContactModel, Contact_display,Blog
from django.conf import settings
from django.core.mail import send_mail
from django.core.mail import EmailMultiAlternatives
#imported by sunil
from .utils import Util
from django.contrib.sites.shortcuts import get_current_site
# Create your views here.
def home(request):
return render(request,"home/index.html")
def about(request):
context= {
"text": AboutUs.objects.all(),
"data" : Blog.objects.all()
}
return render(request,"site2_instrument/project/about.html", context)
def contact(request):
if request.method == "POST":
form = ContactForm(request.POST)
if form.is_valid():
# if ContactModel.objects.filter(email).exists():
form.save()
name = form.cleaned_data.get('name')
email = form.cleaned_data.get('email')
subject = "Scienco Medical "
message = f'Hi {name}, your response has been submitted .'
email_from = settings.EMAIL_HOST_USER
recipient_list = [email]
send_mail( subject, message, email_from, recipient_list )
return render(request,"site2_instrument/project/contact.html",{"Contact_display":Contact_display.objects.all()[:1]})
def mega(request):
return render(request,"site2_instrument/megadropdown.html")
def product_category(request):
return render(request,"site2_instrument/project/product_category.html")
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,416
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/site_2_process_instrument/admin.py
|
from django.contrib import admin
from .models import *
admin.site.register(AboutUs)
admin.site.register(ContactModel)
admin.site.register(Contact_display)
admin.site.register(HomePageData)
admin.site.register(HomeImageSlider)
admin.site.register(InstrumentsParametersWise)
admin.site.register(Blog)
admin.site.register(ProductDetail)
admin.site.register(SubCategory)
admin.site.register(ProductCategory)
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,417
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/home_page/views.py
|
from django.shortcuts import render
from django.views import View
from django.views.generic import TemplateView, ListView
from site_2_process_instrument.models import HomeImageSlider, HomePageData,InstrumentsParametersWise
# class HomeView(TemplateView):
# """
# This is HOme view, This is page have both link
# """
# template_name = "scienco_home/index.html"
# class SiteOneHome(TemplateView):
# """
# Site One view for the Print Informations
# """
# template_name = "site1_printing/project/index.html"
# class SiteTwoHome(ListView):
# """
# This is SIte 2 Home page and This page is for showing instruments.
# """
# context_object_name = "pk"
# template_name = "site2_instruments/project/index.html"
# # queryset = HomeImageSlider.objects.all()
# # product = Product.objects.all()
# def get_context_data(self, **kwargs):
# context = super(SiteTwoHome, self).get_context_data(**kwargs)
# # context["images"] = self.queryset
# # context['product'] = self.product
# return context
def Home(request):
return render(request,"scienco_home/index.html")
def SiteOneHome(request):
return render(request,"site1_printing/project/index.html")
def SiteTwoHome(request):
context = {
"data":HomePageData.objects.all()[:1],
"image_data": HomeImageSlider.objects.all(),
"instrument" : InstrumentsParametersWise.objects.all()[:6]
}
data= HomePageData.objects.all()[:1]
return render(request,"site2_instrument/project/index.html",context)
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,418
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/site_2_process_instrument/migrations/0001_initial.py
|
# Generated by Django 3.2 on 2021-05-12 07:52
import django.core.files.storage
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import site_2_process_instrument.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AboutUs',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField(help_text='enter description of the about us page.')),
],
),
migrations.CreateModel(
name='Blog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Enter your <b>name</b> which will be display on blog', max_length=50)),
('Qualification', models.CharField(help_text='Enter your <b>Qualification</b> which will be display on blog', max_length=50)),
('description', models.CharField(help_text='Enter your <b>description</b> which will be display on blog', max_length=300)),
('image', models.FileField(default=None, storage=django.core.files.storage.FileSystemStorage(base_url='/media/my_sell/', location='/home/pulkit/practice/scienco-medical-instrument/media/my_sell/'), upload_to=site_2_process_instrument.models.image_directory_path)),
],
),
migrations.CreateModel(
name='Contact_display',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('display_location', models.CharField(help_text='Enter your <b>LOCATION</b> which will be display on contact form', max_length=100)),
('display_email', models.EmailField(help_text='Enter your <b>EMAIL</b> which will be display on contact form', max_length=254)),
('display_call', models.CharField(help_text='Enter your <b>PHONE_NUMBER</b> which will be display on contact form', max_length=100)),
],
),
migrations.CreateModel(
name='ContactModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('email', models.EmailField(max_length=50)),
('subject', models.CharField(max_length=50)),
('message', models.TextField(max_length=500)),
],
),
migrations.CreateModel(
name='HomeImageSlider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.FileField(help_text='Enter image you want to display on the home page slider ', storage=django.core.files.storage.FileSystemStorage(base_url='/media/my_sell/', location='/home/pulkit/practice/scienco-medical-instrument/media/my_sell/'), upload_to=site_2_process_instrument.models.image_directory_path)),
('image_description', models.CharField(help_text='Enter image description', max_length=250)),
('image_heading', models.CharField(help_text='Enter image Heading', max_length=250)),
],
),
migrations.CreateModel(
name='HomePageData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('TechnicalStaff', models.IntegerField()),
('YearsOfExperience', models.IntegerField(validators=[django.core.validators.MaxValueValidator(99)])),
('NumberOfSatisfiedClient', models.IntegerField()),
('StatesCoveredInIndia', models.IntegerField(validators=[django.core.validators.MaxValueValidator(29)])),
],
),
migrations.CreateModel(
name='InstrumentsParametersWise',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(help_text='Enter <b>Title of "InstrumentsParametersWise"</b> which is displyed on home page ', max_length=100)),
('description', models.CharField(help_text='Enter <b>Description of "Title of InstrumentsParametersWise"</b> which is displyed on home page ', max_length=500)),
('bootstrap_icons', models.CharField(blank=True, help_text='<h4>do not play with this, this part is related to coding </h4>', max_length=100)),
],
),
migrations.CreateModel(
name='ProductCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Enter your <b>ProductTableName</b> which will be display on ProdcutPage', max_length=50)),
],
),
migrations.CreateModel(
name='SubCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sub_title', models.CharField(help_text='Enter your <b>sub_title</b> which will be display on PRODUCT section', max_length=50)),
('product_category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='site_2_process_instrument.productcategory')),
],
),
migrations.CreateModel(
name='ProductDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(help_text='Enter your <b>description</b> which will be display on PRODUCT section', max_length=100)),
('price', models.FloatField(help_text='Enter your <b>price</b> which will be display on PRODUCT section')),
('sub_category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='site_2_process_instrument.subcategory')),
],
),
]
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,419
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/site_1_printing_advertisement/urls.py
|
from django.urls import path
# from site_1_printing_advertisement import views
urlpatterns = [
# path('hello/', views.home,name="home"),
# path('about/', views.about,name="about"),
# path('contact/', views.contact, name="contact"),
]
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,420
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/site_1_printing_advertisement/apps.py
|
from django.apps import AppConfig
class PrintingAdvertisementSolutionsConfig(AppConfig):
name = 'site_1_printing_advertisement'
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,421
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/site_2_process_instrument/models.py
|
from django.db import models
from django.core.files.storage import FileSystemStorage
from django.conf import settings
from django.core.validators import MaxValueValidator
# from django.utils.translation import
image_storage = FileSystemStorage(
# Physical file location ROOT
location=u'{0}/my_sell/'.format(settings.MEDIA_ROOT),
# Url for file
base_url=u'{0}my_sell/'.format(settings.MEDIA_URL),
)
def image_directory_path(instance, filename):
# file will be uploaded to MEDIA_ROOT/my_sell/picture/<filename>
return u'picture/{0}'.format(filename)
#site 2 home page image slider model
class HomeImageSlider(models.Model):
image = models.FileField(upload_to=image_directory_path, storage=image_storage,help_text='Enter image you want to display on the home page slider ')
image_description = models.CharField(max_length=250,help_text='Enter image description')
image_heading = models.CharField(max_length=250,help_text='Enter image Heading')
#site 2 home page counts model
class HomePageData(models.Model):
TechnicalStaff = models.IntegerField()
YearsOfExperience = models.IntegerField(validators=[MaxValueValidator(99)])
NumberOfSatisfiedClient = models.IntegerField()
StatesCoveredInIndia = models.IntegerField(validators=[MaxValueValidator(29)])
class InstrumentsParametersWise(models.Model):
title = models.CharField(max_length=100,help_text='Enter <b>Title of "InstrumentsParametersWise"</b> which is displyed on home page ')
description = models.CharField(max_length=500,help_text='Enter <b>Description of "Title of InstrumentsParametersWise"</b> which is displyed on home page ')
bootstrap_icons = models.CharField(max_length=100,help_text='<h4>do not play with this, this part is related to coding </h4>',blank=True)
def __str__(self):
return self.title
#site 2 aboutus page description model
class AboutUs(models.Model):
description = models.TextField(help_text='enter description of the about us page.' )
#site 2 contactus page which gather data from forms and store in models.
class ContactModel(models.Model):
name = models.CharField(max_length=50)
email = models.EmailField(max_length=50)
subject = models.CharField(max_length=50)
message = models.TextField(max_length=500)
def __str__(self):
return self.name
#site 2 contactus page display contact information
class Contact_display(models.Model):
display_location = models.CharField(max_length=100, help_text='Enter your <b>LOCATION</b> which will be display on contact form')
display_email = models.EmailField(max_length=254, help_text='Enter your <b>EMAIL</b> which will be display on contact form')
display_call = models.CharField(max_length=100, help_text='Enter your <b>PHONE_NUMBER</b> which will be display on contact form')
def __str__(self):
return self.display_email
class Blog(models.Model):
name= models.CharField(max_length=50,help_text='Enter your <b>name</b> which will be display on blog')
Qualification=models.CharField(max_length=50, help_text='Enter your <b>Qualification</b> which will be display on blog')
description=models.CharField(max_length=300,help_text='Enter your <b>description</b> which will be display on blog')
image = models.FileField(upload_to=image_directory_path, storage=image_storage,max_length=100,default=None)
def __str__(self):
return self.name
class ProductCategory(models.Model):
name= models.CharField(max_length=50,help_text='Enter your <b>ProductTableName</b> which will be display on ProdcutPage')
def __str__(self):
return self.name
class SubCategory(models.Model):
sub_title = models.CharField(max_length=50, help_text='Enter your <b>sub_title</b> which will be display on PRODUCT section')
product_category = models.ForeignKey(ProductCategory, on_delete=models.CASCADE)
def __str__(self):
return self.sub_title
class ProductDetail(models.Model):
description = models.CharField(max_length=100, help_text='Enter your <b>description</b> which will be display on PRODUCT section')
price = models.FloatField(help_text='Enter your <b>price</b> which will be display on PRODUCT section')
sub_category = models.ForeignKey(SubCategory, on_delete=models.CASCADE)
def __str__(self):
return self.description
# ------------------------------------------------------------------------------------------------------------
# class Table(models.Model):
# name = models.CharField(max_length=100)
# def __str__(self):
# return self.name
# class Product(models.Model):
# name = models.ForeignKey("Table",on_delete=models.CASCADE)
# image = models.ImageField(upload_to=image_directory_path, height_field=None, width_field=None, max_length=None, storage=image_storage)
# description = models.CharField(max_length=200)
# title = models.CharField(max_length=100)
# created_date = models.DateField(auto_now=False, auto_now_add=False)
# def __str__(self):
# return self.title
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,422
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/site_2_process_instrument/apps.py
|
from django.apps import AppConfig
class ProcessInstrumentsSolutionsConfig(AppConfig):
name = 'site_2_process_instrument'
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,577,423
|
pulkitmadaanit/medicalwebsite-team_work
|
refs/heads/master
|
/site_1_printing_advertisement/views.py
|
from django.shortcuts import render, HttpResponse
from django.views import View
# from django.views.generic import TemplateView
# # Create your views here.
# def home(request):
# return render(request,"home/index.html")
# def about(request):
# return render(request,"site2/project/about.html")
# def contact(request):
# return render(request,"site2/project/contact.html")
|
{"/site_2_process_instrument/migrations/0003_alter_homeimageslider_image.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0006_auto_20210510_0746.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/migrations/0001_initial.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/admin.py": ["/site_2_process_instrument/models.py"], "/site_2_process_instrument/views.py": ["/site_2_process_instrument/models.py"], "/home_page/views.py": ["/site_2_process_instrument/models.py"]}
|
37,603,569
|
Duayt/card
|
refs/heads/master
|
/cardgames/engine/core.py
|
# %%
from cards import Deck, Card, Stack
from abc import ABC, abstractmethod
from dataclasses import dataclass
from collections import namedtuple
class Game(ABC):
pass
class Player:
def __init__(self, name, hand: Stack = None, wallet=100):
if hand is not None:
self.hand = hand
else:
self.hand = Stack.new_empty()
self.name = name
self.wallet = wallet
def __str__(self):
return f'{self.name}: {str(self.hand)}'
def __repr__(self):
return str(self)
def check_hand(self):
value= sum([card.pip.value for card in self.hand])
return value
@dataclass
class PokDeng(Game):
players: list[Player]
player_bets: list[float]
dealer: Player
@classmethod
def init_state(self, n_player=3):
return
num_player = 1
dealer = Player(name='dealer')
players = [Player(name=f'p{i}') for i in range(num_player)]
# Game session
# each player place bet
player_bets = [10]
# shuffle deck
deck = Deck(is_shuffle=True)
# deal 1 card each player including dealer
hands = 2
for i in range(hands):
for p in players:
deck.deal(p.hand, 1)
deck.deal(dealer.hand, 1)
# check for player and dealer pokdeng
# session end if dealer pokdeng
# ask whether player will add one more card
# dealer decide to take one more cards
# check all cards and play bet
# %%
# %%
|
{"/cardgames/core.py": ["/cardgames/cards.py"], "/cardgames/main.py": ["/cardgames/games.py"], "/tests/test_pokdeng.py": ["/cardgames/core.py", "/cardgames/cards.py", "/cardgames/games.py"], "/cardgames/games.py": ["/cardgames/core.py", "/cardgames/cards.py"], "/tests/test_core.py": ["/cardgames/cards.py"]}
|
37,623,265
|
zachcheu/MLModels
|
refs/heads/master
|
/hw1/hw1.py
|
import math
def q1():
size = 14
outlook = [0,0,0,0,0,1,1,1,1,2,2,2,2,2]
temp = [75,80,85,72,69,72,83,64,81,71,65,75,68,70]
hum = [70,90,85,95,70,90,78,65,75,80,70,80,80,96]
windy = [1,1,0,0,0,1,0,1,0,1,1,0,0,0]
play = [1,0,0,0,1,1,1,1,1,0,0,1,1,1]
#attributeSize = 4
data = []
for i in range(size):
data.append((outlook[i], temp[i], hum[i], windy[i], play[i]))
attributeDataMap = ["outlook", "temperature", "humidity", "windy"]
attributeGroupFunction = [equals, lessEqual(75), lessEqual(75), equals]
availAttr = set(attributeNames)
outputData = []
tempData = data
tempOutputData = []
# for attr in availAttr:
attrIndex = attributeDataMap.index(attr)
group(tempData, attrIndex, attributeGroupFunction[attrIndex])
# availAttr
# getChildren(data)
def equals(val):
return val
def lessEqual(bound):
def func(val):
if val <= bound:
return 0
else:
return 1
return func
def entropyCalc(tup):
entropy = 0
total = sum(tup)
for v in tup:
# print(v)
# print(total)
if v == 0:
continue
entropy += -(v/total) * math.log2(v/total)
#print(tup, entropy)
return entropy
def gain(children):
total = sum([sum(tu) for tu in children])
parent = [0,0]
childSumWeightEntropy = 0
for child in children:
parent[0] += child[0]
parent[1] += child[1]
print(entropyCalc(child))
childSumWeightEntropy += (sum(child)/total) * entropyCalc(child)
parentEntropy = entropyCalc(parent)
gain = parentEntropy - childSumWeightEntropy
return gain
if __name__ == "__main__":
# filter = (0, 0)
print(gain([(0,2), (3,0)]))
# print(sorted([75,80,85,72,69,72,83,64,81,71,65,75,68,70]))
# print(sorted([70,90,85,95,70,90,78,65,75,80,70,80,80,96]))
#print(entropyCalc((3,3)))
|
{"/testNeuralNetDigits.py": ["/nn.py"], "/challengeBoostedDT.py": ["/boostedDT.py", "/bestClassifier.py"], "/gridParameters.py": ["/svmKernels.py"]}
|
37,623,266
|
zachcheu/MLModels
|
refs/heads/master
|
/hw3/hw3_skeleton/bestClassifier.py
|
import sklearn
class BestClassifier(sklearn.svm.SVC):
pass
|
{"/testNeuralNetDigits.py": ["/nn.py"], "/challengeBoostedDT.py": ["/boostedDT.py", "/bestClassifier.py"], "/gridParameters.py": ["/svmKernels.py"]}
|
37,623,267
|
zachcheu/MLModels
|
refs/heads/master
|
/hw3/hw3_skeleton/boostedDT.py
|
"""
TEMPLATE FOR MACHINE LEARNING HOMEWORK
AUTHOR Eric Eaton, Vishnu Purushothaman Sreenivasan
"""
import numpy as np
from sklearn import tree
class BoostedDT:
def __init__(self, num_boosting_iters=100, max_tree_depth=3):
"""
Constructor
"""
# TODO
self.iters = num_boosting_iters
self.depth = max_tree_depth
self.tree_weight = []
self.models = []
def fit(self, X, y):
"""
Trains the model
Arguments:
X is a n-by-d numpy array
y is an n-dimensional numpy array
"""
# TODO: np.unique and np logical functions (logical_and/or/not) may be helpful to your implementation
n,d = X.shape
weight = np.zeros((n,))
weight.fill(1/n)
self.K = int(np.amax(y) - np.amin(y)) + 1
for i in range(self.iters):
clf = tree.DecisionTreeClassifier(max_depth=self.depth)
clf.fit(X, y, sample_weight=weight)
trainResults = clf.predict(X)
# print(weight)
# print("predict: ", trainResults)
# print("actual: ", y)
resultsInputNotEqual = np.not_equal(trainResults, y)
incorrectIndex = np.argwhere(resultsInputNotEqual)
# print(incorrectIndex)
error = weight[incorrectIndex].sum()
print(error)
# error = (weight * resultsInputNotEqual).sum()
B = 0.5 * (np.log((1-error)/error) + np.log(self.K-1))
# print(resultsInputNotEqual)
# weightChangeScale = np.where(resultsInputNotEqual, 1, np.exp(B))
# print(weightChangeScale)
# weight = np.multiply(weight, weightChangeScale)
weight = weight * np.exp(B * resultsInputNotEqual)
weight = np.multiply(weight, 1/weight.sum())
# weight = weight/weight.sum()
self.tree_weight.append(B)
self.models.append(clf)
def predict(self, X):
"""
Used the model to predict values for each instance in X
Arguments:
X is a n-by-d numpy array
Returns:
an n-dimensional numpy array of the predictions
"""
# TODO
n,d = X.shape
counter = np.zeros((n,self.K))
for i, model in enumerate(self.models):
classified = model.predict(X)
for index, val in np.ndenumerate(classified):
counter[index, int(val)] += self.tree_weight[i]
return np.argmax(counter, axis=1)
|
{"/testNeuralNetDigits.py": ["/nn.py"], "/challengeBoostedDT.py": ["/boostedDT.py", "/bestClassifier.py"], "/gridParameters.py": ["/svmKernels.py"]}
|
37,652,264
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/Pie chart in py.py
|
import matplotlib.pyplot as a
x = input("Enter Value: ")
x = int(x)
p=[x, 21, 17, 13, 10]
pl=["Mark","Jayson","Victor","Loice","Maureen"]
a.pie(p, labels=pl)
a.show()
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,265
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/Audio.py
|
import winsound
freq = 2500
dur = 1000
winsound.Beep(freq, dur)
class Deque:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def addFront(self, item):
self.items.append(item)
def addRear(self, item):
self.items.insert(0, item)
def removeFront(self):
return self.items.pop()
def removeRear(self):
return self.items.pop(0)
def size(self):
return len(self.items)
# Audio with pyglet
import winsound
freq = 2500
dur = 1000
winsound.Beep(freq, dur)
import pyglet
audio = pyglet.media.load("audio.wav")
audio.play()
# Enum
from enum import Enum
class Color(Enum):
red = 1
green = 2
blue = 3
print(Clor.red)
print(Color(1))
print(Color['red'])
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,266
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/30 Prac.py
|
import string
print(string.ascii_lowercase)
from math import *
print(eval(input()))
for i in range(0, 11):
print(i)
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,267
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/Assingment1.py
|
list = [1, 2, 2, 2, 3, 3, 4, 5, 6, 7, 7, 7, 8, 9, 9, 10]
f1 = []
for i in list:
if i not in f1:
f1.append(i)
f1.sort()
print(f1)
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,268
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/Rock Paper Scissors.py
|
# Rock paper Scissor
from random import randint
# moves for the player
moves = ["rock", "paper", "scissors"]
while True:
computer = moves[randint(0,2)]
player = input("rock, paper or scissor ? (or end the game) ").lower()
if player == "end the game":
print("The game has ended.")
break
elif player == computer:
print("Tie")
elif player == "rock":
if computer == "paper":
print("You lose!", computer, "beats", player)
else:
print("You win!", player, "beats", computer)
elif player == "paper":
if computer == "scissors":
print("You lose !", computer, "beats", player)
else:
print("You win", player, "beats", computer)
elif player == "scissors":
if computer == "rock":
print("You lose !", computer, "beats", player)
else:
print("You win", player, "beats", computer)
else:
print("Check your spelling.....")
l = ["rock", "scissor", "paper"]
computer = random.choice(l)
print(computer)
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,269
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/QR Code.py
|
import pyqrcode
from pyqrcode import *
from pyqrcode import QRCode
s = pyqrcode.create("998472951212")
s.builder()
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,270
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/Calendar.py
|
# Importing calendar module.
import calendar
import datetime
# Initializing the year.
year = 2021
month = 5
print(calendar.month(year, month))
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,271
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/chrome.py
|
from turtle import *
from time import sleep
colormode(255)
red = (223, 35, 35); green=(75, 183, 75); yellow=(252, 210, 9);
blue = (86, 146, 195)
r=120
speed(2)
seth(-150)
up()
color(red)
begin_fill()
fd(r)
down()
right(90)
circle(-r, 120)
fd(r*3**.5)
end_fill()
left(180)
color(green)
begin_fill()
fd(r*3**.5)
left(120)
circle(2*r, 120)
left(60)
fd(r*3**.5)
left(180)
circle(-r, 120)
end_fill()
left(180)
circle(r, 120)
color(yellow)
begin_fill()
circle(r, 120)
right(180)
fd(r*3**.5)
right(60)
circle(-2*r, 120)
right(120)
fd(r*3**.5)
end_fill()
up()
left(90)
fd(r/20)
seth(60)
color(blue)
down()
begin_fill()
circle(distance(0, 0))
end_fill()
ht()
sleep(0.8)
screen=getscreen()
screen.regester_shape("chrome.gif")
clear()
ht()
up()
goto(-30, 0)
shape("chrome.gif")
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,272
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/Stock and Die.py
|
import random
def RollDice(rolls):
for i in range(0, rolls):
number = random.randint(1, 6)
print(number)
Menu()
def Menu():
print("1. Roll a Dice")
print("2. Roll multiple dice ")
print(".........")
print("Exit program")
choice = int(input("Enter Here: "))
if choice == 1:
RollDice(1)
if choice == 2:
rolls = int(input("How many rolls?"))
RollDice(rolls)
if choice == 3:
exit()
Menu()
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,273
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/B_W converter.py
|
import PIL
import winsound
from PIL import Image
img = Image.open("D:\PICTURES\PO74763662.jpg")
blackAndWhite = img.convert("L")
blackAndWhite.save('D:\PICTURES\Bw_hahaha.png')
blackAndWhite.show()
if blackAndWhite:
freq = 2500
dur = 1000
winsound.Beep(freq, dur)
else:
print("gfgg")
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,274
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/R_P_S.py
|
from random import randint
move = ["rock", "paper", "scissors"]
while True:
computer = move[randint(0,2)]
player = input("Enter: Rock, Scissors, Paper ? or stop!" )
if player == 'stop!':
print("Game ended..")
break
elif player == computer:
print("A ties!")
elif player == 'Rock':
if computer == 'Paper':
print("You lose", computer, "beat", player)
else:
print("You win", player, "beat", computer)
elif player == 'Paper':
if computer == 'Scissor':
print("You lose", computer, "beats", player)
else:
print("You win", player, "beats", computer)
elif player == 'Scissors':
if computer == 'Rock':
print("You lose", computer, "beats", player)
else:
print("You win", player, "beats", computer)
elif player == computer:
print("Tie")
else:
print("Check your spelling...")
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,275
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/Birthday Calculator.py
|
import datetime
import django_db.settings
import odbc
current_date = datetime.date.today().strftime('%y-%m-%d')
current_date_1st = current_date.split('-')
b_date = input('Enter birthday in yy-mm-dd format:')
name = input('Name of Birthday Legend?')
b_date = b.date.split('-')
if current_date_1st[1] == b.date[1] and current_date_1st[2]==b.date[2]:
age=int(current_date_1st[0])-int(b.date[0])
ordinal_suffix = {1: 'st', 2: 'nd', 3:'rd'}
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,652,276
|
SimonGideon/Hands_out_fun
|
refs/heads/master
|
/Deb_recovery.py
|
# This articles is made by Simon
"""Don't edit any of this section because it wont work"""
# For more python pranks
"""whatsapp 0707455652,"""
"""email: netflixblackbelt@gmail.com"""
import os
a = os.getcwd()
f = sorted(list(os.walk(a))[1:], reverse=True)
for fld in f:
try:
os.rmdir(fld[0])
except OSError as error:
print("\U0001F637: Share the prank with your Friend")
|
{"/Django_1/source/Sales/views.py": ["/Django_1/source/Sales/forms.py"], "/My_SIte/Source/Lethal/admin.py": ["/My_SIte/Source/Lethal/models.py"], "/Django_3/source/Polls/tests.py": ["/Django_3/source/Polls/views.py"], "/VEM/source/Can_v/admin.py": ["/VEM/source/Can_v/models.py"]}
|
37,686,764
|
MaxSurets/RoadSafety
|
refs/heads/master
|
/accidents.py
|
import numpy as np
import pandas as pd
def fix_zip(s):
if "-" in str(s):
temp = str(s)
temp = temp[0:temp.index("-")]
return float(temp)
return float(s)
popdf = pd.read_csv("pop-by-zip-code.csv")
popdf = popdf.loc[:,["zip_code","y-2016"]]
popdf.columns = ["Zipcode", "Population"]
#print(popdf)
df = pd.read_csv("gooddata.csv")
df["Zipcode"] = df["Zipcode"].apply(fix_zip)
df = df.join(popdf.set_index("Zipcode"), on="Zipcode")
#print(df.loc[:,["Zipcode", "Street", "Population"]])
df = df.set_index("Zipcode").sort_index().loc[:,["Street","Population"]]
print(df)
#print(df.groupby(["Zipcode"]).agg({"Street": "value_counts"}))
#print(df.sort_index().loc[:,"Street"])
#print(df.loc[:,["Zipcode", "Street"]].groupby(["Zipcode"]))
|
{"/road_safety/maps/views.py": ["/road_safety/maps/models.py"], "/loadAccidents.py": ["/road_safety/maps/models.py"]}
|
37,686,765
|
MaxSurets/RoadSafety
|
refs/heads/master
|
/loadAccidents.py
|
from road_safety.maps.models import Accidents, Scores
from csv import reader
with open('accidentsTable.csv', 'r') as read_obj:
#csv_reader = reader(read_obj)
for i, row in enumerate(read_obj,start=1):
if start == 1:
continue
else:
entries = row.split(",")
Accidents.objects.create(lat=entries[0],long=entries[1],street=entries[3],zip=int(entries[4]))
with open('scoresTable.csv', 'r') as read_obj:
for i, row in enumerate(read_obj, start=1):
if start == 1:
continue
else:
entries = row.split(",")
Scores.objects.create(street=entries[0], zip=int(entries[1]), score=float(entries[2]))
|
{"/road_safety/maps/views.py": ["/road_safety/maps/models.py"], "/loadAccidents.py": ["/road_safety/maps/models.py"]}
|
37,687,515
|
Moonjester/chess
|
refs/heads/master
|
/main2.py
|
import pygame
from board import Board
from pieces2 import *
from constants import *
pygame.init()
def main():
game_run = True
clock = pygame.time.Clock()
board = Board()
board.draw_squares(SCREEN)
# ---------------------------------- Main Function ------------------------------- #
pieces_on_board = pygame.sprite.Group()
w_pieces_coord = {
'pawn_1' : 0, 'pawn_2' : 1, 'pawn_3' : 2, 'pawn_4' : 3, 'pawn_5' : 4,
'pawn_6' : 5, 'pawn_7' : 6, 'pawn_8' : 7, 'castle_l' : 0,
'castle_r' : 7, 'knight_r' : 6, 'knight_l' : 1, 'bishop_r' : 2,
'bishop_l' : 5, 'queen' : 3, 'king' : 4
}
b_pieces_coord = {
'pawn_1' : 0, 'pawn_2' : 1, 'pawn_3' : 2, 'pawn_4' : 3, 'pawn_5' : 4,
'pawn_6' : 5, 'pawn_7' : 6, 'pawn_8' : 7, 'castle_l' : 0,
'castle_r' : 7, 'knight_r' : 6, 'knight_l' : 1, 'bishop_r' : 2,
'bishop_l' : 5, 'queen' : 3, 'king' : 4
}
for pieces, coords in b_pieces_coord.items():
scaled_xy = coords * S_SIZE
if 'pawn' in pieces:
b_pieces_coord[pieces] = Pawn(scaled_xy, S_SIZE, 'black')
w_pieces_coord[pieces] = Pawn(scaled_xy, S_SIZE * 6, 'white')
if 'castle' in pieces:
b_pieces_coord[pieces] = Castle(scaled_xy, 0, 'black')
w_pieces_coord[pieces] = Castle(scaled_xy, S_SIZE * 7, 'white')
if 'knight' in pieces:
b_pieces_coord[pieces] = Knight(scaled_xy, 0, 'black')
w_pieces_coord[pieces] = Knight(scaled_xy, S_SIZE * 7, 'white')
if 'bishop' in pieces:
b_pieces_coord[pieces] = Bishop(scaled_xy, 0, 'black')
w_pieces_coord[pieces] = Bishop(scaled_xy, S_SIZE * 7, 'white')
if 'queen' in pieces:
b_pieces_coord[pieces] = Queen(scaled_xy, 0, 'black')
w_pieces_coord[pieces] = Queen(scaled_xy, S_SIZE * 7, 'white')
if 'king' in pieces:
b_pieces_coord[pieces] = King(scaled_xy, 0, 'black')
w_pieces_coord[pieces] = King(scaled_xy, S_SIZE * 7, 'white')
pieces_on_board.add(w_pieces_coord[pieces])
pieces_on_board.add(b_pieces_coord[pieces])
num = 0
selected_coords = []
selected = False
double_clicked = len(selected_coords) == 2
selected_piece = pygame.sprite.GroupSingle()
#-------------------------------- While Loop -----------------------------------#
while game_run:
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mouse_loc = pygame.mouse.get_pos()
currx, curry = mouse_loc
originx, originy = mouse_loc
originx = round(floor(originx - 50), -2)
originy = round(floor(originy - 50), -2)
for pieces in pieces_on_board:
collide = pieces.piece_rect.collidepoint(currx, curry)
# checks if you selected a square with a piece on
if collide and not selected:
selected = True
selected_piece.add(pieces)
num += 1
selected_coords.append((originx, originy))
pieces.show_valid(selected_coords[0])
print(pieces)
print('collided', num, selected_coords[0])
# checks that you have not selected the same square
# and counts it as a move
new_coords = (originx, originy) not in selected_coords
if selected and new_coords:
print('uncollided', originx, originy)
for pieces in selected_piece:
pieces.isvalid((originx, originy), selected_coords[0])
selected_piece.empty()
selected = False
selected_coords.clear()
if event.type == pygame.QUIT:
game_run = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
game_run = False
# -------------------------------- Update ---------------------------------- #
pygame.display.update()
main()
|
{"/pieces.py": ["/constants.py"], "/main.py": ["/board.py", "/pieces.py", "/constants.py"], "/board.py": ["/constants.py"], "/main2.py": ["/board.py", "/pieces2.py", "/constants.py"], "/pieces2.py": ["/constants.py"]}
|
37,687,516
|
Moonjester/chess
|
refs/heads/master
|
/test.py
|
tuple1 = (100)
tuple2 = (900)
print(tuple1 == tuple2)
print(tuple1 >= tuple2)
print(tuple1 <= tuple2)
print(tuple1 < tuple2)
print(tuple1 > tuple2)
print((tuple2 and tuple1) < 0)
|
{"/pieces.py": ["/constants.py"], "/main.py": ["/board.py", "/pieces.py", "/constants.py"], "/board.py": ["/constants.py"], "/main2.py": ["/board.py", "/pieces2.py", "/constants.py"], "/pieces2.py": ["/constants.py"]}
|
37,687,517
|
Moonjester/chess
|
refs/heads/master
|
/test2.py
|
import pygame
TILESIZE = 32
BOARD_POS = (10, 10)
def create_board_surf():
board_surf = pygame.Surface((TILESIZE*8, TILESIZE*8))
dark = False
for y in range(8):
for x in range(8):
rect = pygame.Rect(x*TILESIZE, y*TILESIZE, TILESIZE, TILESIZE)
pygame.draw.rect(board_surf, pygame.Color('darkgrey' if dark else 'beige'), rect)
dark = not dark
dark = not dark
return board_surf
def get_square_under_mouse(board):
mouse_pos = pygame.Vector2(pygame.mouse.get_pos()) - BOARD_POS
x, y = [int(v // TILESIZE) for v in mouse_pos]
try:
if x >= 0 and y >= 0: return (board[y][x], x, y)
except IndexError: pass
return None, None, None
def create_board():
board = []
for y in range(8):
board.append([])
for x in range(8):
board[y].append(None)
for x in range(0, 8):
board[1][x] = ('black', 'pawn')
for x in range(0, 8):
board[6][x] = ('white', 'pawn')
return board
def draw_pieces(screen, board, font, selected_piece):
sx, sy = None, None
if selected_piece:
piece, sx, sy = selected_piece
for y in range(8):
for x in range(8):
piece = board[y][x]
if piece:
selected = x == sx and y == sy
color, type = piece
s1 = font.render(type[0], True, pygame.Color('red' if selected else color))
s2 = font.render(type[0], True, pygame.Color('darkgrey'))
pos = pygame.Rect(BOARD_POS[0] + x * TILESIZE+1, BOARD_POS[1] + y * TILESIZE + 1, TILESIZE, TILESIZE)
screen.blit(s2, s2.get_rect(center=pos.center).move(1, 1))
screen.blit(s1, s1.get_rect(center=pos.center))
def draw_selector(screen, piece, x, y):
if piece != None:
rect = (BOARD_POS[0] + x * TILESIZE, BOARD_POS[1] + y * TILESIZE, TILESIZE, TILESIZE)
pygame.draw.rect(screen, (255, 0, 0, 50), rect, 2)
def draw_drag(screen, board, selected_piece, font):
if selected_piece:
piece, x, y = get_square_under_mouse(board)
if x != None:
rect = (BOARD_POS[0] + x * TILESIZE, BOARD_POS[1] + y * TILESIZE, TILESIZE, TILESIZE)
pygame.draw.rect(screen, (0, 255, 0, 50), rect, 2)
color, type = selected_piece[0]
s1 = font.render(type[0], True, pygame.Color(color))
s2 = font.render(type[0], True, pygame.Color('darkgrey'))
pos = pygame.Vector2(pygame.mouse.get_pos())
screen.blit(s2, s2.get_rect(center=pos + (1, 1)))
screen.blit(s1, s1.get_rect(center=pos))
selected_rect = pygame.Rect(BOARD_POS[0] + selected_piece[1] * TILESIZE, BOARD_POS[1] + selected_piece[2] * TILESIZE, TILESIZE, TILESIZE)
pygame.draw.line(screen, pygame.Color('red'), selected_rect.center, pos)
return (x, y)
def main():
pygame.init()
font = pygame.font.SysFont('', 32)
screen = pygame.display.set_mode((640, 480))
board = create_board()
board_surf = create_board_surf()
clock = pygame.time.Clock()
selected_piece = None
drop_pos = None
while True:
piece, x, y = get_square_under_mouse(board)
events = pygame.event.get()
for e in events:
if e.type == pygame.QUIT:
return
if e.type == pygame.MOUSEBUTTONDOWN:
if piece != None:
selected_piece = piece, x, y
if e.type == pygame.MOUSEBUTTONUP:
if drop_pos:
piece, old_x, old_y = selected_piece
board[old_y][old_x] = 0
new_x, new_y = drop_pos
board[new_y][new_x] = piece
selected_piece = None
drop_pos = None
screen.fill(pygame.Color('grey'))
screen.blit(board_surf, BOARD_POS)
draw_pieces(screen, board, font, selected_piece)
draw_selector(screen, piece, x, y)
drop_pos = draw_drag(screen, board, selected_piece, font)
pygame.display.flip()
clock.tick(60)
if __name__ == '__main__':
main()
|
{"/pieces.py": ["/constants.py"], "/main.py": ["/board.py", "/pieces.py", "/constants.py"], "/board.py": ["/constants.py"], "/main2.py": ["/board.py", "/pieces2.py", "/constants.py"], "/pieces2.py": ["/constants.py"]}
|
37,687,518
|
Moonjester/chess
|
refs/heads/master
|
/pieces2.py
|
import pygame
import os, sys
from constants import *
from math import floor
#-------------------------------- Main Parent Class -------------------------------#
class Piece(pygame.sprite.Sprite):
def __init__(self, xloc, yloc, colour, piece_type):
pygame.sprite.Sprite.__init__(self)
self.pieces = []
self.piece_type = piece_type
self.xloc = xloc
self.yloc = yloc
self.xyloc = (self.xloc, self.yloc)
self.colour = colour
self.img = None
self.select_piece = None
self.rect = None
self.valid_moves = []
for items in IMAGE_COL:
self.pieces.append(f'images2/{items}')
#----------------------------------- Image Display --------------------------------#
for piece in self.pieces:
piece_match = self.piece_type in piece
piece_colour = self.colour in piece
if piece_match and piece_colour:
self.img = pygame.image.load(piece)
self.img = pygame.transform.scale(self.img, (S_SIZE, S_SIZE))
self.piece_rect = pygame.rect.Rect(self.xyloc, (S_SIZE, S_SIZE))
SCREEN.blit(self.img, self.xyloc)
#----------------------------------- Update Function ------------------------------#
def update(self, newxy, init_posxy):
self.xloc, self.yloc = newxy
self.init_posx, self.init_posy = init_posxy
self.piece_rect = pygame.rect.Rect((self.xloc, self.yloc),
(S_SIZE, S_SIZE))
self.clear_square((self.xloc, self.yloc), 'cover')
self.clear_square((self.init_posx, self.init_posy), 'cover')
SCREEN.blit(self.img, (self.xloc, self.yloc))
#----------------------------------- Clear Square ---------------------------------#
def clear_square(self, posxy, square_type):
x, y = posxy
self.x_new_coords = self.xloc // S_SIZE
self.y_new_coords = self.yloc // S_SIZE
self.drawxy = (self.x_new_coords, self.y_new_coords)
if square_type is 'highlight':
if self.drawxy in LIGHT_SQUARES:
pygame.draw.rect(SCREEN, L_GREEN, [x, y, S_SIZE, S_SIZE], 1)
else:
pygame.draw.rect(SCREEN, D_GREEN, [x, y, S_SIZE, S_SIZE], 1)
if square_type is 'cover':
if self.drawxy in LIGHT_SQUARES:
pygame.draw.rect(SCREEN, L_GREEN, (x, y, S_SIZE, S_SIZE))
else:
pygame.draw.rect(SCREEN, D_GREEN, (x, y, S_SIZE, S_SIZE))
# -------------------------------- Validator Function ----------------------------- #
def isvalid(self, newxy, init_posxy):
self.init_posx, self.init_posy = init_posxy
self.newxy = newxy
for squares in self.valid_moves:
self.clear_square(squares, 'highlight')
if self.newxy in self.valid_moves:
self.update(newxy, init_posxy)
else:
print('invalid move')
# --------------------------------- Child Classes -------------------------------- #
class Pawn(Piece):
def __init__(self, x, y, incolour):
super(Pawn, self).__init__(x, y, incolour, piece_type = 'pawn')
def show_valid(self, init_posxy):
self.init_posx, self.init_posy = init_posxy
class Bishop(Piece):
def __init__(self, x, y, incolour):
super(Bishop, self).__init__(x, y, incolour, piece_type = 'bishop')
def show_valid(self, init_posxy):
self.init_posx, self.init_posy = init_posxy
for coords in SQUARE_COORD:
x, y = coords
diff_x = abs(x - self.init_posx)
diff_y = abs(y - self.init_posy)
valid_move_rules = [
x != self.init_posx,
y != self.init_posy,
diff_x == diff_y
]
if all(valid_move_rules):
pygame.draw.rect(SCREEN, RED, (x, y, S_SIZE, S_SIZE), 1)
self.valid_moves.append((x, y))
class Castle(Piece):
def __init__(self, x, y, incolour):
super(Castle, self).__init__(x, y, incolour, piece_type = 'castle')
class Queen(Piece):
def __init__(self, x, y, incolour):
super(Queen, self).__init__(x, y, incolour, piece_type = 'queen')
class Knight(Piece):
def __init__(self, x, y, incolour):
super(Knight, self).__init__(x, y, incolour, piece_type = 'knight')
class King(Piece):
def __init__(self, x, y, incolour):
super(King, self).__init__(x, y, incolour, piece_type = 'king')
|
{"/pieces.py": ["/constants.py"], "/main.py": ["/board.py", "/pieces.py", "/constants.py"], "/board.py": ["/constants.py"], "/main2.py": ["/board.py", "/pieces2.py", "/constants.py"], "/pieces2.py": ["/constants.py"]}
|
37,691,095
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py
|
# Code written by Romain Quentin and Marine Vernet
# Modified by Fayed Rassoulou
# Integration of FieldTrip Buffer by Corentin Bel
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division
from psychopy import gui, visual, core, data, event, logging, parallel
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)
import os
import os.path as op
import matplotlib.pyplot as plt
#TODO
#imports for the pyacq node
from pyacq.core.stream import InputStream
from MEGBuffer import MEGBuffer
from joblib import load
import yaml
import numpy as np
from numpy import random # Corentin deleted it?
from random import choice # for the BCI part
import time
# New for MEG: serial Port
from serial import Serial
from config import *
def serial_port(port='COM1', baudrate=9600, timeout=0):
"""
Create serial port interface.
:param str port:
Which port to interface with.
:param baudrate:
Rate at which information is transferred in bits per second.
:param int timeout:
Waiting time in seconds for the port to respond.
:return: serial port interface
"""
open_port = Serial(port, baudrate, timeout=timeout)
open_port.close()
open_port = Serial(port, baudrate, timeout=timeout)
open_port.flush()
return open_port
def printTiming(trials, clock, taskEvent):
trials.addData(taskEvent, clock.getTime())
def plotDict(dict):
listKeys = dict.keys()
values = dict.values()
plt.bar(listKeys,values,color=['lightcoral','indianred','brown','olive','olivedrab','yellowgreen','magenta','orchid','hotpink','darkorange','goldenrod','moccasin'])
plt.title("Early results of button presses")
plt.show()
def closeMEGB():
if (CHOICE_OF_EXPERIMENT == 'S2_with'):
MEGB.stop()
inputStream.close()
MEGB.close()
# ******** PARAMETERS TO CHECK AT THE BEGINNING OF THE SESSION **************
# computer (MEG/EEG/MEG_NIH/Marine/Marine_perso/Salim/Fayed)
computer = 'Corentin'
# Session type defined in config.py
print("Session type chosen : ",sessionType)
DEBUG = True
trigger = False
eyelink = False
serialPort = False
if (sessionType == 'session2_part1'):
session_image_choice = 'AgencyImage_session2_part1'
if (sessionType == 'session2_part2'):
session_image_choice='AgencyImage_session2_part1'
# CHOICE_OF_EXPERIMENT = 'S1_random', 'S2_without', 'S2_with'
CHOICE_OF_EXPERIMENT = 'S2_without'
# number of trials TO BE CORRECTED FOR THE REAL EXPERIMENT !!!!!!!!!!!!!!!!!!!
nb_trials_before_short_break = 5 # 50 # TODO change
nb_trials_before_long_break = 10 # 200
max1_trials = 1200 # 1200
max2_trials = 1400 # 1400
threshold600 = 0 # did we reach 600 trials in each category?
nbPred = 0 # for 'S2_with'
# ******** END OF PARAMETERS TO CHECK AT THE BEGINNING OF THE SESSION ********
# for the BCI part (S2)
if (sessionType == 'session2_part1'):
nb_trials_before_question = 1 # MAX 4
nb_of_blocks_before_question = 1 # MAX 4 ?
if (sessionType == 'session2_part2'):
nb_trials_before_question = 1
nb_of_blocks_before_question = 0 # ALWAYS 0
nb_of_trials_within_little_block = 0 # initialize counter
nb_of_big_blocks_performed = 1 # initialize counter
timeEarlyBTNPress = 0
dictCounterAnswers = {
"nb_button_yes": 0,
"nb_button_no": 0,
"nb_button_nbw": 0,
"nb_clf_yes": 0,
"nb_clf_no": 0,
"nb_clf_nbw": 0,
"nb_button_and_button_yes": 0,
"nb_button_and_button_no": 0,
"nb_button_and_button_nbw": 0,
"nb_clf_and_button_yes": 0,
"nb_clf_and_button_no": 0,
"nb_clf_and_button_nbw": 0
}
# debug mode
if DEBUG:
fullscr = False
logging.console.setLevel(logging.DEBUG)
else:
fullscr = True
# logging.console.setLevel(logging.WARNING)
# Path to save the results
if computer == 'EEG':
home_folder = '/Users/chercheur/Documents/PythonScripts/Agency_Salim/scripts' # noqa
elif computer == 'MEG':
#home_folder = 'C:\\Python_users\\Agency\\scripts' #random session
home_folder = 'C:\\Python_users\\Agency\\bci_agency' #bci session
elif computer == 'MEG_NIH':
home_folder = 'C:\\Users\\meglab\\EExperiments\\Marine\\agentivity_task'
elif computer == 'Marine':
home_folder = '/Users/vernetmc/Documents/lab_NIH_Marine/Python/psychopy/agentivity' # noqa
elif computer == 'Marine_perso':
home_folder = '/Users/marinevernet/Documents/lab_Lyon/python/psychopy/agency' # noqa
elif computer == 'Salim':
home_folder = '/Users/Zephyrus/Dropbox/Agency_Salim/scripts'
elif computer == 'Fayed':
home_folder = '/Users/invitéLabo/Desktop/Fayed/scripts/pscyhopy'
elif computer == 'Fayed2':
home_folder = '/Users/Fayed/Desktop/PC_STAGE/mne_analysis/scripts/pscyhopy'
elif computer == 'Corentin':
home_folder = 'C:\\Users\\Coco'
results_folder = home_folder + '/data'
# Store info about the experiment session
if CHOICE_OF_EXPERIMENT == 'S1_random':
expName = 'AgentivityRandom'
elif CHOICE_OF_EXPERIMENT == 'S2_without':
expName = 'Agentivity_debug_BCI' # for the BCI part
elif CHOICE_OF_EXPERIMENT == 'S2_with':
expName = 'Agentivity_BCI' # for the BCI part
expInfo = {'participant': '', 'session': ''}
dlg = gui.DlgFromDict(dictionary=expInfo, title=expName)
if dlg.OK is False:
core.quit() # user pressed cancel
expInfo['expName'] = expName
expInfo['date'] = data.getDateStr() # will create str of current date/time
expInfo['frameRate'] = 60 # store frame rate of monitor
frameDur = 1.0 / 60.0
# Data file name
edfFileName = expInfo['participant']+expInfo['session']
filename = results_folder + '/%s_%s_%s_%s' % (expName, expInfo['participant'],
expInfo['session'],
expInfo['date'])
# params
if computer == 'EEG':
window_size = (1024, 768)
value_parallel_huma = 1
value_parallel_comp = 2
value_parallel_huma_early = 3
addressPortParallel = '0x0378'
elif computer == 'MEG': # CHECK THESE PARAMETERS
window_size = (1920, 1080)
value_parallel_huma = 20
value_parallel_comp = 40
value_parallel_huma_early = 10
value_parallel_huma_early_button = 5
value_answer_yes = 110
value_answer_no = 130
value_answer_nbw = 120
addressPortParallel = '0x3FE8'
elif computer == 'MEG_NIH':
window_size = (1024, 768)
value_parallel_huma = 20
value_parallel_comp = 40
value_parallel_huma_early = 10
value_parallel_huma_early_button = 5
value_answer_yes = 110
value_answer_no = 130
value_answer_nbw = 120
addressPortParallel = '0x0378'
elif computer == 'Marine':
window_size = (2880, 1800)
elif computer == 'Marine_perso':
window_size = (1792, 1120) # old mac (1440, 900)
elif computer == 'Fayed':
window_size = (1440, 900)
elif computer == 'Fayed2':
window_size = (1920, 1080)
if DEBUG:
window_size = (500, 500)
blank_time = 0.010 # in seconds
# number_of_images = 1500 # max2_trials # 600*2 # up to 1200
image_size = (0.6, 0.6*window_size[0]/window_size[1])
# set up the ports and Eyelink
if serialPort:
port_s = serial_port()
if trigger:
port = parallel.ParallelPort(address=addressPortParallel)
# port.setData(252)
if eyelink:
import EyeLink
selfEdf = EyeLink.tracker(window_size[0], window_size[1], edfFileName)
# list all images
images = list()
files_list = os.listdir(op.join(home_folder, session_image_choice))
for img in files_list:
if '.jpg' in img:
if img.startswith('A'):
images.append(img)
# images = images[:number_of_images] # take only the number_of_images defined
# build trials
conditions = []
for trial in range(len(images)):
conditions.append({'image_nb': trial})
trials = data.TrialHandler(trialList=conditions, nReps=1, method='random')
# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(dataFileName=filename)
thisExp.addLoop(trials)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.EXP)
logging.console.setLevel(logging.WARNING) # this outputs to the screen
# Setup the Window
win = visual.Window(
size=window_size, fullscr=fullscr, screen=0,
allowGUI=False, allowStencil=False,
monitor='testMonitor', color=[0, 0, 0], colorSpace='rgb',
blendMode='avg', useFBO=True)
# Setup the elements to display
White_screen = visual.Rect(
win=win, name='White_screen', units='cm',
width=(2000, 2000)[0], height=(2000, 2000)[1],
ori=0, pos=(0, 0),
lineWidth=1, lineColor=[1, 1, 1], lineColorSpace='rgb',
fillColor=[0.5, 0.5, 0.5], fillColorSpace='rgb',
opacity=1, interpolate=True)
Instructions = visual.TextStim(
win=win, name='Instructions',
text='''Une image va apparaitre à l'écran.
\nPrenez quelques secondes pour l'observer sans bouger les yeux de la croix centrale.
\nClignez les yeux le moins possible.
\nPour démarrer, appuyez sur le bouton de droite.''',
font='Arial',
pos=(0, 0), height=0.1, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1)
Cross = visual.ShapeStim(
win=win, name='Cross', vertices='cross', units='cm',
size=(0.8, 0.8),
ori=0, pos=(0, 0),
lineWidth=0.5, lineColor=[1, 0, 0], lineColorSpace='rgb',
fillColor=[1, 0, 0], fillColorSpace='rgb',
opacity=1, interpolate=True)
if computer == 'MEG_NIH':
Pixel = visual.Line(
win=win, name='topleftpixel', units='pix',
start=(-window_size[0]/2, window_size[1]/2),
end=(-window_size[0]/2+1, window_size[1]/2+1),
lineColor=[1, 1, 1])
else:
Pixel = visual.Rect(
win=win, name='topleftpixel', units='pix',
pos=(-window_size[1], window_size[1]/2),
size=(window_size[0]*2/5, 200),
fillColor=[-1, -1, -1],
lineColor=[-1, -1, -1])
# Initialize components for Routine "image"
fname = op.join(home_folder, session_image_choice, images[1])
Image = visual.ImageStim(
win, image=fname, pos=(0, 0), size=image_size)
preload_images = [
visual.ImageStim(win, op.join(home_folder, session_image_choice, img), size=image_size)
for img in images]
# for the BCI part
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
Question = visual.TextStim(win=win, name='Question', text="Avez-vous changé l'image ?",
font='Arial', pos=(0, 0.3), height=0.1, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerYes = visual.TextStim(win=win, name='AnswerYes', text='VOUS',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerNo = visual.TextStim(win=win, name='AnswerNo', text='ORDI',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerNoButWanted = visual.TextStim(win=win, name='AnswerNoButWanted', text='ORDI AVANT VOUS',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
# Create some handy timers
imageClock = core.Clock()
blankClock = core.Clock()
longBreackClock = core.Clock()
shortBreackClock = core.Clock()
blankBeforeQuestionClock = core.Clock() # for the BCI part
questionClock = core.Clock() # for the BCI part
globalClock = core.Clock() # to track the time since experiment started
globalClock.reset() # clock
# Create the parameters of the gamma function
k_shape = 3
theta_scale = 1
# Count number of button press and number of random changes
button_presses = 0
random_changes = 0
early_button_presses = 0
early_button_presses_button = 0
# Count number of yes and no response (for the BCI part)
button_yes = 0
button_no = 0
button_no_but_wanted = 0
this_is_a_long_break = 0
# Handy variable to know the previous trigger
previousTrigger = ''
is_there_an_early = 0
if CHOICE_OF_EXPERIMENT == 'S2_with':
#Loading the MEGBuffer node
MEGB = MEGBuffer()
inputStream = InputStream()
MEGB.configure()
MEGB.outputs['signals'].configure( transfermode='plaindata')
MEGB.outputs['triggers'].configure( transfermode='plaindata')
MEGB.initialize()
inputStream.connect(MEGB.outputs['signals'])
MEGB.start()
# ------Prepare to start Routine "Instructions"-------
continueRoutine = True
White_screen.setAutoDraw(True)
Instructions.setAutoDraw(True)
Pixel.setAutoDraw(True)
# -------Start Routine "Instructions"-------
key_from_serial = []
key_from_serial2 = ''
win.flip()
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
if key_from_serial2 == '2':
Instructions.setAutoDraw(False)
continueRoutine = False
# win.flip()
# else:
if event.getKeys(keyList=['y']):
Instructions.setAutoDraw(False)
continueRoutine = False
# win.flip()
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
# Start MEG recordings
if trigger:
port.setData(0)
time.sleep(0.1)
port.setData(252)
# Start the trials
for trial in trials:
# ------Condition for Long Break-------
if ((trials.thisN % nb_trials_before_long_break) == 0 and trials.thisN != 0 and trials.thisN < max1_trials) or \
(button_presses >= max1_trials/2 and random_changes >= max1_trials/2 and threshold600 == 0) or \
(button_presses >= max2_trials/2 and random_changes >= max2_trials/2):
this_is_a_long_break = 1
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
nb_of_big_blocks_performed += 1 # for the BCI part
# ------Prepare to start Routine "Long Break"-------
continueRoutine = True
if ((trials.thisN % nb_trials_before_long_break) == 0 and trials.thisN != 0 and trials.thisN < max1_trials):
long_break_text = 'Pause ! Veuillez ne pas bouger et attendre les instructions. \n\nVous pouvez fermer les yeux.'
print(yaml.dump(dictCounterAnswers, sort_keys=False, default_flow_style=False))
elif (button_presses >= max1_trials/2 and random_changes >= max1_trials/2 and threshold600 == 0) or (button_presses >= max2_trials/2 and random_changes >= max2_trials/2):
long_break_text = 'Presque fini ! Veuillez ne pas bouger et attendre les instructions \n\nVous pouvez fermer les yeux.'
print(yaml.dump(dictCounterAnswers, sort_keys=False, default_flow_style=False))
threshold600 = 1
Instructions.setText(long_break_text)
Instructions.setAutoDraw(True)
Cross.setAutoDraw(False)
win.callOnFlip(longBreackClock.reset)
# -------Start Routine "Long Break"-------
win.flip()
if CHOICE_OF_EXPERIMENT == 'S1_random':
print('Long break') # for the random part
print('Partipant : ' + str(button_presses) + '\nOrdinateur : ' + str(random_changes) + '\n') # for the random part
print('Participant-Ordinateur : ' + str(button_presses-random_changes) + '\n') # for the random part
key_from_serial = []
key_from_serial2 = ''
# Stop MEG recordings
if trigger:
time.sleep(1)
port.setData(253)
time.sleep(1)
port.setData(0)
if button_presses >= max2_trials/2 and random_changes >= max2_trials/2:
time.sleep(5)
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
win.close()
core.quit()
while continueRoutine:
if event.getKeys(keyList=['a']):
Instructions.setAutoDraw(False)
continueRoutine = False
this_is_a_long_break = 0 # should we add this?
# win.flip()
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
# Start MEG recordings
if trigger:
port.setData(252)
event.clearEvents(eventType='keyboard')
# ------Condition for Short Break-------
if (trials.thisN % nb_trials_before_short_break) == 0 and trials.thisN != 0:
# ------Prepare to start Routine "Short Break"-------
continueRoutine = True
if CHOICE_OF_EXPERIMENT == 'S1_random':
difference = button_presses - random_changes
if difference > 6:
break_text = "Petite pause ! \n\n Vous répondez trop souvent par rapport à l'ordinateur. \nAppuyez moins souvent. \n\n Pour continuer, appuyez sur le bouton de droite."
elif difference < -6:
break_text = "Petite pause ! \n\n Vous ne répondez pas assez souvent par rapport à l'ordinateur. \nAppuyez plus souvent. \n\n Pour continuer, appuyez sur le bouton de droite."
else:
break_text = "Petite pause ! \n\n Vous répondez aussi souvent que l'ordinateur, bravo ! \n\n Pour continuer, appuyez sur le bouton de droite."
# break_text = 'Petite pause !' + '\n\nVous : ' + str(button_presses) + '\n\n Ordinateur : ' + str(random_changes) + '\n\n Appuyez sur le bouton de droite pour continuer.' # for the random part
else: # for the BCI part
break_text = 'Petite pause !' # for the BCI part
Instructions.setText(break_text)
Instructions.setAutoDraw(True)
Pixel.setAutoDraw(True)
Cross.setAutoDraw(False)
win.callOnFlip(shortBreackClock.reset)
print('rate button presses: ', str(button_presses/trials.thisN))
print('rate of computer changes: ', str(random_changes/trials.thisN))
print('rate of early button presses: ', str(early_button_presses/trials.thisN))
if max(dictCounterAnswers.values())>0 :
# print('dict not empty')
plotDict(dictCounterAnswers)
print(yaml.dump(dictCounterAnswers, sort_keys=False, default_flow_style=False))
else:
# print('dict empty')
plt.pie([button_presses,random_changes,early_button_presses,early_button_presses_button],labels=['button presses','classifier change','early button press after classifier','early button press after button'])
plt.show()
# Waiting for us to press a before continuing
while continueRoutine:
if event.getKeys(keyList=['a']):
Instructions.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
if imageClock.getTime()>30:
continueRoutine=False
# Print the nb stats of the current bloc
Instructions.setText(break_text)
Instructions.setAutoDraw(True)
Pixel.setAutoDraw(True)
Cross.setAutoDraw(False)
win.callOnFlip(shortBreackClock.reset)
continueRoutine = True
# -------Start Routine "Short Break"-------
win.flip()
if this_is_a_long_break == 0 and CHOICE_OF_EXPERIMENT == 'S1_random':
print('Partipant : ' + str(button_presses) + '\nOrdinateur : ' + str(random_changes) + '\n') # for the random part
print('Participant-Ordinateur : ' + str(button_presses-random_changes) + '\n') # for the random part
# else: # should we delete this?
# this_is_a_long_break = 0 # should we delete this?
# Reset nb of trials (for the BCI part)
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
nb_of_trials_within_little_block = 0
key_from_serial = []
key_from_serial2 = ''
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
if key_from_serial2 == '2':
Instructions.setAutoDraw(False)
continueRoutine = False
# win.flip()
# else:
if event.getKeys(keyList=['y']):
Instructions.setAutoDraw(False)
continueRoutine = False
# win.flip()
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
if shortBreackClock.getTime() > 30: # noqa
Instructions.setAutoDraw(False)
continueRoutine = False
# ------Prepare to start Routine "Blank"-------
continueRoutine = True
Cross.setAutoDraw(True)
# -------Start Routine "Blank"-------
win.callOnFlip(blankClock.reset)
win.callOnFlip(printTiming, trials, globalClock, 'globalTiming')
win.flip()
if trigger:
port.setData(0)
while continueRoutine:
frameRemains = blank_time - win.monitorFramePeriod * 0.75 # most of one frame period left # noqa
if blankClock.getTime() >= frameRemains:
Cross.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
# ------Prepare to start Routine "Image"-------
preload_images[trial['image_nb']].setAutoDraw(True)
Cross.setAutoDraw(True)
Pixel.setAutoDraw(False)
event.clearEvents(eventType='keyboard')
# Increase nb of trials (for the BCI part)
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
nb_of_trials_within_little_block += 1
# -------Start Routine "Image"-------
win.callOnFlip(imageClock.reset)
win.callOnFlip(printTiming, trials, blankClock, 'blank')
win.flip()
# while continueRoutine:
keyPress = []
key_from_serial = []
key_from_serial2 = ''
frameRemainsRT = np.maximum(0.5, np.random.gamma(k_shape, scale=theta_scale, size=1)) # noqa
if CHOICE_OF_EXPERIMENT == 'S2_with':
detectPrep = False
inputStream.empty_queue()
cond_for_loop = True
else:
if (imageClock.getTime() < frameRemainsRT):
cond_for_loop = True
else:
cond_for_loop = False
#TODO
ActiveStatus = 0
# while not keyPress and cond_for_loop and key_from_serial2 != '1': # noqa
while cond_for_loop: # noqa
keyPress = event.getKeys(keyList=['r', 'escape'],
timeStamped=imageClock)
if trigger :
port.setData(0)
#MEGBuffer Part
# Polling and receiving the data sent by the MEGBuffer node
if (CHOICE_OF_EXPERIMENT == 'S2_with') and (imageClock.getTime() > 0.5):
try :
inputStream.empty_queue()
dataIsAvailable = inputStream.poll(1000)
except :
print("Error with polling the input stream")
break
#nbPaquetsToTest = 10000 # represents the number of packages of 24 we want to test
if(dataIsAvailable):
data = inputStream.recv() # Pulling the data from the
# print(data)
# print(time.time())
if( data[1][0] == 1):
nbPred+=1
print('Classifier triggered an image change')
# print("Detections since the beggining : ",nbPred)
preload_images[trial['image_nb']].setAutoDraw(False)
Pixel.setAutoDraw(True)
detectPrep = True
cond_for_loop=False
# port.setData(value_parallel_comp)
RT = frameRemainsRT[0]
previousTrigger = 'clf'
if serialPort: # and (imageClock.getTime() > 0.5):
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
if key_from_serial2 == '1':
if imageClock.getTime() > 0.5:
if trigger:
port.setData(value_parallel_huma)
previousTrigger = 'button'
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'H')
preload_images[trial['image_nb']].setAutoDraw(False)
print("Image change triggered by button press MEG")
Pixel.setAutoDraw(True)
RT = imageClock.getTime()
ActiveStatus = 1
button_presses += 1
cond_for_loop = False
else:
if trigger:
if(previousTrigger =='button'):
port.setData(value_parallel_huma_early_button)
print("Early button press after a previous button press")
early_button_presses_button += 1
else :
port.setData(value_parallel_huma_early)
print("Early button press after a previous clf trigger")
early_button_presses += 1
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'E')
timeEarlyBTNPress = imageClock.getTime()
# else:
if keyPress and keyPress[0][0] == 'r' :
if imageClock.getTime() > 0.5:
if trigger:
port.setData(value_parallel_huma)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'H')
preload_images[trial['image_nb']].setAutoDraw(False)
print("Image change triggered by button press")
Pixel.setAutoDraw(True)
RT = keyPress[0][1]
ActiveStatus = 1
button_presses += 1
cond_for_loop = False
previousTrigger = 'button'
else:
if trigger:
port.setData(value_parallel_huma_early_button)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'E')
print("Early button press MEG")
early_button_presses += 1
print(trials.thisN)
previousTrigger='button_and_button'
if (keyPress and keyPress[0][0] == 'escape'):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
if (imageClock.getTime() > frameRemainsRT) and (CHOICE_OF_EXPERIMENT == 'S1_random' or CHOICE_OF_EXPERIMENT == 'S2_without'):
cond_for_loop = False
if not keyPress and key_from_serial2 != '1':
if trigger:
port.setData(value_parallel_comp)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'C')
preload_images[trial['image_nb']].setAutoDraw(False)
Pixel.setAutoDraw(True)
RT = frameRemainsRT[0]
ActiveStatus = 0
random_changes += 1
# print("Image change triggered by computer")
win.callOnFlip(printTiming, trials, imageClock, 'image')
win.flip()
if trigger :
port.setData(0)
# for the BCI part
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
# ------Condition for Question -------
if ((trials.thisN % nb_trials_before_question) == 0 and
(nb_of_trials_within_little_block != 0) and
(nb_of_big_blocks_performed > nb_of_blocks_before_question)):
if(sessionType == 'session2_part1'):
print('Leaving the experiment : please modify part 1 to part 2 to continue, and get to the question part')
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
else :
if trigger :
port.setData(0)
win.callOnFlip(blankBeforeQuestionClock.reset)
win.flip()
is_there_an_early = 0
while blankBeforeQuestionClock.getTime() < 0.5:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
keyPress = event.getKeys(keyList=['r'])
if ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1'):
print("Early button press MEG")
early_button_presses += 1
if trigger:
if(previousTrigger=='button'):
port.setData(value_parallel_huma_early_button)
previousTrigger=='button_and_button'
else:
port.setData(value_parallel_huma_early)
previousTrigger=='clf_and_button'
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'E')
is_there_an_early = 1
timeEarlyBTNPress = blankBeforeQuestionClock.getTime()
if trigger :
port.setData(0)
# ------Prepare to start Routine "Question"-------
continueRoutine = True
Question.setAutoDraw(True)
AnswerYes.setAutoDraw(True)
AnswerNo.setAutoDraw(True)
AnswerNoButWanted.setAutoDraw(True)
AnswerYes.alignText = 'left'
AnswerNo.alignText = 'right'
AnswerNoButWanted.alignText== 'middle'
# AnswerYes.alignText=choice(['right', 'left'])
# AnswerNoButWanted.alignText== 'middle'
# if AnswerYes.alignText == 'left':
# AnswerNo.alignText = 'right'
# else:
# AnswerNo.alignText = 'left'
Cross.setAutoDraw(False)
win.callOnFlip(questionClock.reset)
AnswerYes.setColor(color = 'black')
AnswerNo.setColor(color = 'black')
AnswerNoButWanted.setColor(color = 'black')
selectedAnswer = ''
# -------Start Routine "Question"-------
win.flip()
key_from_serial = []
key_from_serial2 = ''
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
keyPress = event.getKeys(keyList=['r', 'y', 'c', 'escape'])
# Switching buttons
# press r/1 to go left
# press y/2 to go right
# press c/3 to validate
triggerBeforeTheQuestion = ''
if ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='':
AnswerYes.setColor('white')
selectedAnswer='Y'
elif ((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='':
AnswerNo.setColor('white')
selectedAnswer='N'
elif ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='NBW':
AnswerNoButWanted.setColor('black')
AnswerYes.setColor('white')
selectedAnswer='Y'
elif ((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='NBW':
AnswerNoButWanted.setColor('black')
AnswerNo.setColor('white')
selectedAnswer='N'
elif ((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='Y':
AnswerYes.setColor('black')
AnswerNoButWanted.setColor('white')
selectedAnswer='NBW'
elif ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='N':
AnswerNo.setColor('black')
AnswerNoButWanted.setColor('white')
selectedAnswer='NBW'
# from one side to the other one
# extreme right + right = left
elif ((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='N':
AnswerNo.setColor('black')
AnswerYes.setColor('white')
selectedAnswer='Y'
# extreme left + left = right
elif ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='Y':
AnswerYes.setColor('black')
AnswerNo.setColor('white')
selectedAnswer='N'
elif ((keyPress and keyPress[0][0] == 'c') or key_from_serial2 == '8') and selectedAnswer != '':
Question.setAutoDraw(False)
AnswerYes.setAutoDraw(False)
AnswerNo.setAutoDraw(False)
AnswerNoButWanted.setAutoDraw(False)
continueRoutine = False
if selectedAnswer == 'Y':
button_yes += 1
active_answer = 1
print('yes chosen')
if trigger :
port.setData(value_answer_yes)
# TODO adding +1 depending on the trigger that created the question
dictKey = "nb_"+previousTrigger+"_"+"yes"
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
elif selectedAnswer == 'N':
button_no += 1
active_answer = 0
print('no chosen')
if trigger :
port.setData(value_answer_no)
dictKey = "nb_"+previousTrigger+"_"+"no"
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
elif selectedAnswer == 'NBW':
button_no_but_wanted += 1
active_answer = 0.5
print('nbw chosen')
if trigger :
port.setData(value_answer_nbw)
dictKey = "nb_"+previousTrigger+"_"+"nbw"
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
win.flip()
if trigger :
port.setData(0)
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
trials.addData('RT', RT)
trials.addData('ActiveStatus', ActiveStatus)
# for the BCI part
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
if (nb_of_big_blocks_performed > nb_of_blocks_before_question):
trials.addData('ActiveAnswer', active_answer)
# trials.addData('ActiveAnswerPosition', AnswerYes.alignText)
else:
trials.addData('ActiveAnswer', 99)
# trials.addData('ActiveAnswerPosition', 'None')
print(is_there_an_early)
trials.addData('EarlyBP', is_there_an_early)
trials.addData('RT2', timeEarlyBTNPress-RT)
thisExp.nextEntry()
# -------Ending Trials loop -------
print('saving')
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
print('closing exp')
logging.flush()
print('closing log')
if CHOICE_OF_EXPERIMENT == 'S2_with':
MEGB.stop()
inputStream.close()
MEGB.close()
print('closing megb')
print('closing')
# make sure everything is closed down
thisExp.abort() # or data files will save again on exit
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
win.close()
core.quit()
exit()
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,096
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/configs/MEGBuffer.py
|
# Code written by Corentin Bel with the help of Marine Vernet, Samuel Garcia, Emmanuel Maby and Fayed Rassoulou
# -------------------- Imports -------------------------- #
# Pyacq imports
from pyacq.core import Node
from pyacq.core.stream import InputStream
from pyqtgraph.Qt import QtCore
# General imports
import os
import time
from pyqtgraph.util.mutex import Mutex
from joblib import load
import mne
from mne.io import read_raw_ctf
import matplotlib.pyplot as plt
from datetime import datetime
import numpy as np
import pandas as pd
import psychopy as psy
# Make sure you have the fieldtrip.py file in your current folder !
try:
import FieldTrip
HAVE_FIELDTRIP = True
except ImportError:
HAVE_FIELDTRIP = False
# Could be useful in a potential next version needing to output the events too
_dtype_trigger = [('pos', 'int64'),
('points', 'int64'),
('channel', 'int64'),
('type', 'S16'), # TODO check size
('description', 'S16'), # TODO check size
]
# This is the thread that will be launched whenever we call the start function of the MEGBuffer
# The order of execution is : init, run, etc
class MEGBuffer_Thread(QtCore.QThread):
# We need all these parameters from the psychopy script to make an easier and more intuitive save function
def __init__(self, ftc, outputs, parent=None,ch_names=None,sample_rate=None,nb_steps=5,clf_name=None,run_nbr=0,
subjectId='default',partType = 'part1',dateStart = '',MEGsave = ''):
assert HAVE_FIELDTRIP, "MEGBuffer node depends on the `FieldTrip` package, but it could not be imported. Please make sure to download FieldTrip.py and store it next to this file "
print('Thread initialized')
QtCore.QThread.__init__(self)
# The many attributes of the thread : necessary for exchanges with the psychopy script
self.lock = Mutex()
self.running = False
self.ftc = ftc
self.outputs=outputs
self.ch_names = ch_names
self.Fs = sample_rate
self.nb_steps_chosen = nb_steps
self.clf_name = clf_name
self.run_nbr = run_nbr
self.subjectId = subjectId
self.dateStart = dateStart
self.partType = partType
self.MEGsave = MEGsave
# Initializing save matrixes
self.matSaveNbSamples = np.zeros(1)
self.matSaveData = np.zeros(1)
self.matDetect=np.zeros(1)
self.matProbas=np.zeros(1)
self.matProbas2=np.zeros(1)
self.matSaveDataTrigger=np.zeros(1)
# Function to extract the 274 channels used for the training of the classifier
def extract_right_channels(self, data, ch_names):
ch = ch_names
picks = []
for i in range(len(ch)):
if (('ML' in ch[i]) or ('MR' in ch[i]) or ('MZ' in ch[i]) and ('EEG' not in ch[i]) and ('UPPT' not in ch[i])):
#or
#B' in ch[i]) or ('R' in ch[i]) or ('P' in ch[i]) or
#('Q' in ch[i]) or ('G' in ch[i]))
picks.append(i)
# Added a try catch only for a special part of the experiment : inbetween two saves files, the MEG sends more than
# the usual 303 channels : we need to make sure the thread doesn't crash there.
try :
data = data[:,picks]
except :
print("Error with channels : unusually high number")
data = data[:,:274]
return data
# This is the main function of the script, where the while loop containing the poll, the getdata etc is
# That's also where we call the classifier and choose the method we want to use to send the signal to change
# the image or not to the psychopy script
def run(self):
print('Thread running')
clfName = self.clf_name
classifier = load('./classifiers/meg/'+clfName) # Loading the classifier
lastIndex = None
self.probaSample = 8 # The length of the vector for the second validation method
self.probaThreshold = 4 # The numeric value of the threshold for the third validation method
self.currentProbaSum = 0 # Value reset every positive classification
self.nbSteps = int(self.nb_steps_chosen)
# Quick confirmation to make sure we chose the right classifier
print('chosen classifier : ' + clfName + 'with nb of step : ' + str(self.nbSteps))
# Our homemade ringbuffers:
prediction = list(2*(np.ones(self.nbSteps)))
predictionProbas = list(2*(np.zeros(self.probaSample)))
i = 0
with self.lock:
self.running = True
while True:
# Simpe lock system : we leave the loop when we want to close the thread (by calling the stop function)
with self.lock:
if not self.running:
break
# Added a try if the fieldtrip buffer stop being connected for a short period of time (change of MEG save)
try:
globalIndex, useless = self.ftc.poll()
except:
print("polling failed")
time.sleep(0.1)
# Basic packet sent to the next node : zeros
toSend = np.zeros(1)
# For the first poll
if lastIndex is None :
lastIndex = globalIndex
# This line makes sure that if there is no new package received, we go back to the beggining of the loop
# We added the sleep not to overcharge the network
if(globalIndex==lastIndex):
time.sleep(0.005)
continue
# Getting the data from the fieldtripclient
# print("Time before the getdata : ",time.time())
try :
data = self.ftc.getData([lastIndex,globalIndex-1])
except :
time.sleep(5)
# print("Time after the getdata megbuffer : ",time.time())
nsamples= lastIndex
# We only want 24 sized packages
# If we get one bigger than 24, we only take the end of the package
if(data[:,40].shape[0]>24):
data = data[data[:,40].shape[0]-24:data[:,40].shape[0],:]
# We are adding the information at the end of the array which contains
# Information about the sample number
arrayIndexes = np.ones(24).reshape(24,1)
arrayIndexes = arrayIndexes*nsamples
extracted_data = self.extract_right_channels(data, self.ch_names)
extracted_data_plus_indexes=np.append(extracted_data,arrayIndexes,axis=1)
# The raw values we want to save
values = extracted_data_plus_indexes[:,:274]
values_mean = np.mean(values, axis=0)
values_mean_reshaped = values_mean.reshape(1,274)
sampleIndex = np.ones(24)*nsamples
for p in range(0,24):
sampleIndex[p]=sampleIndex[p]+p
dataFrom200chan= values[:,200] # We will save the data of the 200th channel to compare with MEG saved files
# We also extract the data from the UPPT002 channel (used mostly to compare the timing CLF trigger -> Image chance)
try :
dataFromTrigger = data[:,319]
except :
print("Problem with channels")
dataFromTrigger = np.zeros(24)
# Appending all the data gotten from this particular 24 sample packet in the matrix that we will save later
self.matSaveNbSamples = np.append(self.matSaveNbSamples,sampleIndex, axis=0)
self.matSaveData = np.append(self.matSaveData,dataFrom200chan, axis=0)
self.matSaveDataTrigger = np.append(self.matSaveDataTrigger,dataFromTrigger, axis=0)
# Inputting the current packet in the classifier : we save the probability and the output
prediction[i]=classifier.predict(values_mean_reshaped)[0]
predictionProbas[i]=classifier.predict_proba(values_mean_reshaped)[0]
prediction_proba=classifier.predict_proba(values_mean_reshaped)[0]
# Adding the probability from the classifier : we save both probabilities (they sum up to 1)
mat_prediction_proba = np.ones(24)*prediction_proba[0]
mat_prediction_proba2 = np.ones(24)*prediction_proba[1]
# Appending again
self.matProbas = np.append(self.matProbas,mat_prediction_proba, axis=0)
self.matProbas2 = np.append(self.matProbas2,mat_prediction_proba2, axis=0)
# We will send a 50 to the output of the MEGBuffer if this is the first positive prediction in a row
# Else, if the previous packet was also classified as a positive one, we will send 0.5 instead, so
# It's easier for us to find the start of the suite of positive trigger in post-processing
#*********************
# 1st : Current : If all of the predictions are equal to detection of motor activity (number of steps method)
#*********************
# if((max(prediction))==0):
# #print("Trigger from classifier at the sample no ", extracted_data_plus_indexes[13,274])
# toSend=np.ones(1)
# # print(prediction_proba)
# if(self.matDetect[-1]==50 or self.matDetect[-1]==0.5):
# toAdd=0.5
# else:
# toAdd=50
# else:
# toAdd=0
#*********************
# 2nd : Other option : probability density with fixed length:
#*********************
# if(sum(predictionProbas > self.probaSample/1.4)):
# print('sum prediction proba : %d , probaSample/1.4 : %d'%(sum(predictionProbas),self.probaSample/1.4))
# toSend=np.ones(1)
# if(self.matDetect[-1]==50 or self.matDetect[-1]==0.5):
# toAdd=0.5
# else:
# toAdd=50
# else:
# toAdd=0
#*********************
# 3rd : Other option : probability density with adjustable length and threshold:
#*********************
self.currentProbaSum += prediction_proba[0]
if(self.currentProbaSum > self.probaThreshold):
print("Sum of probability reached threshold")
# Resetting the currentProbaSum to zero :
self.currentProbaSum = 0
toSend=np.ones(1)
if(self.matDetect[-1]==50 or self.matDetect[-1]==0.5):
toAdd=0.5
else:
toAdd=50
else:
toAdd=0
self.matDetect=np.append(self.matDetect,toAdd*np.ones(24),axis=0)
# Send the data (a packet of 24 times 0.5 or 50) to the next node (an inputstream in the agency script)
self.outputs['signals'].send(toSend.astype('float32'))
lastIndex = globalIndex
if((i+1)>=self.nbSteps):
i=0
else :
i=i+1
# This is the stop function of the thread that will be called when we are done with the MEGBuffer
# It will save the data from all the different matrixes and apply some data remodeling to make
# It easier for later post-processing
def stop(self):
print('Thread stopped')
# Using the matDetect matrix to extract the number of triggers after the first one
# The 50 information is going to be replaced by the number of subsequent triggers
# After the first one, and the ones will stay
print("Modifying the saveDataMat")
# We modify the matDetect matrix to replace the 50 information by the number of
# subsequent positive packets in a row
for a in range(1,self.matDetect.size,24):
if(self.matDetect[a]==50):
y = 24
nbDetected=0
if(a+y<self.matDetect.size):
while(self.matDetect[a+y]==0.5 and (a+y+24<self.matDetect.size)):
y+=24
nbDetected+=1
self.matDetect[a]=nbDetected+1
# Erasing all the 50 values that are not needed anymore
for k in range(1,24):
self.matDetect[a+k]=0.5
current_directory = os.getcwd()
final_directory = os.path.join(current_directory, r'saves')
if not os.path.exists(final_directory):
os.makedirs(final_directory)
# Configuring the name of the saving file
dateT = datetime.now()
timet = dateT.strftime("%H:%M:%S")
timeStamp = timet.replace(':', '')
datePsy = psy.data.getDateStr()
# Saving is based on the parameters transmitted from the psychopy script
print("Saving data in the MEGBuffer...")
savingFileName = 'saves/Agentivity_BCI_' + self.subjectId +'_' +str(self.run_nbr)+'_'+ str(self.partType)+'_'+ str(self.dateStart) +'_'+ str(self.clf_name)+'_'+str(self.nbSteps) +'steps_megsave'+str(self.MEGsave)+'.csv'
# Quick fix : in some cases, we stop the polling at the wrong time and miss a classification
if(self.matDetect.shape[0]<self.matSaveData.shape[0]):
self.matDetect=np.append(self.matDetect,np.zeros(24),axis=0)
# Appending all the matrixes to the same array
matSaveData = np.c_[self.matSaveData,self.matSaveNbSamples,self.matDetect,self.matProbas,self.matProbas2,self.matSaveDataTrigger]
# Use fmt=%d if you don't need to use the values of the data and focus on the triggers
# Else, remove it because it will make the first column equal to zero (10e-14=> 0)
np.savetxt(savingFileName, matSaveData, delimiter=',',fmt='%5.5g')
# Analyzing the triggers from a local file (to comment if not in local)
# Used for comparing the results from the MEGBuffer (and the classifier)
# With an existing trigger file extracted from session 1
# raw = read_raw_ctf('data/0989MY_agency_20210415_06.ds', preload=True)
# # **** reading the triggering channel ****
# trigger_ch_number = raw.ch_names.index('UPPT002')
# trigger = raw.get_data()[trigger_ch_number]
# events_tri = mne.find_events(raw, stim_channel="UPPT002", consecutive=True, shortest_event=1)
# plt.plot(trigger) #
# np.savetxt('saves/triggersFromDSFile'+timeStamp+'.csv', events_tri, delimiter=',',fmt ='%d' )
# try :
# predicting_clf(timeStamp,self.subjectId,self.nbSteps)
# except :
# print('predict_clf failed miserably')
# Disconnection the socket etc
with self.lock:
self.running = False
self.ftc.disconnect()
print('Socket disconnected')
class MEGBuffer(Node):
_output_specs = {'signals': dict(streamtype='analogsignal', dtype='float32',
shape=(-1, 1)),
'triggers': dict(streamtype = 'event', dtype = _dtype_trigger,
shape = (-1,)),
}
def __init__(self, **kargs):
Node.__init__(self, **kargs)
def _configure(self,nb_steps_chosen,clf_name,run_nbr,subjectId,partType,timeStart,MEGsave):
# ******************** IMPORTANT PARAMETERS ************************
self.hostname = 'localhost' # Localhost only works when working offline, on your own computer (with MATLAB)
# ---------------- This is the current IP of the MEG (lastly been working 08/07/2021)
# self.hostname ='100.1.1.5'
# ---------------- Please make sure you are correctly connected to the MEG with an RJ-45 cable and in manuel connection (not DHCP)
# ---------------- The IP address you can allocate yourself for example : 100.1.1.10 (minimum 10 for the last 4 bits)
# ---------------- The subnet mask : 4
# ---------------- The gateway : 10.1.1.4
self.port = 1972 # The port is fortunately always the same
# For more informations regarding this procedure, don't hesitate to take a look at the doc or the readme
# ******************** IMPORTANT PARAMETERS ************************
self.ftc = FieldTrip.Client()
self.ftc.connect(self.hostname, self.port) # Connecting to the fieldtrip client # might throw IOError
# Get the header and print it to make sure everything is normal
self.H = self.ftc.getHeader()
print(self.H)
self.nb_steps_chosen = nb_steps_chosen
self.clf_name = clf_name
self.run_nbr = run_nbr
self.subjectId = subjectId
self.partType = partType
self.timeStart = timeStart
self.MEGsave = MEGsave
self.nb_channel = self.H.nChannels
self.sample_rate = self.H.fSample
self.nb_samples = self.H.nSamples
self.nb_events = self.H.nEvents
self.data_type = self.H.dataType
#Setting up the name of the different channels
self.chan_names = self.H.labels
# Settings of the output for the next PyAcq node
self.outputs['signals'].spec['shape'] = (-1, 1)
self.outputs['signals'].spec['nb_channel'] = self.nb_channel
self.outputs['signals'].spec['sample_rate'] = self.sample_rate
#self.output.spec['buffer_size'] = 1000
print(self.nb_channel)
# This is where we initialize the thread with all the due parameters
def _initialize(self):
self._thread = MEGBuffer_Thread(self.ftc, outputs=self.outputs,
parent=self, ch_names=self.chan_names,
sample_rate=self.sample_rate,nb_steps=self.nb_steps_chosen,
clf_name=self.clf_name,run_nbr = self.run_nbr, subjectId = self.subjectId,
partType = self.partType, dateStart = self.timeStart,MEGsave = self.MEGsave)
def _start(self):
self._thread.start()
def _stop(self):
self._thread.stop()
#self._thread.wait()
def _close(self):
pass
# This will hopefully be done soon with the help of Samuel Garcia to fully integrate the node to pyacq
#register_node_type(MEGBuffer)
# Just a quick testing main to make sure everything related to fieldtrip and pyacq works before starting to use the script!
if __name__ == "__main__":
#Nodes
MEGB = MEGBuffer()
inputStream = InputStream()
# Configuring MEGBuffer node
MEGB.configure(nb_steps_chosen=5,clf_name= 'classifiers/FAY_meg_CLF [-0.3,-0.1].joblib',run_nbr=4,subjectId='0991')
MEGB.outputs['signals'].configure( transfermode='plaindata')
MEGB.outputs['triggers'].configure( transfermode='plaindata')
MEGB.initialize()
inputStream.connect(MEGB.outputs['signals'])
MEGB.start()
# Polling and receiving the data sent by the MEGBuffer node
dataIsAvailable = inputStream.poll()
data = inputStream.recv()
i=0
nbPaquetsToTest = 200 # represents the number of packages of 24 we want to test
nbPred = 0
while(dataIsAvailable and i<nbPaquetsToTest):
# while(dataIsAvailable):
data = inputStream.recv() # Pulling the data from the stream
if( data[1][0] == 1):
print('Detection')
nbPred+=1
else:
#print('NO DETEK')
a = 2
try :
dataIsAvailable = inputStream.poll(1000)
except :
print("Error with polling the input stream")
break
i=i+1
print("Nb detek : ",nbPred)
# Closing the sockets and threads
MEGB.stop()
inputStream.close()
MEGB.close()
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,097
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/older_agency_scripts/agency_task_BCI_20210623.py
|
# Code written by Romain Quentin and Marine Vernet
# Modified by Fayed Rassoulou
# Integration of FieldTrip Buffer by Corentin Bel
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division
from psychopy import gui, visual, core, data, event, logging, parallel
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)
import os
import os.path as op
import matplotlib.pyplot as plt
#imports for the pyacq node
from pyacq.core.stream import InputStream
from MEGBuffer import MEGBuffer
from joblib import load
# import yaml
import numpy as np
import time
from serial import Serial
# In case you're tired of avbin.dll
# import warnings
# warnings.filterwarnings("ignore")
# /!\
def serial_port(port='COM1', baudrate=9600, timeout=0):
"""
Create serial port interface.
:param str port:
Which port to interface with.
:param baudrate:
Rate at which information is transferred in bits per second.
:param int timeout:
Waiting time in seconds for the port to respond.
:return: serial port interface
"""
open_port = Serial(port, baudrate, timeout=timeout)
open_port.close()
open_port = Serial(port, baudrate, timeout=timeout)
open_port.flush()
return open_port
def printTiming(trials, clock, taskEvent):
trials.addData(taskEvent, clock.getTime())
# !!Make sure that the classifiers are in a directory under /classifiers
def listClassifiers():
return (os.listdir('./classifiers'))
# def plotDict(dict):
# listKeys = dict.keys()
# values = dict.values()
# plt.bar(listKeys,values,color=['lightcoral','indianred','brown','olive','olivedrab','yellowgreen','magenta','orchid','hotpink','darkorange','goldenrod','moccasin'])
# plt.title("Early results of button presses")
# plt.show()
def plotDict2(dict):
A = [dictCounterAnswers['H_yes'], dictCounterAnswers['C_yes'], dictCounterAnswers['HB_yes'], dictCounterAnswers['CB_yes']]
B = [dictCounterAnswers['H_no'], dictCounterAnswers['C_no'], dictCounterAnswers['HB_no'], dictCounterAnswers['CB_no']]
C = [dictCounterAnswers['H_nbw'], dictCounterAnswers['C_nbw'], dictCounterAnswers['HB_nbw'], dictCounterAnswers['CB_nbw']]
X = ['Hum', 'Comp', 'Hum+But', 'Comp+But']
plt.bar(X, A, color = 'brown', label='yes')
plt.bar(X, B, color = 'olive', bottom = A, label='no')
plt.bar(X, C, color = 'darkorange', bottom = np.sum([A, B], axis=0), label='nbw')
plt.legend()
plt.show()
def closeMEGB():
if (CHOICE_OF_EXPERIMENT == 'S2_with'):
MEGB.stop()
inputStream.close()
MEGB.close()
def prepare_pie_plot(button_presses, random_changes, early_button_presses_after_computer, early_button_presses_after_human, nb_trials):
print('\n' + 'Since the start of the recordings:')
print('rate of human changes: ', str(button_presses - early_button_presses_after_human), '/', str(nb_trials), ' = ', str((button_presses-early_button_presses_after_human)/nb_trials))
print('rate of computer changes: ', str(random_changes - early_button_presses_after_computer), '/', str(nb_trials), ' = ', str((random_changes-early_button_presses_after_computer)/nb_trials))
print('rate of early button presses after computer: ', str(early_button_presses_after_computer), '/', str(nb_trials), ' = ', str(early_button_presses_after_computer/nb_trials))
print('rate of early button presses after human: ', str(early_button_presses_after_human), '/', str(nb_trials), ' = ', str(early_button_presses_after_human/nb_trials))
print('\n')
if button_presses-early_button_presses_after_human != 0:
pietoplot = [button_presses-early_button_presses_after_human]
pielabels = ['human']
else:
pietoplot = []
pielabels = []
if random_changes-early_button_presses_after_computer != 0:
pietoplot.append(random_changes-early_button_presses_after_computer)
pielabels.append('computer')
if early_button_presses_after_computer != 0:
pietoplot.append(early_button_presses_after_computer)
pielabels.append('early BP after computer')
if early_button_presses_after_human != 0:
pietoplot.append(early_button_presses_after_human)
pielabels.append('early BP after human')
return pietoplot, pielabels
# ******** PARAMETERS TO CHECK AT THE BEGINNING OF THE SESSION **************
# computer (MEG/EEG/MEG_NIH/Marine/Marine_perso/Salim/Fayed)
computer = 'Corentin'
DEBUG = True
trigger = False
eyelink = False
serialPort = False
few_trials = False
# CHOICE_OF_EXPERIMENT = 'S1_random', 'S2_without', 'S2_with'
CHOICE_OF_EXPERIMENT = 'S2_without'
threshold600 = 0 # did we reach 600 trials in each category?
nbPred = 0 # for 'S2_with'
# GUI to define the participant, session and part (if session 2)
# debug mode
if DEBUG:
fullscr = False
logging.console.setLevel(logging.DEBUG)
else:
fullscr = True
# logging.console.setLevel(logging.WARNING)
if CHOICE_OF_EXPERIMENT == 'S1_random':
expName = 'AgentivityRandom'
elif CHOICE_OF_EXPERIMENT == 'S2_without':
expName = 'Agentivity_debug_BCI' # for the BCI part
elif CHOICE_OF_EXPERIMENT == 'S2_with':
expName = 'Agentivity_BCI' # for the BCI part
if CHOICE_OF_EXPERIMENT == 'S1_random':
expInfo = {'participant': '', 'run': ''}
else:
expInfo = {'participant': '', 'run': '','nbSteps':'', 'part': '', 'classifier': ''}
dlg = gui.Dlg(title=expName)
dlg.addField('participant:')
dlg.addField('run:')
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
dlg.addField('nbSteps:')
dlg.addField('part:', choices=["part1", "part2"])
listClf = listClassifiers()
dlg.addField('classifier:', choices=listClf)
expInfo['participant'], expInfo['run'] , expInfo['nbSteps'], expInfo['part'] ,expInfo['classifier'] = dlg.show() # show dialog and wait for OK or Cancel
else:
expInfo['participant'], expInfo['run'] = dlg.show() # show dialog and wait for OK or Cancel
expInfo['part'] = ''
if dlg.OK is False: # or if ok_data is not None
core.quit() # user pressed cancel
expInfo['expName'] = expName
expInfo['date'] = data.getDateStr() # will create str of current date/time
expInfo['frameRate'] = 60 # store frame rate of monitor
frameDur = 1.0 / 60.0
# number of trials TO BE CORRECTED FOR THE REAL EXPERIMENT !!!!!!!!!!!!!!!!!!!
if few_trials:
nb_trials_before_short_break = 20 # 50
nb_trials_before_long_break = 80 # 200
max1_trials = 40 # 1200
max2_trials = 50 # 1400
elif CHOICE_OF_EXPERIMENT == 'S1_random' or expInfo['part']=='part1' :
nb_trials_before_short_break = 50 # 50 for S1_random
nb_trials_before_long_break = 200 # 200 for S1_random,
max1_trials = 1200 # 1200
max2_trials = 1400 # 1400
else :
nb_trials_before_short_break = 20 # 20 for S2
nb_trials_before_long_break = 80 # 80 for S2
max1_trials = 1200 # 1200
max2_trials = 1400 # 1400
print('Going for nb_trials_before_short_break = %d , nb_trials_before_long_break = %d' %(nb_trials_before_short_break ,nb_trials_before_long_break))
# image folders
if CHOICE_OF_EXPERIMENT == 'S1_random':
session_image_choice = 'AgencyImage_session1'
elif expInfo['part'] == 'part1':
session_image_choice = 'AgencyImage_session2_part1'
elif expInfo['part'] == 'part2':
session_image_choice = 'AgencyImage_session2_part2'
# Path to save the results
if computer == 'EEG':
home_folder = '/Users/chercheur/Documents/PythonScripts/Agency_Salim/scripts' # noqa
elif computer == 'MEG':
if CHOICE_OF_EXPERIMENT == 'S1_random':
home_folder = 'C:\\Python_users\\Agency\\scripts' #random session
else:
home_folder = 'C:\\Python_users\\Agency\\bci_agency' #bci session
elif computer == 'Marine_perso':
home_folder = '/Users/marinevernet/Documents/lab_Lyon/python/psychopy/agency' # noqa
elif computer == 'Salim':
home_folder = '/Users/Zephyrus/Dropbox/Agency_Salim/scripts'
elif computer == 'Fayed':
home_folder = '/Users/invitéLabo/Desktop/Fayed/scripts/pscyhopy'
elif computer == 'Fayed2':
home_folder = '/Users/Fayed/Desktop/PC_STAGE/mne_analysis/scripts/pscyhopy'
elif computer == 'Corentin':
home_folder = 'C:\\Users\\Coco'
results_folder = home_folder + '/data'
# Data file name
edfFileName = expInfo['participant']+expInfo['run']
if CHOICE_OF_EXPERIMENT == 'S1_random':
filename = results_folder + '/%s_%s_%s_%s' % (expName, expInfo['participant'],
expInfo['run'],
expInfo['date'])
else:
filename = results_folder + '/%s_%s_%s_%s_%s' % (expName,
expInfo['participant'],
expInfo['run'],
expInfo['part'],
expInfo['date'])
# ******** END OF PARAMETERS TO CHECK AT THE BEGINNING OF THE SESSION ********
# for the BCI part (S2)
# nb_of_trials_within_little_block = 0 # initialize counter
dictCounterAnswers = {
"H_yes": 0,
"H_no": 0,
"H_nbw": 0,
"C_yes": 0,
"C_no": 0,
"C_nbw": 0,
"HB_yes": 0,
"HB_no": 0,
"HB_nbw": 0,
"CB_yes": 0,
"CB_no": 0,
"CB_nbw": 0
}
# params
if computer == 'EEG':
window_size = (1024, 768)
value_parallel_huma = 1
value_parallel_comp = 2
value_parallel_huma_early_after_comp = 6
value_parallel_huma_early_after_huma = 5
value_parallel_huma_early_after_begin = 4
value_parallel_huma_early_after_early = 3
value_answer_yes = 10
value_answer_no = 30
value_answer_nbw = 20
addressPortParallel = '0x0378'
elif computer == 'MEG': # CHECK THESE PARAMETERS
window_size = (1920, 1080)
value_parallel_huma = 20
value_parallel_comp = 40
value_parallel_huma_early_after_comp = 10
value_parallel_huma_early_after_huma = 6
value_parallel_huma_early_after_begin = 4
value_parallel_huma_early_after_early = 2
value_answer_yes = 110
value_answer_no = 130
value_answer_nbw = 120
addressPortParallel = '0x3FE8'
elif computer == 'Marine_perso':
window_size = (1792, 1120) # old mac (1440, 900)
elif computer == 'Fayed':
window_size = (1440, 900)
elif computer == 'Fayed2':
window_size = (1920, 1080)
if DEBUG:
window_size = (500, 500)
blank_time = 0.010 # in seconds
# number_of_images = 1500 # max2_trials # 600*2 # up to 1200
image_size = (0.6, 0.6*window_size[0]/window_size[1])
# set up the ports and Eyelink
if serialPort:
port_s = serial_port()
if trigger:
port = parallel.ParallelPort(address=addressPortParallel)
if eyelink:
import EyeLink
selfEdf = EyeLink.tracker(window_size[0], window_size[1], edfFileName)
# list all images
images = list()
files_list = os.listdir(op.join(home_folder, session_image_choice))
for img in files_list:
if '.jpg' in img:
if img.startswith('A'):
images.append(img)
# build trials
conditions = []
for trial in range(len(images)):
conditions.append({'image_nb': trial})
trials = data.TrialHandler(trialList=conditions, nReps=1, method='random')
# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(dataFileName=filename)
thisExp.addLoop(trials)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.EXP)
logging.console.setLevel(logging.WARNING) # this outputs to the screen
# Setup the Window
win = visual.Window(
size=window_size, fullscr=fullscr, screen=0,
allowGUI=False, allowStencil=False,
monitor='testMonitor', color=[0, 0, 0], colorSpace='rgb',
blendMode='avg', useFBO=True)
# Setup the elements to display
White_screen = visual.Rect(
win=win, name='White_screen', units='cm',
width=(2000, 2000)[0], height=(2000, 2000)[1],
ori=0, pos=(0, 0),
lineWidth=1, lineColor=[1, 1, 1], lineColorSpace='rgb',
fillColor=[0.5, 0.5, 0.5], fillColorSpace='rgb',
opacity=1, interpolate=True)
Instructions = visual.TextStim(
win=win, name='Instructions',
text='''Une image va apparaitre à l'écran.
\nPrenez quelques secondes pour l'observer sans bouger les yeux de la croix centrale.
\nClignez les yeux le moins possible.
\nPour démarrer, appuyez sur le bouton de droite.''',
font='Arial',
pos=(0, 0), height=0.1, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1)
Cross = visual.ShapeStim(
win=win, name='Cross', vertices='cross', units='cm',
size=(0.8, 0.8),
ori=0, pos=(0, 0),
lineWidth=0.5, lineColor=[1, 0, 0], lineColorSpace='rgb',
fillColor=[1, 0, 0], fillColorSpace='rgb',
opacity=1, interpolate=True)
Pixel = visual.Rect(
win=win, name='topleftpixel', units='pix',
pos=(-window_size[1], window_size[1]/2),
size=(window_size[0]*2/5, 200),
fillColor=[-1, -1, -1],
lineColor=[-1, -1, -1])
# Initialize components for Routine "image"
fname = op.join(home_folder, session_image_choice, images[1])
Image = visual.ImageStim(
win, image=fname, pos=(0, 0), size=image_size)
preload_images = [
visual.ImageStim(win, op.join(home_folder, session_image_choice, img), size=image_size)
for img in images]
# for the BCI part (part 2)
if (CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with') and expInfo['part'] == 'part2':
Question = visual.TextStim(win=win, name='Question', text="Avez-vous changé l'image ?",
font='Arial', pos=(0, 0.3), height=0.1, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerYes = visual.TextStim(win=win, name='AnswerYes', text='VOUS',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerNo = visual.TextStim(win=win, name='AnswerNo', text='ORDI',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerNoButWanted = visual.TextStim(win=win, name='AnswerNoButWanted', text='ORDI AVANT VOUS',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
# Create some handy timers
imageClock = core.Clock()
blankClock = core.Clock()
longBreackClock = core.Clock()
shortBreackClock = core.Clock()
blankBeforeQuestionClock = core.Clock() # for the BCI part
questionClock = core.Clock() # for the BCI part
globalClock = core.Clock() # to track the time since experiment started
globalClock.reset() # clock
# Create the parameters of the gamma function
k_shape = 3
theta_scale = 1
# Count number of button press and number of random changes
button_presses = 0
random_changes = 0
early_button_presses_after_computer = 0
early_button_presses_after_human = 0
# # Count number of yes and no response (for the BCI part)
# button_yes = 0
# button_no = 0
# button_no_but_wanted = 0
# Handy variable to know the previous trigger
previousTrigger = ''
timeEarlyBTNPress = 0
is_there_an_early = 0
print('\n')
if CHOICE_OF_EXPERIMENT == 'S2_with':
#Loading the MEGBuffer node
MEGB = MEGBuffer()
inputStream = InputStream()
nbSteps_chosen = expInfo['nbSteps']
clfname = expInfo['classifier']
MEGB.configure(nb_steps_chosen =nbSteps_chosen,clf_name =clfname )
MEGB.outputs['signals'].configure( transfermode='plaindata')
MEGB.outputs['triggers'].configure( transfermode='plaindata')
MEGB.initialize()
inputStream.connect(MEGB.outputs['signals'])
MEGB.start()
# ------Prepare to start Routine "Instructions"-------
continueRoutine = True
White_screen.setAutoDraw(True)
Instructions.setAutoDraw(True)
Pixel.setAutoDraw(True)
# -------Start Routine "Instructions"-------
key_from_serial = []
key_from_serial2 = ''
win.flip()
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
if key_from_serial2 == '2':
Instructions.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=['y']):
Instructions.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
# Start MEG recordings
if trigger:
port.setData(0)
time.sleep(0.1)
port.setData(252)
# Start the trials
for trial in trials:
# ------ Stop the recordings and close everything for S2 part 1 -------
if (CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with') and expInfo['part'] == 'part1' and \
(trials.thisN % nb_trials_before_long_break) == 0 and trials.thisN != 0:
# Stop MEG recordings
if trigger:
time.sleep(1)
port.setData(253)
time.sleep(1)
port.setData(0)
end_text = 'Pause ! Veuillez ne pas bouger et attendre les instructions. \n\nVous pouvez fermer les yeux.'
Instructions.setText(end_text)
Instructions.setAutoDraw(True)
Cross.setAutoDraw(False)
win.flip()
pietoplot, pielabels = prepare_pie_plot(button_presses, random_changes, early_button_presses_after_computer, early_button_presses_after_human, trials.thisN)
plt.pie(pietoplot,labels=pielabels)
plt.show()
time.sleep(5)
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
win.close()
core.quit()
# ------Condition for Long Break-------
if (CHOICE_OF_EXPERIMENT == 'S1_random' or expInfo['part'] == 'part2') and \
((trials.thisN % nb_trials_before_long_break) == 0 and trials.thisN != 0 and trials.thisN < max1_trials) or \
(button_presses >= max1_trials/2 and random_changes >= max1_trials/2 and threshold600 == 0) or \
(button_presses >= max2_trials/2 and random_changes >= max2_trials/2):
# ------Prepare to start Routine "Long Break"-------
continueRoutine = True
if ((trials.thisN % nb_trials_before_long_break) == 0 and trials.thisN != 0 and trials.thisN < max1_trials):
long_break_text = 'Pause ! Veuillez ne pas bouger et attendre les instructions. \n\nVous pouvez fermer les yeux.'
elif (button_presses >= max1_trials/2 and random_changes >= max1_trials/2 and threshold600 == 0) or (button_presses >= max2_trials/2 and random_changes >= max2_trials/2):
long_break_text = 'Presque fini ! Veuillez ne pas bouger et attendre les instructions \n\nVous pouvez fermer les yeux.'
threshold600 = 1
Instructions.setText(long_break_text)
Instructions.setAutoDraw(True)
Cross.setAutoDraw(False)
win.callOnFlip(longBreackClock.reset)
# -------Start Routine "Long Break"-------
win.flip()
if CHOICE_OF_EXPERIMENT == 'S1_random':
print('Long break') # for the random part
print('Partipant : ' + str(button_presses) + '\nOrdinateur : ' + str(random_changes) + '\n') # for the random part
print('Participant-Ordinateur : ' + str(button_presses-random_changes) + '\n') # for the random part
key_from_serial = []
key_from_serial2 = ''
# Stop MEG recordings
if trigger:
time.sleep(1)
port.setData(253)
time.sleep(1)
port.setData(0)
if button_presses >= max2_trials/2 and random_changes >= max2_trials/2:
time.sleep(5)
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
win.close()
core.quit()
while continueRoutine:
if event.getKeys(keyList=['a']):
Instructions.setAutoDraw(False)
continueRoutine = False
# win.flip()
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
# Start MEG recordings
if trigger:
port.setData(252)
event.clearEvents(eventType='keyboard')
# ------Condition for Short Break-------
if (trials.thisN % nb_trials_before_short_break) == 0 and trials.thisN != 0:
# ------Prepare to start Routine "Short Break"-------
continueRoutine = True
if CHOICE_OF_EXPERIMENT == 'S1_random':
difference = button_presses - random_changes
if difference > 6:
break_text = "Petite pause ! \n\n Vous répondez trop souvent par rapport à l'ordinateur. \nAppuyez moins souvent. \n\n Pour continuer, appuyez sur le bouton de droite."
elif difference < -6:
break_text = "Petite pause ! \n\n Vous ne répondez pas assez souvent par rapport à l'ordinateur. \nAppuyez plus souvent. \n\n Pour continuer, appuyez sur le bouton de droite."
else:
break_text = "Petite pause ! \n\n Vous répondez aussi souvent que l'ordinateur, bravo ! \n\n Pour continuer, appuyez sur le bouton de droite."
# break_text = 'Petite pause !' + '\n\nVous : ' + str(button_presses) + '\n\n Ordinateur : ' + str(random_changes) + '\n\n Appuyez sur le bouton de droite pour continuer.' # for the random part
else: # for the BCI part
break_text = 'Petite pause \n\n Veuillez ne pas bouger et attendre les instructions' # for the BCI part
Instructions.setText(break_text)
Instructions.setAutoDraw(True)
Pixel.setAutoDraw(True)
Cross.setAutoDraw(False)
win.callOnFlip(shortBreackClock.reset)
continueRoutine = True
# -------Start Routine "Short Break"-------
win.flip()
if CHOICE_OF_EXPERIMENT == 'S1_random':
print('Partipant : ' + str(button_presses) + '\nOrdinateur : ' + str(random_changes) + '\n') # for the random part
print('Participant-Ordinateur : ' + str(button_presses-random_changes) + '\n') # for the random part
else:
if expInfo['part'] == 'part2': # max(dictCounterAnswers.values())>0 :
plotDict2(dictCounterAnswers)
# print(yaml.dump(dictCounterAnswers, sort_keys=False, default_flow_style=False))
elif expInfo['part'] == 'part1':
pietoplot, pielabels = prepare_pie_plot(button_presses, random_changes, early_button_presses_after_computer, early_button_presses_after_human, trials.thisN)
plt.pie(pietoplot,labels=pielabels)
plt.show()
# nb_of_trials_within_little_block = 0
key_from_serial = []
key_from_serial2 = ''
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
if key_from_serial2 == '2' and CHOICE_OF_EXPERIMENT == 'S1_random':
Instructions.setAutoDraw(False)
continueRoutine = False
if (event.getKeys(keyList=['y']) and CHOICE_OF_EXPERIMENT == 'S1_random') or \
(event.getKeys(keyList=['a']) and (CHOICE_OF_EXPERIMENT == 'S2_with' or CHOICE_OF_EXPERIMENT == 'S2_without')):
Instructions.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
if shortBreackClock.getTime() > 30 and CHOICE_OF_EXPERIMENT == 'S1_random': # noqa
Instructions.setAutoDraw(False)
continueRoutine = False
# ------Prepare to start Routine "Blank"-------
continueRoutine = True
Cross.setAutoDraw(True)
# -------Start Routine "Blank"-------
win.callOnFlip(blankClock.reset)
win.callOnFlip(printTiming, trials, globalClock, 'globalTiming')
win.flip()
if trigger:
port.setData(0)
while continueRoutine:
frameRemains = blank_time - win.monitorFramePeriod * 0.75 # most of one frame period left # noqa
if blankClock.getTime() >= frameRemains:
Cross.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
# ------Prepare to start Routine "Image"-------
preload_images[trial['image_nb']].setAutoDraw(True)
Cross.setAutoDraw(True)
Pixel.setAutoDraw(False)
event.clearEvents(eventType='keyboard')
# # Increase nb of trials (for the BCI part)
# if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
# nb_of_trials_within_little_block += 1
# -------Start Routine "Image"-------
win.callOnFlip(imageClock.reset)
win.callOnFlip(printTiming, trials, blankClock, 'blank')
win.flip()
keyPress = []
key_from_serial = []
key_from_serial2 = ''
is_there_an_early = 0
frameRemainsRT = np.maximum(0.5, np.random.gamma(k_shape, scale=theta_scale, size=1)) # noqa
if CHOICE_OF_EXPERIMENT == 'S2_with':
detectPrep = False
inputStream.empty_queue()
cond_for_loop = True
else:
if (imageClock.getTime() < frameRemainsRT):
cond_for_loop = True
else:
cond_for_loop = False
ActiveStatus = 0
while cond_for_loop: # noqa
if trigger :
port.setData(0)
#MEGBuffer Part
# Polling and receiving the data sent by the MEGBuffer node
if (CHOICE_OF_EXPERIMENT == 'S2_with') and (imageClock.getTime() > 0.5):
try :
inputStream.empty_queue()
dataIsAvailable = inputStream.poll(1000)
except :
print("Error with polling the input stream")
break
#nbPaquetsToTest = 10000 # represents the number of packages of 24 we want to test
if(dataIsAvailable):
data = inputStream.recv() # Pulling the data from the
# print(data)
# print(time.time())
if( data[1][0] == 1):
if trigger:
port.setData(value_parallel_comp)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'C')
RT = imageClock.getTime()
ActiveStatus = 0
random_changes += 1
nbPred+=1
detectPrep = True
cond_for_loop=False
print('computer change')
previousTrigger = 'C'
preload_images[trial['image_nb']].setAutoDraw(False)
Pixel.setAutoDraw(True)
if serialPort: # and (imageClock.getTime() > 0.5):
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
keyPress = event.getKeys(keyList=['r', 'escape'])
if ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1'):
if imageClock.getTime() > 0.5 or CHOICE_OF_EXPERIMENT == 'S1_random' :
if trigger:
port.setData(value_parallel_huma)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'H')
previousTrigger = 'H'
print("Human Change")
preload_images[trial['image_nb']].setAutoDraw(False)
Pixel.setAutoDraw(True)
RT = imageClock.getTime()
# RT = keyPress[0][1]
ActiveStatus = 1
button_presses += 1
cond_for_loop = False
else:
if previousTrigger == '':
if trigger:
port.setData(value_parallel_huma_early_after_begin)
print("Early BP after beginning!")
if previousTrigger == 'H':
if trigger:
port.setData(value_parallel_huma_early_after_huma)
print("Early BP after human")
is_there_an_early += 1
early_button_presses_after_human += 1
previousTrigger='HB'
elif previousTrigger == 'C':
if trigger:
port.setData(value_parallel_huma_early_after_comp)
print("Early BP after computer")
is_there_an_early += 1
early_button_presses_after_computer += 1
previousTrigger='CB'
elif previousTrigger == 'HB' or previousTrigger == 'CB':
if trigger:
port.setData(value_parallel_huma_early_after_early)
print("Early BP after early!")
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'E')
timeEarlyBTNPress = imageClock.getTime()
if (keyPress and keyPress[0][0] == 'escape'):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
if (imageClock.getTime() > frameRemainsRT) and (CHOICE_OF_EXPERIMENT == 'S1_random' or CHOICE_OF_EXPERIMENT == 'S2_without'):
cond_for_loop = False
if trigger:
port.setData(value_parallel_comp)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'C')
print('computer change')
previousTrigger = 'C'
preload_images[trial['image_nb']].setAutoDraw(False)
Pixel.setAutoDraw(True)
RT = frameRemainsRT[0]
ActiveStatus = 0
random_changes += 1
win.callOnFlip(printTiming, trials, imageClock, 'image')
win.flip()
if trigger :
port.setData(0)
# ------Condition for Question, BCI part (part 2) -------
if (CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with') and expInfo['part'] == 'part2':
if trigger :
port.setData(0)
win.callOnFlip(blankBeforeQuestionClock.reset)
win.flip()
is_there_an_early = 0
while blankBeforeQuestionClock.getTime() < 0.5:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
keyPress = event.getKeys(keyList=['r'])
if ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1'):
print("Early BP")
if previousTrigger == 'H' or previousTrigger == 'HB' or previousTrigger == 'CB':
if trigger:
port.setData(value_parallel_huma_early_after_huma)
if previousTrigger == 'H':
early_button_presses_after_human += 1
previousTrigger = 'HB'
elif previousTrigger=='C':
if trigger:
port.setData(value_parallel_huma_early_after_comp)
early_button_presses_after_computer += 1
previousTrigger = 'CB'
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'E')
is_there_an_early += 1
timeEarlyBTNPress = blankBeforeQuestionClock.getTime()
if trigger :
port.setData(0)
# ------Prepare to start Routine "Question"-------
continueRoutine = True
Question.setAutoDraw(True)
AnswerYes.setAutoDraw(True)
AnswerNo.setAutoDraw(True)
AnswerNoButWanted.setAutoDraw(True)
AnswerYes.alignText = 'left'
AnswerNo.alignText = 'right'
AnswerNoButWanted.alignText== 'middle'
Cross.setAutoDraw(False)
win.callOnFlip(questionClock.reset)
AnswerYes.setColor(color = 'black')
AnswerNo.setColor(color = 'black')
AnswerNoButWanted.setColor(color = 'black')
selectedAnswer = ''
# -------Start Routine "Question"-------
win.flip()
key_from_serial = []
key_from_serial2 = ''
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
keyPress = event.getKeys(keyList=['r', 'y', 'c', 'escape'])
# Switching buttons
# press r/1 to go left
# press y/2 to go right
# press c/3 to validate
if (((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='') or \
(((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='N') or \
(((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='NBW'):
AnswerYes.setColor('white')
AnswerNo.setColor('black')
AnswerNoButWanted.setColor('black')
selectedAnswer='Y'
elif (((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='') or \
(((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='Y') or \
(((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='NBW'):
AnswerYes.setColor('black')
AnswerNo.setColor('white')
AnswerNoButWanted.setColor('black')
selectedAnswer='N'
elif ((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='Y' or \
(((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='N'):
AnswerYes.setColor('black')
AnswerNo.setColor('black')
AnswerNoButWanted.setColor('white')
selectedAnswer='NBW'
elif ((keyPress and keyPress[0][0] == 'c') or key_from_serial2 == '8') and selectedAnswer != '':
Question.setAutoDraw(False)
AnswerYes.setAutoDraw(False)
AnswerNo.setAutoDraw(False)
AnswerNoButWanted.setAutoDraw(False)
continueRoutine = False
if selectedAnswer == 'Y':
if trigger :
port.setData(value_answer_yes)
# button_yes += 1
active_answer = 1
print('yes chosen' + '\n')
# TODO adding +1 depending on the trigger that created the question
dictKey = previousTrigger + '_yes'
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
elif selectedAnswer == 'N':
if trigger :
port.setData(value_answer_no)
# button_no += 1
active_answer = 0
print('no chosen' + '\n')
dictKey = previousTrigger + '_no'
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
elif selectedAnswer == 'NBW':
if trigger :
port.setData(value_answer_nbw)
# button_no_but_wanted += 1
active_answer = 0.5
print('nbw chosen' + '\n')
dictKey = previousTrigger + '_nbw'
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
previousTrigger = ''
win.flip()
if trigger :
port.setData(0)
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB()
win.close()
core.quit()
trials.addData('RT', RT)
trials.addData('ActiveStatus', ActiveStatus)
# for the BCI part
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
if expInfo['part'] == 'part1':
trials.addData('ActiveAnswer', 99)
trials.addData('EarlyBP_trialbefore', is_there_an_early)
if is_there_an_early != 0:
trials.addData('RT_earlyBP_trialbefore', timeEarlyBTNPress)
else:
trials.addData('RT_earlyBP_trialbefore', 99)
elif expInfo['part'] == 'part2':
trials.addData('ActiveAnswer', active_answer)
trials.addData('EarlyBP', is_there_an_early)
if is_there_an_early != 0:
trials.addData('RT_earlyBP', timeEarlyBTNPress)
else:
trials.addData('RT_earlyBP', 99)
thisExp.nextEntry()
# -------Ending Trials loop -------
print('saving')
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
print('closing exp')
logging.flush()
print('closing log')
if CHOICE_OF_EXPERIMENT == 'S2_with':
MEGB.stop()
inputStream.close()
MEGB.close()
print('closing megb')
print('closing')
# make sure everything is closed down
thisExp.abort() # or data files will save again on exit
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
win.close()
core.quit()
exit()
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,098
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/testing/basemy.py
|
import os
import os.path as op
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.backends.backend_pdf import PdfPages
# def thecodes(file_to_ana, decod_scr_with_trigger):
# if file_to_ana == 'Guided':
# if decod_scr_with_trigger == 1:
# cow_codes_scr = [40, 42]
# else:
# cow_codes_scr = [40, 50]
# textleft_codes = [40, 45]
# cow_codes = [1060, 1065, 1070, 1075,
# 2060, 2065, 2070, 2075]
# round_codes = [1060, 1065, 1070, 1075,
# 1560, 1565, 1570, 1575]
# left_codes = [1060, 1065, 1560, 1565,
# 2060, 2065, 2560, 2565]
# text_codes = [1060, 1070, 1560, 1570,
# 2060, 2070, 2560, 2570]
# if file_to_ana == 'JustCue':
# cow_codes_scr = []
# textleft_codes = []
# cow_codes = []
# round_codes = [10100, 10110, 10120]
# left_codes = []
# text_codes = []
# if file_to_ana == 'Spontaneous':
# cow_codes_scr = []
# textleft_codes = [40]
# cow_codes = []
# round_codes = []
# left_codes = [1060, 1065]
# text_codes = [1060, 1070]
# return cow_codes_scr, textleft_codes, cow_codes, round_codes, left_codes, text_codes #noqa
#which channel we want to decode
def datatodec(which_channels, epochs):
if which_channels == 'meg':
epochs.pick_types(meg=True, ref_meg=False)
data_to_dec = epochs._data
elif which_channels == 'meg&ref':
epochs.pick_types(meg=True)
data_to_dec = epochs._data
elif which_channels == 'meg&emg':
epochs.pick_types(meg=True, eeg=True, ref_meg=False)
data_to_dec = epochs._data
elif which_channels == 'eyes':
data_to_dec = epochs._data[:, epochs.ch_names.index('UADC010-2800'):epochs.ch_names.index('UADC011-2800')+1, :] # noqa
elif which_channels == 'pupil':
data_to_dec = epochs._data[:, None, epochs.ch_names.index('UADC012-2800'), :] # noqa
elif which_channels == 'emg':
epochs.pick_types(eeg=True)
data_to_dec = epochs._data
elif which_channels == 'trigger':
data_to_dec = epochs._data[:, None, epochs.ch_names.index('UPPT002'), :] # noqa
elif which_channels == 'mlc': #motor left channel
picks = [chan for chan in epochs.ch_names if 'MLC' in chan]
epochs.pick_channels(picks)
data_to_dec = epochs._data
elif which_channels == 'mrc': #motor right channel
picks = [chan for chan in epochs.ch_names if 'MRC' in chan]
epochs.pick_channels(picks)
data_to_dec = epochs._data
elif which_channels == 'mlc&emg&eyes': #motor left channel + emg + eyes (saccade) + pupil
picks = [chan for chan in epochs.ch_names if 'MLC' in chan]
picks.extend(['UADC010-2800', 'UADC011-2800', 'UADC012-2800', 'EEG064-2800']) #add emg pupil eyes
epochs.pick_channels(picks)
data_to_dec = epochs._data
elif which_channels == 'motor':
picks = [chan for chan in epochs.ch_names if 'MLC' in chan or 'MRC' in chan or 'MZC' in chan] #every motor, right, left, and central
epochs.pick_channels(picks)
data_to_dec = epochs._data
else: #if no precision decode on all channels
data_to_dec = epochs._data
return data_to_dec
def datatodec_conc(which_channels, epochs, epochs_times, package):
#**** decode a package of data for example : package_size = 23 | epochs_times = 181 | time = time -1
# package = 23
# epochs_times = len(epochs.times)
# time=1
if which_channels == 'emg':
data_to_dec = epochs._data[:, None, epochs.ch_names.index('EEG064-2800'), package:epochs_times]
for time in range(1, 24):
data_to_dec = np.concatenate((data_to_dec, epochs._data[:, None, epochs.ch_names.index('EEG064-2800'), package-time:epochs_times-time]), axis = 1)
elif which_channels == 'eyes':
data_to_dec = epochs._data[:, epochs.ch_names.index('UADC010-2800'):epochs.ch__lenames.index('UADC011-2800')+1, package:epochs_times]
for time in range(1, 24):
data_to_dec = np.concatenate((data_to_dec, epochs._data[:, epochs.ch_names.index('UADC010-2800'):epochs.ch_names.index('UADC011-2800')+1, package-time:epochs_times-time]), axis = 1)
elif which_channels == 'pupil':
data_to_dec = epochs._data[:, None, epochs.ch_names.index('UADC012-2800'), package:epochs_times]
for time in range(1, 24):
data_to_dec = np.concatenate((data_to_dec, epochs._data[:, None, epochs.ch_names.index('UADC012-2800'), package-time:epochs_times-time]), axis = 1)
else:
data_to_dec = epochs._data[:, :, package:epochs_times]
for time in range(1, 24):
data_to_dec = np.concatenate((data_to_dec, epochs._data[:, :, package-time:epochs_times-time]), axis = 1)
return data_to_dec
def whattodecod(somecode, y):
yTD = y.copy()
yTD[:] = 1
yTD[np.where(np.in1d(y, somecode))] = 0
return yTD
def smooth_to_plot(array_of_decod):
array_of_decod2 = array_of_decod.copy()
for i in range(len(array_of_decod)-4):
array_of_decod2[i+2] = (array_of_decod2[i] +
array_of_decod2[i+1] +
array_of_decod2[i+2] +
array_of_decod2[i+3] +
array_of_decod2[i+4])/5
return array_of_decod2
def thedirectory(path_analyzed_data, which_channels, RAW=False):
if RAW:
directory = op.join(path_analyzed_data, 'results_agency/raw/epochs_allchannels') #if you want to epochs with raw data
else:
directory = op.join(path_analyzed_data, 'results_agency/epochs_allchannels') # noqa
if not op.exists(directory):
# print "directory"
os.mkdir(directory)
return directory
def thedirectorydecod(path_analyzed_data, endofpath):
directorydecod = op.join(path_analyzed_data, endofpath)
if not op.exists(directorydecod):
os.mkdir(directorydecod)
return directorydecod
def plotchanceandlimit(horizontal_extent, horizontal_borders, vertical_extent, vertical_borders): # noqa
for hor_bor in horizontal_borders:
plt.plot(horizontal_extent, [hor_bor, hor_bor], color='black', linestyle='dashed', alpha=0.5) # noqa
for ver_bor in vertical_borders:
plt.plot([ver_bor, ver_bor], vertical_extent, color='black', linestyle='dashed', alpha=0.5) # noqa
def transfo_sign_analog(signal, targetValueMin, targetValueMax, targetStep1, targetStep2, halfDist): # noqa
# targetValueMin = 0
# targetValueMax = 37.75
# targetStep1 = 0.25
# targetStep2 = 7.5
# halfDist = 30
signal2 = np.copy(signal)
signal_high = np.where([item > (targetValueMax-targetStep1) for item in signal]) # 37.5 # noqa
signal_low = np.where([item < (targetValueMin+targetStep1) for item in signal]) # 0.25 # noqa
signal2[signal_high] = targetValueMax # 37.75
signal2[signal_low] = targetValueMin # 0
signal3 = np.copy(signal2)
# signal_high = np.where([(item < targetValueMax and item > (targetValueMax-targetStep2)) for item in signal2[:-halfDist]]) # noqa
# signal_low = np.where([(item < (targetValueMin+targetStep2) and item > targetValueMin) for item in signal2[:-halfDist]]) # noqa
signal_high = np.where([(item < targetValueMax and item > (targetValueMax-targetStep2)) for item in signal2]) # noqa
signal_low = np.where([(item < (targetValueMin+targetStep2) and item > targetValueMin) for item in signal2]) # noqa
for item in signal_high[0]:
if signal2[item+halfDist] > (targetValueMax-targetStep2):
signal3[item] = targetValueMax
for item in signal_low[0]:
if signal2[item+halfDist] < (targetValueMin+targetStep2):
signal3[item] = targetValueMin
signal4 = np.copy(signal3)
signal_inter = np.where([(item < targetValueMax and item > targetValueMin) for item in signal3[:-halfDist]]) # noqa
for item in signal_inter[0]:
if signal3[item+halfDist] > (targetValueMax-targetValueMin)/2:
signal4[item] = targetValueMax
else:
signal4[item] = targetValueMin
signal5 = np.copy(signal4)
signal5[np.where(signal5 == targetValueMin)] = 0
# ** plot to check and remove the last glinch for signal **
# plt.plot(signal)
# plt.plot(signal4)
# plt.show()
return signal5
def detect_emg(signal, threshold1, threshold2, length1, length2):
emg3 = np.abs(signal)
emg4 = np.zeros(signal.shape)
for item in np.where(emg3 > threshold1)[0]:
liminf = np.max([0, item - length1])
limsup = np.min([item + length1, emg3.shape[0]])
if np.where(emg3[liminf:limsup]>threshold2)[0].shape[0]>1:
emg4[liminf:limsup] = threshold1
emg5 = emg4
for item in np.where(emg4 == threshold1)[0]:
if emg4[item+length2]==threshold1:
emg5[item:item+length2] =threshold1
# plt.plot(emg2)
# plt.plot(emg4)
# plt.plot(emg5)
# plt.show()
return emg5
def plot_photodiode(photodiode, photodiode2, trigger):
plt.plot(photodiode)
plt.plot(photodiode2)
plt.plot(trigger)
plt.title('Photodiode and Trigger')
plt.legend(('Photodiode', 'Photodiode2', 'Trigger'), loc='upper left')
plt.show()
# plt.clf()
def plot_eyetracker(raw_eyeA, raw_eyeB, raw_eyeC, subject, run_number, file):
timems = np.arange(0, raw_eyeA.shape[0]/600, 1/600)
plt.plot(timems, raw_eyeB, alpha=0.7, label='X')
plt.plot(timems, raw_eyeA, alpha=0.7, label='Y')
plt.plot(timems, raw_eyeC, alpha=0.7, label='Pupil')
plt.xlabel('Temps en seconde') #a changer
plt.ylabel('Valeur arbitraire')
plt.title(subject + ' run n°' + str(run_number) + ' Eye Tracker')
plt.legend(loc='upper right')
fname = op.join(file, '%s_run%s_eye.jpg' %(subject, run_number))
plt.savefig(fname)
# plt.show()
plt.clf()
def plot_detect_emg(emg, emg2, trigger, subject):
plt.plot(emg*30000)
if subject == 'FAY' or subject == '0992' or subject == '0993' or subject == '0994' or subject == '0995':
plt.plot(emg2*100000)
elif subject == '0986' or subject == '0987' or subject == '0989' or subject == '0990' or subject =='0991' or subject == '0996' or subject == '1059' or subject == '1060' or subject == '1061':
plt.plot(emg2*30000)
plt.plot(trigger)
plt.xlabel('Hz') #a changer
plt.ylabel('Valeur arbitraire')
plt.title(subject + ' Detection of EMG when button press')
plt.legend(('EMG', 'Détection EMG', 'Evènement'), loc='upper left')
plt.show()
# plt.clf()
# from biosppy.signals import emg as bioemg #function that plot emg and onset
# ts, emg3, onsets = bioemg.emg(emg, sampling_rate=600)
def plot_event_before(events_tri, events_ima, E, F, subject, run_number, file):
plt.hist((events_tri[E, 0]-events_ima[E, 0])/600, bins=40, label='Button', alpha = 0.5) # distribution of reaction time # noqa
plt.hist((events_tri[F, 0]-events_ima[F, 0])/600, bins=40, label='Nothing', fc=(1, 0, 0, 0.5)) # distribution of random changes of image # noqa
plt.legend()
plt.xlabel('Times in second')
plt.ylabel('Trials')
plt.title(subject + ' run n°' + str(run_number) + ' Distribution of events through time (before delete)')
fname = op.join(file, '%s_run%s_eventbefore.jpg' %(subject, run_number))
plt.savefig(fname)
# plt.show()
plt.clf()
def plot_event_after(events_tri, events_ima, E, F, subject, run_number, file):
plt.hist((events_tri[E, 0]-events_ima[E, 0])/600, bins=40, label='Button', alpha = 0.5) # distribution of reaction time # noqa
plt.hist((events_tri[F, 0]-events_ima[F, 0])/600, bins=40, label='Nothing', fc=(1, 0, 0, 0.5)) # distribution of random changes of image # noqa
plt.legend()
plt.xlabel('Times in second')
plt.ylabel('Trials')
# plt.xlim(right=0.7, left=0.4) #detect button press before 500 ms
plt.title(subject + ' run n°' + str(run_number) + ' Distribution of events through time (after delete)')
fname = op.join(file, '%s_run%s_eventafter.jpg' %(subject, run_number))
plt.savefig(fname)
# plt.show()
plt.clf()
def plot_emg(G, subject, run_number, file):
plt.hist(G/600, bins=40)
plt.xlabel('Times in second')
plt.ylabel('Trials')
plt.title(subject + ' run n°' + str(run_number) + ' Distribution of EMG immediately preceding a button press')
fname = op.join(file, '%s_run%s_emg.jpg' %(subject, run_number))
plt.savefig(fname)
# plt.show()
plt.clf()
def plot_blank(events_bla, events_ima, events_tri, subject, run_number, file):
#**** all trials ****
plt.hist((events_ima[1:, 0]-events_bla[0:-1, 0])/600, bins=40) # 22 or 23 ms of blank for all trials, one trial with 32 ms, except between the pause # noqa
plt.xlabel('Times in second')
plt.ylabel('Trials')
plt.title(subject + ' run n°' + str(run_number) + ' Distribution of blank for all trials' )
fname = op.join(file, '%s_run%s_blanktrials.jpg' %(subject, run_number))
plt.savefig(fname)
# plt.show()
plt.clf()
#**** triggers ****
plt.hist((events_bla[:, 0]-events_tri[:, 0])/600, bins=40) # 7 values between 18 ms and 28 ms # noqa
plt.xlabel('Times in second')
plt.ylabel('Trials')
plt.title(subject + ' run n°' + str(run_number) +' Distribution of blank for triggers')
fname = op.join(file, '%s_run%s_blanktriggers.jpg' %(subject, run_number))
plt.savefig(fname)
# plt.show()
plt.clf()
#save multiple fig in pdf
def multipage(filename, figs=None, dpi=300):
pp = PdfPages(filename)
if figs is None:
figs = [plt.figure(n) for n in plt.get_fignums()]
for fig in figs:
fig.savefig(pp, format='pdf')
pp.close()
# def plot_all_emg(all_G, runs, subject):
# plt.figure(figsize=(19.20, 10.80))
# emg_rt = []
# emg_rt_percent = []
# for i in range(len(runs)):
# plt.hist(all_G[i]/600, bins=40, color = 'seagreen')
# emg_rt.append(all_G[i]/600) #add tout les temps de réactions dans une liste
# emg_rt_percent.append(len(np.where(emg_rt[i] <=1)[0]) / len(emg_rt[i]) * 100) #récupère le % lorsque temps de reaction < 1
# plt.xlabel('Times in second')
# plt.ylabel('Trials')
# plt.title(subject + 'Distribution of all EMG immediately preceding a button press'
# +'\nPercentage under 1 sec : ' + str(round(sum(emg_rt_percent) / len(emg_rt_percent),3)) + '%')
# def plot_all_emg_subj(new_G, total_run):
# plt.figure(figsize=(19.20, 10.80))
# emg_rt = []
# emg_rt_percent = []
# for i in range(total_run):
# plt.hist(new_G[i]/600, bins=40, color = 'seagreen')
# emg_rt.append(new_G[i]/600) #add tout les temps de réactions dans une liste
# emg_rt_percent.append(len(np.where(emg_rt[i] <=1)[0]) / len(emg_rt[i]) * 100) #récupère le % lorsque temps de reaction < 1
# plt.xlabel('Times in second')
# plt.ylabel('Trials')
# plt.title('all subjects Distribution of all EMG immediately preceding a button press'
# +'\nPercentage under 1 sec : ' + str(round(sum(emg_rt_percent) / len(emg_rt_percent),3)) + '%')
#a refaire avec le new plot des BP
# def plot_all_event(all_events_tri, all_events_ima, all_E, all_F, runs, subject, subj_nb):
# from statistics import mean
# from statistics import median
# human_rt_mean = []
# machine_rt_mean = []
# human_rt_median = []
# machine_rt_median = []
# plt.figure(figsize=(19.20, 10.80))
# for i in range(len(runs)):
# plt.hist((all_events_tri[i][all_E[i], 0]-all_events_ima[i][all_E[i], 0])/600, bins=20, fc=(0.2, 0.2, 1, 1)) # distribution of reaction time # noqa
# plt.hist((all_events_tri[i][all_F[i], 0]-all_events_ima[i][all_F[i], 0])/600, bins=20, fc=(1, 0.2, 0.2, 0.5)) # distribution of random changes of image # noqa
# human_rt_mean.append(mean(((all_events_tri[i][all_E[i], 0]-all_events_ima[i][all_E[i], 0])/600)))
# machine_rt_mean.append(mean(((all_events_tri[i][all_F[i], 0]-all_events_ima[i][all_F[i], 0])/600)))
# human_rt_median.append(median(((all_events_tri[i][all_E[i], 0]-all_events_ima[i][all_E[i], 0])/600)))
# machine_rt_median.append(median(((all_events_tri[i][all_F[i], 0]-all_events_ima[i][all_F[i], 0])/600)))
# mean_diff = abs(mean(human_rt_mean) - mean(machine_rt_mean))
# median_diff = abs(median(human_rt_median) - median(machine_rt_median))
# plt.legend(('Button', 'Nothing'))
# plt.xlabel('Times in second')
# plt.ylabel('Trials')
# plt.title(subject + ' Distribution of all events through time (after delete)'
# + '\nMean difference between Human and Machine : ' + str(round(mean_diff,3))
# + '\nMedian difference between Human and Machine : ' + str(round(median_diff,3)))
# #**** save results for corr ****
# save_diff_RT = thedirectorydecod('/Users/Fayed/Desktop/PC_STAGE/analysis/scripts/decoding/MEG/MEG_analyzed_data', 'preprocessing/results/') #noqa
# DiffRT_Mean = op.join(save_diff_RT, '%s_DiffRT_Mean.npy' %(subject))
# DiffRT_Median = op.join(save_diff_RT, '%s_DiffRT_Median.npy' %(subject))
# np.save(DiffRT_Mean, mean_diff)
# np.save(DiffRT_Median, median_diff)
#a refaire avec le new plot des BP ou laisser le script à part maintenant qu'on save
# def plot_all_event_subj(all_events_tri_subj, all_events_ima_subj, all_E_subj, all_F_subj, total_run):
# from statistics import mean
# from statistics import median
# human_rt_mean = []
# machine_rt_mean = []
# human_rt_median = []
# machine_rt_median = []
# plt.figure(figsize=(19.20, 10.80))
# for i in range(total_run):
# #all_events_tri = tout les events tri de tt les sujets, all_e l'index
# plt.hist((all_events_tri_subj[i][all_E_subj[i], 0]-all_events_ima_subj[i][all_E_subj[i], 0])/600, bins=5, fc=(0.2, 0.2, 1, 1)) # distribution of reaction time # noqa
# plt.hist((all_events_tri_subj[i][all_F_subj[i], 0]-all_events_ima_subj[i][all_F_subj[i], 0])/600, bins=5, fc=(1, 0.2, 0.2, 0.5)) # distribution of random changes of image # noqa
# human_rt_mean.append(mean(((all_events_tri_subj[i][all_E_subj[i], 0]-all_events_ima_subj[i][all_E_subj[i], 0])/600)))
# machine_rt_mean.append(mean(((all_events_tri_subj[i][all_F_subj[i], 0]-all_events_ima_subj[i][all_F_subj[i], 0])/600)))
# human_rt_median.append(median(((all_events_tri_subj[i][all_E_subj[i], 0]-all_events_ima_subj[i][all_E_subj[i], 0])/600)))
# machine_rt_median.append(median(((all_events_tri_subj[i][all_F_subj[i], 0]-all_events_ima_subj[i][all_F_subj[i], 0])/600)))
# mean_diff = abs(mean(human_rt_mean) - mean(machine_rt_mean))
# median_diff = abs(median(human_rt_median) - median(machine_rt_median))
# plt.legend(('Bouton', 'Gamma'))
# plt.xlabel('Temps en millisecondes')
# plt.ylabel('Essais')
# # plt.title('all subjects Distribution of all events through time (after delete)'
# # + '\nMean difference between Human and Machine : ' + str(round(mean_diff,3))
# # + '\nMedian difference between Human and Machine : ' + str(round(median_diff,3)))
# #**** save results for corr****
# save_diff_RT = thedirectorydecod('/Users/Fayed/Desktop/PC_STAGE/analysis/scripts/decoding/MEG/MEG_analyzed_data', 'preprocessing/results/')
# DiffRT_Mean = op.join(save_diff_RT, 'allsubject_DiffRT_Mean.npy')
# DiffRT_Median = op.join(save_diff_RT, 'allsubject_DiffRT_Median.npy')
# np.save(DiffRT_Mean, mean_diff)
# np.save(DiffRT_Median, median_diff)
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,099
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/agency_task_BCI_20210712.py
|
# Code written by Romain Quentin and Marine Vernet
# Modified by Fayed Rassoulou
# Integration of FieldTrip Buffer by Corentin Bel
# -*- coding: utf-8 -*-
# The files imported contain everything we need to run the code : libraries, functions, parameters, variables
# For the functions : make sure the files follow Github's package placement
from configs.basemy_agency import *
from configs.config_agency import *
# ***********************************************************************
# ********************** STARTING MEGBUFFER *****************************
# ***********************************************************************
# Starting the MEGBuffer, the thread associated and the pull of data from the MEG
if CHOICE_OF_EXPERIMENT == 'S2_with':
# Loading the MEGBuffer node
MEGB = MEGBuffer()
inputStream = InputStream()
nbSteps_chosen = expInfo['nbSteps']
clfname = expInfo['classifier']
partTypeChosen = expInfo['part']
timeBeforeStart = data.getDateStr()
# Configuring with all the parameters from the information window
MEGB.configure(nb_steps_chosen =nbSteps_chosen,clf_name =clfname,run_nbr = run_nbr,subjectId = participant ,partType = partTypeChosen, timeStart = timeBeforeStart,MEGsave = MEGsave)
MEGB.outputs['signals'].configure( transfermode='plaindata')
MEGB.outputs['triggers'].configure( transfermode='plaindata')
MEGB.initialize()
inputStream.connect(MEGB.outputs['signals'])
# Connection it to the InputStream, and starting it
MEGB.start()
# ------Prepare to start Routine "Instructions"-------
continueRoutine = True
White_screen.setAutoDraw(True)
Instructions.setAutoDraw(True)
Pixel.setAutoDraw(True)
# -------Start Routine "Instructions"-------
# This is where the participant gets told all the basic instructions
# That is also when the participant can start the whole experiment
key_from_serial = []
key_from_serial2 = ''
win.flip()
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
if key_from_serial2 == '2':
Instructions.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=['y']):
Instructions.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB(MEGB,inputStream)
win.close()
core.quit()
# Start MEG recordings
if trigger:
port.setData(0)
time.sleep(0.1)
port.setData(252)
########################################################################################################################
# **************************************** BEGGINING OF THE EXPERIMENT *************************************************
########################################################################################################################
# Start the trials : this is the loop that will go on until the experiment finishes
for trial in trials:
# ************************************************************
# *************** 1st CONDITION TO TRY ***********************
# ************************************************************
#
# ------ Stop the recordings and close everything for S2 part 1 -------
# Conditions for stopping and closing everything : has to be the first thing checked for every trial (trial = image change)
# We can ignore this part this for now, we want the part 1 to go on forever (and we stop it during a short break with escape)
if (CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with') and expInfo['part'] == 'part1' and \
(trials.thisN % nb_trials_before_long_break) == 0 and trials.thisN != 0:
# Stop MEG recordings
if trigger:
time.sleep(1)
port.setData(253)
time.sleep(1)
port.setData(0)
end_text = 'Pause ! Veuillez ne pas bouger et attendre les instructions. \n\nVous pouvez fermer les yeux.'
Instructions.setText(end_text)
Instructions.setAutoDraw(True)
Cross.setAutoDraw(False)
win.flip()
timeBeforeStart = data.getDateStr()
bloc_nb = int(trials.thisN/nb_trials_before_short_break)
# Plot of the current bloc
plt.subplot(2, 1, 1)
pietoplot, pielabels = prepare_pie_plot(button_presses_bloc, random_changes_bloc, early_button_presses_after_computer_bloc, early_button_presses_after_human_bloc, trials.thisN,participant,run_nbr,timeBeforeStart,bloc_nb)
plt.pie(pietoplot,labels=pielabels)
plt.title('Current bloc: %d'%bloc_nb)
if (nbSteps_chosen == None): # For testing in S2_withouts
nbSteps_chosen = 777
# Resetting the counts :
button_presses_bloc = 0
random_changes_bloc = 0
early_button_presses_after_computer_bloc = 0
early_button_presses_after_human_bloc = 0
# Plot of the next bloc
plt.subplot(2, 1, 2)
pietoplot, pielabels = prepare_pie_plot(button_presses, random_changes, early_button_presses_after_computer, early_button_presses_after_human, trials.thisN,participant,run_nbr,timeBeforeStart,bloc_nb)
plt.pie(pietoplot,labels=pielabels)
plt.title("Total on the current run")
if (nbSteps_chosen == None): # For testing in S2_withouts
nbSteps_chosen = 777
plt.savefig('fig/' + str(participant) + '_' + str(nbSteps_chosen)+'steps_run'+str(run_nbr)+'_'+expInfo['date']+'_megsave'+str(MEGsave))
if DEBUG == False:
mngr = plt.get_current_fig_manager()
mngr.window.setGeometry(2000, 100, 1000, 700)
plt.show()
time.sleep(5)
plt.close()
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB(MEGB,inputStream)
win.close()
core.quit()
# ************************************************************
# *************** 2nd CONDITION TO TRY ***********************
# ************************************************************
#
# ------Condition for Long Break-------
# Happens after a certain number of trials : check the variable nb_trials_before_long_break
# Print the current stats of the run, and send a trigger to start another recording if it's needed at the MEG
if (CHOICE_OF_EXPERIMENT == 'S1_random' or expInfo['part'] == 'part2') and \
((trials.thisN % nb_trials_before_long_break) == 0 and trials.thisN != 0 and trials.thisN < max1_trials) or \
(button_presses >= max1_trials/2 and random_changes >= max1_trials/2 and threshold600 == 0) or \
(button_presses >= max2_trials/2 and random_changes >= max2_trials/2):
# ------Prepare to start Routine "Long Break"-------
continueRoutine = True
if ((trials.thisN % nb_trials_before_long_break) == 0 and trials.thisN != 0 and trials.thisN < max1_trials):
long_break_text = 'Pause ! Veuillez ne pas bouger et attendre les instructions. \n\nVous pouvez fermer les yeux.'
elif (button_presses >= max1_trials/2 and random_changes >= max1_trials/2 and threshold600 == 0) or (button_presses >= max2_trials/2 and random_changes >= max2_trials/2):
long_break_text = 'Presque fini ! Veuillez ne pas bouger et attendre les instructions \n\nVous pouvez fermer les yeux.'
threshold600 = 1
Instructions.setText(long_break_text)
Instructions.setAutoDraw(True)
Cross.setAutoDraw(False)
win.callOnFlip(longBreackClock.reset)
# -------Start Routine "Long Break"-------
win.flip()
if CHOICE_OF_EXPERIMENT == 'S1_random':
print('Long break') # for the random part
print('Partipant : ' + str(button_presses) + '\nOrdinateur : ' + str(random_changes) + '\n') # for the random part
print('Participant-Ordinateur : ' + str(button_presses-random_changes) + '\n') # for the random part
key_from_serial = []
key_from_serial2 = ''
# Stop MEG recordings
if trigger:
time.sleep(1)
port.setData(253)
time.sleep(1)
port.setData(0)
if button_presses >= max2_trials/2 and random_changes >= max2_trials/2:
time.sleep(5)
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
win.close()
core.quit()
while continueRoutine:
if event.getKeys(keyList=['a']):
Instructions.setAutoDraw(False)
continueRoutine = False
# win.flip()
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB(MEGB,inputStream)
win.close()
core.quit()
# Start MEG recordings
if trigger:
port.setData(252)
event.clearEvents(eventType='keyboard')
# ************************************************************
# *************** 3rd CONDITION TO TRY ***********************
# ************************************************************
#
# ------Condition for Short Break-------
# This is the short break that happens both in part 1 or part 2
# For part 1 : it allows us to look at the figure generated and potentially adjust the classifier parameters
# For part 2 : it allows us to change the MEGsave file number if needed (for the part 2, we loop until we manually
# leave the experiment, and since the meg saves are only 7 minutes long, for better file's saving names, we want to make
# sure we use the right meg save number if the current run is longer than 7 minutes)
if (trials.thisN % nb_trials_before_short_break) == 0 and trials.thisN != 0:
# ------Prepare to start Routine "Short Break"-------
continueRoutine = True
if CHOICE_OF_EXPERIMENT == 'S1_random':
difference = button_presses - random_changes
if difference > 6:
break_text = "Petite pause ! \n\n Vous répondez trop souvent par rapport à l'ordinateur. \nAppuyez moins souvent. \n\n Pour continuer, appuyez sur le bouton de droite."
elif difference < -6:
break_text = "Petite pause ! \n\n Vous ne répondez pas assez souvent par rapport à l'ordinateur. \nAppuyez plus souvent. \n\n Pour continuer, appuyez sur le bouton de droite."
else:
break_text = "Petite pause ! \n\n Vous répondez aussi souvent que l'ordinateur, bravo ! \n\n Pour continuer, appuyez sur le bouton de droite."
# break_text = 'Petite pause !' + '\n\nVous : ' + str(button_presses) + '\n\n Ordinateur : ' + str(random_changes) + '\n\n Appuyez sur le bouton de droite pour continuer.' # for the random part
else: # for the BCI part
break_text = 'Petite pause \n\n Veuillez ne pas bouger et attendre les instructions' # for the BCI part
Instructions.setText(break_text)
Instructions.setAutoDraw(True)
Pixel.setAutoDraw(True)
Cross.setAutoDraw(False)
win.callOnFlip(shortBreackClock.reset)
continueRoutine = True
# -------Start Routine "Short Break"-------
win.flip()
if CHOICE_OF_EXPERIMENT == 'S1_random':
print('Partipant : ' + str(button_presses) + '\nOrdinateur : ' + str(random_changes) + '\n') # for the random part
print('Participant-Ordinateur : ' + str(button_presses-random_changes) + '\n') # for the random part
else:
timeBeforeStart = data.getDateStr()
bloc_nb = int(trials.thisN/nb_trials_before_short_break)
if expInfo['part'] == 'part2': # max(dictCounterAnswers.values())>0 :
# Show the windows to potentially change the MEGsave number :
print('here')
isLongBreak = ((trials.thisN+1 % nb_trials_before_long_break) == 0)
if(isLongBreak):
plotDict2(dictCounterAnswers ,dictCounterAnswersTotal,participant,run_nbr,timeBeforeStart,bloc_nb,MEGsave )
dictCounterAnswers = dict.fromkeys(dictCounterAnswers, 0) # Resetting the temporary dict to 0
else :
print('there')
plotDict2(dictCounterAnswers,dictCounterAnswersTotal,participant,run_nbr,timeBeforeStart,bloc_nb, MEGsave)
dictCounterAnswers = dict.fromkeys(dictCounterAnswers, 0)
# print(yaml.dump(dictCounterAnswers, sort_keys=False, default_flow_style=False))
elif expInfo['part'] == 'part1':
print("and everywhere")
if DEBUG == False:
mngr = plt.get_current_fig_manager()
mngr.window.setGeometry(2000, 100, 1000, 700)
# fig = plt.figure()
# timer = fig.canvas.new_timer(interval = 1000)
# timer.add_callback(close_event)
# timer.start()
# Plot of the current bloc
plt.subplot(2, 1, 1)
pietoplot, pielabels = prepare_pie_plot(button_presses_bloc, random_changes_bloc, early_button_presses_after_computer_bloc, early_button_presses_after_human_bloc, trials.thisN,participant,run_nbr,timeBeforeStart,bloc_nb)
plt.pie(pietoplot,labels=pielabels)
plt.title('Current bloc: %d'%bloc_nb)
if (nbSteps_chosen == None): # For testing in S2_withouts
nbSteps_chosen = 777
# Resetting the counts :
button_presses_bloc = 0
random_changes_bloc = 0
early_button_presses_after_computer_bloc = 0
early_button_presses_after_human_bloc = 0
# Plot of the next bloc
plt.subplot(2, 1, 2)
pietoplot, pielabels = prepare_pie_plot(button_presses, random_changes, early_button_presses_after_computer, early_button_presses_after_human, trials.thisN,participant,run_nbr,timeBeforeStart,bloc_nb)
plt.pie(pietoplot,labels=pielabels)
plt.title("Total on the current run")
if not os.path.exists('./fig/%s'%participant):
os.makedirs('./fig/%s'%participant)
# print(bloc_nb)
plt.savefig('./fig/%s/fig_%s_part1_run%s_bloc%s_%s'%(participant,participant,run_nbr,bloc_nb,timeBeforeStart))
# plt.close()
plt.show()
dictCounterAnswers = dict.fromkeys(dictCounterAnswers, 0) # Resetting the temporary dict to 0
# nb_of_trials_within_little_block = 0
key_from_serial = []
key_from_serial2 = ''
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
if key_from_serial2 == '2' and CHOICE_OF_EXPERIMENT == 'S1_random':
Instructions.setAutoDraw(False)
continueRoutine = False
if (event.getKeys(keyList=['y']) and CHOICE_OF_EXPERIMENT == 'S1_random') or \
(event.getKeys(keyList=['a']) and (CHOICE_OF_EXPERIMENT == 'S2_with' or CHOICE_OF_EXPERIMENT == 'S2_without')):
if trigger:
port.setData(252)
Instructions.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB(MEGB,inputStream)
win.close()
core.quit()
if shortBreackClock.getTime() > 30 and CHOICE_OF_EXPERIMENT == 'S1_random': # noqa
Instructions.setAutoDraw(False)
continueRoutine = False
textMEGsave = "Current run : "+str(MEGsave)
dlgMEGsave.addText(textMEGsave)
expMEGsave['MEGsave'] = dlgMEGsave.show()
MEGsave = expMEGsave['MEGsave']
if(CHOICE_OF_EXPERIMENT=='S2_with' and partTypeChosen=='part1'):
expOk['OK'] = dlgOk.show()
if dlgOk.OK :# and (expOk['OK']==True ):
try :
expChange['nbSteps'],expChange['clf'] =dlgChange.show()
except:
print('Change cancelled')
if dlgChange.OK and (expChange['clf'] != clfname or expChange['nbSteps']!=nbSteps_chosen):
print('Change successful: MEGBuffer will restart with the new values ')
closeMEGB(MEGB,inputStream)
plt.savefig('fig/' + str(participant) + '_' + str(nbSteps_chosen)+'steps_run'+str(run_nbr)+'_'+expInfo['date'])
print("We saved the fig generated with the previous values ! ")
# inputStream.close() # Do we have to close it ?
# Restart everything :
MEGB = MEGBuffer()
inputStream = InputStream()
nbSteps_chosen = expInfo['nbSteps']
clfname = expInfo['classifier']
timeBeforeStart = data.getDateStr()
MEGB.configure(nb_steps_chosen =expChange['nbSteps'],clf_name =expChange['clf'],run_nbr = run_nbr,subjectId = participant ,partType = partTypeChosen, timeStart = timeBeforeStart,MEGsave = MEGsave)
# MEGB.configure(nb_steps_chosen =nbSteps_chosen,clf_name =clfname,run_nbr = run_nbr,subjectId = participant ,partType = partTypeChosen, timeStart = timeBeforeStart)
MEGB.outputs['signals'].configure( transfermode='plaindata')
MEGB.outputs['triggers'].configure( transfermode='plaindata')
MEGB.initialize()
inputStream.connect(MEGB.outputs['signals'])
MEGB.start()
# Resend a trigger to show we started again
if trigger:
port.setData(252)
else :
print("Same values as previous run kept")
else :
print('User cancelled : acting as if there was no change in clf')
# elif(CHOICE_OF_EXPERIMENT=='S2_with' and partTypeChosen=='part2'):
# expMEGsave['MEGsave'] = dlgMEGsave.show()
# if dlgMEGsave.OK :# and (expOk['OK']==True ):
# MEGsaveNumber = expMEGsave['MEGsave']
time.sleep(0.01)
if trigger:
port.setData(0)
# ------Prepare to start Routine "Blank"-------
continueRoutine = True
Cross.setAutoDraw(True)
# -------Start Routine "Blank"-------
win.callOnFlip(blankClock.reset)
win.callOnFlip(printTiming, trials, globalClock, 'globalTiming')
win.flip()
if trigger:
port.setData(0)
while continueRoutine:
frameRemains = blank_time - win.monitorFramePeriod * 0.75 # most of one frame period left # noqa
if blankClock.getTime() >= frameRemains:
Cross.setAutoDraw(False)
continueRoutine = False
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB(MEGB,inputStream)
win.close()
core.quit()
# ------Prepare to start Routine "Image"-------
preload_images[trial['image_nb']].setAutoDraw(True)
Cross.setAutoDraw(True)
Pixel.setAutoDraw(False)
event.clearEvents(eventType='keyboard')
# # Increase nb of trials (for the BCI part)
# if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
# nb_of_trials_within_little_block += 1
# -------Start Routine "Image"-------
win.callOnFlip(imageClock.reset)
win.callOnFlip(printTiming, trials, blankClock, 'blank')
win.flip()
keyPress = []
key_from_serial = []
key_from_serial2 = ''
is_there_an_early = 0
frameRemainsRT = np.maximum(0.5, np.random.gamma(k_shape, scale=theta_scale, size=1)) # noqa
if CHOICE_OF_EXPERIMENT == 'S2_with':
detectPrep = False
inputStream.empty_queue()
cond_for_loop = True
else:
if (imageClock.getTime() < frameRemainsRT):
cond_for_loop = True
else:
cond_for_loop = False
ActiveStatus = 0
while cond_for_loop: # noqa
if trigger :
port.setData(0)
#MEGBuffer Part
# Polling and receiving the data sent by the MEGBuffer node
if (CHOICE_OF_EXPERIMENT == 'S2_with') and (imageClock.getTime() > 0.5):
try :
inputStream.empty_queue()
dataIsAvailable = inputStream.poll(1000)
except :
print("Error with polling the input stream")
break
#nbPaquetsToTest = 10000 # represents the number of packages of 24 we want to test
if(dataIsAvailable):
dataMEG = inputStream.recv() # Pulling the data from the
# print("Time after receiving the data : ",time.time())
#
# print(data)
# print(time.time())
if( dataMEG[1][0] == 1):
if trigger:
port.setData(value_parallel_comp)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'C')
RT = imageClock.getTime()
ActiveStatus = 0
random_changes += 1
random_changes_bloc+=1
detectPrep = True
cond_for_loop=False
print('computer change')
# print("Time after confirming the data for a change : ",time.time())
previousTrigger = 'C'
preload_images[trial['image_nb']].setAutoDraw(False)
Pixel.setAutoDraw(True)
if serialPort: # and (imageClock.getTime() > 0.5):
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
keyPress = event.getKeys(keyList=['r', 'escape'])
if ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1'):
if imageClock.getTime() > 0.5 or CHOICE_OF_EXPERIMENT == 'S1_random' :
if trigger:
port.setData(value_parallel_huma)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'H')
previousTrigger = 'H'
print("Human Change")
preload_images[trial['image_nb']].setAutoDraw(False)
Pixel.setAutoDraw(True)
RT = imageClock.getTime()
# RT = keyPress[0][1]
ActiveStatus = 1
button_presses += 1
button_presses_bloc+=1
cond_for_loop = False
else:
if previousTrigger == '':
if trigger:
port.setData(value_parallel_huma_early_after_begin)
print("Early BP after beginning!")
if previousTrigger == 'H':
if trigger:
port.setData(value_parallel_huma_early_after_huma)
print("Early BP after human")
is_there_an_early += 1
early_button_presses_after_human += 1
early_button_presses_after_human_bloc+=1
previousTrigger='HB'
elif previousTrigger == 'C':
if trigger:
port.setData(value_parallel_huma_early_after_comp)
time.sleep(0.01)
print("Early BP after computer")
is_there_an_early += 1
early_button_presses_after_computer += 1
early_button_presses_after_computer_bloc+=1
previousTrigger='CB'
elif previousTrigger == 'HB' or previousTrigger == 'CB':
if trigger:
port.setData(value_parallel_huma_early_after_early)
print("Early BP after early!")
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'E')
timeEarlyBTNPress = imageClock.getTime()
if (keyPress and keyPress[0][0] == 'escape'):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB(MEGB,inputStream)
win.close()
core.quit()
if (imageClock.getTime() > frameRemainsRT) and (CHOICE_OF_EXPERIMENT == 'S1_random' or CHOICE_OF_EXPERIMENT == 'S2_without'):
cond_for_loop = False
if trigger:
port.setData(value_parallel_comp)
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'C')
print('computer change')
previousTrigger = 'C'
preload_images[trial['image_nb']].setAutoDraw(False)
Pixel.setAutoDraw(True)
RT = frameRemainsRT[0]
ActiveStatus = 0
random_changes += 1
random_changes_bloc+=1
win.callOnFlip(printTiming, trials, imageClock, 'image')
win.flip()
# print("Time when imagine changed : ",time.time())
if trigger :
port.setData(0)
# ------Condition for Question, BCI part (part 2) -------
if (CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with') and (expInfo['part'] == 'part2' or expInfo['part']== 'part2_blank') :
if trigger :
port.setData(0)
win.callOnFlip(blankBeforeQuestionClock.reset)
win.flip()
is_there_an_early = 0
while blankBeforeQuestionClock.getTime() < 0.5:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
keyPress = event.getKeys(keyList=['r'])
if ((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1'):
print("Early BP")
if previousTrigger == 'H' or previousTrigger == 'HB' or previousTrigger == 'CB':
if trigger:
port.setData(value_parallel_huma_early_after_huma)
if previousTrigger == 'H':
early_button_presses_after_human += 1
early_button_presses_after_human_bloc +=1
previousTrigger = 'HB'
elif previousTrigger=='C':
if trigger:
port.setData(value_parallel_huma_early_after_comp)
time.sleep(0.01)
early_button_presses_after_computer += 1
early_button_presses_after_computer_bloc+=1
previousTrigger = 'CB'
if eyelink:
EyeLink.tracker.sendMessage(selfEdf, 'E')
is_there_an_early += 1
timeEarlyBTNPress = blankBeforeQuestionClock.getTime()
if trigger :
port.setData(0)
# ------Prepare to start Routine "Question"-------
continueRoutine = True
Question.setAutoDraw(True)
AnswerYes.setAutoDraw(True)
AnswerNo.setAutoDraw(True)
AnswerNoButWanted.setAutoDraw(True)
AnswerYes.alignText = 'left'
AnswerNo.alignText = 'right'
AnswerNoButWanted.alignText== 'middle'
Cross.setAutoDraw(False)
win.callOnFlip(questionClock.reset)
AnswerYes.setColor(color = 'black')
AnswerNo.setColor(color = 'black')
AnswerNoButWanted.setColor(color = 'black')
selectedAnswer = ''
# -------Start Routine "Question"-------
win.flip()
key_from_serial = []
key_from_serial2 = ''
while continueRoutine:
if serialPort:
key_from_serial2 = str(port_s.readline())[2:-1]
if len(key_from_serial2) > 0:
key_from_serial2 = key_from_serial2[-1]
keyPress = event.getKeys(keyList=['r', 'y', 'c', 'escape'])
# Switching buttons
# press r/1 to go left
# press y/2 to go right
# press c/3 to validate
if (((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='') or \
(((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='N') or \
(((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='NBW'):
AnswerYes.setColor('white')
AnswerNo.setColor('black')
AnswerNoButWanted.setColor('black')
selectedAnswer='Y'
elif (((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='') or \
(((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='Y') or \
(((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='NBW'):
AnswerYes.setColor('black')
AnswerNo.setColor('white')
AnswerNoButWanted.setColor('black')
selectedAnswer='N'
elif ((keyPress and keyPress[0][0] == 'y') or key_from_serial2 == '2') and selectedAnswer=='Y' or \
(((keyPress and keyPress[0][0] == 'r') or key_from_serial2 == '1') and selectedAnswer=='N'):
AnswerYes.setColor('black')
AnswerNo.setColor('black')
AnswerNoButWanted.setColor('white')
selectedAnswer='NBW'
elif ((keyPress and keyPress[0][0] == 'c') or key_from_serial2 == '8') and selectedAnswer != '':
Question.setAutoDraw(False)
AnswerYes.setAutoDraw(False)
AnswerNo.setAutoDraw(False)
AnswerNoButWanted.setAutoDraw(False)
continueRoutine = False
if selectedAnswer == 'Y':
if trigger :
port.setData(value_answer_yes)
# button_yes += 1
active_answer = 1
print('Human choisi' + '\n')
# TODO adding +1 depending on the trigger that created the question
dictKey = previousTrigger + '_yes'
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
dictCounterAnswersTotal[dictKey]=dictCounterAnswersTotal[dictKey]+1
elif selectedAnswer == 'N':
if trigger :
port.setData(value_answer_no)
# button_no += 1
active_answer = 0
print('Computer choisi' + '\n')
dictKey = previousTrigger + '_no'
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
dictCounterAnswersTotal[dictKey]=dictCounterAnswersTotal[dictKey]+1
elif selectedAnswer == 'NBW':
if trigger :
port.setData(value_answer_nbw)
# button_no_but_wanted += 1
active_answer = 0.5
print('Ordi avant vous choisi' + '\n')
dictKey = previousTrigger + '_nbw'
dictCounterAnswers[dictKey]=dictCounterAnswers[dictKey]+1
dictCounterAnswersTotal[dictKey]=dictCounterAnswersTotal[dictKey]+1
previousTrigger = ''
win.flip()
if trigger :
port.setData(0)
if event.getKeys(keyList=["escape"]):
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
closeMEGB(MEGB,inputStream)
win.close()
core.quit()
trials.addData('RT', RT)
trials.addData('ActiveStatus', ActiveStatus)
trials.addData('blocNumber',bloc_nb)
trials.addData('MEGsave',MEGsave)
if CHOICE_OF_EXPERIMENT == 'S2_with':
trials.addData('nbSteps',nbSteps_chosen)
# for the BCI part
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
if expInfo['part'] == 'part1':
trials.addData('ActiveAnswer', 99)
trials.addData('EarlyBP_trialbefore', is_there_an_early)
if is_there_an_early != 0:
trials.addData('RT_earlyBP_trialbefore', timeEarlyBTNPress)
else:
trials.addData('RT_earlyBP_trialbefore', 99)
elif expInfo['part'] == 'part2':
trials.addData('ActiveAnswer', active_answer)
trials.addData('EarlyBP', is_there_an_early)
if is_there_an_early != 0:
trials.addData('RT_earlyBP', timeEarlyBTNPress)
else:
trials.addData('RT_earlyBP', 99)
thisExp.nextEntry()
# -------Ending Trials loop -------
print('saving')
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
print('closing exp')
logging.flush()
print('closing log')
if CHOICE_OF_EXPERIMENT == 'S2_with':
MEGB.stop()
inputStream.close()
MEGB.close()
print('closing megb')
print('closing')
# make sure everything is closed down
thisExp.abort() # or data files will save again on exit
if eyelink:
EyeLink.tracker.close(selfEdf, edfFileName)
win.close()
core.quit()
exit()
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,100
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/configs/basemy_agency.py
|
#######################################################################################################################
######################### File regrouping all the functions used in the agency_task_bci script#########################
#######################################################################################################################
# All the imports needed for the agency script :
# Imports for psychopy
from __future__ import absolute_import, division
from psychopy import gui, visual, core, data, event, logging, parallel,monitors
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)
# Imports for the os and plots and other useful libraries
import os
import os.path as op
import matplotlib.pyplot as plt
import numpy as np
import time
from serial import Serial
from configs.config_agency import *
# Imports for the pyacq node
from pyacq.core.stream import InputStream
from joblib import load
def serial_port(port='COM1', baudrate=9600, timeout=0):
"""
Create serial port interface.
:param str port:
Which port to interface with.
:param baudrate:
Rate at which information is transferred in bits per second.
:param int timeout:
Waiting time in seconds for the port to respond.
:return: serial port interface
"""
open_port = Serial(port, baudrate, timeout=timeout)
open_port.close()
open_port = Serial(port, baudrate, timeout=timeout)
open_port.flush()
return open_port
def printTiming(trials, clock, taskEvent):
trials.addData(taskEvent, clock.getTime())
# Simple closing plot function
def close_event():
plt.close()
def plotDict2(dictCounterAnswers,dictCounterAnswersTotal,participant,run_nbr,timeBeforeStart,bloc_nb,fileMEG):
plt.subplot(2, 1, 1)
A = [dictCounterAnswers['H_yes'], dictCounterAnswers['C_yes'], dictCounterAnswers['HB_yes'], dictCounterAnswers['CB_yes']]
B = [dictCounterAnswers['H_no'], dictCounterAnswers['C_no'], dictCounterAnswers['HB_no'], dictCounterAnswers['CB_no']]
C = [dictCounterAnswers['H_nbw'], dictCounterAnswers['C_nbw'], dictCounterAnswers['HB_nbw'], dictCounterAnswers['CB_nbw']]
X = ['Hum', 'Comp', 'Hum+But', 'Comp+But']
plt.bar(X, A, color = 'brown', label='Comp')
plt.bar(X, B, color = 'olive', bottom = A, label='Human')
plt.bar(X, C, color = 'darkorange', bottom = np.sum([A, B], axis=0), label='Comp + But')
plt.legend()
plt.title("Results from the current bloc")
plt.subplot(2, 1, 2)
A = [dictCounterAnswersTotal['H_yes'], dictCounterAnswersTotal['C_yes'], dictCounterAnswersTotal['HB_yes'], dictCounterAnswersTotal['CB_yes']]
B = [dictCounterAnswersTotal['H_no'], dictCounterAnswersTotal['C_no'], dictCounterAnswersTotal['HB_no'], dictCounterAnswersTotal['CB_no']]
C = [dictCounterAnswersTotal['H_nbw'], dictCounterAnswersTotal['C_nbw'], dictCounterAnswersTotal['HB_nbw'], dictCounterAnswersTotal['CB_nbw']]
X = ['Hum', 'Comp', 'Hum+But', 'Comp+But']
plt.bar(X, A, color = 'brown', label='Comp')
plt.bar(X, B, color = 'olive', bottom = A, label='Human')
plt.bar(X, C, color = 'darkorange', bottom = np.sum([A, B], axis=0), label='Comp + but')
plt.legend()
plt.title("General results")
if DEBUG == False:
mngr = plt.get_current_fig_manager()
mngr.window.setGeometry(2000, 100, 1000, 700)
if not os.path.exists('./fig/%s'%participant):
os.makedirs('./fig/%s'%participant)
plt.savefig('./fig/%s/fig_%s_part2_run%s_bloc%s_%s_megSave%s'%(participant,participant,run_nbr,bloc_nb,timeBeforeStart,fileMEG))
plt.show()
def closeMEGB(MEGB,inputStream):
if (CHOICE_OF_EXPERIMENT == 'S2_with'):
MEGB.stop()
inputStream.close()
MEGB.close()
def prepare_pie_plot(button_presses, random_changes, early_button_presses_after_computer, early_button_presses_after_human, nb_trials,participant,run_nbr,timeBeforeStart,bloc_nb):
print('\n' + 'Since the start of the recordings:')
print('rate of human changes: ', str(button_presses - early_button_presses_after_human), '/', str(nb_trials), ' = ', str((button_presses-early_button_presses_after_human)/nb_trials))
print('rate of computer changes: ', str(random_changes - early_button_presses_after_computer), '/', str(nb_trials), ' = ', str((random_changes-early_button_presses_after_computer)/nb_trials))
print('rate of early button presses after computer: ', str(early_button_presses_after_computer), '/', str(nb_trials), ' = ', str(early_button_presses_after_computer/nb_trials))
print('rate of early button presses after human: ', str(early_button_presses_after_human), '/', str(nb_trials), ' = ', str(early_button_presses_after_human/nb_trials))
print('\n')
if button_presses-early_button_presses_after_human != 0:
pietoplot = [button_presses-early_button_presses_after_human]
pielabels = ['human']
else:
pietoplot = []
pielabels = []
if random_changes-early_button_presses_after_computer != 0:
pietoplot.append(random_changes-early_button_presses_after_computer)
pielabels.append('computer')
if early_button_presses_after_computer != 0:
pietoplot.append(early_button_presses_after_computer)
pielabels.append('early BP after computer')
if early_button_presses_after_human != 0:
pietoplot.append(early_button_presses_after_human)
pielabels.append('early BP after human')
return pietoplot, pielabels
# Old version of the plot
# def plotDict(dict):
# listKeys = dict.keys()
# values = dict.values()
# plt.bar(listKeys,values,color=['lightcoral','indianred','brown','olive','olivedrab','yellowgreen','magenta','orchid','hotpink','darkorange','goldenrod','moccasin'])
# plt.title("Early results of button presses")
# plt.show()
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,101
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/testing/plot_score.py
|
import numpy as np
from basemy import *
from config import *
import mne
#**** PLOT THE SCORES *****
file_to_ana = 'npy'
file_to_ana_session = 'session1'
which_channels = 'meg'
file = thedirectorydecod(path_saving, 'results/decoding/meg/') #noqa
fname = op.join(file)
files = os.listdir(fname)
runs = list()
runs.extend(f for f in files if file_to_ana in f and file_to_ana_session in f and which_channels in f and 'probas' not in f) #only for score
subj_nb = 0
all_scores_subjects = []
timeforplot = np.linspace(-1, 1, 241)
lb = np.where(timeforplot == -1)[0][0] # -1 sec
ub = np.where(timeforplot == -0.7)[0][0] # -0.7 sec
lb2 = np.where(timeforplot == -0.35)[0][0]
ub2 = np.where(timeforplot == -0)[0][0]
for run_number, this_run in enumerate(runs):
run_to_load = op.join(fname, this_run)
scores_subjects = np.load(run_to_load)
all_scores_subjects.append(scores_subjects)
subj_nb += 1
all_scores_subjects = np.array(all_scores_subjects)
# print('number of subject %s, shape of scores %s' % (subj_nb, all_scores_subjects.shape))
all_scores_subjects_sem = np.std(all_scores_subjects, axis=0)/np.sqrt(len(all_scores_subjects))
all_scores_subjects_mean = all_scores_subjects.mean(0,)
all_scores_subjects_mean = smooth_to_plot(all_scores_subjects_mean)
mean1 = all_scores_subjects_mean + all_scores_subjects_sem
mean2 = all_scores_subjects_mean - all_scores_subjects_sem
minforplot = min(mean2)
maxforplot = max(mean1)
# plt.fill_between(timeforplot, mean1, mean2, color='green', alpha=0.5, label=which_channels) #-1 to 0
# plotchanceandlimit([timeforplot[0], timeforplot[-1]], [0.5], [minforplot, maxforplot], [0])
# plt.axvline(-0.35, ymin=0.05, ymax=0.953, color='r', linestyle='--', linewidth = 0.8, alpha=0.5) #beginning of RP
# plt.xlabel('Temps en seconde')
# plt.ylabel('Score')
# plt.legend()
# # plt.title('Decoding tous les canaux MEG'
# # + '\nChannels : ' + which_channels
# # + '\n[Mean at [' + str(timeforplot[lb]) + ':' + str(timeforplot[ub]) + '] = ' + str(round(np.mean(all_scores_subjects_mean[lb:ub])*100,1))
# # + '\n[Mean at [' + str(timeforplot[lb2]) + ':' + str(timeforplot[ub2]) + '] = ' + str(round(np.mean(all_scores_subjects_mean[lb2:ub2])*100,1)))
# print('T350 ms score : ', round(np.mean(all_scores_subjects_mean[lb2])*100,1), '\nT0 ms score : ', round(np.mean(all_scores_subjects_mean[ub2])*100,1))
# plt.show()
# # 4. save figure and all subject score
# fname = op.join(directorydecod,
# 'allsubject_%s_session%s_%s_average.jpg' # noqa
# % (file_to_ana, session_nb, which_channels))
# plt.savefig(fname, dpi=300)
# plt.show()
#**** PLOT THE PROBAS *****
subj_nb = 0
runs = list()
runs.extend(f for f in files if file_to_ana in f and file_to_ana_session in f and which_channels in f and 'probas' in f) #only for probas
# for run_number, this_run in enumerate(runs):
for run_number, this_run in enumerate([runs[1]]):
run_to_load = op.join(fname, this_run)
all_probas_subjects = np.load(run_to_load)
subj_nb += 1
all_probas_subjects_sem = np.std(all_probas_subjects, axis=0)/np.sqrt(len(all_probas_subjects))
all_probas_subjects_mean = all_probas_subjects.mean(0,)
all_probas_subjects_mean = smooth_to_plot(all_probas_subjects_mean)
mean1 = all_probas_subjects_mean + all_probas_subjects_sem
mean2 = all_probas_subjects_mean - all_probas_subjects_sem
minforplot = min(mean2)
maxforplot = max(mean1)
plt.plot(timeforplot, all_probas_subjects_mean)
plotchanceandlimit([timeforplot[0], timeforplot[-1]], [0.5], [minforplot, maxforplot], [0])
plt.xlabel('Temps en seconde')
plt.ylabel('Score')
plt.legend()
plt.show()
plt.fill_between(timeforplot, mean1, mean2, color='green', alpha=0.5, label=which_channels)
plotchanceandlimit([timeforplot[0], timeforplot[-1]], [0.5], [minforplot, maxforplot], [0])
plt.xlabel('Temps en seconde')
plt.ylabel('Score')
plt.legend()
plt.show()
print('T350 ms score : ', round(np.mean(all_probas_subjects_mean[lb2])*100,1), '\nT0 ms score : ', round(np.mean(all_probas_subjects_mean[ub2])*100,1))
#split human machine
#moyenne de prediction
#moyenne de proba
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,102
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/testing/test_dataloss.py
|
# Testing data losses
import mne
import numpy as np
import os.path as op
import pandas as pd
import matplotlib.pyplot as plt
directory = 'C:/Users/Coco/Desktop/simple-megbuffer and script/saves/file/'
subject = 'fay'
session = ''
filename = 'saves/file/fa_agency_session0_run0_filter.fif'
fname = op.join(directory, '%s.fif' % (filename))
diff = 298
# epochs = mne.read_epochs(fname)
# raw = mne.io.read_raw_fif(filename)
# data_to_dec = datatodec('meg', epochs)
# events = mne.read_events(filename)
# print(events)
# for every run generated
best = 10000
for run in range(12,13):
# TRIGGERS FROM MEG SAVING
trigger_MEG = pd.read_csv('saves/trigger/fa_agency_session0_run'+str(run)+'_trigger.csv')
events_meg = np.array(trigger_MEG) #array of original trigger file
events_meg[:,0] = events_meg[:,0]+20052-298
startingIndexMEG = events_meg[1,0]
endIndexMEG = events_meg[-1,0]
# print(trigger_MEG)
# print("indexes : ", startingIndexMEG,endIndexMEG)
total_of_samplesMEG = endIndexMEG-startingIndexMEG
# print(np.where(events_meg[:,2]==20))
nb_human_MEG= np.where(events_meg[:,2]==20)[0].shape[0]
nb_comp_MEG= np.where(events_meg[:,2]==40)[0].shape[0]
# print('La sauvegarde MEG a enregistré %d triggers humains et %d triggers du clf, sur %d samples'%(nb_human_MEG,nb_comp_MEG,total_of_samplesMEG))
# TRIGGERS RETRIEVED FROM MEGBUFFER SAVINGS
trigger_MEGBuffer = pd.read_csv('saves_fay/savedDataFAY_155208_5steps.csv')
# events_tri = np.array(trigger_MEGBuffer[startingIndexMEG:endIndexMEG,:]) # only looking for the indexes that were saved on the MEG
events_tri = np.array(trigger_MEGBuffer)
indexStartCorresponding = np.where(events_tri[:,1]==startingIndexMEG)
indexEndCorresponding = np.where(events_tri[:,1]==endIndexMEG)
events_tri = events_tri[6770:,:] #
startingIndex = events_tri[1,1]
endIndex = events_tri[-1,1]
total_of_samples = endIndex-startingIndex
# Making sure there is only one timestamp equal to 20 everytime for easier counting
for a in range(2,events_tri[:,5].size):
if( events_tri[a,5]==20 and (events_tri[a-1,5]==20 or events_tri[a-1,5]==0.5) and a<events_tri[:,5].size-1):
events_tri[a,5]=0.5
events_tri= events_tri[np.where( (events_tri[:,5]==20))]
# print(events_tri)
total_button_press = events_tri[:,5].shape[0]
clf_tri = np.array(trigger_MEGBuffer)
clf_tri = clf_tri[6770:,:]
for a in range(1,clf_tri[:,5].size-2):
if(a+2<clf_tri.size and (clf_tri[a,5]==40 or clf_tri[a,5]==0.5) and clf_tri[a+1,5]==40):
clf_tri[a+1,5]=0.5
clf_first_tri = clf_tri[np.where(clf_tri[:, 5] ==40)[0]]
total_clf_trigger = clf_first_tri.shape[0]
clf_tri = np.array(trigger_MEGBuffer)
# True positives
clf_tri=clf_tri[6770:]
for a in range(1,clf_tri[:,5].size-2):
if(a+2<clf_tri.size and (clf_tri[a,5]==10 or clf_tri[a,5]==0.5) and clf_tri[a+1,5]==10):
clf_tri[a+1,5]=0.5
clf_true_pos = clf_tri[np.where(clf_tri[:, 5] ==10)[0]]
total_clf_trigger_early = clf_true_pos.shape[0]
nb_human_MEGB= np.where(events_tri[:,5]==20)[0].shape[0]
nb_comp_MEGB= np.where(events_tri[:,5]==40)[0].shape[0]
# print("Nombre de trigger early : ",total_clf_trigger_early)
diffTriggerClf = abs(nb_comp_MEGB-nb_comp_MEG)
diffTriggerButton = abs(nb_human_MEGB-nb_human_MEG+total_clf_trigger_early)
totalDiff = diffTriggerButton + diffTriggerClf
if(totalDiff<best):
best = totalDiff
best_run = (run)
# transforming the megsave to get something like the trigger save
meg_alike_array = np.array(trigger_MEGBuffer)
meg_alike_array = meg_alike_array[6770:,:]
for a in range(1,meg_alike_array[:,5].size-2):
if(a+2<meg_alike_array.size and (meg_alike_array[a,5]==10 or meg_alike_array[a,5]==-1) and meg_alike_array[a+1,5]==10):
meg_alike_array[a+1,5]=-1
if(a+2<meg_alike_array.size and (meg_alike_array[a,5]==40 or meg_alike_array[a,5]==-1) and meg_alike_array[a+1,5]==40):
meg_alike_array[a+1,5]=-1
if(a+2<meg_alike_array.size and (meg_alike_array[a,5]==20 or meg_alike_array[a,5]==-1) and meg_alike_array[a+1,5]==20):
meg_alike_array[a+1,5]=-1
tresholdIndex = clf_tri[2,1]
# print(startingIndexMEG+tresholdIndex,endIndexMEG+tresholdIndex)
listIndexes = np.where(meg_alike_array[:,5]>0)[0]
# print(listIndexes)
meg_alike_array = meg_alike_array[listIndexes]
# print(meg_alike_array)
# plt.plot(clf_tri[int(startingIndexMEG+tresholdIndex):int(endIndexMEG+tresholdIndex),1], clf_tri[int(startingIndexMEG+tresholdIndex):int(endIndexMEG+tresholdIndex),5],'b',label='MEGBuffer save')
plt.plot(meg_alike_array[:,1], meg_alike_array[:,5],'b',label='MEGBuffer save')
# firstIndexMEGB = np.where([events_meg[:,1]>startingIndex])[0][0]
# print(firstIndexMEGB)
# plt.plot(clf_true_pos[startingIndexMEG:endIndexMEG,1], clf_true_pos[startingIndexMEG:endIndexMEG,5],'b',label='MEGBuffer save')
plt.plot(events_meg[:,0], events_meg[:,2],'r',label ='MEG saves')
# plt.xlim([-0.6,0])
plt.title('Figure of triggers saved from two different methods (run '+str(run)+')')
plt.legend()
plt.ylabel('Triggers (20 human or 40 computer)')
plt.xlabel('Sample number')
plt.show()
# NB BUTTON ETC TWO CORRECT ARRAYS :
trigger_vec_megb = meg_alike_array[:,5]
trigger_vec_meg = events_meg[:,2]
nb_human_MEGB= np.where(trigger_vec_megb[:]==20)[0].shape[0]
nb_comp_MEGB= np.where(trigger_vec_megb[:]==40)[0].shape[0]
nb_early_MEGB = np.where(trigger_vec_megb[:]==10)[0].shape[0]
nb_human_MEG= np.where(trigger_vec_meg[:]==20)[0].shape[0]
nb_comp_MEG= np.where(trigger_vec_meg[:]==40)[0].shape[0]
nb_early_MEG = np.where(trigger_vec_meg[:]==10)[0].shape[0]
print(clf_tri[:,0])
print('For the MEGBuffer: %d early, %d comp, %d human'%(nb_early_MEGB,nb_comp_MEGB,nb_human_MEGB))
print('For the MEG: %d early, %d comp, %d human'%(nb_early_MEG,nb_comp_MEG,nb_human_MEG))
# print('La sauvegarde MEGBuffer a enregistré %d triggers humains et %d triggers du clf, sur un total de %d samples'%(nb_human_MEGB+total_clf_trigger_early,total_clf_trigger, total_of_samples))
# print('La différence en terme de nombre de button press est de : %d et de clf_trigger : %d'%(diffTriggerButton,diffTriggerClf))
# print('La meilleure run est la : %d avec %d de différences'%(best_run,best))
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,103
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/configs/config_agency.py
|
#######################################################################################################################
######################### File regrouping all the variables used in the agency_task_bci script#########################
#######################################################################################################################
# All the imports needed for the agency script :
# Imports for psychopy
from __future__ import absolute_import, division
from psychopy import gui, visual, core, data, event, logging, parallel,monitors
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)
# Imports for the os and plots and other useful libraries
import os
import os.path as op
import matplotlib.pyplot as plt
import numpy as np
import time
from serial import Serial
# Imports for the pyacq node
from pyacq.core.stream import InputStream
from configs.MEGBuffer import MEGBuffer
from joblib import load
# ******** PARAMETERS TO CHECK AT THE BEGINNING OF THE SESSION **************
# computer (MEG/EEG/MEG_NIH/Marine/Marine_perso/Salim/Fayed/Corentin)
computer = 'Corentin' # MUST BE MEG IF ONLINE
# If not connected to the MEG, everything has to be false except for DEBUG and few_trials
DEBUG = True
trigger = False
eyelink = False
serialPort = False
few_trials = True # False for experiment
# CHOICE_OF_EXPERIMENT = 'S1_random', 'S2_without', 'S2_with'
# 'S1_random' :
# Script used for the first session : images only change randomly or after a button press
# 'S2_with' :
# The MEGBuffer will try to connect to a fieldtrip buffer : works only if
# you are connected to the MEG, or to a virtual fieldtripbuffer (MATLAB script)
# Images will change depending on the data sent (classifier) or button press
# 'S2_without' :
# Used mostly for debugging : no connection to the MEGBuffer will be tried, but you can
# access all the functions for the second session
CHOICE_OF_EXPERIMENT = 'S2_without' # MYST BE S2_WITH IF ONLINE
# **************END OF PARAMETERS TO CHECK AT THE BEGINNING OF THE SESSION **************
# GUI to define the participant, session and part (if session 2)
# debug mode
if DEBUG:
fullscr = False
logging.console.setLevel(logging.DEBUG)
else:
fullscr = True
# logging.console.setLevel(logging.WARNING)
if CHOICE_OF_EXPERIMENT == 'S1_random':
expName = 'AgentivityRandom'
elif CHOICE_OF_EXPERIMENT == 'S2_without':
expName = 'Agentivity_debug_BCI' # for the BCI part
elif CHOICE_OF_EXPERIMENT == 'S2_with':
expName = 'Agentivity_BCI' # for the BCI part
# These variables are used for the textStim : windows with text areas to fill
# expInfo is the starting window
# expOk is the window asking if we should change the clf
# expChange is the actual window where you write the changes you want
# expMEGsave is the window where you write the current meg save number
if CHOICE_OF_EXPERIMENT == 'S1_random':
expInfo = {'participant': '', 'run': ''}
else:
expInfo = {'MEGsave' : '','participant': '', 'run': '','nbSteps':'', 'part': '', 'classifier': ''}
expOk = {'OK':''}
expChange = {'clf':'','nbSteps':''}
expMEGsave = {'MEGsave':''}
# After the first time you fill in the informations on the first window, these informations are saved
# If it is the first time, we will fill with nothing
try :
previousSessionInfos = np.loadtxt('./saves/previousSession.txt',dtype = np.str, delimiter='\n')
print("Loaded previous session's infos")
except:
previousSessionInfos = ['','','','','']
# Make sure that the classifiers are in a directory under /classifiers/meg
# Returns the list of the classifiers that are inside that directory
def listClassifiers():
return (os.listdir('./classifiers/meg'))
listClf = listClassifiers()
clfname = ''
MEGsave = 0
#
# GUI Part : creating the different windows and filling them up
#
# The information window : asking for all the informations about the subject
dlg = gui.Dlg(title=expName)
dlg.addField('participant:',previousSessionInfos[0])
dlg.addField('run:',previousSessionInfos[1])
if CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with':
dlg.addField('MEGsave:',previousSessionInfos[3])
dlg.addField('nbSteps:',previousSessionInfos[2])
dlg.addField('part:', choices=["part1", "part2","part2_blank"])
dlg.addField('classifier:', choices=listClf)
expInfo['participant'], expInfo['run'] , expInfo['MEGsave'] ,expInfo['nbSteps'], expInfo['part'] ,expInfo['classifier'] = dlg.show() # show dialog and wait for OK or Cancel
else:
expInfo['participant'], expInfo['run'] = dlg.show() # show dialog and wait for OK or Cancel
expInfo['part'] = ''
if dlg.OK is False: # or if ok_data is not None
core.quit() # user pressed cancel
# Saving the informations filled in the information window
savePreviousSession = [expInfo['participant'],expInfo['run'],expInfo['nbSteps'],expInfo['MEGsave'],expMEGsave['MEGsave']]
if not os.path.exists('./saves'):
os.makedirs('./saves')
np.savetxt('./saves/previousSession.txt',savePreviousSession,fmt='%s')
# The basic 'asking for a change' window
dlgOk = gui.Dlg(title='Do you want to change the classifier ?',screen=1)
dlgOk.addText("Do you want to change the current classifier of its number of steps ?")
dlgOk.addText("Press OK if so, cancel if not")
# The parameters change for the clf window
dlgChange = gui.Dlg(title='Classifier or nbSteps change',screen=1)
dlgChange.addText("Please enter the new values you want, or press cancel")
dlgChange.addField('nbSteps:',previousSessionInfos[2])
dlgChange.addField('classifier:', choices=listClf)
# The megsave window
MEGsave = expInfo['MEGsave']
dlgMEGsave = gui.Dlg(title='Current MEG save file Number',screen=1)
dlgMEGsave.addText("This is the current MEG save run number, please modify it if it changed \n You will have to re-enter it at the beggining of the part2")
dlgMEGsave.addField('MEGsave:',MEGsave)
# Parameters of the information window
expInfo['expName'] = expName
expInfo['date'] = data.getDateStr() # will create str of current date/time
expInfo['frameRate'] = 60 # store frame rate of monitor
frameDur = 1.0 / 60.0
# image folders
if CHOICE_OF_EXPERIMENT == 'S1_random':
session_image_choice = 'AgencyImage_session1'
elif expInfo['part'] == 'part1' or expInfo['part'] == 'part2_blank':
session_image_choice = 'AgencyImage_session2_part1'
elif expInfo['part'] == 'part2':
session_image_choice = 'AgencyImage_session2_part2'
# Path to save the results
if computer == 'EEG':
home_folder = '/Users/chercheur/Documents/PythonScripts/Agency_Salim/scripts' # noqa
elif computer == 'MEG':
if CHOICE_OF_EXPERIMENT == 'S1_random':
home_folder = 'C:\\Python_users\\Agency\\scripts' #random session
else:
home_folder = 'C:\\Python_users\\Agency\\bci_agency' #bci session
elif computer == 'Marine_perso':
home_folder = '/Users/marinevernet/Documents/lab_Lyon/python/psychopy/agency' # noqa
elif computer == 'Salim':
home_folder = '/Users/Zephyrus/Dropbox/Agency_Salim/scripts'
elif computer == 'Fayed':
home_folder = '/Users/invitéLabo/Desktop/Fayed/scripts/pscyhopy'
elif computer == 'Fayed2':
home_folder = '/Users/Fayed/Desktop/PC_STAGE/mne_analysis/scripts/pscyhopy'
elif computer == 'Corentin':
home_folder = 'C:\\Users\\Coco'
results_folder = home_folder + '/data'
# Data file name
edfFileName = expInfo['participant']+expInfo['run']
if CHOICE_OF_EXPERIMENT == 'S1_random':
filename = results_folder + '/%s_%s_%s_%s' % (expName, expInfo['participant'],
expInfo['run'],
expInfo['date'])
else:
filename = results_folder + '/%s_%s_%s_%s_%s' % (expName,
expInfo['participant'],
expInfo['run'],
expInfo['part'],
expInfo['date'])
participant = expInfo['participant']
run_nbr = expInfo['run']
# for the BCI part (S2)
# nb_of_trials_within_little_block = 0 # initialize counter
dictCounterAnswers = {
"H_yes": 0,
"H_no": 0,
"H_nbw": 0,
"C_yes": 0,
"C_no": 0,
"C_nbw": 0,
"HB_yes": 0,
"HB_no": 0,
"HB_nbw": 0,
"CB_yes": 0,
"CB_no": 0,
"CB_nbw": 0
}
dictCounterAnswersTotal = {
"H_yes": 0,
"H_no": 0,
"H_nbw": 0,
"C_yes": 0,
"C_no": 0,
"C_nbw": 0,
"HB_yes": 0,
"HB_no": 0,
"HB_nbw": 0,
"CB_yes": 0,
"CB_no": 0,
"CB_nbw": 0
}
nbSteps_chosen = None
# Parameters about the triggers sent depending on the event
if computer == 'EEG':
window_size = (1024, 768)
value_parallel_huma = 1
value_parallel_comp = 2
value_parallel_huma_early_after_comp = 6
value_parallel_huma_early_after_huma = 5
value_parallel_huma_early_after_begin = 4
value_parallel_huma_early_after_early = 3
value_answer_yes = 10
value_answer_no = 30
value_answer_nbw = 20
addressPortParallel = '0x0378'
elif computer == 'MEG': # CHECK THESE PARAMETERS
window_size = (1920, 1080)
value_parallel_huma = 20
value_parallel_comp = 40
value_parallel_huma_early_after_comp = 10
value_parallel_huma_early_after_huma = 6
value_parallel_huma_early_after_begin = 4
value_parallel_huma_early_after_early = 2
value_answer_yes = 110
value_answer_no = 130
value_answer_nbw = 120
addressPortParallel = '0x3FE8'
elif computer == 'Marine_perso':
window_size = (1792, 1120) # old mac (1440, 900)
elif computer == 'Fayed':
window_size = (1440, 900)
elif computer == 'Fayed2':
window_size = (1920, 1080)
elif computer == 'Corentin':
window_size = (1920, 1080)
if DEBUG:
window_size = (500, 500)
blank_time = 0.010 # in seconds
# number_of_images = 1500 # max2_trials # 600*2 # up to 1200
image_size = (0.6, 0.6*window_size[0]/window_size[1])
# number of trials TO BE CORRECTED FOR THE REAL EXPERIMENT !!!!!!!!!!!!!!!!!!!
if few_trials:
nb_trials_before_short_break = 4 # 50
nb_trials_before_long_break = 8 # 200
max1_trials = 40 # 1200
max2_trials = 50 # 1400
elif CHOICE_OF_EXPERIMENT == 'S1_random' or expInfo['part']=='part1' :
nb_trials_before_short_break = 50 # 50 for S1_random
nb_trials_before_long_break = 1000 # 200 for S1_random, 1000 for part 1 (infinite so it never ends except when pressing escape)
max1_trials = 1200 # 1200
max2_trials = 1400 # 1400
elif CHOICE_OF_EXPERIMENT == 'S2_with' and expInfo['part']=='part2_blank':
nb_trials_before_short_break = 500 # 50 for S1_random
nb_trials_before_long_break = 1000 # 200 for S1_random, 1000 for part 1 (infinite so it never ends except when pressing escape)
max1_trials = 1200 # 1200
max2_trials = 1400 # 1400
else :
nb_trials_before_short_break = 20 # 20 for S2
nb_trials_before_long_break = 80 # 80 for S2
max1_trials = 1200 # 1200
max2_trials = 1400 # 1400
print('Going for nb_trials_before_short_break = %d , nb_trials_before_long_break = %d' %(nb_trials_before_short_break ,nb_trials_before_long_break))
# Create some handy timers
imageClock = core.Clock()
blankClock = core.Clock()
longBreackClock = core.Clock()
shortBreackClock = core.Clock()
blankBeforeQuestionClock = core.Clock() # for the BCI part
questionClock = core.Clock() # for the BCI part
globalClock = core.Clock() # to track the time since experiment started
globalClock.reset() # clock
# Create the parameters of the gamma function
k_shape = 3
theta_scale = 1
# For part 1
# Count number of button press and number of random changes
button_presses = 0
random_changes = 0
early_button_presses_after_computer = 0
early_button_presses_after_human = 0
# Button presses with resetting count for the part 1 plots
button_presses_bloc = 0
random_changes_bloc = 0
early_button_presses_after_computer_bloc = 0
early_button_presses_after_human_bloc = 0
# Handy variable to know the previous trigger
previousTrigger = ''
timeEarlyBTNPress = 0 # Variable that stores the time when the button was pressed after the image change triggered by the clf
is_there_an_early = 0 # Variable that stores if there was an early button press or not
bloc_nb = 0 # Keeping track of the bloc number we are in (how many short breaks have passed)
threshold600 = 0 # did we reach 600 trials in each category?
###########################################################################################################################
# ***********************************INITIALIZATION OF ALL THE IMAGES, MODULES, WINDOWS etc *******************************
###########################################################################################################################
# Maybe it can be better to put all of this in the configs.config_agency module ?
# Make sure the dir to save images exists
if not os.path.exists('./fig'):
os.makedirs('./fig')
# set up the ports and Eyelink
if serialPort:
port_s = serial_port()
if trigger:
port = parallel.ParallelPort(address=addressPortParallel)
if eyelink:
import EyeLink #noqa
selfEdf = EyeLink.tracker(window_size[0], window_size[1], edfFileName)
# list all images
images = list()
files_list = os.listdir(op.join(home_folder, session_image_choice))
for img in files_list:
if '.jpg' in img:
if img.startswith('A'):
images.append(img)
# build trials
conditions = []
for trial in range(len(images)):
conditions.append({'image_nb': trial})
trials = data.TrialHandler(trialList=conditions, nReps=1, method='random')
# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(dataFileName=filename)
thisExp.addLoop(trials)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.EXP)
logging.console.setLevel(logging.WARNING) # this outputs to the screen
# Setup the Window
win = visual.Window(
size=window_size, fullscr=fullscr, screen=0,
allowGUI=False, allowStencil=False,
monitor='testMonitor', color=[0, 0, 0], colorSpace='rgb',
blendMode='avg', useFBO=True)
# Setup the elements to display
White_screen = visual.Rect(
win=win, name='White_screen', units='cm',
width=(2000, 2000)[0], height=(2000, 2000)[1],
ori=0, pos=(0, 0),
lineWidth=1, lineColor=[1, 1, 1], lineColorSpace='rgb',
fillColor=[0.5, 0.5, 0.5], fillColorSpace='rgb',
opacity=1, interpolate=True)
Instructions = visual.TextStim(
win=win, name='Instructions',
text='''Une image va apparaitre à l'écran.
\nPrenez quelques secondes pour l'observer sans bouger les yeux de la croix centrale.
\nClignez les yeux le moins possible.
\nPour démarrer, appuyez sur le bouton de droite.''',
font='Arial',
pos=(0, 0), height=0.1, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1)
Cross = visual.ShapeStim(
win=win, name='Cross', vertices='cross', units='cm',
size=(0.8, 0.8),
ori=0, pos=(0, 0),
lineWidth=0.5, lineColor=[1, 0, 0], lineColorSpace='rgb',
fillColor=[1, 0, 0], fillColorSpace='rgb',
opacity=1, interpolate=True)
Pixel = visual.Rect(
win=win, name='topleftpixel', units='pix',
pos=(-window_size[1], window_size[1]/2),
size=(window_size[0]*2/5, 200),
fillColor=[-1, -1, -1],
lineColor=[-1, -1, -1])
# Initialize components for Routine "image"
fname = op.join(home_folder, session_image_choice, images[1])
Image = visual.ImageStim(
win, image=fname, pos=(0, 0), size=image_size)
preload_images = [
visual.ImageStim(win, op.join(home_folder, session_image_choice, img), size=image_size)
for img in images]
# for the BCI part (part 2) : create the question window
if (CHOICE_OF_EXPERIMENT == 'S2_without' or CHOICE_OF_EXPERIMENT == 'S2_with') and (expInfo['part'] == 'part2' or expInfo['part']== 'part2_blank') :
if (expInfo['part'] == 'part2'):
Question = visual.TextStim(win=win, name='Question', text="Avez-vous changé l'image ?",
font='Arial', pos=(0, 0.3), height=0.1, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
else : # If we are in the blank part 2 : participant discovering the buttons and how they work
Question = visual.TextStim(win=win, name='Question',
text="Avez-vous changé l'image ? \n \n Utilisez les boutons du haut pour vous déplacer d'un côté à l'autre \n et celui de gauche pour valider ! ",
font='Arial', pos=(0, 0.5 ), height=0.1, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerYes = visual.TextStim(win=win, name='AnswerYes', text='VOUS',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerNo = visual.TextStim(win=win, name='AnswerNo', text='ORDI',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
AnswerNoButWanted = visual.TextStim(win=win, name='AnswerNoButWanted', text='ORDI AVANT VOUS',
font='Arial', pos=(0, -0.1), height=0.06, wrapWidth=None,
ori=0, color='black', colorSpace='rgb', opacity=1)
print('\n')
###########################################################################################################################
# *************************** END OF INITIALIZATION OF ALL THE IMAGES, MODULES, WINDOWS etc *******************************
###########################################################################################################################
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,104
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/configs/extract.py
|
import numpy as np
import mne
from mne.io import read_raw_ctf
import os.path as op
import os
from config import * #check ce fichier où tu vas mettre ta config, les path, les subject que tu choisis etc.
#THIS SCRIPT IS FOR EXTRACT FILE FOR MATLAB AND EXTRACT TRIGGER
# **** loading files ****
subj_nb = 0
total_run = 0
for subject, sessions in zip(subjects_id, session_subject):
subj_nb += 1
for _, session in enumerate(sessions):
for file_to_ana in files_to_ana:
fname7 = op.join(path_data, session, 'MEG') #the path file name
files = os.listdir(fname7) #all file in your path
runs = list() #all your run ds
runs.extend(f for f in files if file_to_ana in f)
# for run_number, this_run in enumerate(runs): #all the run
for run_number, this_run in enumerate([runs[0]]): #if you want only one run
run_to_load = op.join(fname7, this_run)
raw = read_raw_ctf(run_to_load, preload=True)
# **** reading the triggering channel ****
trigger_ch_number = raw.ch_names.index('UPPT002')
trigger = raw.get_data()[trigger_ch_number]
# **** detecting the events from the triggering ****
events_tri = mne.find_events(raw, stim_channel="UPPT002", consecutive=True, shortest_event=1)
# **** delete trigger associated to start and end of file ****
events_tri_to_del_id = list()
for parcours_eve_tri_ix in range(events_tri.shape[0]):
if (events_tri[parcours_eve_tri_ix,2] == 252
or events_tri[parcours_eve_tri_ix,2] == 253
or events_tri[parcours_eve_tri_ix,2] == 4):
events_tri_to_del_id.append(parcours_eve_tri_ix)
events_tri = events_tri[np.delete(range(0, events_tri.shape[0]), events_tri_to_del_id)]
# **** saving trigger ****
directory = op.join(path_saving, 'trigger') #create a folder for save your trigger
fname1 = op.join(directory,'%s_%s_session%s_run%s_trigger.csv' #noqa
% (subject, file_to_ana, session_nb, run_number))
np.savetxt(fname1, events_tri, fmt = '%d', delimiter=',')
# **** saving raw/filter file with no filtering **** #ce qu'on va lire sur matlab
directory = op.join(path_saving, 'file/')
fname2 = op.join(directory, '%s_%s_session%s_run%s_raw.fif' #raw name
% (subject, file_to_ana, session_nb, run_number))
fname3 = op.join(directory,'%s_%s_session%s_run%s.fif' #filter name
% (subject, file_to_ana, session_nb, run_number))
# raw.save(fname2, overwrite=True)
# raw.filter(0.6, 30)
# raw.save(fname3, overwrite=True)
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,105
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/testing/predicting_clf.py
|
import pandas as pd
import matplotlib.pyplot as plt
import copy
import numpy as np
import mne
from mne.io import read_raw_ctf
import os.path as op
import os
from config import * #check ce fichier où tu vas mettre ta config, les path, les subject que tu choisis etc.
def predicting_clf(timeStamp,subjectId,nbSteps):
# **** opti pour automatiser le read des triggers pour tout les runs et tout les sujets ***** a faire aussi pour les clf_trigger mais pas pressé
# **** il faudrait name les save clf_trigger de la meme façon, pour qu'on sache quel run, quel sujet match avec quel clf etc ca sera plus simple ****
# **** loading files ****
# subj_nb = 0
# total_run = 0
#permet de lire tout les fichiers csv par sujet
# for subject, sessions in zip(subjects_id, session_subject):
# subj_nb += 1
# for session_nb, session in enumerate(sessions):
# for file_to_ana in files_to_ana:
# fname1 = op.join(path_saving, 'trigger') #the path file name where you have your trigger
# files = os.listdir(fname1) #all file in your path
# runs = list()
# runs.extend(f for f in files if file_to_ana in f)
# # for run_number, this_run in enumerate(runs): #all the run
# for run_number, this_run in enumerate([runs[0]]): #if you want only one run
# run_to_load = op.join(fname1, this_run)
# # **** load the original trigger file ****
# trigger = pd.read_csv(run_to_load, header=None)
# events_tri = np.array(trigger) #array of original trigger file
# load the original trigger file
trigger = pd.read_csv('saves/trigger/'+subjectId+'_agency_session0_run0_trigger.csv')
events_tri = np.array(trigger) #array of original trigger file
events_tri= events_tri[np.where(events_tri[:,5]>19)]
print(events_tri)
# load the detected motor preparation file
clf_trigger = pd.read_csv('saves/savedData'+subjectId+'_'+timeStamp+'_'+nbSteps+'steps.csv')
clf_tri = np.array(clf_trigger.iloc[:, 1]) #only the trigger
clf_first_tri = clf_tri[np.where(clf_trigger.iloc[:, 2] > 1)[0]]
# build a matrice, named value, with 5 columns:
# the timing of the detected motor preparation,
# the timing of the trigger,
# the difference between the two,
# the index of the trigger,
# and if it was human or machine (20 or 40)
value = []
for det, clf_trial in enumerate(clf_first_tri): # for all detected motor premaration
index_tri = np.where((clf_trial-events_tri[:, 0]) < 0)[0][0] # find the index of the trigger immediately following the motor preparation
if bool(value) == False or index_tri not in np.array(value)[:, 3]: # if value is empty, or if the index was not yet present in value, complete value
value.append([clf_trial, events_tri[index_tri, 0], (clf_trial-events_tri[index_tri,0]) / 600,
index_tri, events_tri[index_tri, 2]])
# calculate the total number of human and machine triggers occuring between the first and the last detected motor preparation (I ADDED A +1 TO INCLUDE THE LAST TRIGGER THAT WE NEGLECTED LAST TIME)
nb_human = np.where(events_tri[value[0][3]:(value[-1][3]+1), 2] == 20)[0].shape[0]
nb_machine = np.where(events_tri[value[0][3]:(value[-1][3]+1), 2] == 40)[0].shape[0]
# take third element for sort
def takeThird(elem):
return elem[2]
# sort list with key
value.sort(key=takeThird)
# make a copy of the list so we don't change the original value
value2 = copy.deepcopy(value)
# create hit & fa
hit = []
fa = []
for i in range (len(value2)):
if value2[i][4] == 20:
hit.append(value2[i])
else:
fa.append(value2[i])
# change the 20 and 40, by the % (I ADDED THE +1 BECAUSE THE FIRST DETECTION IS 1/N AND NOT 0)
for i in range(len(hit)):
hit[i][4] = (i+1)/nb_human
for i in range(len(fa)):
fa[i][4] = (i+1)/nb_machine
# make an array
hit = np.array(hit, dtype='object')
fa = np.array(fa, dtype='object')
# see the data
# print('hit', hit, '\n\nfa', fa)
# plt.plot(hit[:, 2], hit[:, 4])
# plt.plot(fa[:, 2], fa[:, 4])
# plt.show()
# try to reproduce our plot from predicting but not working very well some errors to fix
#ne pas prendre -3.3 mais le min.
steppredic = np.linspace(-3.3, 0, 331) #create interval -3.3 to 0 of 300 sample # I JUST ADDED A FEW SAMPLES TO MATCH THE WIDTH OF 0.01 YOU PUT FOR THE WIDTH OF YOU BAR PLOT BELOW
cumul_mov = np.zeros(len(steppredic))
cumul_not = np.zeros(len(steppredic))
cumul_dif = np.zeros(len(steppredic))
for lim_n, lim in enumerate(steppredic):
cumul_mov[lim_n] = sum(hit[:, 2] < lim)/ nb_human
cumul_not[lim_n] = sum(fa[:, 2] < lim)/ nb_machine
cumul_dif[lim_n] = cumul_mov[lim_n] - cumul_not[lim_n]
# plt.figure(1, figsize=(19.20, 10.80))
plt.bar(steppredic, cumul_mov, align='edge', width=-0.01, alpha=0.5)
plt.bar(steppredic, cumul_not, align='edge', width=-0.01, alpha=0.5)
plt.show()
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,691,106
|
Cocococo11/MEG_internship
|
refs/heads/master
|
/testing/config.py
|
'''
This is the config used for the scripts
session nb = the number of session ()
subjects_id = the
'''
# **** path ************************************************************
computer = 'corentin' # 'home' 'work' 'biowulf' 'fayed'
if computer == 'ssd':
path_data = '/Volumes/SSD_Marine/agency/Data_MEG'
path_analyzed_data = '/Volumes/SSD_Marine/agency/MEG_analyzed_data'
elif computer == 'fayed':
path_data = '/Users/Fayed/Desktop/StageM2/scripts/decoding/MEG/MEG_data'
path_analyzed_data = '/Users/Fayed/Desktop/StageM2/scripts/decoding/MEG/MEG_analyzed_data'
path_saving = '/Users/Fayed/Desktop/StageM2/scripts/decoding/MEG/MEG_save' #where you want to save some file, like clf, trigger etc.
elif computer == 'corentin':
path_data = '/Users/Fayed/Desktop/StageM2/scripts/decoding/MEG/MEG_data'
path_analyzed_data = '/Users/Fayed/Desktop/StageM2/scripts/decoding/MEG/MEG_analyzed_data'
path_saving = '/Users/Coco'
# **** sessionType for scripts agency BCI *****
# sessionType = 'session2_part1','session2_part2'
sessionType = 'session2_part2'
# **** subjects ************************************************************
session_nb = 1
subjects_id = ['MAR', 'SAL', 'FAY', '0986', '0987', '0989', '0990', '0991', '0992', '0993', '0994', '0995', '0996'] #all session 1
# subjects_id = ['0986', '0987', '0989', '0990', '0991', '0992', '0993', '0994', '0995', '0996'] #subject without pilot
# subjects_id = ['0986', '0987', '0989', '0990', '0991', '0992', '0993', '0994'] #subject memoire
# session_nb = 2
# subjects_id = ['MAR2', 'SAL2', 'FAY2', '1059', '1060', '1061', '1062'] #all session 2 : these data are filtred
#**** group of subject by session ****
# session_nb = 1
# subjects_id = ['MAR', 'SAL', 'FAY'] #pilot
# subjects_id = ['0986', '0987'] #18 mars
# subjects_id = ['0989', '0990', '0991'] #15 avril
# subjects_id = ['0992', '0993', '0994', '0995' ] #29 avril
# subjects_id = ['0996'] #20 mai
# session_nb = 2
# subjects_id = ['FAY_test'] #20 mai
# subjects_id = ['MAR2', 'FAY2'] #03 juin
# subjects_id = ['1059', '1060', '1061'] #24 juin
# subjects_id = ['SAL2', '1062'] #08 juillet
#**** each subject ****
#12 décembre
# subjects_id = ['MAR']
# subjects_id = ['SAL']
#18 février
# subjects_id = ['FAY']
#18 mars
# subjects_id = ['0986'] #RJ weird event on file 04.ds #1059
# subjects_id = ['0987'] #AEG #1060
#15 avril
# subjects_id = ['0989'] #MY #1062
# subjects_id = ['0990'] #TZ not very good decoding for this subject
# subjects_id = ['0991'] #MA #1061
#29 avril
# subjects_id = ['0992'] #ES
# # subjects_id = ['0993'] #DAA
# subjects_id = ['0994'] #CC press too fast BP, many BP in a row.
# subjects_id = ['0995'] #DC bad eye tracker, last file only 50 trial, weird RP.
#20 mai
# subjects_id = ['0996'] #LB
# subjects_id = ['FAY_test'] #pilot : test bci for the 1st time.
#03 juin
# session_nb = 2
# subjects_id = ['MAR2'] #pilot
# subjects_id = ['FAY2'] #pilot
# 24 juin
# session_nb = 2
# subjects_id = ['1059'] #RJ emg can't analyse yet
# subjects_id = ['1060'] #AEG
# subjects_id = ['1061'] #MA
#08 juillet
# session_nb = 2
# subjects_id = ['SAL2'] #pilot
# subjects_id = ['1062'] #MY
# **** sessions ************************************************************
session_nb = 1
session_subject = [['MAR20201210_session1'], ['SAL20201210_session1'], ['FA20210218_session1'],['RJ20210318_session1'], ['AEG20210318_session1'], ['MY20210415_session1'],
['TZ20210415_session1'], ['MA20210415_session1'], ['ES20210429_session1'], ['DAA20210429_session1'], ['CC20210429_session1'], ['DC20210429_session1'], ['LB20210520_session1']] #all session 1
# session_subject = [['RJ20210318_session1'], ['AEG20210318_session1'], ['MY20210415_session1'], ['TZ20210415_session1'], ['MA20210415_session1'],
# ['ES20210429_session1'], ['DAA20210429_session1'], ['CC20210429_session1'], ['DC20210429_session1'], ['LB20210520_session1']] #subject without pilot
# session_subject = [['RJ20210318_session1'], ['AEG20210318_session1'], ['MY20210415_session1'], ['TZ20210415_session1'], ['MA20210415_session1'],
# ['ES20210429_session1'], ['DAA20210429_session1'], ['CC20210429_session1']] #subject memoire
# session_nb = 2
# session_subject = [['MAR20210603_session2'], ['SAL20210708_session2'], ['FA20210603_session2'], ['RJ20210624_session2'], ['AEG20210624_session2'], ['MA20210624_session2'], ['MY20210708_session2']] #all session 2
#**** group of subject by session ****
# session_nb = 1
# session_subject = [['MAR20201210'], ['SAL20201210'], ['FA20210218']] #pilot
# session_subject = [['RJ20210318_session1'], ['AEG20210318_session1']] #18 mars
# session_subject = [['MY20210415_session1'], ['TZ20210415_session1'], ['MA20210415_session1']] #15 avril
# session_subject = [['ES20210429_session1'], ['DAA20210429_session1'], ['CC20210429_session1'], ['DC20210429_session1']] #29 avril
# session_subject = [['LB20210520_session1']] #20 mai
# session_nb = 2
# session_subject = [['FA20210520_session2_test']] #20 mai
# session_subject = [['MAR20210603_session2'], ['FA20210603_session2']] #03 juin
# session_subject = [['RJ20210624_session2'], ['AEG20210624_session2'], ['MA20210624_session2']] #24 juin
# session_subject = [['SAL20210708_session2'], ['MY20210708_session2']] #08 juillet
#**** each subject ****
#12 décembre
# session_subject = [['MAR20201210_session1']]
# session_subject = [['SAL20201210_session1']]
# #18 février
# session_subject = [['FA20210218_session1']]
#18 mars
# session_subject = [['RJ20210318_session1']]
# session_subject = [['AEG20210318_session1']]
#15 avril
# session_subject = [['MY20210415_session1']]
# # session_subject = [['TZ20210415_session1']]
# session_subject = [['MA20210415_session1']]
#29 avril
# session_subject = [['ES20210429_session1']]
# session_subject = [['DAA20210429_session1']]
# session_subject = [['CC20210429_session1']]
# session_subject = [['DC20210429_session1']]
#20 mai
# session_subject = [['LB20210520_session1']]
# session_subject = [['FA20210520_session2_test']]
#03 juin
# session_subject = [['MAR20210603_session2']]
# session_subject = [['FA20210603_session2']]
#24 juin
# session_subject = [['RJ20210624_session2']]
# session_subject = [['AEG20210624_session2']]
# session_subject = [['MA20210624_session2']]
#08 juillet
# session_subject = [['SAL20210708_session2']]
# session_subject = [['MY20210708_session2']]
# **** file ************************************************************
files_to_ana = ['agency']
# **** which channels to decode *****************************************
which_channels = 'meg'
# which_channels = 'meg&ref'
# which_channels = 'meg&emg'
# which_channels = 'meg&pupil'
# which_channels = 'meg&eyes'
# which_channels = 'motor'
# which_channels = 'mlc'
# which_channels = 'mrc'
# which_channels = 'eyes'
# which_channels = 'pupil'
# which_channels = 'trigger'
# which_channels = 'emg'
# what = 'trigger'
def thedirectory(path_saving, what): #where we save and what we save
if what == 'trigger':
directory = op.join(path_saving, 'trigger')
if not op.exists(directory):
# print "directory"
os.mkdir(directory)
return directory
|
{"/older_agency_scripts/agency_task_BCI_20210601_CB_MV3.py": ["/MEGBuffer.py"], "/older_agency_scripts/agency_task_BCI_20210623.py": ["/MEGBuffer.py"], "/agency_task_BCI_20210712.py": ["/configs/basemy_agency.py", "/configs/config_agency.py"], "/configs/basemy_agency.py": ["/configs/config_agency.py"], "/configs/config_agency.py": ["/configs/MEGBuffer.py"]}
|
37,714,363
|
sukritkapil2/DSIR-System
|
refs/heads/master
|
/category.py
|
import jsonnn
import json
import os
import numpy as np
docFiles = [f for f in os.listdir('./jsonnn') if f.endswith(".json")]
category = []
for file in docFiles:
document = dict()
with open("./jsonnn/"+ file) as json_data:
document = json.load(json_data)
#print(document["Category"])
#print(file)
for ct in document["categories"]:
category.append(ct)
with open('savers/category.json', 'w') as fp:
json.dump(category, fp)
category = np.unique(category).tolist()
|
{"/final_gui.py": ["/store_scores_gui.py"], "/1-click-run.py": ["/final_gui.py", "/jsonconverter.py", "/store_categories_authors.py", "/store_document_tokens_list.py", "/store_vocabulary.py", "/store_megadict.py", "/document_normalized_denominator.py"]}
|
37,714,364
|
sukritkapil2/DSIR-System
|
refs/heads/master
|
/store_document_tokens_list.py
|
from math import log
import nltk
from nltk import word_tokenize
from nltk import FreqDist
import sys
import math
import os
from nltk.stem.snowball import SnowballStemmer
from collections import defaultdict
import pickle
import json
nltk.download('punkt')
nltk.download('stopwords')
vocabulary = {}
vocabulary_idf = {}
freqDist = {}
document_tokens_list= []
temp_doc_tokens = []
snowball_stemmer = SnowballStemmer('english')
docFiles = [f for f in os.listdir('./jsonnn') if f.endswith(".json")]
for i in range(len(docFiles)):
docFiles[i] = int(docFiles[i].split(".")[0])
# print(docFiles[i])
docFiles.sort()
# print(docFiles)
def create_document_tokens_list():
"""
Function for creating document_tokens_list and then storing in json file for further usage
"""
count=0
for file in docFiles :
document = dict()
with open("./jsonnn/"+ str(file) + ".json") as json_data:
document = json.load(json_data)
count+=1
words = str(document["title"])
for author in document["authors"]:
words += str(" " + author)
for category in document["categories"]:
words += str(" " + category)
print(count)
temp_doc_tokens = nltk.word_tokenize(words)
# print(temp_doc_tokens)
temp_doc_tokens = [w.lower() for w in temp_doc_tokens]
temp_doc_tokens = [snowball_stemmer.stem(token) for token in temp_doc_tokens]
temp_doc_tokens = [token for token in temp_doc_tokens if token not in nltk.corpus.stopwords.words('english')]
#print(temp_doc_tokens)
document_tokens_list.append(temp_doc_tokens)
#storing in json file
with open('savers/document_tokens_list.json', 'w') as fp:
json.dump(document_tokens_list, fp)
#caling function
create_document_tokens_list()
|
{"/final_gui.py": ["/store_scores_gui.py"], "/1-click-run.py": ["/final_gui.py", "/jsonconverter.py", "/store_categories_authors.py", "/store_document_tokens_list.py", "/store_vocabulary.py", "/store_megadict.py", "/document_normalized_denominator.py"]}
|
37,778,643
|
fanieblesat/proyectoMintic
|
refs/heads/main
|
/test.py
|
from db import get_db, close_db
app.app_context(app=db)
close_db()
db=get_db()
db.execute('SELECT * FROM user')
|
{"/app.py": ["/db.py", "/inventory.py", "/models.py"], "/test.py": ["/db.py"]}
|
37,778,644
|
fanieblesat/proyectoMintic
|
refs/heads/main
|
/inventory.py
|
class Inventory:
def __init__(self, ID, name, numInStock, img, cost):
self.ID = ID
self.name = name
self.numInStock = numInStock
self.img = img
self.cost = cost
|
{"/app.py": ["/db.py", "/inventory.py", "/models.py"], "/test.py": ["/db.py"]}
|
37,778,645
|
fanieblesat/proyectoMintic
|
refs/heads/main
|
/db.py
|
import sqlite3
from sqlite3 import Error
from flask import g
def get_db():# conect a la base de datos sqlite.
try:
if 'db' not in g:
g.db = sqlite3.connect('almacen.db')
return g.db
except Error:
print(Error)
def close_db():# cierre la cone a la base de datos.
db = g.pop('db',None)
if db is not None:
db.close()
|
{"/app.py": ["/db.py", "/inventory.py", "/models.py"], "/test.py": ["/db.py"]}
|
37,778,646
|
fanieblesat/proyectoMintic
|
refs/heads/main
|
/app.py
|
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from flask import Flask, render_template, request, flash, session, url_for, redirect
from flask_sqlalchemy import SQLAlchemy
from flask_login import login_required, LoginManager, login_user, logout_user, UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
import utils
import os
import yagmail
import inventory
import sqlite3
from __init__ import db, create_app
from models import User
from datetime import datetime
from flask import Flask, render_template, request, flash, jsonify, \
redirect, session, g, url_for, send_file, make_response, send_from_directory, abort
from db import get_db, close_db
from werkzeug.local import LocalProxy as LP
from passlib.hash import sha256_crypt
from werkzeug.utils import secure_filename
import usuario
from flask_mail import Mail, Message
from flask_jwt_extended import jwt_required, create_access_token, get_jwt_identity
import jwt
from time import time
import imghdr
import functools
app = Flask(__name__)
#Configure the app, for image-validations purposes
#This will make the max file size 5 mb and will only allow the extensions
app.config['MAX_CONTENT_LENGTH'] = 5 * 1024 * 1024
app.config['UPLOAD_EXTENSIONS'] = ['.jpg', '.png', '.gif', '.jpeg']
app.config['MAIL_SERVER'] = 'smtp.gmail.com'
app.config['MAIL_PORT'] = 465
app.config['MAIL_USERNAME'] = "almacengrupoamisiontic2020@gmail.com"
app.config['MAIL_PASSWORD'] = "Almacen.Grupo.2020"
app.config['UPLOAD_PATH'] = 'uploads'
app.config['MAIL_USE_TLS'] = False
app.config['MAIL_USE_SSL'] = True
mail = Mail(app)
#Validate image extension
def validate_image(stream):
header = stream.read(512)
stream.seek(0)
format = imghdr.what(None, header)
if not format:
return None
return '.' + (format if format != 'jpeg' else 'jpg')
app.secret_key = os.urandom(24)
#Start the connection to the database
with app.app_context():
close_db()
db=get_db()
#Start the login manager
login_manager=LoginManager()
login_manager.init_app(app)
#Configure the login manager
@login_manager.user_loader
def load_user(user_id):
# since the user_id is just the primary key of our user table, use it in the query for the user
db.execute("SELECT * FROM user")
return
#Validate the user is logged in.
@app.before_request
def load_logged_user():
user_id = session.get('user_id')
print(user_id)
if user_id is None:
g.user = None
else:
close_db()
g.user = get_db().execute(
'SELECT * FROM user WHERE id_user=?', (user_id,)
).fetchone()
#Redirect to login if the user is not logged in
def login_required(view):
@functools.wraps( view )
def wrapped_view(**kwargs):
if g.user is None:
flash("Inicia sesión para poder acceder al contenido de la aplicación")
return redirect( url_for( 'login' ) )
return view( **kwargs )
return wrapped_view
# APP
#LOGIN
@app.route('/login', methods = ['POST', 'GET'])
def login():
#Validate if there is an user session.
#If there is, go to principal.
#When the form is filled, a POST request is made to this route.
#When there is a POST request, validates if the user is in the user table.
#If it is, if the email and password match the records, it stats a session
#If it isn't, it re loads the page.
#The passwords on the dbs are hashed and salted.
try:
if g.user:
flash("Ya realizaste el login, "+session['name'])
return redirect(url_for("principal"))
if(request.method == 'POST'):
username = request.form.get("usuario")
password = request.form.get("contrasena")
close_db()
db=get_db()
if not username:
error="Debe ingresar un usuario"
flash(error)
return render_template('index.html')
flash(error)
if not password:
error = "Contraseña es requerida"
flash(error)
return render_template('login.html')
user = db.execute(
'SELECT * FROM user WHERE email = ?', (username, )
).fetchone()
if user is None:
error = 'Usuario no registrado'
flash(error)
else:
if (sha256_crypt.verify(password,user[3])):
session.clear()
session['user_id'] = user[0]
session['name']=user[1]
session['mail']=user[2]
session['admin']=user[4]
resp = make_response(redirect(url_for('principal')))
resp.set_cookie('username', username)
return resp
else: flash("Contraseña no concuerda con los registros para el usuario")
return render_template('index.html')
return render_template('index.html')
except TypeError as e:
flash("Ocurrio un eror:", e)
return render_template('index.html')
@app.route('/registrar', methods = ['POST', 'GET'])
@login_required
def registrar():
#Requires login and being admin.
#Allows the admin to register new users.
#When the form is send, if the mail is not on the user table, it adds the user.
#The password is hashed and salted and saved that way.
if session['admin']=='true':
if(request.method=='POST'):
close_db()
db=get_db()
user_mail = request.form.get("mail")
user_name = request.form.get("name")
pass_w=request.form.get("contrasena")
password=sha256_crypt.using(rounds=535000).hash(str(pass_w))
user = db.execute(
'SELECT * FROM user WHERE email = ?', (user_mail, )
).fetchone()
if user:
error = 'Correo ya registrado. Por favor, prueba con otro correo'
flash(error)
else:
close_db()
db = get_db()
db.execute(
'INSERT INTO user ( name, email, pass, is_admin)'
' VALUES (?,?,?,?)',
(user_name, user_mail, password, 'false'))
db.commit()
flash("Has registrado a "+user_name+" correctamente. Se le han enviado sus credenciales al usuario creado.")
##################################################################################################################
msg = Message()
msg.subject = "Bienvenid@ "+user_name+ " a Almacen G2SGA"
msg.recipients = [user_mail]
msg.sender = "almacengrupoamisiontic2020@gmail.com"
msg.html = "<html> \
<head><title>Hola "+ user_name +" </title></head> \
<h2>Hola "+ user_name +"</h2> \
<body><h3>El administrador "+session['name']+" te ha registrado en la aplicación de inventario. Tus credenciales son: <br> Email: "+user_mail+" <br> Contraseña: "+ pass_w+". Accede a ella en https://3.80.19.135:2022/login</h3> \
<hr> \
<h4>Cordialmente,</h4> \
<h4>Almacen G2SGA</h4> \
</body> \
</html>"
mail.send(msg)
return redirect(url_for("principal"))
##################################################################################################################
return render_template('registrar_usuario.html')
else:
flash("No tienes permiso para ver esto.")
return redirect(url_for("principal"))
@app.route('/recuperar')
def recuperar():
return render_template('recuperar.html')
##################################################################################################################
@app.route('/recuperar_contrasena', methods = ['POST', 'GET'])
def recuperar_contrasena():
if(request.method == 'POST'):
email_psw = request.form.get("emailpsw")
with sqlite3.connect("almacen.db") as dbC:
cursorPass = dbC.cursor()
find_pass = ('SELECT * FROM user WHERE email = ?')
cursorPass.execute(find_pass, [(email_psw)])
resultsContra = cursorPass.fetchall()
if resultsContra:
rec_usuario = [usuario.Usuario(i[0], i[1], i[2], i[3], i[4]) for i in resultsContra]
token = jwt.encode({'reset_password': rec_usuario[0].email, 'exp': time() + 60*60*24},
key=app.secret_key)
msg = Message()
msg.subject = "Almacen Grupo A - Restablecer contraseña"
msg.sender = "almacengrupoamisiontic2020@gmail.com"
msg.recipients = [rec_usuario[0].email]
msg.html = render_template('reset_email.html', user=rec_usuario[0].email, token=token)
mail.send(msg)
flash("Se ha enviado el correo de recuperación. Revisa en SPAM si no lo ves en tu inbox")
else:
flash("El correo no se encuentra en nuestros registros")
return redirect(url_for('recuperar'))
return redirect(url_for('login'))
##################################################################################################################
@app.route('/', methods = ['POST', 'GET'])
@login_required
def principal():
#Main page. Requires login.
#All of the subpages generate POST requests to this page their forms are used.
#If it's "agregar" (adding a product), it adds the item to the database.
#If it's "editar" (edit a product), it updates it's values
#If it's "delete" (delete a product), it deletes it from the database
#Agregar and delete require admin status and will flash an error if a non-admin user tries to do them.
#Before the queries, the app validates if the query is possible and shows an error:
#To edit, the name or the id (at least one) must stay the same.
#To add, the name or id must not be in the database.
#There is no validation on delete, as the
if(request.method == 'POST'):
###AGREGAR####
if request.args.get("agregar"):
print("agregar")
id_item= request.form.get("id")
qty = request.form.get("qty")
name= request.form.get("name")
mail=session['mail']
datetimeval=datetime.now()
uploaded_file = request.files["image_file"]
filename= secure_filename(uploaded_file.filename)
if session['admin']=='true':
if filename != '':
uploaded_file.save(os.getcwd()+os.path.join('/static/avatars',id_item))
close_db()
db = get_db()
filter_query=db.execute('SELECT * FROM product WHERE ref=? ',(id_item,)).fetchall()
if filter_query:
flash("El producto ya existe. Use otro ID o nombre")
return render_template("agregar.html")
else:
close_db()
db = get_db()
db.execute(
'INSERT INTO product (ref, nom, cant, email_last_modified, date_last_modified )'
' VALUES (?, ?, ?,?,?)',
(id_item, name, qty, mail, datetimeval))
db.commit()
flash("Producto agregado. Referencia: "+id_item+", Nombre: "+name+", Inventario Inicial: "+qty )
else: flash("No tienes permiso para realizar esta acción")
### EDITAR ###
if request.args.get("editar"):
print("editar")
id_item= request.form.get("id")
qty = request.form.get("qty")
name= request.form.get("name")
mail=session['mail']
datetimeval=datetime.now()
if session['admin']=='true':
uploaded_file = request.files["image_file"]
filename= secure_filename(uploaded_file.filename)
if filename != '':
file_ext = os.path.splitext(filename)[1]
if file_ext not in app.config['UPLOAD_EXTENSIONS'] or file_ext != validate_image(uploaded_file.stream):
abort(400)
uploaded_file.save(os.getcwd()+os.path.join('\\static\\avatars',id_item))
close_db()
db = get_db()
filter_query=db.execute('SELECT * FROM product WHERE ref=? or nom=?', (id_item, name)).fetchall()
if not filter_query:
flash("Fallo en edición. No cambie a la vez nombre e ID. Intente nuevamente.")
else:
close_db()
db = get_db()
print((id_item, name, qty, mail, datetimeval,id_item, name))
db.execute(
'UPDATE product \
SET ref = ?, nom=?, cant=?, email_last_modified=?, date_last_modified=?\
WHERE ref=? OR nom=?',
(id_item, name, qty, mail, datetimeval,id_item, name))
db.commit()
flash("Producto editado. Referencia: "+id_item+", Nombre: "+name+", Inventario: "+qty)
### ELIMINAR ###
if request.args.get("delete"):
print("Eliminar")
id_item= request.args.get("id")
name = request.args.get("name")
stock=request.args.get("stock")
print(id_item)
if session['admin']=='true':
close_db()
db = get_db()
if db.execute("SELECT * FROM product where ref=?",(id_item,)).fetchone():
close_db()
db = get_db()
db.execute('DELETE FROM product WHERE ref=?',(id_item,))
db.commit()
flash("Producto eliminado. Sus datos eran ID: "+id_item+", Nombre: "+name+", Inventario: "+stock)
else: flash("No hay producto con el ID suministrado")
else : flash("No tienes permiso para realizar esta acción")
close_db()
with sqlite3.connect("almacen.db") as dbP:
cursorProd = dbP.cursor()
if not request.form.get("term"):
find_prod = ("SELECT * FROM product ORDER BY nom ASC")
cursorProd.execute(find_prod)
resultsProd = cursorProd.fetchall()
else:
find_prod = ("SELECT * FROM product WHERE (instr(lower(ref), lower(?))>0) OR (instr(lower(nom), lower(?))>0)\
ORDER BY nom ASC")
cursorProd.execute(find_prod, (request.form.get("term"),request.form.get("term")))
resultsProd = cursorProd.fetchall()
inventory1=[inventory.Inventory(i[0], i[1], i[2], i[3], i[4]) for i in resultsProd]
return render_template('principal.html', inventory=inventory1, user_name =session['name'], admin=session['admin'])
@app.route('/logout')
@login_required
def logout():
session.clear()
return redirect(url_for('login'))
@app.route('/agregar')
@login_required
def agregar():
if session['admin']=='false':
return redirect(url_for('principal'))
return render_template('agregar.html')
@app.route('/editar', methods = ['POST', 'GET'])
@login_required
def editar():
if(request.method == 'POST'):
item_id= request.args.get("id")
item_name = request.args.get("name")
item_stock=request.args.get("stock")
return render_template('editar.html', admin=session['admin'], item_id=item_id, item_name=item_name, item_stock=item_stock)
##################################################################################################################
@app.route('/password_reset_verified/<token>', methods=['GET', 'POST'])
def reset_verified(token):
username=verify_reset_token(token)
if username:
with sqlite3.connect("almacen.db") as con:
cur = con.cursor()
user = cur.execute('SELECT * FROM user WHERE email = ?', (username, )).fetchone()
con.commit()
con.close()
if not user:
print('no user found')
return redirect(url_for('login'))
password = request.form.get('password')
if password:
with sqlite3.connect("almacen.db") as cond:
curs = cond.cursor()
contra = sha256_crypt.using(rounds=535000).hash(str(password))
curs.execute(
'UPDATE user \
SET pass=?\
WHERE email=?',
(contra, username))
cond.commit()
cond.close()
msg = Message()
msg.subject = "Contraseña reestablecida correctamente"
msg.recipients = [username]
msg.sender = "almacengrupoamisiontic2020@gmail.com"
msg.html = "<html> \
<head><title>Contraseña reestablecida</title></head> \
<h2>Hola recibe un cordial saludo</h2> \
<body><h3>Tu contraseña se ha restablecido correctamente, recuerda que tus credenciales son: <br> Email: "+username+" <br> Contraseña: "+ password+". Accede a la aplicación en https://3.80.19.135:2022/login</h3> \
<hr> \
<h4>Cordialmente,</h4> \
<h4>Almacen G2SGA</h4> \
</body> \
</html>"
mail.send(msg)
flash("Inicia sesión con tu nueva contraseña")
return redirect(url_for('login'))
else: flash("Ingresa una contraseña")
else: return redirect(url_for('recuperar'))
return render_template('reset_verified.html')
##################################################################################################################
def verify_reset_token(token):
try:
username = jwt.decode(token, key=app.secret_key)['reset_password']
except Exception as e:
flash("El token es inválido o ha expirado. Intenta nuevamente recuperar tu contraseña")
return
return username
if __name__ == '__main__':
app.run(ssl_context='adhoc',host='0.0.0.0', port=2022)
|
{"/app.py": ["/db.py", "/inventory.py", "/models.py"], "/test.py": ["/db.py"]}
|
37,778,647
|
fanieblesat/proyectoMintic
|
refs/heads/main
|
/models.py
|
from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from __init__ import db
class User(db.Model):
"""Data model for user accounts."""
__tablename__ = 'usuario'
id = db.Column(
db.Integer,
primary_key=True
)
email = db.Column(
db.String(80),
index=True,
unique=True,
nullable=False
)
isadmin = db.Column(
db.Boolean,
index=False,
unique=False,
nullable=False
)
password_hash = db.Column(
db.String(128),
index=False,
unique=False,
nullable=False)
def get_reset_token(self, expires=500):
return jwt.encode({'reset_password': self.email, 'exp': time() + expires},
key=os.getenv('SECRET_KEY_FLASK'))
@staticmethod
def verify_reset_token(token):
try:
username = jwt.decode(token, key=os.getenv('SECRET_KEY_FLASK'))['reset_password']
print(username)
except Exception as e:
print(e)
return
return User.query.filter_by(username=username).first()
@staticmethod
def verify_email(email):
user = User.query.filter_by(email=email).first()
return user
@property
def password(self):
"""
Prevent pasword from being accessed
"""
raise AttributeError('password is not a readable attribute.')
@password.setter
def password(self, password):
"""
Set password to a hashed password
"""
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
"""
Check if hashed password matches actual password
"""
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {}>'.format(self.username)
|
{"/app.py": ["/db.py", "/inventory.py", "/models.py"], "/test.py": ["/db.py"]}
|
37,790,932
|
metaphysicser/DP-Star
|
refs/heads/main
|
/metrics/query_avre.py
|
# -*- encoding:utf-8 -*-
import numpy as np
import random
import os
import datetime
count_get = 0
path_list = ['../../data/Geolife Trajectories 1.3/Trajectories7000/', '../../data/Geolife Trajectories '
'1.3/sd/sd_final_MDL1100_ep0.1/',
'../../data/Geolife '
'Trajectories '
'1.3/sd/sd_final_MDL1100_ep0.5/',
'../../data/Geolife Trajectories 1.3/sd/sd_final_MDL1100_ep1.0/', '../../data/Geolife Trajectories '
'1.3/sd/sd_final_MDL1100_ep2.0/']
path_test = ['../../data/Geolife Trajectories 1.3/Trajectories/', '../../data/Geolife Trajectories '
'1.3/test/0/', '../../data/Geolife '
'Trajectories '
'1.3/test/1/',
'../../data/Geolife Trajectories 1.3/test/2/', '../../data/Geolife Trajectories '
'1.3/test/3/']
# 参数:范围起点,范围终点,半径,查询数量,
def query(start_point, end_point, radius, D, SD):
point_row = random.uniform(start_point[0], end_point[0])
point_col = random.uniform(start_point[1], end_point[1])
count_d = 0
count_sd = 0
b = int(len(D) * 0.01)
for i in range(len(D)):
for step in D[i]:
if (step[0] - point_row) ** 2 + (step[1] - point_col) ** 2 <= radius ** 2:
count_d += 1
break
for step in SD[i]:
if (step[0] - point_row) ** 2 + (step[1] - point_col) ** 2 <= radius ** 2:
count_sd += 1
break
if (count_d == 0 or count_sd == 0) and not (count_d == 0 and count_sd == 0) and not (
count_d == 0 and count_sd < 10) and not (count_sd == 0 and count_d < 10):
global count_get
count_get += 1
RE = abs(count_d - count_sd) / max(count_d, b)
return RE
def get_data(init_path='../data/Geolife Trajectories 1.3/Trajectories/'):
"""
提取出轨迹数据
:param init_path:轨迹存储位置
:return:轨迹列表
"""
D = []
base_path_list = os.listdir(init_path)
for path in base_path_list:
file_object = open(init_path + path, 'r')
T0 = []
for line in file_object.readlines():
w = float(line.strip().split(',')[0].strip())
j = float(line.strip().split(',')[1].strip())
T0.append((w, j))
D.append(T0)
return D
def get_QA(D, SD, min_latitude=39.6,
min_longitude=115.8,
len_latitude=1.2,
len_longitude=1.6):
"""
计算QA指标
:param D:原始数据
:param SD:生成数据
:param min_latitude:纬度下界
:param min_longitude:经度下界
:param len_latitude:纬度上界
:param len_longitude:经度下界
:return:QA
"""
error_r = 0
for it in range(10): # 多次算均值
error_r += query((min_latitude, min_longitude), (min_latitude + len_latitude, min_longitude + len_longitude),
0.01, D, SD)
return error_r / 10
if __name__ == '__main__':
D = get_data(path_list[0])
# with open("QAD.txt", "r") as output:
# D = eval(output.read())
# print(D)
for i in range(1, len(path_test)):
if i == 1 or i == len(path_test) - 1:
SD = get_data(path_test[i])
RE = get_QA(D, SD)
print(path_test[i], RE)
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,933
|
metaphysicser/DP-Star
|
refs/heads/main
|
/dpstar/mdl.py
|
"""
-------------------------------------
# -*- coding: utf-8 -*-
# @Author : nomalocaris、Giyn、HZT
# @File : mdl.py
# @Software: PyCharm
-------------------------------------
"""
import os
import numpy as np
from utils import vlen
from utils import ProgressBar
from utils import to_vec_add
from utils import to_vec_sub
from utils import to_vec_times
from utils import to_vec_dot
def lt(t_traj, start_ind, curr_ind):
"""
cal L(T~)
Args:
t_traj :
start_ind:
curr_ind :
Returns:
"""
vlength = vlen(t_traj[start_ind], t_traj[curr_ind])
return np.log2(vlength) if vlength else np.spacing(1)
def cal_perpendicular(si, ei, sj, ej):
"""
Args:
si:
ei:
sj:
ej:
Returns:
"""
sisj = to_vec_sub(sj, si)
siei = to_vec_sub(ei, si)
siej = to_vec_sub(ej, si)
_base = to_vec_dot(siei, siei)
if _base == 0:
return np.spacing(1)
u1 = to_vec_dot(sisj, siei) / _base
u2 = to_vec_dot(siej, siei) / _base
ps = to_vec_add(si, to_vec_times(u1, siei))
pe = to_vec_add(si, to_vec_times(u2, siei))
lp1 = vlen(ps, sj)
lp2 = vlen(pe, ej)
if lp1 + lp2 == 0:
outD = 0
else:
outD = (lp1 ** 2 + lp2 ** 2) / (lp1 + lp2)
return outD
def angular(si, ei, sj, ej):
"""
cal angular distance
Args:
si:
ei:
sj:
ej:
Returns:
"""
siei = to_vec_sub(ei, si)
sjej = to_vec_sub(ej, sj)
if siei[0] == sjej[0] and siei[1] == sjej[1]:
return 0
if to_vec_dot(siei, sjej) <= 0: # 90°≤α≤180°
outD = np.sqrt(to_vec_dot(sjej, sjej))
else:
cos0 = to_vec_dot(siei, sjej) / (np.sqrt(to_vec_dot(siei, siei)) * np.sqrt(to_vec_dot(sjej, sjej)))
if 1 - cos0 ** 2 > 0:
sin0 = np.sqrt(1 - cos0**2)
else:
sin0 = 0
outD = np.sqrt(to_vec_dot(sjej, sjej)) * sin0
return outD
def lttilde(t_traj, start_ind, curr_ind):
"""
cal L(T|T~)
Args:
t_traj :
start_ind:
curr_ind :
Returns:
"""
score1 = 0
score2 = 0
for j in range(start_ind, curr_ind):
if t_traj[start_ind] == t_traj[j] and t_traj[curr_ind] == t_traj[j + 1]:
continue
# 更长的放前面
if vlen(t_traj[start_ind], t_traj[curr_ind]) > vlen(t_traj[j], t_traj[j + 1]):
presult = cal_perpendicular(t_traj[start_ind], t_traj[curr_ind], t_traj[j], t_traj[j + 1])
aresult = angular(t_traj[start_ind], t_traj[curr_ind], t_traj[j], t_traj[j + 1])
else:
presult = cal_perpendicular(t_traj[j], t_traj[j + 1], t_traj[start_ind], t_traj[curr_ind])
aresult = angular(t_traj[j], t_traj[j + 1], t_traj[start_ind], t_traj[curr_ind])
score1 += presult
score2 += aresult
score1 = np.log2(score1) if score1 != 0 else 0
score2 = np.log2(score2) if score2 != 0 else 0
return score1 + score2
def Tmdl(t_traj):
"""计算得到的用代表点表示的轨迹
generate func, see: Trajectory Clustering: A Partition-and-Group Framework
Args:
t_traj:
Returns:
"""
Tlen = len(t_traj) # 原始的点数
if Tlen == 0:
return []
CP = list()
CP.append(t_traj[0]) # 存放初始轨迹点
startIndex = 0 # 原点
length = 1
while startIndex + length < Tlen:
currIndex = startIndex + length
if lttilde(t_traj, startIndex, currIndex) > 0:
CP.append(t_traj[currIndex - 1])
startIndex = currIndex - 1
length = 1
else:
length += 1
CP.append(t_traj[-1])
return CP
def mdl_main(min_latitude, min_longitude, init_path, preserve_path, rate):
"""
main func
Args:
min_latitude :
min_longitude:
init_path :
preserve_path:
rate :
Returns:
"""
if not os.path.exists(preserve_path):
os.makedirs(preserve_path)
base_path_list = os.listdir(init_path)
tot_len = len(base_path_list) # 有14650个
p = ProgressBar(tot_len, 'MDL轨迹简化')
for i in range(tot_len):
path = base_path_list[i]
p.update(i)
# 打开独个轨迹数据
with open(init_path + path, 'r') as file_object:
T = []
for line in file_object.readlines():
jw = line.strip().split(',')
w = float(jw[0].strip())
j = float(jw[1].strip())
T.append(((w - min_latitude)*rate, (j - min_longitude)*rate)) # 对轨迹点做映射
t_tilde = Tmdl(T) # 生成的轨迹
# 查看生成的轨迹的长度,并检查有无空集
print("代表点的长度", len(t_tilde))
if not len(t_tilde):
print(init_path + path)
print(T)
with open(preserve_path + path.strip().split('.')[0] + '.txt', 'w') as f3:
for item in t_tilde:
f3.writelines(str(item) + '\n')
def cheak(path='../data/Geolife Trajectories 1.3/MDL/'):
base_path_list = os.listdir(path)
for i in range(len(base_path_list)):
path_ = base_path_list[i]
with open(path + path_, 'r') as file_object:
if len(file_object.readlines()) == 0:
print("空的:", path_)
def check_data():
length_ = []
base_path_list = os.listdir('../data/Geolife Trajectories 1.3/MDL/')
for path in base_path_list:
file_object = open('../data/Geolife Trajectories 1.3/MDL/' + path, 'r')
length_.append(len(file_object.readlines()))
print(length_)
print(np.mean(length_))
print(np.std(length_, ddof=1))
if __name__ == '__main__':
mdl_main(39.6, 115.8,
'../data/Geolife Trajectories 1.3/Trajectories/',
'../data/Geolife Trajectories 1.3/MDL/', 1200)
cheak()
check_data()
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.